devalang_wasm/engine/audio/interpreter/driver/
renderer_graph.rs

1/// Audio graph rendering - implements proper routing, node effects, and ducking
2use super::AudioInterpreter;
3use crate::engine::audio::interpreter::audio_graph::Connection;
4use std::collections::HashMap;
5
6/// Buffers for each node in the audio graph (stereo: left + right samples interleaved)
7type NodeBuffers = HashMap<String, Vec<f32>>;
8
9/// Process audio through the routing graph
10pub fn render_audio_graph(
11    interpreter: &AudioInterpreter,
12    total_samples: usize,
13) -> anyhow::Result<Vec<f32>> {
14    let total_duration = total_samples as f32 / interpreter.sample_rate as f32;
15
16    // Create buffers for each node in the graph
17    let mut node_buffers: NodeBuffers = HashMap::new();
18    for node_name in interpreter.audio_graph.node_names() {
19        node_buffers.insert(node_name, vec![0.0f32; total_samples * 2]);
20    }
21
22    // Phase 1: Render audio events into their respective nodes
23    render_events_into_nodes(interpreter, &mut node_buffers, total_duration)?;
24
25    // Phase 2: Apply effects to each node
26    apply_node_effects(interpreter, &mut node_buffers)?;
27
28    // Phase 3: Apply ducks and route audio between nodes
29    apply_routing_and_ducking(interpreter, &mut node_buffers)?;
30
31    // Phase 4: Mix all nodes into master buffer
32    let master_buffer = mix_to_master(interpreter, &node_buffers)?;
33
34    Ok(master_buffer)
35}
36
37/// Determine which node an event belongs to based on its content
38/// Returns the node name where this event should be rendered
39fn get_event_target_node(
40    event: &crate::engine::audio::events::AudioEvent,
41    interpreter: &AudioInterpreter,
42) -> String {
43    use crate::engine::audio::events::AudioEvent;
44
45    match event {
46        AudioEvent::Note { synth_id, .. } | AudioEvent::Chord { synth_id, .. } => {
47            // Check if there's a routing node matching the synth ID
48            if let Some(_) = interpreter.routing.nodes.get(synth_id) {
49                synth_id.clone()
50            } else {
51                // Auto-route to $master if not explicitly defined in routing
52                "$master".to_string()
53            }
54        }
55        AudioEvent::Sample { source, .. } => {
56            // First, check if source (pattern name) matches a routing node
57            if let Some(src) = source {
58                if interpreter.routing.nodes.contains_key(src) {
59                    return src.clone();
60                }
61            }
62
63            // Auto-route to $master if pattern not explicitly defined in routing
64            "$master".to_string()
65        }
66        AudioEvent::Stop { .. } => {
67            // Stop events don't route to nodes
68            "$master".to_string()
69        }
70    }
71}
72
73/// Render audio events into their assigned nodes
74fn render_events_into_nodes(
75    interpreter: &AudioInterpreter,
76    node_buffers: &mut NodeBuffers,
77    total_duration: f32,
78) -> anyhow::Result<()> {
79    use crate::engine::audio::events::AudioEvent;
80    use crate::engine::audio::generator::{SynthParams, generate_note_with_options};
81
82    let total_samples = (total_duration * interpreter.sample_rate as f32).ceil() as usize;
83
84    for event in &interpreter.events.events {
85        // Determine target node for this event
86        let mut target_node = get_event_target_node(event, interpreter);
87
88        // If target node doesn't exist in buffers and it's not $master, route to $master instead
89        if !node_buffers.contains_key(&target_node) && target_node != "$master" {
90            target_node = "$master".to_string();
91        }
92
93        // Ensure the target buffer exists (create if needed)
94        if !node_buffers.contains_key(&target_node) {
95            node_buffers.insert(target_node.clone(), vec![0.0f32; total_samples * 2]);
96        }
97
98        // Get the target buffer
99        let target_buffer = node_buffers.get_mut(&target_node);
100        if target_buffer.is_none() {
101            continue;
102        }
103        let target_buffer = target_buffer.unwrap();
104
105        match event {
106            AudioEvent::Note {
107                midi,
108                start_time,
109                duration,
110                synth_def,
111                pan,
112                detune,
113                gain,
114                velocity,
115                attack,
116                release,
117                ..
118            } => {
119                let mut params = SynthParams {
120                    waveform: synth_def.waveform.clone(),
121                    attack: synth_def.attack,
122                    decay: synth_def.decay,
123                    sustain: synth_def.sustain,
124                    release: synth_def.release,
125                    synth_type: synth_def.synth_type.clone(),
126                    filters: synth_def.filters.clone(),
127                    options: synth_def.options.clone(),
128                    lfo: synth_def.lfo.clone(),
129                    plugin_author: synth_def.plugin_author.clone(),
130                    plugin_name: synth_def.plugin_name.clone(),
131                    plugin_export: synth_def.plugin_export.clone(),
132                };
133
134                if let Some(a) = attack {
135                    params.attack = a / 1000.0;
136                }
137                if let Some(r) = release {
138                    params.release = r / 1000.0;
139                }
140
141                let samples = generate_note_with_options(
142                    *midi,
143                    *duration * 1000.0, // Convert to milliseconds
144                    velocity * gain,    // Combined velocity and gain
145                    &params,
146                    interpreter.sample_rate,
147                    *pan,
148                    *detune,
149                )?;
150
151                let start_sample = (*start_time * interpreter.sample_rate as f32).ceil() as usize;
152                let start_idx = start_sample * 2; // Convert to sample index (stereo)
153
154                // Only write if start is within bounds
155                if start_idx < total_samples * 2 {
156                    let end_idx = (start_idx + samples.len()).min(total_samples * 2);
157                    let write_len = end_idx.saturating_sub(start_idx);
158
159                    if write_len > 0 {
160                        target_buffer[start_idx..end_idx]
161                            .iter_mut()
162                            .zip(samples[0..write_len].iter())
163                            .for_each(|(dst, src)| *dst += src);
164                    }
165                }
166            }
167            AudioEvent::Sample {
168                uri: _uri,
169                start_time: _start_time,
170                velocity: _velocity,
171                ..
172            } => {
173                // Load sample from bank (synthetic drums for CLI)
174                #[cfg(feature = "cli")]
175                {
176                    use crate::engine::audio::samples;
177
178                    if let Some(sample_data) = samples::get_sample(_uri) {
179                        let start_sample =
180                            (*_start_time * interpreter.sample_rate as f32).ceil() as usize;
181                        let start_idx = start_sample * 2; // Convert to stereo sample index
182
183                        // Only write if sample starts before buffer ends
184                        if start_idx < total_samples * 2 {
185                            let end_idx =
186                                (start_idx + sample_data.samples.len()).min(total_samples * 2);
187                            let write_len = end_idx - start_idx;
188
189                            if write_len > 0 {
190                                // Scale sample with velocity
191                                let velocity_scale = _velocity;
192                                target_buffer[start_idx..end_idx]
193                                    .iter_mut()
194                                    .zip(sample_data.samples[0..write_len].iter())
195                                    .for_each(|(dst, src)| *dst += src * velocity_scale);
196                            }
197                        }
198                    }
199                }
200            }
201            _ => {
202                // Chord and Stop events are not rendered directly in this graph rendering mode
203            }
204        }
205    }
206
207    Ok(())
208}
209
210/// Apply effects chains to each node
211fn apply_node_effects(
212    interpreter: &AudioInterpreter,
213    node_buffers: &mut NodeBuffers,
214) -> anyhow::Result<()> {
215    use crate::engine::audio::effects::chain::build_effect_chain;
216
217    for (node_name, node_config) in &interpreter.audio_graph.nodes {
218        if let Some(effects_value) = &node_config.effects {
219            // Build effect chain - need to convert single Value to array
220            let effects_array = match effects_value {
221                crate::language::syntax::ast::Value::Array(arr) => arr.clone(),
222                _ => vec![effects_value.clone()],
223            };
224
225            let mut effect_chain = build_effect_chain(&effects_array, false);
226
227            if let Some(buffer) = node_buffers.get_mut(node_name) {
228                // Apply effects to this node's buffer
229                effect_chain.process(buffer, interpreter.sample_rate);
230            }
231        }
232    }
233
234    Ok(())
235}
236
237/// Apply routing connections and duck effects
238fn apply_routing_and_ducking(
239    interpreter: &AudioInterpreter,
240    node_buffers: &mut NodeBuffers,
241) -> anyhow::Result<()> {
242    // Phase 1: Apply all ducks and sidechains first (these modify source buffers)
243    for connection in interpreter.audio_graph.connections.iter() {
244        match connection {
245            Connection::Duck {
246                source,
247                destination,
248                effect_params: _,
249            } => {
250                apply_duck(source, destination, node_buffers, interpreter.sample_rate)?;
251            }
252            Connection::Sidechain {
253                source,
254                destination,
255                effect_params: _,
256            } => {
257                apply_sidechain(source, destination, node_buffers, interpreter.sample_rate)?;
258            }
259            _ => {}
260        }
261    }
262
263    // Phase 2: Apply all routes (these mix audio between nodes)
264    for connection in interpreter.audio_graph.connections.iter() {
265        match connection {
266            Connection::Route {
267                source,
268                destination,
269                gain,
270            } => {
271                // Mix source buffer into destination buffer with gain
272                if let (Some(src_buf), Some(dst_buf)) = (
273                    node_buffers.get(source).cloned(),
274                    node_buffers.get_mut(destination),
275                ) {
276                    for j in 0..src_buf.len() {
277                        dst_buf[j] += src_buf[j] * gain;
278                    }
279                }
280            }
281            _ => {}
282        }
283    }
284
285    Ok(())
286}
287
288/// Apply duck effect - compress destination based on source envelope
289fn apply_duck(
290    source_name: &str,
291    destination_name: &str,
292    node_buffers: &mut NodeBuffers,
293    sample_rate: u32,
294) -> anyhow::Result<()> {
295    // Get source buffer (the trigger - kick) and destination buffer (the target - bassline)
296    let source_buf = if let Some(buf) = node_buffers.get(source_name) {
297        buf.clone()
298    } else {
299        return Ok(());
300    };
301
302    let dest_opt = node_buffers.get_mut(destination_name);
303    if dest_opt.is_none() {
304        return Ok(());
305    }
306
307    let dest_buf = dest_opt.unwrap();
308
309    // Simple duck: when source has significant amplitude, reduce destination
310    // Use RMS (root mean square) to detect activity in source
311    let window_size = (sample_rate as usize / 100) * 2; // 10ms windows in stereo samples
312
313    for frame_start in (0..dest_buf.len()).step_by(window_size.max(2)) {
314        let frame_end = (frame_start + window_size).min(dest_buf.len());
315
316        // Calculate RMS of source in this window
317        let source_rms = if frame_start < source_buf.len() && frame_end <= source_buf.len() {
318            let sum_sq: f32 = source_buf[frame_start..frame_end]
319                .iter()
320                .map(|&s| s * s)
321                .sum();
322            (sum_sq / (frame_end - frame_start) as f32).sqrt()
323        } else {
324            0.0
325        };
326
327        // Apply duck: if source is active (RMS > threshold), reduce destination
328        let threshold = 0.01; // Amplitude threshold for detecting kick
329        let duck_reduction = if source_rms > threshold {
330            // Smooth reduction: 1.0 (no reduction) when silent, 0.3 (70% reduction) when active
331            0.3 + (1.0 - 0.3) * (-source_rms * 100.0).exp()
332        } else {
333            1.0 // No reduction when source is silent
334        };
335
336        // Apply the reduction to destination buffer in this window
337        for i in frame_start..frame_end {
338            dest_buf[i] *= duck_reduction;
339        }
340    }
341
342    Ok(())
343}
344
345/// Apply sidechain effect - gate modulation between nodes
346fn apply_sidechain(
347    source_name: &str,
348    destination_name: &str,
349    node_buffers: &mut NodeBuffers,
350    sample_rate: u32,
351) -> anyhow::Result<()> {
352    // Get current volumes in destination buffer (envelope)
353    let dest_envelope = if let Some(dest_buf) = node_buffers.get(destination_name) {
354        compute_envelope(dest_buf, sample_rate)
355    } else {
356        return Ok(());
357    };
358
359    // Apply sidechain modulation based on destination envelope
360    if let Some(src_buf) = node_buffers.get_mut(source_name) {
361        // Map buffer indices to envelope indices
362        let frame_rate = 100;
363        let samples_per_frame = (sample_rate / frame_rate) as usize * 2;
364
365        for frame_idx in (0..src_buf.len()).step_by(2) {
366            let current_envelope_idx = frame_idx / samples_per_frame;
367
368            if current_envelope_idx < dest_envelope.len() {
369                let dest_level = dest_envelope[current_envelope_idx];
370
371                // Sidechain gate: proportional to destination level
372                let normalized_linear = (dest_level * 10.0).min(1.0);
373                let gate_open = 1.0 - (normalized_linear * 0.5); // Range [1.0, 0.5]
374
375                src_buf[frame_idx] *= gate_open;
376                if frame_idx + 1 < src_buf.len() {
377                    src_buf[frame_idx + 1] *= gate_open;
378                }
379            }
380        }
381    }
382
383    Ok(())
384}
385
386/// Compute RMS envelope of a buffer (stereo, 2 samples per frame)
387fn compute_envelope(buffer: &[f32], sample_rate: u32) -> Vec<f32> {
388    let frame_rate = 100; // 100 Hz envelope resolution
389    let samples_per_frame = sample_rate / frame_rate;
390    let mut envelope = Vec::new();
391
392    for chunk in buffer.chunks(samples_per_frame as usize * 2) {
393        let rms: f32 = (chunk.iter().map(|s| s * s).sum::<f32>() / chunk.len() as f32).sqrt();
394        envelope.push(rms.min(1.0).max(0.0));
395    }
396
397    envelope
398}
399
400/// Mix all node buffers down to master
401fn mix_to_master(
402    _interpreter: &AudioInterpreter,
403    node_buffers: &NodeBuffers,
404) -> anyhow::Result<Vec<f32>> {
405    let master_buf = node_buffers
406        .get("$master")
407        .ok_or_else(|| anyhow::anyhow!("Master node not found"))?
408        .clone();
409
410    let mut result = master_buf;
411
412    // Mix all other nodes into master (except master itself)
413    for (node_name, buffer) in node_buffers {
414        if node_name != "$master" {
415            for i in 0..buffer.len() {
416                result[i] += buffer[i];
417            }
418        }
419    }
420
421    Ok(result)
422}