makepad_audio_graph/
audio_graph.rs

1use {
2    crate::{
3        makepad_platform::*,
4        audio_traits::*,
5    },
6    std::any::TypeId,
7    std::sync::{Arc, Mutex},
8};
9
10live_design!{
11    AudioGraph= {{AudioGraph}} {
12    }
13}
14
15pub enum FromUI {
16    AllNotesOff,
17    MidiData(MidiData),
18    NewRoot(Box<dyn AudioGraphNode + Send>),
19    DisplayAudio(AudioBuffer),
20}
21
22pub enum AudioGraphAction<'a> {
23    DisplayAudio {
24        active: bool,
25        voice: usize,
26        buffer: &'a AudioBuffer
27    },
28    VoiceOff {voice: usize}
29}
30
31#[derive(Live, LiveRegister)]
32pub struct AudioGraph {
33    #[live] root: AudioComponentRef,
34    #[rust] from_ui: FromUISender<FromUI>,
35    #[rust] to_ui: ToUIReceiver<ToUIDisplayMsg>,
36}
37
38impl LiveHook for AudioGraph {
39    fn after_new_from_doc(&mut self, cx: &mut Cx) {
40        Self::start_audio_output(cx, self.from_ui.receiver(), self.to_ui.sender());
41        // we should have a component
42        
43        if let Some(root) = self.root.as_mut() {
44            let graph_node = root.get_graph_node(cx);
45            let _ = self.from_ui.send(FromUI::NewRoot(graph_node));
46        }
47    }
48    fn skip_apply(&mut self, _cx: &mut Cx, apply: &mut Apply, index: usize, nodes: &[LiveNode])->Option<usize>{
49        if let ApplyFrom::UpdateFromDoc{..} = apply.from{
50            return Some(nodes.skip_node(index))
51        }
52        None
53    }
54}
55
56struct Node {
57    from_ui: FromUIReceiver<FromUI>,
58    display_buffers: Vec<AudioBuffer>,
59    root: Option<Box<dyn AudioGraphNode + Send >>
60}
61
62impl AudioGraph {
63    pub fn state_by_type<T: 'static +  AudioGraphNode + Send> (&mut self) -> Option<&mut T>{
64        None
65    }  
66
67    pub fn by_type<T: 'static + AudioComponent>(&mut self) -> Option<&mut T> {
68        if let Some(child) = self.root.audio_query(&AudioQuery::TypeId(TypeId::of::<T>()), &mut None).into_found() {
69            return child.downcast_mut::<T>()
70        }
71        None
72    }
73    
74    pub fn send_midi_data(&self, data: MidiData) {
75        let _ = self.from_ui.send(FromUI::MidiData(data));
76    }
77    
78    
79    pub fn all_notes_off(&self) {
80        let _ =  self.from_ui.send(FromUI::AllNotesOff);
81    }
82     
83    fn render_to_output_buffer(node: &mut Node, to_ui: &ToUISender<ToUIDisplayMsg>, info: AudioInfo, output: &mut AudioBuffer) {
84        
85        while let Ok(msg) = node.from_ui.try_recv() {
86            match msg {
87                FromUI::DisplayAudio(buf) => {
88                    node.display_buffers.push(buf);
89                    //log!("{}", node.display_buffers.len())
90                }
91                FromUI::NewRoot(new_root) => {
92                    node.root = Some(new_root);
93                }
94                FromUI::MidiData(data) => {
95                    //if data.channel() == 0{
96                    if let Some(root) = node.root.as_mut() {
97                        root.handle_midi_data(data);
98                    }
99                    // }
100                }
101                FromUI::AllNotesOff=>{
102                    if let Some(root) = node.root.as_mut() {
103                        root.all_notes_off();
104                    }
105                }
106            }
107        }
108        if let Some(root) = node.root.as_mut() {
109            // we should create a real output buffer
110            //node.buffer.resize_like_output(output);
111            let mut dg = DisplayAudioGraph {
112                to_ui,
113                buffers: &mut node.display_buffers
114            };
115            root.render_to_audio_buffer(info, &mut [output], &[], &mut dg);
116            // lets output this buffer to the UI
117            //if let Some(mut display_buffer) = dg.pop_buffer() {
118            //    display_buffer.copy_from(&node.buffer);
119            //   dg.send_buffer(0, display_buffer);
120            //}
121            //output.copy_from_buffer(&node.buffer);
122        }
123    }
124    
125    fn start_audio_output(cx: &mut Cx, from_ui: FromUIReceiver<FromUI>, to_ui: ToUISender<ToUIDisplayMsg>) {
126        let mut buffers = Vec::new();
127        for _ in 0..512 {
128            buffers.push(AudioBuffer::new_with_size(512, 2));
129        }
130        
131        let state = Arc::new(Mutex::new(Node {
132            from_ui,
133            display_buffers: buffers,
134            root: None
135        }));
136        
137        let to_ui = Arc::new(Mutex::new(to_ui));
138        
139        cx.audio_output(0, move | info, output_buffer | {
140            let mut state = state.lock().unwrap();
141            let to_ui = to_ui.lock().unwrap();
142            Self::render_to_output_buffer(&mut state, &to_ui, info, output_buffer);
143        });
144    }
145    
146    pub fn handle_event_with(
147        &mut self,
148        cx: &mut Cx,
149        event: &Event,
150        dispatch_action: &mut dyn FnMut(&mut Cx, AudioGraphAction)
151    ) {
152        if let Some(root) = self.root.as_mut() {
153            root.handle_event_with(cx, event, &mut | _, _ | {});
154        }
155        
156        while let Ok(to_ui) = self.to_ui.try_recv() {
157            match to_ui {
158                ToUIDisplayMsg::DisplayAudio {voice, buffer, active} => {
159                    //log!("GOT DISPLAY AUDIO");
160                    dispatch_action(cx, AudioGraphAction::DisplayAudio {buffer: &buffer, voice, active});
161                    self.from_ui.send(FromUI::DisplayAudio(buffer)).unwrap();
162                },
163                ToUIDisplayMsg::VoiceOff {voice} => {
164                    //log!("GOT DISPLAY AUDIO");
165                    dispatch_action(cx, AudioGraphAction::VoiceOff {voice});
166                },
167                ToUIDisplayMsg::OutOfBuffers => { // inject some new buffers
168                }
169            }
170        }
171    }
172}
173