mimium_lang/runtime/
vm.rs

1use core::slice;
2use slotmap::{DefaultKey, SlotMap};
3use std::{cell::RefCell, cmp::Ordering, collections::HashMap, ops::Range, rc::Rc};
4pub mod bytecode;
5pub mod program;
6mod ringbuffer;
7pub use bytecode::*;
8use ringbuffer::Ringbuffer;
9
10use program::OpenUpValue;
11pub use program::{FuncProto, Program};
12
13use crate::{
14    compiler::bytecodegen::ByteCodeGenerator,
15    interner::Symbol,
16    plugin::{ExtClsInfo, ExtClsType, ExtFunInfo, ExtFunType, MachineFunction},
17    runtime::vm::program::WordSize,
18    types::{Type, TypeSize},
19};
20pub type RawVal = u64;
21pub type ReturnCode = i64;
22
23#[derive(Debug, Default, PartialEq)]
24struct StateStorage {
25    pos: usize,
26    rawdata: Vec<u64>,
27}
28impl StateStorage {
29    fn resize(&mut self, size: usize) {
30        self.rawdata.resize(size, 0)
31    }
32    fn get_state(&self, size: u64) -> &[RawVal] {
33        unsafe {
34            let head = self.rawdata.as_ptr().add(self.pos);
35            slice::from_raw_parts(head, size as _)
36        }
37    }
38    fn get_state_mut(&mut self, size: usize) -> &mut [RawVal] {
39        unsafe {
40            let head = self.rawdata.as_mut_ptr().add(self.pos);
41            slice::from_raw_parts_mut(head, size as _)
42        }
43    }
44    fn get_as_ringbuffer(&mut self, size_in_samples: u64) -> Ringbuffer<'_> {
45        let data_head = unsafe { self.rawdata.as_mut_ptr().add(self.pos) };
46        Ringbuffer::new(data_head, size_in_samples)
47    }
48    fn push_pos(&mut self, offset: StateOffset) {
49        self.pos = (self.pos as u64 + (std::convert::Into::<u64>::into(offset))) as usize;
50    }
51    fn pop_pos(&mut self, offset: StateOffset) {
52        self.pos = (self.pos as u64 - (std::convert::Into::<u64>::into(offset))) as usize;
53    }
54}
55
56#[derive(Debug, Clone, Copy, PartialEq, Eq)]
57pub struct ClosureIdx(pub slotmap::DefaultKey);
58
59#[derive(Debug, Clone, Default)]
60struct StateStorageStack(Vec<ClosureIdx>);
61
62impl StateStorageStack {
63    pub fn push(&mut self, i: ClosureIdx) {
64        self.0.push(i)
65    }
66    pub fn pop(&mut self) {
67        let _ = self.0.pop();
68    }
69}
70
71#[derive(Debug, Clone, Default)]
72pub(crate) struct ArrayHeap {
73    elem_word_size: u64,
74    data: Vec<RawVal>,
75}
76impl ArrayHeap {
77    pub fn get_length_array(&self) -> u64 {
78        self.data.len() as u64 / self.elem_word_size
79    }
80}
81#[derive(Debug, Clone, Default)]
82pub(crate) struct ArrayStorage {
83    data: SlotMap<DefaultKey, ArrayHeap>,
84}
85pub(crate) type ArrayIdx = slotmap::DefaultKey;
86impl ArrayStorage {
87    pub fn alloc_array(&mut self, len: u64, elem_size: u64) -> RawVal {
88        let array = ArrayHeap {
89            elem_word_size: elem_size,
90            data: vec![0u64; (len * elem_size) as usize],
91        };
92        let key = self.data.insert(array);
93        debug_assert!(
94            std::mem::size_of::<ArrayIdx>() == 8,
95            "ArrayIdx size must be 8 bytes"
96        );
97        unsafe { std::mem::transmute_copy::<ArrayIdx, RawVal>(&key) }
98    }
99    pub fn get_array(&self, id: RawVal) -> &ArrayHeap {
100        let key: ArrayIdx = unsafe { std::mem::transmute_copy::<RawVal, ArrayIdx>(&id) };
101        self.data.get(key).expect("Invalid ArrayIdx")
102    }
103    pub fn get_array_mut(&mut self, id: RawVal) -> &mut ArrayHeap {
104        let key: ArrayIdx = unsafe { std::mem::transmute_copy::<RawVal, ArrayIdx>(&id) };
105        self.data.get_mut(key).expect("Invalid ArrayIdx")
106    }
107}
108// Upvalues are used with Rc<RefCell<UpValue>> because it maybe shared between multiple closures
109// Maybe it will be managed with some GC mechanism in the future.
110#[derive(Debug, Clone, PartialEq)]
111enum UpValue {
112    Open(OpenUpValue),
113    Closed(Vec<RawVal>, bool),
114}
115type SharedUpValue = Rc<RefCell<UpValue>>;
116impl From<OpenUpValue> for UpValue {
117    fn from(value: OpenUpValue) -> Self {
118        Self::Open(value)
119    }
120}
121
122#[derive(Default)]
123struct LocalUpValueMap(Vec<(Reg, SharedUpValue)>);
124
125impl LocalUpValueMap {
126    pub fn get_or_insert(&mut self, ov: OpenUpValue) -> SharedUpValue {
127        let OpenUpValue { pos, .. } = ov;
128        self.0
129            .iter()
130            .find_map(|(i2, v)| (pos == *i2 as _).then_some(v.clone()))
131            .unwrap_or_else(|| {
132                let v = Rc::new(RefCell::new(UpValue::Open(ov)));
133                self.0.push((pos as Reg, v.clone()));
134                v
135            })
136    }
137}
138
139#[derive(Debug, Default, PartialEq)]
140//closure object dynamically allocated
141pub struct Closure {
142    pub fn_proto_pos: usize, //position of function prototype in global_ftable
143    pub base_ptr: u64,       //base pointer to current closure, to calculate open upvalue
144    pub is_closed: bool,
145    pub refcount: u64,
146    pub(self) upvalues: Vec<SharedUpValue>,
147    state_storage: StateStorage,
148}
149impl Closure {
150    pub(self) fn new(
151        program: &Program,
152        base_ptr: u64,
153        fn_i: usize,
154        upv_map: &mut LocalUpValueMap,
155    ) -> Self {
156        let fnproto = &program.global_fn_table[fn_i].1;
157        let upvalues = fnproto
158            .upindexes
159            .iter()
160            .map(|ov| upv_map.get_or_insert(*ov))
161            .collect::<Vec<_>>();
162        let mut state_storage = StateStorage::default();
163        state_storage.resize(fnproto.state_skeleton.total_size() as usize);
164        Self {
165            fn_proto_pos: fn_i,
166            upvalues,
167            is_closed: false,
168            refcount: 1,
169            base_ptr,
170            state_storage,
171        }
172    }
173}
174
175pub type ClosureStorage = SlotMap<DefaultKey, Closure>;
176pub fn drop_closure(storage: &mut ClosureStorage, id: ClosureIdx) {
177    let cls = storage.get_mut(id.0).unwrap();
178    cls.refcount -= 1;
179    if cls.refcount == 0 {
180        let c_cls = storage
181            .get_mut(id.0)
182            .unwrap()
183            .upvalues
184            .iter()
185            .map(|v| {
186                let v = v.borrow();
187                if let UpValue::Closed(v, _) = &v as &UpValue {
188                    let cls_i = Machine::get_as::<ClosureIdx>(v[0]);
189                    Some(cls_i)
190                } else {
191                    None
192                }
193            })
194            .collect::<Vec<_>>();
195        c_cls.iter().filter_map(|i| *i).for_each(|clsi| {
196            drop_closure(storage, clsi);
197        });
198        storage.remove(id.0);
199    }
200}
201
202#[derive(Clone, Copy, Default)]
203enum RawValType {
204    Float,
205    #[default]
206    Int,
207    // UInt,
208}
209
210#[derive(Clone, Copy)]
211enum ExtFnIdx {
212    Fun(usize),
213    Cls(usize),
214}
215/// The virtual machine that executes mimium bytecode programs.
216///
217/// A [`Machine`] holds the compiled [`Program`], value stack and all live
218/// closures. External functions and closures installed from plugins are also
219/// managed here.
220pub struct Machine {
221    // program will be modified while its execution, e.g., higher-order external closure creates its wrapper.
222    pub prog: Program,
223    stack: Vec<RawVal>,
224    base_pointer: u64,
225    pub closures: ClosureStorage,
226    pub ext_fun_table: Vec<(Symbol, ExtFunType)>,
227    pub ext_cls_table: Vec<(Symbol, ExtClsType)>,
228    pub arrays: ArrayStorage,
229    fn_map: HashMap<usize, ExtFnIdx>, //index from fntable index of program to it of machine.
230    // cls_map: HashMap<usize, usize>, //index from fntable index of program to it of machine.
231    global_states: StateStorage,
232    states_stack: StateStorageStack,
233    delaysizes_pos_stack: Vec<usize>,
234    global_vals: Vec<RawVal>,
235    debug_stacktype: Vec<RawValType>,
236}
237
238macro_rules! binop {
239    ($op:tt,$t:ty, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
240        {
241        $self.set_stacktype($dst as i64, RawValType::Float);
242        $self.set_stack($dst as i64, Self::to_value::<$t>(
243            Self::get_as::<$t>($self.get_stack($src1 as i64))
244        $op Self::get_as::<$t>($self.get_stack($src2 as i64))))
245    }
246    };
247}
248macro_rules! binop_bool {
249    ($op:tt, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
250        {
251        $self.set_stacktype($dst as i64, RawValType::Float);
252        let bres:bool =
253            Self::get_as::<f64>($self.get_stack($src1 as i64))
254        $op Self::get_as::<f64>($self.get_stack($src2 as i64));
255        let fres = if bres{
256            1.0f64
257        }else{
258            0.0f64
259        };
260        $self.set_stack($dst as i64,Self::to_value::<f64>(fres))
261    }
262    };
263}
264macro_rules! binop_bool_compose {//for and&or
265    ($op:tt, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
266        {
267        $self.set_stacktype($dst as i64, RawValType::Float);
268        let bres:bool =
269            Self::get_as::<f64>($self.get_stack($src1 as i64))>0.0
270        $op Self::get_as::<f64>($self.get_stack($src2 as i64))>0.0;
271        let fres = if bres{ 1.0f64 }else{ 0.0f64 };
272        $self.set_stack($dst as i64,Self::to_value::<f64>(fres))
273    }
274    };
275}
276macro_rules! binopmethod {
277    ($op:ident,$t:ty, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {{
278        $self.set_stacktype($dst as i64, RawValType::Float);
279        $self.set_stack(
280            $dst as i64,
281            Self::to_value::<$t>(
282                Self::get_as::<$t>($self.get_stack($src1 as i64))
283                    .$op(Self::get_as::<$t>($self.get_stack($src2 as i64))),
284            ),
285        )
286    }};
287}
288macro_rules! uniop {
289    ($op:tt,$t:ty, $dst:expr,$src:expr,$self:ident) => {
290        $self.set_stack($dst as i64,
291            Self::to_value::<$t>(
292            $op Self::get_as::<$t>($self.get_stack($src as i64))))
293    };
294}
295macro_rules! uniop_bool {
296    ($op:tt, $dst:expr,$src:expr,$self:ident) => {{
297        let bres: bool = $op(matches!(
298            Self::get_as::<f64>($self.get_stack($src as i64)).partial_cmp(&0.0),
299            Some(std::cmp::Ordering::Greater)
300        ));
301        let fres = if bres { 1.0f64 } else { 0.0f64 };
302        $self.set_stack($dst as i64, Self::to_value::<f64>(fres))
303    }};
304}
305macro_rules! uniopmethod {
306    ($op:tt,$t:ty, $dst:expr,$src:expr,$self:ident) => {{
307        $self.set_stack(
308            $dst as i64,
309            Self::to_value::<$t>(Self::get_as::<$t>($self.get_stack($src as i64)).$op()),
310        )
311    }};
312}
313
314fn set_vec<T>(vec: &mut Vec<T>, i: usize, value: T)
315where
316    T: Clone + std::default::Default,
317{
318    match i.cmp(&vec.len()) {
319        Ordering::Less => vec[i] = value,
320        Ordering::Equal => vec.push(value),
321        Ordering::Greater => {
322            vec.resize(i, T::default());
323            vec.push(value);
324        }
325    }
326}
327fn set_vec_range<T>(vec: &mut Vec<T>, i: usize, values: &[T])
328where
329    T: std::fmt::Debug + Copy + std::default::Default,
330{
331    //do not use copy_from_slice  or extend_from_slice because the ptr range may overwrap,
332    // and copy_from_slice use ptr::copy_nonoverwrapping internally.
333    // vec[range].copy_from_slice(values)
334    let start = i;
335    let end = i + values.len();
336    if end > vec.len() {
337        vec.resize(i, T::default());
338    }
339    match start.cmp(&vec.len()) {
340        Ordering::Less => {
341            let range = i..(i + values.len());
342            for (v, i) in values.iter().zip(range.into_iter()) {
343                vec[i] = *v;
344            }
345        }
346        Ordering::Equal => values.iter().for_each(|v| vec.push(*v)),
347        Ordering::Greater => values.iter().for_each(|v| vec.push(*v)),
348    }
349}
350
351impl Machine {
352    /// Create a new VM from a compiled [`Program`] and external functions.
353    pub fn new(
354        prog: Program,
355        extfns: impl Iterator<Item = ExtFunInfo>,
356        extcls: impl Iterator<Item = Box<dyn MachineFunction>>,
357    ) -> Self {
358        let mut res = Self {
359            prog,
360            stack: vec![],
361            base_pointer: 0,
362            closures: Default::default(),
363            ext_fun_table: vec![],
364            ext_cls_table: vec![],
365            fn_map: HashMap::new(),
366            // cls_map: HashMap::new(),
367            arrays: ArrayStorage::default(),
368            global_states: Default::default(),
369            states_stack: Default::default(),
370            delaysizes_pos_stack: vec![0],
371            global_vals: vec![],
372            debug_stacktype: vec![RawValType::Int; 255],
373        };
374        extfns.for_each(|ExtFunInfo { name, fun, .. }| {
375            let _ = res.install_extern_fn(name, fun);
376        });
377        extcls.for_each(|machine_function| {
378            let _ = res.install_extern_cls(machine_function.get_name(), machine_function.get_fn());
379        });
380        res.link_functions();
381        res
382    }
383    /// Create a new VM instance with the new program, preserving the current state as possible.
384    pub fn new_resume(&self, prog: Program) -> Self {
385        let mut new_vm = Self {
386            prog,
387            stack: vec![],
388            base_pointer: 0,
389            closures: Default::default(),
390            ext_fun_table: vec![],
391            ext_cls_table: vec![],
392            fn_map: HashMap::new(),
393            // cls_map: HashMap::new(),
394            arrays: ArrayStorage::default(),
395            global_states: Default::default(),
396            states_stack: Default::default(),
397            delaysizes_pos_stack: vec![0],
398            global_vals: vec![],
399            debug_stacktype: vec![RawValType::Int; 255],
400        };
401        //expect there are no change changes in external function use for now
402
403        new_vm.ext_fun_table = self.ext_fun_table.clone();
404        new_vm.ext_cls_table = self.ext_cls_table.clone();
405        new_vm.global_vals = self.global_vals.clone();
406        new_vm.arrays = self.arrays.clone();
407
408        let new_state = state_tree::update_state_storage(
409            &self.global_states.rawdata,
410            self.prog
411                .get_dsp_state_skeleton()
412                .cloned()
413                .expect("dsp function not found"),
414            new_vm
415                .prog
416                .get_dsp_state_skeleton()
417                .cloned()
418                .expect("dsp function not found"),
419        );
420        match new_state {
421            Ok(Some(s)) => {
422                new_vm.global_states.rawdata = s;
423            }
424            Ok(None) => {
425                log::info!("No state structure change detected. Just copies buffer");
426                new_vm.global_states.rawdata = self.global_states.rawdata.clone();
427            }
428            Err(e) => {
429                log::error!("Failed to migrate global state: {e}");
430            }
431        }
432        new_vm.link_functions();
433        new_vm.execute_main();
434        new_vm
435    }
436    pub fn clear_stack(&mut self) {
437        self.stack.fill(0);
438    }
439    pub fn get_stack(&self, offset: i64) -> RawVal {
440        // unsafe {
441        //     *self
442        //         .stack
443        //         .get_unchecked((self.base_pointer + offset as u64) as usize)
444        // }
445        self.get_stack_range(offset, 1).1[0]
446    }
447    pub fn get_stack_range(&self, offset: i64, word_size: TypeSize) -> (Range<usize>, &[RawVal]) {
448        let addr_start = self.base_pointer as usize + offset as usize;
449        let addr_end = addr_start + word_size as usize;
450        let start = self.stack.as_slice().as_ptr();
451        let slice = unsafe {
452            // w/ unstable feature
453            // let (_,snd) = self.stack.as_slice().split_at_unchecked(offset as usize);
454            // snd.split_at_unchecked(n as usize)
455            let vstart = start.add(addr_start);
456            slice::from_raw_parts(vstart, word_size as usize)
457        };
458        (addr_start..addr_end, slice)
459    }
460    pub fn get_stack_range_mut(
461        &mut self,
462        offset: i64,
463        word_size: TypeSize,
464    ) -> (Range<usize>, &mut [RawVal]) {
465        let addr_start = self.base_pointer as usize + offset as usize;
466        let addr_end = addr_start + word_size as usize;
467        let start = self.stack.as_mut_ptr();
468        let slice = unsafe {
469            // w/ unstable feature
470            // let (_,snd) = self.stack.as_slice().split_at_unchecked(offset as usize);
471            // snd.split_at_unchecked(n as usize)
472            let vstart = start.add(addr_start);
473            slice::from_raw_parts_mut(vstart, word_size as usize)
474        };
475        (addr_start..addr_end, slice)
476    }
477    pub fn set_stack(&mut self, offset: i64, v: RawVal) {
478        self.set_stack_range(offset, &[v])
479    }
480    pub fn set_stack_range(&mut self, offset: i64, vs: &[RawVal]) {
481        // debug_assert!(!v.is_null());
482        // debug_assert!(v.is_aligned());
483        // let vs = unsafe { slice::from_raw_parts(v, size) };
484        set_vec_range(
485            &mut self.stack,
486            (self.base_pointer as i64 + offset) as usize,
487            vs,
488        )
489    }
490    fn move_stack_range(&mut self, offset: i64, srcrange: Range<usize>) {
491        let dest = (self.base_pointer as i64 + offset) as usize;
492        if srcrange.end > self.stack.len() {
493            self.stack.resize(srcrange.end, 0);
494        }
495        let dest_end = dest + (srcrange.end - srcrange.start);
496        if dest_end > self.stack.len() {
497            self.stack.resize(dest_end, 0);
498        }
499        self.stack.copy_within(srcrange, dest)
500    }
501    fn set_stacktype(&mut self, offset: i64, t: RawValType) {
502        // set_vec(
503        //     &mut self.debug_stacktype,
504        //     (self.base_pointer as i64 + offset) as usize,
505        //     t,
506        // );
507    }
508    pub fn get_top_n(&self, n: usize) -> &[RawVal] {
509        let len = self.stack.len();
510        &self.stack[(len - n)..]
511    }
512    fn get_upvalue_offset(upper_base: usize, offset: OpenUpValue) -> usize {
513        upper_base + offset.pos
514    }
515    pub fn get_open_upvalue(
516        &self,
517        upper_base: usize,
518        ov: OpenUpValue,
519    ) -> (Range<usize>, &[RawVal]) {
520        let OpenUpValue { size, .. } = ov;
521        // log::trace!("upper base:{}, upvalue:{}", upper_base, offset);
522        let abs_pos = Self::get_upvalue_offset(upper_base, ov);
523        let end = abs_pos + size as usize;
524        let slice = unsafe {
525            let vstart = self.stack.as_slice().as_ptr().add(abs_pos);
526            slice::from_raw_parts(vstart, size as usize)
527        };
528        (abs_pos..end, slice)
529    }
530    pub fn get_closure(&self, idx: ClosureIdx) -> &Closure {
531        debug_assert!(
532            self.closures.contains_key(idx.0),
533            "Invalid Closure Id referred"
534        );
535        unsafe { self.closures.get_unchecked(idx.0) }
536    }
537    pub(crate) fn get_closure_mut(&mut self, idx: ClosureIdx) -> &mut Closure {
538        debug_assert!(
539            self.closures.contains_key(idx.0),
540            "Invalid Closure Id referred"
541        );
542        unsafe { self.closures.get_unchecked_mut(idx.0) }
543    }
544    fn get_current_state(&mut self) -> &mut StateStorage {
545        if self.states_stack.0.is_empty() {
546            &mut self.global_states
547        } else {
548            let idx = unsafe { self.states_stack.0.last().unwrap_unchecked() };
549            &mut self.get_closure_mut(*idx).state_storage
550        }
551    }
552    fn return_general(&mut self, iret: Reg, nret: Reg) -> &[u64] {
553        let base = self.base_pointer as usize;
554        let iret_abs = base + iret as usize;
555        self.stack
556            .copy_within(iret_abs..(iret_abs + nret as usize), base - 1);
557        // clean up temporary variables to ensure that `nret`
558        // at the top of the stack is the return value
559        self.stack.truncate(base - 1 + nret as usize);
560        let res_slice = self.stack.split_at(base).1;
561        res_slice
562    }
563
564    pub fn get_as<T>(v: RawVal) -> T {
565        unsafe { std::mem::transmute_copy::<RawVal, T>(&v) }
566    }
567    pub fn get_as_array<T>(v: &[RawVal]) -> &[T] {
568        unsafe { std::mem::transmute::<&[RawVal], &[T]>(v) }
569    }
570    pub fn to_value<T>(v: T) -> RawVal {
571        assert_eq!(std::mem::size_of::<T>(), 8);
572        unsafe { std::mem::transmute_copy::<T, RawVal>(&v) }
573    }
574    fn call_function<F>(
575        &mut self,
576        func_pos: u8,
577        _nargs: u8,
578        nret_req: u8,
579        mut action: F,
580    ) -> ReturnCode
581    where
582        F: FnMut(&mut Self) -> ReturnCode,
583    {
584        let offset = (func_pos + 1) as u64;
585        self.delaysizes_pos_stack.push(0);
586        self.base_pointer += offset;
587        let nret = action(self);
588
589        if nret_req > nret as u8 {
590            panic!("invalid number of return value {nret_req} required but accepts only {nret}.");
591        }
592        // shrink stack so as to match with number of return values
593        self.stack
594            .truncate((self.base_pointer as i64 + nret_req as i64) as usize);
595        self.base_pointer -= offset;
596        self.delaysizes_pos_stack.pop();
597        nret
598    }
599    fn allocate_closure(&mut self, fn_i: usize, upv_map: &mut LocalUpValueMap) -> ClosureIdx {
600        let idx = self
601            .closures
602            .insert(Closure::new(&self.prog, self.base_pointer, fn_i, upv_map));
603        ClosureIdx(idx)
604    }
605    /// This API is used for defining higher-order external function that returns some external rust closure.
606    /// Because the native closure cannot be called with CallCls directly, the vm appends an additional function the program,
607    /// that wraps external closure call with an internal closure.
608    pub fn wrap_extern_cls(&mut self, extcls: ExtClsInfo) -> ClosureIdx {
609        let ExtClsInfo { name, fun, ty } = extcls;
610
611        self.prog.ext_fun_table.push((name.to_string(), ty));
612        let prog_funid = self.prog.ext_fun_table.len() - 1;
613        self.ext_cls_table.push((name, fun));
614        let vm_clsid = self.ext_cls_table.len() - 1;
615        self.fn_map.insert(prog_funid, ExtFnIdx::Cls(vm_clsid));
616        let (bytecodes, nargs, nret) = if let Type::Function { arg, ret } = ty.to_type() {
617            let mut wrap_bytecode = Vec::<Instruction>::new();
618            // todo: decouple bytecode generator dependency
619            let asize = ByteCodeGenerator::word_size_for_type(arg);
620            // if there are 2 arguments of float for instance, base pointer should be 2
621            let nargs = match arg.to_type() {
622                Type::Tuple(args) => args.len(),
623                Type::Record(fields) => fields.len(),
624                _ => unreachable!("single argument should be 1 element record"),
625            } as u8;
626            let base = nargs as u8;
627            let nret = ByteCodeGenerator::word_size_for_type(ret);
628            wrap_bytecode.push(Instruction::MoveConst(base, 0));
629            wrap_bytecode.push(Instruction::MoveRange(base + 1, 0, asize));
630
631            wrap_bytecode.extend_from_slice(&[
632                Instruction::CallExtFun(base, nargs, nret as _),
633                Instruction::Return(base, nret as _),
634            ]);
635            (wrap_bytecode, nargs, nret)
636        } else {
637            panic!("non-function type called for wrapping external closure");
638        };
639        let newfunc = FuncProto {
640            nparam: nargs as _,
641            nret: nret as _,
642            bytecodes,
643            constants: vec![prog_funid as _],
644            ..Default::default()
645        };
646        self.prog.global_fn_table.push((name.to_string(), newfunc));
647        let fn_i = self.prog.global_fn_table.len() - 1;
648        let mut cls = Closure::new(
649            &self.prog,
650            self.base_pointer,
651            fn_i,
652            &mut LocalUpValueMap(vec![]),
653        );
654        // wrapper closure will not be released automatically.
655        cls.is_closed = true;
656        let idx = self.closures.insert(cls);
657        ClosureIdx(idx)
658    }
659    fn close_upvalues(&mut self, src: Reg) {
660        let clsidx = Self::get_as::<ClosureIdx>(self.get_stack(src as _));
661
662        let clsidxs = self
663            .get_closure(clsidx)
664            .upvalues
665            .iter()
666            .map(|upv| {
667                let upv = &mut *upv.borrow_mut();
668                match upv {
669                    UpValue::Open(ov) => {
670                        let (_range, ov_raw) =
671                            self.get_open_upvalue(self.base_pointer as usize, *ov);
672                        let is_closure = ov.is_closure;
673                        *upv = UpValue::Closed(ov_raw.to_vec(), is_closure);
674                        is_closure.then_some(Self::get_as::<ClosureIdx>(ov_raw[0]))
675                    }
676                    UpValue::Closed(v, is_closure) => {
677                        is_closure.then_some(Self::get_as::<ClosureIdx>(v[0]))
678                    }
679                }
680            })
681            .collect::<Vec<_>>();
682        clsidxs.iter().for_each(|i| {
683            if let Some(ci) = i {
684                let cls = self.get_closure_mut(*ci);
685                cls.refcount += 1;
686            }
687        });
688        let cls = self.get_closure_mut(clsidx);
689        cls.is_closed = true;
690    }
691    fn release_open_closures(&mut self, local_closures: &[ClosureIdx]) {
692        for clsidx in local_closures.iter() {
693            let cls = self.get_closure(*clsidx);
694            if !cls.is_closed {
695                // log::debug!("release {:?}", clsidx);
696                drop_closure(&mut self.closures, *clsidx)
697            }
698        }
699    }
700    fn get_fnproto(&self, func_i: usize) -> &FuncProto {
701        &self.prog.global_fn_table[func_i].1
702    }
703    /// Execute a function within the VM.
704    ///
705    /// `func_i` is an index into the program's function table and `cls_i` is an
706    /// optional closure that provides the environment for the call.
707    /// The returned [`ReturnCode`] is the number of values pushed on the stack
708    /// as a result of the call.
709    pub fn execute(&mut self, func_i: usize, cls_i: Option<ClosureIdx>) -> ReturnCode {
710        let mut local_closures: Vec<ClosureIdx> = vec![];
711        let mut upv_map = LocalUpValueMap::default();
712        let mut pcounter = 0;
713        // if cfg!(test) {
714        //     log::trace!("{:?}", func);
715        // }
716
717        loop {
718            // if cfg!(debug_assertions) && log::max_level() >= log::Level::Trace {
719            //     let mut line = String::new();
720            //     line += &format!("{: <20} {}", func.bytecodes[pcounter], ": [");
721            //     for i in 0..self.stack.len() {
722            //         if i == self.base_pointer as usize {
723            //             line += "!";
724            //         }
725            //         line += &match self.debug_stacktype[i] {
726            //             RawValType::Float => format!("{0:.5}f", Self::get_as::<f64>(self.stack[i])),
727            //             RawValType::Int => format!("{0:.5}i", Self::get_as::<i64>(self.stack[i])),
728            //             RawValType::UInt => format!("{0:.5}u", Self::get_as::<u64>(self.stack[i])),
729            //         };
730            //         if i < self.stack.len() - 1 {
731            //             line += ",";
732            //         }
733            //     }
734            //     line += "]";
735            //     log::trace!("{line}");
736            // }
737            let mut increment = 1;
738            match self.get_fnproto(func_i).bytecodes[pcounter] {
739                Instruction::Move(dst, src) => {
740                    self.set_stack(dst as i64, self.get_stack(src as i64));
741                }
742                Instruction::MoveConst(dst, pos) => {
743                    self.set_stack(dst as i64, self.get_fnproto(func_i).constants[pos as usize]);
744                }
745                Instruction::MoveImmF(dst, v) => {
746                    self.set_stack(dst as i64, Self::to_value(Into::<f64>::into(v)));
747                }
748                Instruction::MoveRange(dst, src, n) => {
749                    let (range, _slice) = self.get_stack_range(src as _, n);
750                    self.move_stack_range(dst as i64, range);
751                }
752                Instruction::CallCls(func, nargs, nret_req) => {
753                    let addr = self.get_stack(func as i64);
754                    let cls_i = Self::get_as::<ClosureIdx>(addr);
755                    let cls = self.get_closure(cls_i);
756                    let pos_of_f = cls.fn_proto_pos;
757                    self.states_stack.push(cls_i);
758                    self.call_function(func, nargs, nret_req, move |machine| {
759                        machine.execute(pos_of_f, Some(cls_i))
760                    });
761                    self.states_stack.pop();
762                }
763                Instruction::Call(func, nargs, nret_req) => {
764                    let pos_of_f = Self::get_as::<usize>(self.get_stack(func as i64));
765                    self.call_function(func, nargs, nret_req, move |machine| {
766                        machine.execute(pos_of_f, None)
767                    });
768                }
769                Instruction::CallExtFun(func, nargs, nret_req) => {
770                    let ext_fn_idx = self.get_stack(func as i64) as usize;
771                    let fidx = self.fn_map.get(&ext_fn_idx).unwrap();
772                    let nret = match fidx {
773                        ExtFnIdx::Fun(fi) => {
774                            let f = self.ext_fun_table[*fi].1;
775                            self.call_function(func, nargs, nret_req, f)
776                        }
777                        ExtFnIdx::Cls(ci) => {
778                            let (_name, cls) = &self.ext_cls_table[*ci];
779                            let cls = cls.clone();
780                            self.call_function(func, nargs, nret_req, move |machine| {
781                                cls.borrow_mut()(machine)
782                            })
783                        }
784                    };
785
786                    // return
787                    let base = self.base_pointer as usize;
788                    let iret = base + func as usize + 1;
789                    self.stack
790                        .copy_within(iret..(iret + nret as usize), base + func as usize);
791                    self.stack.truncate(base + func as usize + nret as usize);
792                }
793                Instruction::Closure(dst, fn_index) => {
794                    let fn_proto_pos = self.get_stack(fn_index as i64) as usize;
795                    let vaddr = self.allocate_closure(fn_proto_pos, &mut upv_map);
796                    local_closures.push(vaddr);
797                    self.set_stack(dst as i64, Self::to_value(vaddr));
798                }
799                Instruction::Close(src) => {
800                    self.close_upvalues(src);
801                }
802                Instruction::Return0 => {
803                    self.stack.truncate((self.base_pointer - 1) as usize);
804                    self.release_open_closures(&local_closures);
805                    return 0;
806                }
807                Instruction::Return(iret, nret) => {
808                    let _ = self.return_general(iret, nret);
809                    self.release_open_closures(&local_closures);
810                    return nret.into();
811                }
812                Instruction::GetUpValue(dst, index, _size) => {
813                    {
814                        let up_i = cls_i.unwrap();
815                        let cls = self.get_closure(up_i);
816                        let upvalues = &cls.upvalues;
817                        let rv = &upvalues[index as usize];
818                        let vs = match &*rv.borrow() {
819                            UpValue::Open(i) => {
820                                let upper_base = cls.base_ptr as usize;
821                                let (_range, rawv) = self.get_open_upvalue(upper_base, *i);
822                                // log::trace!("open {}", unsafe {
823                                //     std::mem::transmute::<u64, f64>(rawv[0])
824                                // });
825                                // assert_eq!(rawv.len(), size as usize);
826                                let rawv: &[RawVal] = unsafe { std::mem::transmute(rawv) };
827                                rawv
828                            }
829                            UpValue::Closed(rawval, _) => {
830                                //force borrow because closure cell and stack never collisions
831                                let rawv: &[RawVal] =
832                                    unsafe { std::mem::transmute(rawval.as_slice()) };
833                                rawv
834                                //
835                            }
836                        };
837                        self.set_stack_range(dst as i64, vs);
838                    };
839                }
840                Instruction::SetUpValue(index, src, size) => {
841                    let up_i = cls_i.unwrap();
842                    let cls = self.get_closure(up_i);
843                    let upper_base = cls.base_ptr as usize;
844                    let upvalues = &cls.upvalues;
845                    let (_range, v) = self.get_stack_range(src as i64, size);
846                    let rv = &mut *upvalues[index as usize].borrow_mut();
847                    match rv {
848                        UpValue::Open(OpenUpValue { pos: i, size, .. }) => {
849                            let (range, _v) = self.get_stack_range(src as i64, *size);
850                            let dest = upper_base + *i;
851                            unsafe {
852                                //force borrow because closure cell and stack never collisions
853                                let dst = slice::from_raw_parts_mut(
854                                    std::mem::transmute::<*const RawVal, *mut RawVal>(
855                                        self.stack.as_ptr(),
856                                    ),
857                                    self.stack.len(),
858                                );
859                                dst.copy_within(range, dest);
860                            }
861                        }
862                        UpValue::Closed(uv, _) => {
863                            uv.as_mut_slice().copy_from_slice(v);
864                        }
865                    };
866                }
867                Instruction::GetGlobal(dst, gid, size) => {
868                    let gvs = unsafe {
869                        let vstart = self.global_vals.as_ptr().offset(gid as _);
870                        debug_assert!(!vstart.is_null());
871                        // debug_assert!(vstart.is_aligned());
872                        slice::from_raw_parts(vstart, size as _)
873                    };
874                    self.set_stack_range(dst as i64, gvs)
875                }
876                Instruction::SetGlobal(gid, src, size) => {
877                    let gvs = unsafe {
878                        let vstart = self.global_vals.as_mut_ptr().offset(gid as _);
879                        debug_assert!(!vstart.is_null());
880                        // debug_assert!(vstart.is_aligned());
881                        slice::from_raw_parts_mut(vstart, size as _)
882                    };
883                    let (_, slice) = self.get_stack_range(src as i64, size);
884                    gvs.copy_from_slice(slice);
885                }
886                Instruction::Jmp(offset) => {
887                    // -1 is for the offset in last increment
888                    increment = offset;
889                }
890                Instruction::JmpIfNeg(cond, offset) => {
891                    let cond_v = self.get_stack(cond as i64);
892                    if Self::get_as::<f64>(cond_v) <= 0.0 {
893                        increment = offset;
894                    }
895                }
896                Instruction::AddF(dst, src1, src2) => binop!(+,f64,dst,src1,src2,self),
897                Instruction::SubF(dst, src1, src2) => {
898                    binop!(-,f64,dst,src1,src2,self)
899                }
900                Instruction::MulF(dst, src1, src2) => binop!(*,f64,dst,src1,src2,self),
901                Instruction::DivF(dst, src1, src2) => binop!(/,f64,dst,src1,src2,self),
902                Instruction::ModF(dst, src1, src2) => binop!(%,f64,dst,src1,src2,self),
903                Instruction::NegF(dst, src) => uniop!(-,f64,dst,src,self),
904                Instruction::AbsF(dst, src) => uniopmethod!(abs, f64, dst, src, self),
905                Instruction::SqrtF(dst, src) => uniopmethod!(sqrt, f64, dst, src, self),
906                Instruction::SinF(dst, src) => uniopmethod!(sin, f64, dst, src, self),
907                Instruction::CosF(dst, src) => uniopmethod!(cos, f64, dst, src, self),
908                Instruction::PowF(dst, src1, src2) => {
909                    binopmethod!(powf, f64, dst, src1, src2, self)
910                }
911                Instruction::LogF(dst, src) => uniopmethod!(ln, f64, dst, src, self),
912                Instruction::AddI(dst, src1, src2) => binop!(+,i64,dst,src1,src2,self),
913                Instruction::SubI(dst, src1, src2) => binop!(-,i64,dst,src1,src2,self),
914                Instruction::MulI(dst, src1, src2) => binop!(*,i64,dst,src1,src2,self),
915                Instruction::DivI(dst, src1, src2) => binop!(/,i64,dst,src1,src2,self),
916                Instruction::ModI(dst, src1, src2) => binop!(%,i64,dst,src1,src2,self),
917                Instruction::NegI(dst, src) => uniop!(-,i64,dst,src,self),
918                Instruction::AbsI(dst, src) => uniopmethod!(abs, i64, dst, src, self),
919                Instruction::PowI(dst, lhs, rhs) => binop!(^,i64,dst,lhs,rhs,self),
920                Instruction::LogI(_, _, _) => todo!(),
921                Instruction::Not(dst, src) => uniop_bool!(!, dst, src, self),
922                Instruction::Eq(dst, src1, src2) => binop_bool!(==,dst,src1,src2,self),
923                Instruction::Ne(dst, src1, src2) => binop_bool!(!=,dst,src1,src2,self),
924                Instruction::Gt(dst, src1, src2) => binop_bool!(>,dst,src1,src2,self),
925                Instruction::Ge(dst, src1, src2) => binop_bool!(>=,dst,src1,src2,self),
926                Instruction::Lt(dst, src1, src2) => binop_bool!(<,dst,src1,src2,self),
927                Instruction::Le(dst, src1, src2) => binop_bool!(<=,dst,src1,src2,self),
928                Instruction::And(dst, src1, src2) => binop_bool_compose!(&&,dst,src1,src2,self),
929                Instruction::Or(dst, src1, src2) => binop_bool_compose!(||,dst,src1,src2,self),
930                Instruction::CastFtoI(dst, src) => self.set_stack(
931                    dst as i64,
932                    Self::to_value::<i64>(Self::get_as::<f64>(self.get_stack(src as i64)) as i64),
933                ),
934                Instruction::CastItoF(dst, src) => self.set_stack(
935                    dst as i64,
936                    Self::to_value::<f64>(Self::get_as::<i64>(self.get_stack(src as i64)) as f64),
937                ),
938                Instruction::CastItoB(dst, src) => self.set_stack(
939                    dst as i64,
940                    Self::to_value::<bool>(Self::get_as::<i64>(self.get_stack(src as i64)) != 0),
941                ),
942                Instruction::AllocArray(dst, len, elem_size) => {
943                    // Allocate an array of the given length and element size
944                    let key = self.arrays.alloc_array(len as _, elem_size as _);
945                    // Set the stack to point to the start of the new array
946                    self.set_stack(dst as i64, key);
947                }
948                Instruction::GetArrayElem(dst, arr, idx) => {
949                    // Get the array and index values
950                    let array = self.get_stack(arr as i64);
951                    let index = self.get_stack(idx as i64);
952                    let index_val = Self::get_as::<f64>(index);
953                    let adata = self.arrays.get_array(array);
954                    let elem_word_size = adata.elem_word_size as usize;
955                    let buffer = unsafe {
956                        let address = adata
957                            .data
958                            .as_ptr()
959                            .wrapping_add(index_val as usize * elem_word_size);
960                        std::slice::from_raw_parts(address, elem_word_size)
961                    };
962                    set_vec_range(
963                        &mut self.stack,
964                        (self.base_pointer + dst as u64) as usize,
965                        buffer,
966                    );
967                    // todo: implement automatic interpolation and out-of-bounds handling for primitive arrays.
968                }
969                Instruction::SetArrayElem(arr, idx, val) => {
970                    // Get the array, index, and value
971                    let array = self.get_stack(arr as i64);
972                    let index = self.get_stack(idx as i64);
973                    let index_val = Self::get_as::<f64>(index);
974                    let index_int = index_val as usize;
975                    let adata = self.arrays.get_array_mut(array);
976                    let elem_word_size = adata.elem_word_size as usize;
977                    let buffer = unsafe {
978                        let address = adata
979                            .data
980                            .as_mut_ptr()
981                            .wrapping_add(index_int * elem_word_size);
982                        std::slice::from_raw_parts_mut(address, elem_word_size)
983                    };
984                    let (_range, buf_src) = self.get_stack_range(val as _, elem_word_size as _);
985                    buffer.copy_from_slice(buf_src);
986                }
987                Instruction::GetState(dst, size) => {
988                    //force borrow because state storage and stack never collisions
989                    let v: &[RawVal] = unsafe {
990                        std::mem::transmute(self.get_current_state().get_state(size as _))
991                    };
992                    self.set_stack_range(dst as i64, v);
993                }
994                Instruction::SetState(src, size) => {
995                    let vs = {
996                        let (_range, v) = self.get_stack_range(src as i64, size as _);
997                        unsafe { std::mem::transmute::<&[RawVal], &[RawVal]>(v) }
998                    };
999                    let dst = self.get_current_state().get_state_mut(size as _);
1000                    dst.copy_from_slice(vs);
1001                }
1002                Instruction::PushStatePos(v) => self.get_current_state().push_pos(v),
1003                Instruction::PopStatePos(v) => self.get_current_state().pop_pos(v),
1004                Instruction::Delay(dst, src, time) => {
1005                    let i = self.get_stack(src as i64);
1006                    let t = self.get_stack(time as i64);
1007                    let delaysize_i =
1008                        unsafe { self.delaysizes_pos_stack.last().unwrap_unchecked() };
1009
1010                    let size_in_samples = unsafe {
1011                        *self
1012                            .get_fnproto(func_i)
1013                            .delay_sizes
1014                            .get_unchecked(*delaysize_i)
1015                    };
1016                    let mut ringbuf = self.get_current_state().get_as_ringbuffer(size_in_samples);
1017
1018                    let res = ringbuf.process(i, t);
1019                    self.set_stack(dst as i64, res);
1020                }
1021                Instruction::Mem(dst, src) => {
1022                    let s = self.get_stack(src as i64);
1023                    let ptr = self.get_current_state().get_state_mut(1);
1024                    let v = Self::to_value(ptr[0]);
1025                    self.set_stack(dst as i64, v);
1026                    let ptr = self.get_current_state().get_state_mut(1);
1027                    ptr[0] = s;
1028                }
1029                Instruction::Dummy => {
1030                    unreachable!()
1031                }
1032            }
1033            pcounter = (pcounter as i64 + increment as i64) as usize;
1034        }
1035    }
1036    pub fn install_extern_fn(&mut self, name: Symbol, f: ExtFunType) -> usize {
1037        self.ext_fun_table.push((name, f));
1038        self.ext_fun_table.len() - 1
1039    }
1040    pub fn install_extern_cls(&mut self, name: Symbol, f: ExtClsType) -> usize {
1041        self.ext_cls_table.push((name, f));
1042        self.ext_cls_table.len() - 1
1043    }
1044
1045    fn link_functions(&mut self) {
1046        //link external functions
1047        let global_mem_size = self
1048            .prog
1049            .global_vals
1050            .iter()
1051            .map(|WordSize(size)| *size as usize)
1052            .sum();
1053        self.global_vals = vec![0; global_mem_size];
1054        self.prog
1055            .ext_fun_table
1056            .iter_mut()
1057            .enumerate()
1058            .for_each(|(i, (name, _ty))| {
1059                if let Some((j, _)) = self
1060                    .ext_fun_table
1061                    .iter()
1062                    .enumerate()
1063                    .find(|(_j, (fname, _fn))| name == fname.as_str())
1064                {
1065                    let _ = self.fn_map.insert(i, ExtFnIdx::Fun(j));
1066                } else if let Some((j, _)) = self
1067                    .ext_cls_table
1068                    .iter()
1069                    .enumerate()
1070                    .find(|(_j, (fname, _fn))| name == fname.as_str())
1071                {
1072                    let _ = self.fn_map.insert(i, ExtFnIdx::Cls(j));
1073                } else {
1074                    panic!("external function {} cannot be found", name);
1075                }
1076            });
1077    }
1078    pub fn execute_idx(&mut self, idx: usize) -> ReturnCode {
1079        let (_name, func) = &self.prog.global_fn_table[idx];
1080        if !func.bytecodes.is_empty() {
1081            self.global_states
1082                .resize(func.state_skeleton.total_size() as usize);
1083            // 0 is always base pointer to the main function
1084            if !self.stack.is_empty() {
1085                self.stack[0] = 0;
1086            }
1087            self.base_pointer = 1;
1088            self.execute(idx, None)
1089        } else {
1090            0
1091        }
1092    }
1093    pub fn execute_entry(&mut self, entry: &str) -> ReturnCode {
1094        if let Some(idx) = self.prog.get_fun_index(entry) {
1095            self.execute_idx(idx)
1096        } else {
1097            -1
1098        }
1099    }
1100    pub fn execute_main(&mut self) -> ReturnCode {
1101        // 0 is always base pointer to the main function
1102        self.base_pointer += 1;
1103        self.execute(0, None)
1104    }
1105}
1106
1107#[cfg(test)]
1108mod test;