1use core::slice;
2use slotmap::{DefaultKey, Key, KeyData, SlotMap};
3use std::{cell::RefCell, cmp::Ordering, collections::HashMap, ops::Range, rc::Rc};
4pub mod bytecode;
5pub mod heap;
6mod primitives;
7pub mod program;
8mod ringbuffer;
9pub use bytecode::*;
10use ringbuffer::Ringbuffer;
11
12use program::OpenUpValue;
13pub use program::{FuncProto, Program};
14
15use crate::{
16 compiler::bytecodegen::ByteCodeGenerator,
17 interner::{ExprNodeId, Symbol, ToSymbol, TypeNodeId},
18 plugin::{ExtClsInfo, ExtClsType, ExtFunInfo, ExtFunType, MachineFunction},
19 runtime::vm::program::WordSize,
20 types::{Type, TypeSize},
21};
22pub type RawVal = u64;
23pub type ReturnCode = i64;
24
25#[derive(Debug, Default, PartialEq)]
26struct StateStorage {
27 pos: usize,
28 rawdata: Vec<u64>,
29}
30impl StateStorage {
31 fn resize(&mut self, size: usize) {
32 self.rawdata.resize(size, 0)
33 }
34 fn get_state(&self, size: u64) -> &[RawVal] {
35 unsafe {
36 let head = self.rawdata.as_ptr().add(self.pos);
37 slice::from_raw_parts(head, size as _)
38 }
39 }
40 fn get_state_mut(&mut self, size: usize) -> &mut [RawVal] {
41 unsafe {
42 let head = self.rawdata.as_mut_ptr().add(self.pos);
43 slice::from_raw_parts_mut(head, size as _)
44 }
45 }
46 fn get_as_ringbuffer(&mut self, size_in_samples: u64) -> Ringbuffer<'_> {
47 let data_head = unsafe { self.rawdata.as_mut_ptr().add(self.pos) };
48 Ringbuffer::new(data_head, size_in_samples)
49 }
50 fn push_pos(&mut self, offset: StateOffset) {
51 self.pos = (self.pos as u64 + (std::convert::Into::<u64>::into(offset))) as usize;
52 }
53 fn pop_pos(&mut self, offset: StateOffset) {
54 self.pos = (self.pos as u64 - (std::convert::Into::<u64>::into(offset))) as usize;
55 }
56}
57
58#[derive(Debug, Clone, Copy, PartialEq, Eq)]
59pub struct ClosureIdx(pub slotmap::DefaultKey);
60
61#[derive(Debug, Clone, Default)]
62struct StateStorageStack(Vec<ClosureIdx>);
63
64impl StateStorageStack {
65 pub fn push(&mut self, i: ClosureIdx) {
66 self.0.push(i)
67 }
68 pub fn pop(&mut self) {
69 let _ = self.0.pop();
70 }
71}
72
73#[derive(Debug, Clone, Default)]
74pub(crate) struct ArrayHeap {
75 elem_word_size: u64,
76 data: Vec<RawVal>,
77}
78impl ArrayHeap {
79 pub fn get_length_array(&self) -> u64 {
80 self.data.len() as u64 / self.elem_word_size
81 }
82 pub fn get_elem_word_size(&self) -> u64 {
83 self.elem_word_size
84 }
85 pub fn get_data(&self) -> &[RawVal] {
86 if self.data.is_empty() {
87 &[]
88 } else {
89 &self.data
90 }
91 }
92 pub fn get_data_mut(&mut self) -> &mut [RawVal] {
93 &mut self.data
94 }
95}
96#[derive(Debug, Clone, Default)]
97pub(crate) struct ArrayStorage {
98 data: SlotMap<DefaultKey, ArrayHeap>,
99}
100pub(crate) type ArrayIdx = slotmap::DefaultKey;
101impl ArrayStorage {
102 pub fn alloc_array(&mut self, len: u64, elem_size: u64) -> RawVal {
103 let array = ArrayHeap {
104 elem_word_size: elem_size,
105 data: vec![0u64; (len * elem_size) as usize],
106 };
107 let key = self.data.insert(array);
108 debug_assert!(
109 std::mem::size_of::<ArrayIdx>() == 8,
110 "ArrayIdx size must be 8 bytes"
111 );
112 key.data().as_ffi() as RawVal
113 }
114 pub fn get_array(&self, id: RawVal) -> &ArrayHeap {
115 let key = ArrayIdx::from(KeyData::from_ffi(id as u64));
116 self.data
117 .get(key)
118 .unwrap_or_else(|| panic!("Invalid ArrayIdx: raw_id={id}"))
119 }
120 pub fn try_get_array(&self, id: RawVal) -> Option<&ArrayHeap> {
122 let key = ArrayIdx::from(KeyData::from_ffi(id as u64));
123 self.data.get(key)
124 }
125 pub fn get_array_mut(&mut self, id: RawVal) -> &mut ArrayHeap {
126 let key = ArrayIdx::from(KeyData::from_ffi(id as u64));
127 self.data
128 .get_mut(key)
129 .unwrap_or_else(|| panic!("Invalid ArrayIdx (mut): raw_id={id}"))
130 }
131}
132#[derive(Debug, Clone, PartialEq)]
135enum UpValue {
136 Open(OpenUpValue),
137 Closed(Vec<RawVal>, bool),
138}
139type SharedUpValue = Rc<RefCell<UpValue>>;
140impl From<OpenUpValue> for UpValue {
141 fn from(value: OpenUpValue) -> Self {
142 Self::Open(value)
143 }
144}
145
146#[derive(Default)]
147struct LocalUpValueMap(Vec<(Reg, SharedUpValue)>);
148
149impl LocalUpValueMap {
150 pub fn get_or_insert(&mut self, ov: OpenUpValue) -> SharedUpValue {
151 let OpenUpValue { pos, .. } = ov;
152 self.0
153 .iter()
154 .find_map(|(i2, v)| (pos == *i2 as usize).then_some(v.clone()))
155 .unwrap_or_else(|| {
156 let v = Rc::new(RefCell::new(UpValue::Open(ov)));
157 self.0.push((pos as Reg, v.clone()));
158 v
159 })
160 }
161}
162
163#[derive(Debug, Default, PartialEq)]
164pub struct Closure {
166 pub fn_proto_pos: usize, pub base_ptr: u64, pub is_closed: bool,
169 pub refcount: u64,
170 pub(self) upvalues: Vec<SharedUpValue>,
171 state_storage: StateStorage,
172}
173impl Closure {
174 pub(self) fn new(
175 program: &Program,
176 base_ptr: u64,
177 fn_i: usize,
178 upv_map: &mut LocalUpValueMap,
179 ) -> Self {
180 let fnproto = &program.global_fn_table[fn_i].1;
181 let upvalues = fnproto
182 .upindexes
183 .iter()
184 .map(|ov| upv_map.get_or_insert(*ov))
185 .collect::<Vec<_>>();
186 let mut state_storage = StateStorage::default();
187 state_storage.resize(fnproto.state_skeleton.total_size() as usize);
188 Self {
189 fn_proto_pos: fn_i,
190 upvalues,
191 is_closed: false,
192 refcount: 1,
193 base_ptr,
194 state_storage,
195 }
196 }
197}
198
199pub type ClosureStorage = SlotMap<DefaultKey, Closure>;
200
201#[derive(Clone, Copy, Default)]
202enum RawValType {
203 Float,
204 #[default]
205 Int,
206 }
208
209#[derive(Clone, Copy)]
210enum ExtFnIdx {
211 Fun(usize),
212 Cls(usize),
213}
214pub struct Machine {
220 pub prog: Program,
222 stack: Vec<RawVal>,
223 base_pointer: u64,
224 pub closures: ClosureStorage, pub heap: heap::HeapStorage, pub ext_fun_table: Vec<(Symbol, ExtFunType)>,
227 pub ext_cls_table: Vec<(Symbol, ExtClsType)>,
228 pub arrays: ArrayStorage,
229 fn_map: HashMap<usize, ExtFnIdx>, global_states: StateStorage,
232 states_stack: StateStorageStack,
233 delaysizes_pos_stack: Vec<usize>,
234 global_vals: Vec<RawVal>,
235 debug_stacktype: Vec<RawValType>,
236 current_ext_call_nargs: u8,
237 current_ext_call_idx: Option<usize>,
238 code_values: Vec<ExprNodeId>,
242}
243
244macro_rules! binop {
245 ($op:tt,$t:ty, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
246 {
247 $self.set_stacktype($dst as i64, RawValType::Float);
248 $self.set_stack($dst as i64, Self::to_value::<$t>(
249 Self::get_as::<$t>($self.get_stack($src1 as i64))
250 $op Self::get_as::<$t>($self.get_stack($src2 as i64))))
251 }
252 };
253}
254macro_rules! binop_bool {
255 ($op:tt, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
256 {
257 $self.set_stacktype($dst as i64, RawValType::Float);
258 let bres:bool =
259 Self::get_as::<f64>($self.get_stack($src1 as i64))
260 $op Self::get_as::<f64>($self.get_stack($src2 as i64));
261 let fres = if bres{
262 1.0f64
263 }else{
264 0.0f64
265 };
266 $self.set_stack($dst as i64,Self::to_value::<f64>(fres))
267 }
268 };
269}
270macro_rules! binop_bool_compose {($op:tt, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {
272 {
273 $self.set_stacktype($dst as i64, RawValType::Float);
274 let bres:bool =
275 Self::get_as::<f64>($self.get_stack($src1 as i64))>0.0
276 $op Self::get_as::<f64>($self.get_stack($src2 as i64))>0.0;
277 let fres = if bres{ 1.0f64 }else{ 0.0f64 };
278 $self.set_stack($dst as i64,Self::to_value::<f64>(fres))
279 }
280 };
281}
282macro_rules! binopmethod {
283 ($op:ident,$t:ty, $dst:expr,$src1:expr,$src2:expr,$self:ident) => {{
284 $self.set_stacktype($dst as i64, RawValType::Float);
285 $self.set_stack(
286 $dst as i64,
287 Self::to_value::<$t>(
288 Self::get_as::<$t>($self.get_stack($src1 as i64))
289 .$op(Self::get_as::<$t>($self.get_stack($src2 as i64))),
290 ),
291 )
292 }};
293}
294macro_rules! uniop {
295 ($op:tt,$t:ty, $dst:expr,$src:expr,$self:ident) => {
296 $self.set_stack($dst as i64,
297 Self::to_value::<$t>(
298 $op Self::get_as::<$t>($self.get_stack($src as i64))))
299 };
300}
301macro_rules! uniop_bool {
302 ($op:tt, $dst:expr,$src:expr,$self:ident) => {{
303 let bres: bool = $op(matches!(
304 Self::get_as::<f64>($self.get_stack($src as i64)).partial_cmp(&0.0),
305 Some(std::cmp::Ordering::Greater)
306 ));
307 let fres = if bres { 1.0f64 } else { 0.0f64 };
308 $self.set_stack($dst as i64, Self::to_value::<f64>(fres))
309 }};
310}
311macro_rules! uniopmethod {
312 ($op:tt,$t:ty, $dst:expr,$src:expr,$self:ident) => {{
313 $self.set_stack(
314 $dst as i64,
315 Self::to_value::<$t>(Self::get_as::<$t>($self.get_stack($src as i64)).$op()),
316 )
317 }};
318}
319
320fn set_vec<T>(vec: &mut Vec<T>, i: usize, value: T)
321where
322 T: Clone + std::default::Default,
323{
324 match i.cmp(&vec.len()) {
325 Ordering::Less => vec[i] = value,
326 Ordering::Equal => vec.push(value),
327 Ordering::Greater => {
328 vec.resize(i, T::default());
329 vec.push(value);
330 }
331 }
332}
333fn set_vec_range<T>(vec: &mut Vec<T>, i: usize, values: &[T])
334where
335 T: std::fmt::Debug + Copy + std::default::Default,
336{
337 let start = i;
341 let end = i + values.len();
342 if end > vec.len() {
343 vec.resize(i, T::default());
344 }
345 match start.cmp(&vec.len()) {
346 Ordering::Less => {
347 let range = i..(i + values.len());
348 for (v, i) in values.iter().zip(range.into_iter()) {
349 vec[i] = *v;
350 }
351 }
352 Ordering::Equal => values.iter().for_each(|v| vec.push(*v)),
353 Ordering::Greater => values.iter().for_each(|v| vec.push(*v)),
354 }
355}
356
357impl Machine {
358 fn try_get_heap_backed_closure(&self, raw: RawVal) -> Option<(heap::HeapIdx, ClosureIdx)> {
359 let heap_idx = Self::get_as::<heap::HeapIdx>(raw);
360 self.heap.get(heap_idx).and_then(|obj| {
361 obj.data
362 .first()
363 .map(|&closure_raw| (heap_idx, Self::get_as::<ClosureIdx>(closure_raw)))
364 })
365 }
366
367 fn try_get_direct_closure(&self, raw: RawVal) -> Option<ClosureIdx> {
368 let clsidx = Self::get_as::<ClosureIdx>(raw);
369 self.closures.contains_key(clsidx.0).then_some(clsidx)
370 }
371
372 pub fn drop_closure(&mut self, id: ClosureIdx) {
375 let cls = self.closures.get_mut(id.0).unwrap();
376 cls.refcount -= 1;
377 if cls.refcount == 0 {
378 let raw_refs = self
379 .closures
380 .get(id.0)
381 .unwrap()
382 .upvalues
383 .iter()
384 .filter_map(|v| {
385 let v = v.borrow();
386 match &*v {
387 UpValue::Closed(data, true) => Some(data[0]),
388 _ => None,
389 }
390 })
391 .collect::<Vec<_>>();
392 let refs = raw_refs
393 .into_iter()
394 .filter_map(|raw| {
395 self.try_get_heap_backed_closure(raw)
396 .map(|(heap_idx, closure_idx)| (Some(heap_idx), closure_idx))
397 .or_else(|| {
398 self.try_get_direct_closure(raw)
399 .map(|closure_idx| (None, closure_idx))
400 })
401 })
402 .collect::<Vec<_>>();
403 refs.iter().for_each(|(heap_idx, clsi)| {
404 self.drop_closure(*clsi);
405 if let Some(heap_idx) = heap_idx {
406 heap::heap_release(&mut self.heap, *heap_idx);
407 }
408 });
409 self.closures.remove(id.0);
410 }
411 }
412
413 pub fn new(
415 prog: Program,
416 extfns: impl Iterator<Item = ExtFunInfo>,
417 extcls: impl Iterator<Item = Box<dyn MachineFunction>>,
418 ) -> Self {
419 let mut res = Self {
420 prog,
421 stack: vec![],
422 base_pointer: 0,
423 closures: Default::default(),
424 heap: Default::default(),
425 ext_fun_table: vec![],
426 ext_cls_table: vec![],
427 fn_map: HashMap::new(),
428 arrays: ArrayStorage::default(),
430 global_states: Default::default(),
431 states_stack: Default::default(),
432 delaysizes_pos_stack: vec![0],
433 global_vals: vec![],
434 debug_stacktype: vec![RawValType::Int; 255],
435 current_ext_call_nargs: 0,
436 current_ext_call_idx: None,
437 code_values: vec![],
438 };
439 extfns.for_each(|ExtFunInfo { name, fun, .. }| {
440 let _ = res.install_extern_fn(name, fun);
441 });
442 extcls.for_each(|machine_function| {
443 let _ = res.install_extern_cls(machine_function.get_name(), machine_function.get_fn());
444 });
445 res.link_functions();
446 res
447 }
448 pub fn new_resume(&self, prog: Program) -> Self {
450 let mut new_vm = Self {
451 prog,
452 stack: vec![],
453 base_pointer: 0,
454 closures: Default::default(),
455 heap: Default::default(),
456 ext_fun_table: vec![],
457 ext_cls_table: vec![],
458 fn_map: HashMap::new(),
459 arrays: ArrayStorage::default(),
461 global_states: Default::default(),
462 states_stack: Default::default(),
463 delaysizes_pos_stack: vec![0],
464 global_vals: vec![],
465 debug_stacktype: vec![RawValType::Int; 255],
466 current_ext_call_nargs: 0,
467 current_ext_call_idx: None,
468 code_values: vec![],
469 };
470 new_vm.ext_fun_table = self.ext_fun_table.clone();
473 new_vm.ext_cls_table = self.ext_cls_table.clone();
474 new_vm.global_vals = self.global_vals.clone();
475 new_vm.arrays = self.arrays.clone();
476
477 let patch_plan = state_tree::build_state_storage_patch_plan(
478 self.prog
479 .get_dsp_state_skeleton()
480 .cloned()
481 .expect("dsp function not found"),
482 new_vm
483 .prog
484 .get_dsp_state_skeleton()
485 .cloned()
486 .expect("dsp function not found"),
487 );
488 if let Some(plan) = patch_plan {
489 new_vm.global_states.rawdata =
490 state_tree::apply_state_storage_patch_plan(&self.global_states.rawdata, &plan);
491 } else {
492 log::info!("No state structure change detected. Just copies buffer");
493 new_vm.global_states.rawdata = self.global_states.rawdata.clone();
494 }
495 new_vm.link_functions();
496 new_vm.execute_main();
497 new_vm
498 }
499 pub fn clear_stack(&mut self) {
500 self.stack.fill(0);
501 }
502 pub fn get_stack(&self, offset: i64) -> RawVal {
503 self.get_stack_range(offset, 1).1[0]
509 }
510 pub fn get_stack_range(&self, offset: i64, word_size: TypeSize) -> (Range<usize>, &[RawVal]) {
511 let addr_start = (self.base_pointer as i64 + offset) as usize;
512 let addr_end = addr_start + word_size as usize;
513 let slice = &self.stack[addr_start..addr_end];
514 (addr_start..addr_end, slice)
515 }
516 pub fn get_stack_range_mut(
517 &mut self,
518 offset: i64,
519 word_size: TypeSize,
520 ) -> (Range<usize>, &mut [RawVal]) {
521 let addr_start = (self.base_pointer as i64 + offset) as usize;
522 let addr_end = addr_start + word_size as usize;
523 let slice = &mut self.stack[addr_start..addr_end];
524 (addr_start..addr_end, slice)
525 }
526 pub fn set_stack(&mut self, offset: i64, v: RawVal) {
527 self.set_stack_range(offset, &[v])
528 }
529 pub fn set_stack_range(&mut self, offset: i64, vs: &[RawVal]) {
530 set_vec_range(
534 &mut self.stack,
535 (self.base_pointer as i64 + offset) as usize,
536 vs,
537 )
538 }
539 fn move_stack_range(&mut self, offset: i64, srcrange: Range<usize>) {
540 let dest = (self.base_pointer as i64 + offset) as usize;
541 if srcrange.end > self.stack.len() {
542 self.stack.resize(srcrange.end, 0);
543 }
544 let dest_end = dest + (srcrange.end - srcrange.start);
545 if dest_end > self.stack.len() {
546 self.stack.resize(dest_end, 0);
547 }
548 self.stack.copy_within(srcrange, dest)
549 }
550 fn set_stacktype(&mut self, offset: i64, t: RawValType) {
551 }
557 pub fn get_top_n(&self, n: usize) -> &[RawVal] {
558 let len = self.stack.len();
559 &self.stack[(len - n)..]
560 }
561
562 pub fn get_current_ext_call_nargs(&self) -> u8 {
564 self.current_ext_call_nargs
565 }
566
567 pub fn get_current_ext_call_idx(&self) -> Option<usize> {
568 self.current_ext_call_idx
569 }
570 pub fn get_closure_idx_from_heap(&self, heap_idx: heap::HeapIdx) -> ClosureIdx {
577 let heap_obj = self.heap.get(heap_idx).expect("Invalid HeapIdx");
578 Self::get_as::<ClosureIdx>(heap_obj.data[0])
579 }
580 fn get_upvalue_offset(upper_base: usize, offset: OpenUpValue) -> usize {
581 upper_base + offset.pos
582 }
583 pub fn get_open_upvalue(
584 &self,
585 upper_base: usize,
586 ov: OpenUpValue,
587 ) -> (Range<usize>, &[RawVal]) {
588 let OpenUpValue { size, .. } = ov;
589 let abs_pos = Self::get_upvalue_offset(upper_base, ov);
591 let end = abs_pos + size as usize;
592 let slice = unsafe {
593 let vstart = self.stack.as_slice().as_ptr().add(abs_pos);
594 slice::from_raw_parts(vstart, size as usize)
595 };
596 (abs_pos..end, slice)
597 }
598 pub fn get_closure(&self, idx: ClosureIdx) -> &Closure {
599 debug_assert!(
600 self.closures.contains_key(idx.0),
601 "Invalid Closure Id referred"
602 );
603 unsafe { self.closures.get_unchecked(idx.0) }
604 }
605 pub(crate) fn get_closure_mut(&mut self, idx: ClosureIdx) -> &mut Closure {
606 debug_assert!(
607 self.closures.contains_key(idx.0),
608 "Invalid Closure Id referred"
609 );
610 unsafe { self.closures.get_unchecked_mut(idx.0) }
611 }
612 fn get_current_state(&mut self) -> &mut StateStorage {
613 if self.states_stack.0.is_empty() {
614 &mut self.global_states
615 } else {
616 let idx = unsafe { self.states_stack.0.last().unwrap_unchecked() };
617 &mut self.get_closure_mut(*idx).state_storage
618 }
619 }
620 fn return_general(&mut self, iret: Reg, nret: Reg) -> &[u64] {
621 let base = self.base_pointer as usize;
622 let iret_abs = base + iret as usize;
623 self.stack
624 .copy_within(iret_abs..(iret_abs + nret as usize), base - 1);
625 self.stack.truncate(base - 1 + nret as usize);
628
629 (self.stack.split_at(base).1) as _
630 }
631
632 pub fn get_as<T>(v: RawVal) -> T {
633 unsafe { std::mem::transmute_copy::<RawVal, T>(&v) }
634 }
635 pub fn get_as_array<T>(v: &[RawVal]) -> &[T] {
636 unsafe { std::mem::transmute::<&[RawVal], &[T]>(v) }
637 }
638 pub fn to_value<T>(v: T) -> RawVal {
639 assert_eq!(std::mem::size_of::<T>(), 8);
640 unsafe { std::mem::transmute_copy::<T, RawVal>(&v) }
641 }
642 fn call_function<F>(
643 &mut self,
644 func_pos: Reg,
645 nargs: u8,
646 nret_req: TypeSize,
647 mut action: F,
648 ) -> ReturnCode
649 where
650 F: FnMut(&mut Self) -> ReturnCode,
651 {
652 let offset = (func_pos + 1) as u64;
653 self.delaysizes_pos_stack.push(0);
654 self.base_pointer += offset;
655
656 let snapshot_words = nargs as usize;
657 let arg_snapshot = (0..snapshot_words)
658 .map(|i| self.get_stack(i as i64))
659 .collect::<Vec<_>>();
660
661 let nret = action(self);
662
663 if nret_req > nret as TypeSize {
664 if nret == 1 && arg_snapshot.len() >= nret_req as usize {
665 let base = self.base_pointer as usize;
666 let ret_start = base - 1;
667 let required_len = ret_start + nret_req as usize;
668 if self.stack.len() < required_len {
669 self.stack.resize(required_len, 0);
670 }
671 (1..nret_req as usize).for_each(|i| {
672 self.stack[ret_start + i] = arg_snapshot[i];
673 });
674 } else {
675 panic!(
676 "invalid number of return value {nret_req} required but accepts only {nret}."
677 );
678 }
679 }
680 self.stack
682 .truncate((self.base_pointer as i64 + nret_req as i64) as usize);
683 self.base_pointer -= offset;
684 self.delaysizes_pos_stack.pop();
685 nret
686 }
687 fn allocate_closure(&mut self, fn_i: usize, upv_map: &mut LocalUpValueMap) -> ClosureIdx {
688 let idx = self
689 .closures
690 .insert(Closure::new(&self.prog, self.base_pointer, fn_i, upv_map));
691 ClosureIdx(idx)
692 }
693
694 fn allocate_heap_closure(
697 &mut self,
698 fn_i: usize,
699 upv_map: &mut LocalUpValueMap,
700 ) -> heap::HeapIdx {
701 let closure_idx = self.allocate_closure(fn_i, upv_map);
704
705 let heap_obj = heap::HeapObject::with_data(vec![Self::to_value(closure_idx)]);
708 let heap_idx = self.heap.insert(heap_obj);
709
710 log::trace!(
711 "allocate_heap_closure: fn_i={fn_i}, heap_idx={heap_idx:?}, closure_idx={closure_idx:?}"
712 );
713 heap_idx
714 }
715
716 fn release_heap_closure(&mut self, heap_idx: heap::HeapIdx) {
722 let maybe_closure = self.heap.get(heap_idx).and_then(|obj| {
724 (!obj.data.is_empty()).then_some(Self::get_as::<ClosureIdx>(obj.data[0]))
725 });
726
727 if let Some(closure_idx) = maybe_closure {
728 if !self.get_closure(closure_idx).is_closed {
729 self.drop_closure(closure_idx);
730 }
731 }
732
733 heap::heap_release(&mut self.heap, heap_idx);
738 }
739
740 fn close_heap_upvalues(&mut self, heap_idx: heap::HeapIdx) {
742 log::trace!("close_heap_upvalues: heap_idx={heap_idx:?}");
743
744 if let Some(heap_obj) = self.heap.get(heap_idx) {
746 if !heap_obj.data.is_empty() {
747 let closure_idx = Self::get_as::<ClosureIdx>(heap_obj.data[0]);
748 self.close_upvalues_by_idx(closure_idx);
750 }
751 }
752 }
753
754 fn release_heap_closures(&mut self, local_heap_closures: &[heap::HeapIdx]) {
764 for &heap_idx in local_heap_closures {
765 self.release_heap_closure(heap_idx);
766 }
767 }
768
769 pub fn wrap_extern_cls(&mut self, extcls: ExtClsInfo) -> ClosureIdx {
773 let ExtClsInfo { name, fun, ty } = extcls;
774
775 self.prog.ext_fun_table.push((name.to_string(), ty));
776 let prog_funid = self.prog.ext_fun_table.len() - 1;
777 self.ext_cls_table.push((name, fun));
778 let vm_clsid = self.ext_cls_table.len() - 1;
779 self.fn_map.insert(prog_funid, ExtFnIdx::Cls(vm_clsid));
780 let (bytecodes, nargs, nret) = if let Type::Function { arg, ret } = ty.to_type() {
781 let mut wrap_bytecode = Vec::<Instruction>::new();
782 let asize = ByteCodeGenerator::word_size_for_type(arg);
784 let nargs = match arg.to_type() {
786 Type::Tuple(args) => args.len(),
787 Type::Record(fields) => fields.len(),
788 _ => unreachable!("single argument should be 1 element record"),
789 } as u8;
790 let base = nargs as Reg;
791 let nret = ByteCodeGenerator::word_size_for_type(ret);
792 wrap_bytecode.push(Instruction::MoveConst(base, 0));
793 wrap_bytecode.push(Instruction::MoveRange(base + 1, 0, asize));
794
795 wrap_bytecode.extend_from_slice(&[
796 Instruction::CallExtFun(base, nargs, nret as _),
797 Instruction::Return(base, nret as _),
798 ]);
799 (wrap_bytecode, nargs, nret)
800 } else {
801 panic!("non-function type called for wrapping external closure");
802 };
803 let newfunc = FuncProto {
804 nparam: nargs as _,
805 nret: nret as _,
806 bytecodes,
807 constants: vec![prog_funid as _],
808 ..Default::default()
809 };
810 self.prog.global_fn_table.push((name.to_string(), newfunc));
811 let fn_i = self.prog.global_fn_table.len() - 1;
812 let mut cls = Closure::new(
813 &self.prog,
814 self.base_pointer,
815 fn_i,
816 &mut LocalUpValueMap(vec![]),
817 );
818 cls.is_closed = true;
820 let idx = self.closures.insert(cls);
821 ClosureIdx(idx)
822 }
823 fn close_upvalues(&mut self, src: Reg) {
824 let clsidx = Self::get_as::<ClosureIdx>(self.get_stack(src as _));
825 self.close_upvalues_by_idx(clsidx);
826 }
827 fn close_upvalues_by_idx(&mut self, clsidx: ClosureIdx) {
830 let closure_base_ptr = self.get_closure(clsidx).base_ptr as usize;
831
832 let raw_refs = self
835 .get_closure(clsidx)
836 .upvalues
837 .iter()
838 .filter_map(|upv| {
839 let upv = &mut *upv.borrow_mut();
840 match upv {
841 UpValue::Open(ov) => {
842 let (_range, ov_raw) = self.get_open_upvalue(closure_base_ptr, *ov);
843 let is_closure = ov.is_closure;
844 *upv = UpValue::Closed(ov_raw.to_vec(), is_closure);
845 is_closure.then_some(ov_raw[0])
846 }
847 UpValue::Closed(v, is_closure) => {
848 (*is_closure).then_some(v[0])
849 }
850 }
851 })
852 .collect::<Vec<_>>();
853 let refs = raw_refs
854 .into_iter()
855 .filter_map(|raw| {
856 self.try_get_heap_backed_closure(raw)
857 .map(|(heap_idx, closure_idx)| (Some(heap_idx), closure_idx))
858 .or_else(|| {
859 self.try_get_direct_closure(raw)
860 .map(|closure_idx| (None, closure_idx))
861 })
862 })
863 .collect::<Vec<_>>();
864 refs.iter().for_each(|(heap_idx, closure_idx)| {
865 if let Some(heap_idx) = heap_idx {
866 heap::heap_retain(&mut self.heap, *heap_idx);
867 }
868 self.get_closure_mut(*closure_idx).refcount += 1;
869 });
870 let cls = self.get_closure_mut(clsidx);
871 cls.is_closed = true;
872 }
873 fn release_open_closures(&mut self, local_closures: &[ClosureIdx]) {
874 for clsidx in local_closures.iter() {
875 let cls = self.get_closure(*clsidx);
876 if !cls.is_closed {
877 self.drop_closure(*clsidx)
879 }
880 }
881 }
882 fn get_fnproto(&self, func_i: usize) -> &FuncProto {
883 &self.prog.global_fn_table[func_i].1
884 }
885 pub fn execute(&mut self, func_i: usize, cls_i: Option<ClosureIdx>) -> ReturnCode {
892 let mut local_closures: Vec<ClosureIdx> = vec![];
893 let mut local_heap_closures: Vec<heap::HeapIdx> = vec![];
894 let mut upv_map = LocalUpValueMap::default();
895 let mut pcounter = 0;
896 loop {
901 let mut increment = 1;
921 match self.get_fnproto(func_i).bytecodes[pcounter] {
922 Instruction::Move(dst, src) => {
923 self.set_stack(dst as i64, self.get_stack(src as i64));
924 }
925 Instruction::MoveConst(dst, pos) => {
926 self.set_stack(dst as i64, self.get_fnproto(func_i).constants[pos as usize]);
927 }
928 Instruction::MoveImmF(dst, v) => {
929 self.set_stack(dst as i64, Self::to_value(Into::<f64>::into(v)));
930 }
931 Instruction::MoveRange(dst, src, n) => {
932 let (range, _slice) = self.get_stack_range(src as _, n);
933 self.move_stack_range(dst as i64, range);
934 }
935 Instruction::CallCls(func, nargs, nret_req) => {
936 let addr = self.get_stack(func as i64);
937 let cls_i = Self::get_as::<ClosureIdx>(addr);
938 let cls = self.get_closure(cls_i);
939 let pos_of_f = cls.fn_proto_pos;
940 self.states_stack.push(cls_i);
941 self.call_function(func, nargs, nret_req, move |machine| {
942 machine.execute(pos_of_f, Some(cls_i))
943 });
944 self.states_stack.pop();
945 }
946 Instruction::Call(func, nargs, nret_req) => {
947 let pos_of_f = Self::get_as::<usize>(self.get_stack(func as i64));
948 self.call_function(func, nargs, nret_req, move |machine| {
949 machine.execute(pos_of_f, None)
950 });
951 }
952 Instruction::CallExtFun(func, nargs, nret_req) => {
953 let ext_fn_idx = self.get_stack(func as i64) as usize;
954 let fidx = self.fn_map.get(&ext_fn_idx).unwrap();
955 let prev_nargs = self.current_ext_call_nargs;
956 let prev_ext_call_idx = self.current_ext_call_idx;
957 self.current_ext_call_nargs = nargs;
958 self.current_ext_call_idx = Some(ext_fn_idx);
959 let _nret = match fidx {
960 ExtFnIdx::Fun(fi) => {
961 let f = self.ext_fun_table[*fi].1;
962 self.call_function(func, nargs, nret_req, f)
963 }
964 ExtFnIdx::Cls(ci) => {
965 let (_name, cls) = &self.ext_cls_table[*ci];
966 let cls = cls.clone();
967 self.call_function(func, nargs, nret_req, move |machine| {
968 cls.borrow_mut()(machine)
969 })
970 }
971 };
972 self.current_ext_call_nargs = prev_nargs;
973 self.current_ext_call_idx = prev_ext_call_idx;
974
975 let base = self.base_pointer as usize;
981 let iret = base + func as usize + 1;
982 let n = nret_req as usize;
983 self.stack
984 .copy_within(iret..(iret + n), base + func as usize);
985 self.stack.truncate(base + func as usize + n);
986 }
987 Instruction::Closure(dst, fn_index) => {
988 let fn_proto_pos = self.get_stack(fn_index as i64) as usize;
989 let vaddr = self.allocate_closure(fn_proto_pos, &mut upv_map);
990 local_closures.push(vaddr);
991 self.set_stack(dst as i64, Self::to_value(vaddr));
992 }
993 Instruction::Close(src) => {
994 self.close_upvalues(src);
995 }
996 Instruction::MakeHeapClosure(dst, fn_index, _size) => {
998 let fn_proto_pos = self.get_stack(fn_index as i64) as usize;
999 let heap_idx = self.allocate_heap_closure(fn_proto_pos, &mut upv_map);
1000 local_heap_closures.push(heap_idx);
1001 self.set_stack(dst as i64, Self::to_value(heap_idx));
1003 }
1004 Instruction::CloseHeapClosure(src) => {
1005 let heap_addr = self.get_stack(src as i64);
1006 if let Some((heap_idx, _)) = self.try_get_heap_backed_closure(heap_addr) {
1007 self.close_heap_upvalues(heap_idx);
1008 } else if let Some(closure_idx) = self.try_get_direct_closure(heap_addr) {
1009 self.close_upvalues_by_idx(closure_idx);
1010 }
1011 }
1012 Instruction::CloneHeap(src) => {
1013 let heap_addr = self.get_stack(src as i64);
1014 if let Some((heap_idx, closure_idx)) =
1015 self.try_get_heap_backed_closure(heap_addr)
1016 {
1017 heap::heap_retain(&mut self.heap, heap_idx);
1018 if let Some(closure) = self.closures.get_mut(closure_idx.0) {
1019 closure.refcount += 1;
1020 }
1021 } else if let Some(closure_idx) = self.try_get_direct_closure(heap_addr) {
1022 if let Some(closure) = self.closures.get_mut(closure_idx.0) {
1023 closure.refcount += 1;
1024 }
1025 }
1026 }
1027 Instruction::BoxAlloc(dst, src, inner_size) => {
1028 let (_, src_data) = self.get_stack_range(src as i64, inner_size);
1030 let data = src_data.to_vec();
1031 let heap_idx = self.heap.insert(heap::HeapObject::with_data(data));
1032 self.set_stack(dst as i64, Self::to_value(heap_idx));
1033 }
1034 Instruction::BoxLoad(dst, src, inner_size) => {
1035 let heap_addr = self.get_stack(src as i64);
1037 let heap_idx = Self::get_as::<heap::HeapIdx>(heap_addr);
1038 let heap_obj = self
1039 .heap
1040 .get(heap_idx)
1041 .expect("BoxLoad: invalid heap index");
1042 let data: Vec<u64> = heap_obj.data[..inner_size as usize].to_vec();
1043 self.set_stack_range(dst as i64, &data);
1044 }
1045 Instruction::BoxClone(src) => {
1046 let heap_addr = self.get_stack(src as i64);
1047 let heap_idx = Self::get_as::<heap::HeapIdx>(heap_addr);
1048 heap::heap_retain(&mut self.heap, heap_idx);
1049 }
1050 Instruction::BoxRelease(src) => {
1051 let heap_addr = self.get_stack(src as i64);
1052 let heap_idx = Self::get_as::<heap::HeapIdx>(heap_addr);
1053 heap::heap_release(&mut self.heap, heap_idx);
1054 }
1055 Instruction::BoxStore(ptr_reg, src, inner_size) => {
1056 let heap_addr = self.get_stack(ptr_reg as i64);
1057 let heap_idx = Self::get_as::<heap::HeapIdx>(heap_addr);
1058 let (_, src_data) = self.get_stack_range(src as i64, inner_size);
1059 let data = src_data.to_vec();
1060 let heap_obj = self
1061 .heap
1062 .get_mut(heap_idx)
1063 .expect("BoxStore: invalid heap index");
1064 heap_obj.data[..inner_size as usize].copy_from_slice(&data);
1065 }
1066 Instruction::CloneUserSum(value_reg, value_size, type_idx) => {
1067 let ty = self
1068 .prog
1069 .get_type_from_table(type_idx)
1070 .expect("Invalid type table index in CloneUserSum");
1071 let (_, value_data) = self.get_stack_range(value_reg as i64, value_size);
1072 let value_vec = value_data.to_vec();
1073 Self::clone_usersum_recursive(&value_vec, &ty, &mut self.heap);
1074 }
1075 Instruction::ReleaseUserSum(value_reg, value_size, type_idx) => {
1076 let ty = self
1077 .prog
1078 .get_type_from_table(type_idx)
1079 .expect("Invalid type table index in ReleaseUserSum");
1080 let (_, value_data) = self.get_stack_range(value_reg as i64, value_size);
1081 let value_vec = value_data.to_vec();
1082 let tt = &self.prog.type_table;
1083 Self::release_usersum_recursive(&value_vec, &ty, &mut self.heap, tt);
1084 }
1085 Instruction::CallIndirect(func, nargs, nret_req) => {
1086 let callable = self.get_stack(func as i64);
1087 let heap_idx = Self::get_as::<heap::HeapIdx>(callable);
1088
1089 let maybe_heap_closure = self.heap.get(heap_idx).and_then(|heap_obj| {
1090 heap_obj.data.first().and_then(|&closure_raw| {
1091 let cls_i = Self::get_as::<ClosureIdx>(closure_raw);
1092 self.closures.contains_key(cls_i.0).then_some(cls_i)
1093 })
1094 });
1095
1096 if let Some(cls_i) = maybe_heap_closure {
1097 let cls = self.get_closure(cls_i);
1098 let pos_of_f = cls.fn_proto_pos;
1099 self.states_stack.push(cls_i);
1100 self.call_function(func, nargs, nret_req, move |machine| {
1101 machine.execute(pos_of_f, Some(cls_i))
1102 });
1103 self.states_stack.pop();
1104 } else {
1105 let closure_idx = ClosureIdx(DefaultKey::from(KeyData::from_ffi(callable)));
1106 if self.closures.contains_key(closure_idx.0) {
1107 let cls = self.get_closure(closure_idx);
1108 let pos_of_f = cls.fn_proto_pos;
1109 self.states_stack.push(closure_idx);
1110 self.call_function(func, nargs, nret_req, move |machine| {
1111 machine.execute(pos_of_f, Some(closure_idx))
1112 });
1113 self.states_stack.pop();
1114 } else {
1115 let pos_of_f = callable as usize;
1120 assert!(
1121 pos_of_f < self.prog.global_fn_table.len(),
1122 "Invalid indirect callable: raw={callable}, heap={heap_idx:?}, func_i={func_i}, pc={pcounter}"
1123 );
1124 self.call_function(func, nargs, nret_req, move |machine| {
1125 machine.execute(pos_of_f, None)
1126 });
1127 }
1128 }
1129 }
1130 Instruction::Return0 => {
1131 self.stack.truncate((self.base_pointer - 1) as usize);
1132 self.release_open_closures(&local_closures);
1133 self.release_heap_closures(&local_heap_closures);
1134 return 0;
1135 }
1136 Instruction::Return(iret, nret) => {
1137 let _ = self.return_general(iret, nret);
1138 self.release_open_closures(&local_closures);
1139 self.release_heap_closures(&local_heap_closures);
1140 return nret.into();
1141 }
1142 Instruction::GetUpValue(dst, index, _size) => {
1143 {
1144 let up_i = cls_i.unwrap();
1145 let cls = self.get_closure(up_i);
1146 let upvalues = &cls.upvalues;
1147 let rv = &upvalues[index as usize];
1148 let vs = match &*rv.borrow() {
1149 UpValue::Open(i) => {
1150 let upper_base = cls.base_ptr as usize;
1151 let (_range, rawv) = self.get_open_upvalue(upper_base, *i);
1152 let rawv: &[RawVal] = unsafe { std::mem::transmute(rawv) };
1153 rawv
1154 }
1155 UpValue::Closed(rawval, _) => {
1156 let rawv: &[RawVal] =
1157 unsafe { std::mem::transmute(rawval.as_slice()) };
1158 rawv
1159 }
1160 };
1161 self.set_stack_range(dst as i64, vs);
1162 };
1163 }
1164 Instruction::SetUpValue(index, src, size) => {
1165 let up_i = cls_i.unwrap();
1166 let cls = self.get_closure(up_i);
1167 let upper_base = cls.base_ptr as usize;
1168 let upvalues = &cls.upvalues;
1169 let (_range, v) = self.get_stack_range(src as i64, size);
1170 let rv = &mut *upvalues[index as usize].borrow_mut();
1171 match rv {
1172 UpValue::Open(OpenUpValue { pos: i, size, .. }) => {
1173 let (range, _v) = self.get_stack_range(src as i64, *size);
1174 let dest = upper_base + *i;
1175 unsafe {
1176 let dst = slice::from_raw_parts_mut(
1178 std::mem::transmute::<*const RawVal, *mut RawVal>(
1179 self.stack.as_ptr(),
1180 ),
1181 self.stack.len(),
1182 );
1183 dst.copy_within(range, dest);
1184 }
1185 }
1186 UpValue::Closed(uv, _) => {
1187 uv.as_mut_slice().copy_from_slice(v);
1188 }
1189 };
1190 }
1191 Instruction::GetGlobal(dst, gid, size) => {
1192 let gvs = unsafe {
1193 let vstart = self.global_vals.as_ptr().offset(gid as _);
1194 debug_assert!(!vstart.is_null());
1195 slice::from_raw_parts(vstart, size as _)
1197 };
1198 self.set_stack_range(dst as i64, gvs)
1199 }
1200 Instruction::SetGlobal(gid, src, size) => {
1201 let gvs = unsafe {
1202 let vstart = self.global_vals.as_mut_ptr().offset(gid as _);
1203 debug_assert!(!vstart.is_null());
1204 slice::from_raw_parts_mut(vstart, size as _)
1206 };
1207 let (_, slice) = self.get_stack_range(src as i64, size);
1208 gvs.copy_from_slice(slice);
1209 }
1210 Instruction::Jmp(offset) => {
1211 increment = offset;
1212 }
1213 Instruction::JmpIfNeg(cond, offset) => {
1214 let cond_v = self.get_stack(cond as i64);
1215 if Self::get_as::<f64>(cond_v) <= 0.0 {
1216 increment = offset;
1217 }
1218 }
1219 Instruction::JmpTable(scrut, table_idx) => {
1220 let scrut_val = self.get_stack(scrut as i64);
1221 let val = Self::get_as::<i64>(scrut_val);
1223 let fn_proto = self.get_fnproto(func_i);
1224 debug_assert!(
1225 !fn_proto.jump_tables.is_empty(),
1226 "JmpTable instruction requires non-empty jump_tables"
1227 );
1228 let table = &fn_proto.jump_tables[table_idx as usize];
1229 let idx = (val - table.min) as usize;
1230 let default_idx = table.offsets.len() - 1;
1232 increment = table
1233 .offsets
1234 .get(idx)
1235 .copied()
1236 .unwrap_or(table.offsets[default_idx]);
1237 }
1238 Instruction::AddF(dst, src1, src2) => binop!(+,f64,dst,src1,src2,self),
1239 Instruction::SubF(dst, src1, src2) => {
1240 binop!(-,f64,dst,src1,src2,self)
1241 }
1242 Instruction::MulF(dst, src1, src2) => binop!(*,f64,dst,src1,src2,self),
1243 Instruction::DivF(dst, src1, src2) => binop!(/,f64,dst,src1,src2,self),
1244 Instruction::ModF(dst, src1, src2) => binop!(%,f64,dst,src1,src2,self),
1245 Instruction::NegF(dst, src) => uniop!(-,f64,dst,src,self),
1246 Instruction::AbsF(dst, src) => uniopmethod!(abs, f64, dst, src, self),
1247 Instruction::SqrtF(dst, src) => uniopmethod!(sqrt, f64, dst, src, self),
1248 Instruction::SinF(dst, src) => uniopmethod!(sin, f64, dst, src, self),
1249 Instruction::CosF(dst, src) => uniopmethod!(cos, f64, dst, src, self),
1250 Instruction::PowF(dst, src1, src2) => {
1251 binopmethod!(powf, f64, dst, src1, src2, self)
1252 }
1253 Instruction::LogF(dst, src) => uniopmethod!(ln, f64, dst, src, self),
1254 Instruction::AddI(dst, src1, src2) => binop!(+,i64,dst,src1,src2,self),
1255 Instruction::SubI(dst, src1, src2) => binop!(-,i64,dst,src1,src2,self),
1256 Instruction::MulI(dst, src1, src2) => binop!(*,i64,dst,src1,src2,self),
1257 Instruction::DivI(dst, src1, src2) => binop!(/,i64,dst,src1,src2,self),
1258 Instruction::ModI(dst, src1, src2) => binop!(%,i64,dst,src1,src2,self),
1259 Instruction::NegI(dst, src) => uniop!(-,i64,dst,src,self),
1260 Instruction::AbsI(dst, src) => uniopmethod!(abs, i64, dst, src, self),
1261 Instruction::PowI(dst, lhs, rhs) => binop!(^,i64,dst,lhs,rhs,self),
1262 Instruction::LogI(_, _, _) => todo!(),
1263 Instruction::Not(dst, src) => uniop_bool!(!, dst, src, self),
1264 Instruction::Eq(dst, src1, src2) => binop_bool!(==,dst,src1,src2,self),
1265 Instruction::Ne(dst, src1, src2) => binop_bool!(!=,dst,src1,src2,self),
1266 Instruction::Gt(dst, src1, src2) => binop_bool!(>,dst,src1,src2,self),
1267 Instruction::Ge(dst, src1, src2) => binop_bool!(>=,dst,src1,src2,self),
1268 Instruction::Lt(dst, src1, src2) => binop_bool!(<,dst,src1,src2,self),
1269 Instruction::Le(dst, src1, src2) => binop_bool!(<=,dst,src1,src2,self),
1270 Instruction::And(dst, src1, src2) => binop_bool_compose!(&&,dst,src1,src2,self),
1271 Instruction::Or(dst, src1, src2) => binop_bool_compose!(||,dst,src1,src2,self),
1272 Instruction::CastFtoI(dst, src) => self.set_stack(
1273 dst as i64,
1274 Self::to_value::<i64>(Self::get_as::<f64>(self.get_stack(src as i64)) as i64),
1275 ),
1276 Instruction::CastItoF(dst, src) => self.set_stack(
1277 dst as i64,
1278 Self::to_value::<f64>(Self::get_as::<i64>(self.get_stack(src as i64)) as f64),
1279 ),
1280 Instruction::CastItoB(dst, src) => self.set_stack(
1281 dst as i64,
1282 Self::to_value::<bool>(Self::get_as::<i64>(self.get_stack(src as i64)) != 0),
1283 ),
1284 Instruction::AllocArray(dst, len, elem_size) => {
1285 let key = self.arrays.alloc_array(len as _, elem_size as _);
1287 self.set_stack(dst as i64, key);
1289 }
1290 Instruction::GetArrayElem(dst, arr, idx) => {
1291 let array = self.get_stack(arr as i64);
1293 let index = self.get_stack(idx as i64);
1294 let index_val = Self::get_as::<f64>(index);
1295 let adata = self.arrays.get_array(array);
1296 let elem_word_size = adata.elem_word_size as usize;
1297 let len = adata.get_length_array() as usize;
1298 if len == 0 {
1299 let zeros = vec![0; elem_word_size];
1300 set_vec_range(
1301 &mut self.stack,
1302 (self.base_pointer + dst as u64) as usize,
1303 &zeros,
1304 );
1305 continue;
1306 }
1307 let max_idx = len.saturating_sub(1);
1308 let index_int = if !index_val.is_finite() {
1309 0
1310 } else {
1311 let raw_idx = index_val as i64;
1312 raw_idx.clamp(0, max_idx as i64) as usize
1313 };
1314 let start = index_int * elem_word_size;
1315 let end = start + elem_word_size;
1316 let buffer = &adata.data[start..end];
1317 set_vec_range(
1318 &mut self.stack,
1319 (self.base_pointer + dst as u64) as usize,
1320 buffer,
1321 );
1322 }
1323 Instruction::SetArrayElem(arr, idx, val) => {
1324 let array = self.get_stack(arr as i64);
1326 let index = self.get_stack(idx as i64);
1327 let index_val = Self::get_as::<f64>(index);
1328 let elem_word_size = self.arrays.get_array(array).elem_word_size as usize;
1329 let len = self.arrays.get_array(array).get_length_array() as usize;
1330 if len == 0 {
1331 continue;
1332 }
1333 let max_idx = len.saturating_sub(1);
1334 let index_int = if !index_val.is_finite() {
1335 0
1336 } else {
1337 let raw_idx = index_val as i64;
1338 raw_idx.clamp(0, max_idx as i64) as usize
1339 };
1340 let (_range2, buf_src2) = self.get_stack_range(val as _, elem_word_size as _);
1341 let src_words = buf_src2.to_vec();
1342 let adata = self.arrays.get_array_mut(array);
1343 let start = index_int * elem_word_size;
1344 let end = start + elem_word_size;
1345 let buffer = &mut adata.data[start..end];
1346 buffer.copy_from_slice(&src_words);
1347 }
1348 Instruction::GetState(dst, size) => {
1349 let v: &[RawVal] = unsafe {
1351 std::mem::transmute(self.get_current_state().get_state(size as _))
1352 };
1353 self.set_stack_range(dst as i64, v);
1354 }
1355 Instruction::SetState(src, size) => {
1356 let vs = {
1357 let (_range, v) = self.get_stack_range(src as i64, size as _);
1358 unsafe { std::mem::transmute::<&[RawVal], &[RawVal]>(v) }
1359 };
1360 let dst = self.get_current_state().get_state_mut(size as _);
1361 dst.copy_from_slice(vs);
1362 }
1363 Instruction::PushStatePos(v) => self.get_current_state().push_pos(v),
1364 Instruction::PopStatePos(v) => self.get_current_state().pop_pos(v),
1365 Instruction::Delay(dst, src, time) => {
1366 let i = self.get_stack(src as i64);
1367 let t = self.get_stack(time as i64);
1368 let delaysize_i =
1369 unsafe { self.delaysizes_pos_stack.last().unwrap_unchecked() };
1370
1371 let size_in_samples = unsafe {
1372 *self
1373 .get_fnproto(func_i)
1374 .delay_sizes
1375 .get_unchecked(*delaysize_i)
1376 };
1377 let mut ringbuf = self.get_current_state().get_as_ringbuffer(size_in_samples);
1378
1379 let res = ringbuf.process(i, t);
1380 self.set_stack(dst as i64, res);
1381 }
1382 Instruction::Mem(dst, src) => {
1383 let s = self.get_stack(src as i64);
1384 let ptr = self.get_current_state().get_state_mut(1);
1385 let v = Self::to_value(ptr[0]);
1386 self.set_stack(dst as i64, v);
1387 let ptr = self.get_current_state().get_state_mut(1);
1388 ptr[0] = s;
1389 }
1390 Instruction::Dummy => {
1391 unreachable!()
1392 }
1393 }
1394 pcounter = (pcounter as i64 + increment as i64) as usize;
1395 }
1396 }
1397 pub fn install_extern_fn(&mut self, name: Symbol, f: ExtFunType) -> usize {
1398 self.ext_fun_table.push((name, f));
1399 self.ext_fun_table.len() - 1
1400 }
1401 pub fn install_extern_cls(&mut self, name: Symbol, f: ExtClsType) -> usize {
1402 self.ext_cls_table.push((name, f));
1403 self.ext_cls_table.len() - 1
1404 }
1405
1406 pub fn alloc_code(&mut self, expr: ExprNodeId) -> RawVal {
1411 let idx = self.code_values.len();
1412 self.code_values.push(expr);
1413 idx as RawVal
1414 }
1415
1416 pub fn get_code(&self, val: RawVal) -> ExprNodeId {
1418 self.code_values[val as usize]
1419 }
1420
1421 pub fn try_get_code(&self, val: RawVal) -> Option<ExprNodeId> {
1423 self.code_values.get(val as usize).copied()
1424 }
1425
1426 fn link_functions(&mut self) {
1427 let global_mem_size = self
1429 .prog
1430 .global_vals
1431 .iter()
1432 .map(|WordSize(size)| *size as usize)
1433 .sum();
1434 self.global_vals = vec![0; global_mem_size];
1435 let ext_entries = self.prog.ext_fun_table.clone();
1436 ext_entries.iter().enumerate().for_each(|(i, (name, ty))| {
1437 if let Some((j, _)) = self
1438 .ext_fun_table
1439 .iter()
1440 .enumerate()
1441 .find(|(_j, (fname, _fn))| name == fname.as_str())
1442 {
1443 let _ = self.fn_map.insert(i, ExtFnIdx::Fun(j));
1444 } else if let Some((j, _)) = self
1445 .ext_cls_table
1446 .iter()
1447 .enumerate()
1448 .find(|(_j, (fname, _fn))| name == fname.as_str())
1449 {
1450 let _ = self.fn_map.insert(i, ExtFnIdx::Cls(j));
1451 } else if let Some(spec_cls) =
1452 crate::plugin::try_make_specialized_extcls(name.as_str().to_symbol(), *ty)
1453 {
1454 let cls_idx = self.install_extern_cls(spec_cls.name, spec_cls.fun);
1455 let _ = self.fn_map.insert(i, ExtFnIdx::Cls(cls_idx));
1456 } else {
1457 panic!("external function {name} cannot be found");
1458 }
1459 });
1460 }
1461 pub fn execute_idx(&mut self, idx: usize) -> ReturnCode {
1462 let (_name, func) = &self.prog.global_fn_table[idx];
1463 if !func.bytecodes.is_empty() {
1464 self.global_states
1465 .resize(func.state_skeleton.total_size() as usize);
1466 if !self.stack.is_empty() {
1468 self.stack[0] = 0;
1469 }
1470 self.base_pointer = 1;
1471 self.execute(idx, None)
1472 } else {
1473 0
1474 }
1475 }
1476 pub fn execute_entry(&mut self, entry: &str) -> ReturnCode {
1477 if let Some(idx) = self.prog.get_fun_index(entry) {
1478 self.execute_idx(idx)
1479 } else {
1480 -1
1481 }
1482 }
1483
1484 fn clone_usersum_recursive(
1488 value_data: &[RawVal],
1489 ty: &TypeNodeId,
1490 heap: &mut heap::HeapStorage,
1491 ) {
1492 use crate::types::Type;
1493 match ty.to_type() {
1494 Type::Boxed(_) => {
1495 if !value_data.is_empty() {
1497 let heap_idx = Self::get_as::<heap::HeapIdx>(value_data[0]);
1498 heap::heap_retain(heap, heap_idx);
1499 }
1500 }
1501 Type::UserSum { variants, .. } => {
1502 if value_data.is_empty() {
1504 return;
1505 }
1506 let tag = value_data[0] as usize;
1507 if tag >= variants.len() {
1508 return;
1509 }
1510 if let Some(payload_ty) = variants[tag].1 {
1512 Self::clone_usersum_recursive(&value_data[1..], &payload_ty, heap);
1514 }
1515 }
1516 Type::Tuple(elem_types) => {
1517 let mut offset = 0;
1519 for elem_ty in elem_types {
1520 let elem_size = elem_ty.word_size() as usize;
1521 if offset + elem_size <= value_data.len() {
1522 Self::clone_usersum_recursive(
1523 &value_data[offset..offset + elem_size],
1524 &elem_ty,
1525 heap,
1526 );
1527 }
1528 offset += elem_size;
1529 }
1530 }
1531 Type::Record(fields) => {
1532 let mut offset = 0;
1534 for field in fields {
1535 let field_size = field.ty.word_size() as usize;
1536 if offset + field_size <= value_data.len() {
1537 Self::clone_usersum_recursive(
1538 &value_data[offset..offset + field_size],
1539 &field.ty,
1540 heap,
1541 );
1542 }
1543 offset += field_size;
1544 }
1545 }
1546 Type::TypeAlias(_name) => {
1547 if !value_data.is_empty() {
1551 let heap_idx = Self::get_as::<heap::HeapIdx>(value_data[0]);
1552 heap::heap_retain(heap, heap_idx);
1553 }
1554 }
1555 _ => {
1556 }
1558 }
1559 }
1560
1561 fn release_usersum_recursive(
1565 value_data: &[RawVal],
1566 ty: &TypeNodeId,
1567 heap: &mut heap::HeapStorage,
1568 type_table: &[TypeNodeId],
1569 ) {
1570 use crate::types::Type;
1571 match ty.to_type() {
1572 Type::Boxed(inner) => {
1573 if !value_data.is_empty() {
1576 let heap_idx = Self::get_as::<heap::HeapIdx>(value_data[0]);
1577 let should_release_inner = heap
1578 .get(heap_idx)
1579 .map(|obj| obj.refcount <= 1)
1580 .unwrap_or(false);
1581 if should_release_inner {
1582 let inner_data = heap
1584 .get(heap_idx)
1585 .map(|obj| obj.data.clone())
1586 .unwrap_or_default();
1587 Self::release_usersum_recursive(&inner_data, &inner, heap, type_table);
1588 }
1589 heap::heap_release(heap, heap_idx);
1590 }
1591 }
1592 Type::UserSum { variants, .. } => {
1593 if value_data.is_empty() {
1595 return;
1596 }
1597 let tag = value_data[0] as usize;
1598 if tag >= variants.len() {
1599 return;
1600 }
1601 if let Some(payload_ty) = variants[tag].1 {
1603 Self::release_usersum_recursive(
1605 &value_data[1..],
1606 &payload_ty,
1607 heap,
1608 type_table,
1609 );
1610 }
1611 }
1612 Type::Tuple(elem_types) => {
1613 let mut offset = 0;
1615 for elem_ty in elem_types {
1616 let elem_size = elem_ty.word_size() as usize;
1617 if offset + elem_size <= value_data.len() {
1618 Self::release_usersum_recursive(
1619 &value_data[offset..offset + elem_size],
1620 &elem_ty,
1621 heap,
1622 type_table,
1623 );
1624 }
1625 offset += elem_size;
1626 }
1627 }
1628 Type::Record(fields) => {
1629 let mut offset = 0;
1631 for field in fields {
1632 let field_size = field.ty.word_size() as usize;
1633 if offset + field_size <= value_data.len() {
1634 Self::release_usersum_recursive(
1635 &value_data[offset..offset + field_size],
1636 &field.ty,
1637 heap,
1638 type_table,
1639 );
1640 }
1641 offset += field_size;
1642 }
1643 }
1644 Type::TypeAlias(name) => {
1645 if !value_data.is_empty() {
1650 let heap_idx = Self::get_as::<heap::HeapIdx>(value_data[0]);
1651 let resolved_sum = type_table.iter().find(
1653 |tid| matches!(tid.to_type(), Type::UserSum { name: n, .. } if n == name),
1654 );
1655 let should_release_inner = heap
1656 .get(heap_idx)
1657 .map(|obj| obj.refcount <= 1)
1658 .unwrap_or(false);
1659 if should_release_inner && let Some(inner_ty) = resolved_sum {
1660 let inner_data = heap
1661 .get(heap_idx)
1662 .map(|obj| obj.data.clone())
1663 .unwrap_or_default();
1664 Self::release_usersum_recursive(&inner_data, inner_ty, heap, type_table);
1665 }
1666 heap::heap_release(heap, heap_idx);
1667 }
1668 }
1669 _ => {
1670 }
1672 }
1673 }
1674
1675 pub fn execute_main(&mut self) -> ReturnCode {
1676 self.base_pointer += 1;
1678 self.execute(0, None)
1679 }
1680}
1681
1682#[cfg(test)]
1683mod test;