1use std::collections::HashMap;
2use std::sync::Arc;
3
4use crate::interner::{Symbol, TypeNodeId};
5use crate::mir::{self, Mir};
6use crate::runtime::vm::bytecode::{ConstPos, GlobalPos, Reg};
7use crate::runtime::vm::program::WordSize;
8use crate::runtime::vm::{self, StateOffset};
9use crate::types::{PType, RecordTypeField, Type, TypeSize};
10use crate::utils::half_float::HFloat;
11use vm::bytecode::Instruction as VmInstruction;
12
13#[derive(Debug, Default)]
14struct MemoryRegion(Reg, TypeSize);
15#[derive(Debug, Default)]
16struct VRegister(HashMap<Arc<mir::Value>, MemoryRegion>);
17
18impl VRegister {
19 pub fn push_stack(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
20 self.add_newvalue_range(v, size)
21 }
22 pub fn add_newvalue(&mut self, v: &Arc<mir::Value>) -> Reg {
23 let pos = self
24 .0
25 .iter()
26 .max_by_key(|(_v, MemoryRegion(address, size))| address + size)
27 .map(|(_, MemoryRegion(address, size))| address + size)
28 .unwrap_or(0);
29 self.0.insert(v.clone(), MemoryRegion(pos, 1));
30 log::trace!("add {:?}", self.0);
31 pos as Reg
32 }
33 pub fn add_newvalue_range(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
34 let pos = self
35 .0
36 .iter()
37 .max_by_key(|(_v, MemoryRegion(address, size))| address + size)
38 .map(|(_, MemoryRegion(address, size))| address + size)
39 .unwrap_or(0);
40 self.0.insert(v.clone(), MemoryRegion(pos, size as _));
41 log::trace!("add_range {:#?}", self.0);
42 pos as Reg
43 }
44 pub fn find(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
45 log::trace!("find {v}");
46 let res = self.0.get(v).map(|r| r.0);
47 match (res, v.as_ref()) {
48 (Some(_), mir::Value::Argument(_)) | (Some(_), mir::Value::Global(_)) => res,
50 (Some(_), _) => {
51 self.0.remove(v);
52 res
53 }
54 _ => None,
55 }
56 }
57 pub fn find_keep(&self, v: &Arc<mir::Value>) -> Option<Reg> {
59 log::trace!("findkeep {v}");
60 self.0.get(v).map(|r| r.0)
61 }
62}
63
64#[derive(Debug, Default)]
65struct VStack(Vec<VRegister>);
66impl VStack {
67 fn get_top(&mut self) -> &mut VRegister {
68 self.0.last_mut().unwrap()
69 }
70 fn find_upvalue(&self, v: &Arc<mir::Value>) -> Option<Reg> {
71 self.0
72 .iter()
73 .rev()
74 .skip(1)
75 .find_map(|vreg| vreg.find_keep(v))
76 }
77 pub fn push_stack(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
78 self.get_top().push_stack(v, size)
79 }
80 pub fn add_newvalue(&mut self, v: &Arc<mir::Value>) -> Reg {
81 self.get_top().add_newvalue(v)
82 }
83 pub fn find(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
84 self.get_top().find(v)
85 }
86 pub fn find_keep(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
87 self.get_top().find_keep(v)
88 }
89}
90
91#[derive(Debug, Default)]
92pub struct ByteCodeGenerator {
93 vregister: VStack,
94 varray: Vec<Arc<mir::Value>>,
95 fnmap: HashMap<Symbol, usize>,
96 globals: HashMap<Arc<mir::Value>, usize>, program: vm::Program,
98}
99
100#[derive(Default, Debug, Clone, Copy)]
106pub enum SelfEvalMode {
107 #[default]
108 SimpleState,
109 ZeroAtInit,
110}
111#[derive(Default, Debug, Clone, Copy)]
112pub struct Config {
113 pub self_eval_mode: SelfEvalMode,
114}
115
116fn gen_raw_int(n: &i64) -> vm::RawVal {
117 let raw = {
118 let iptr = n as *const i64;
119 iptr as *const vm::RawVal
120 };
121 unsafe { *raw }
122}
123
124fn gen_raw_float(n: &f64) -> vm::RawVal {
125 let raw = {
126 let iptr = n as *const f64;
127 iptr as *const vm::RawVal
128 };
129 unsafe { *raw }
130}
131
132impl ByteCodeGenerator {
133 pub(crate) fn word_size_for_type(ty: TypeNodeId) -> TypeSize {
138 match ty.to_type() {
139 Type::Primitive(PType::Unit) => 0,
140 Type::Primitive(PType::String) => 1,
141 Type::Primitive(_) => 1,
142 Type::Array(_) => 1, Type::Tuple(types) => types.iter().map(|t| Self::word_size_for_type(*t)).sum(),
144 Type::Record(types) => types
145 .iter()
146 .map(|RecordTypeField { ty, .. }| Self::word_size_for_type(*ty))
147 .sum(),
148 Type::Function { arg: _, ret: _ } => 1,
149 Type::Ref(_) => 1,
150 Type::Code(_) => todo!(),
151 _ => {
152 1
154 }
155 }
156 }
157
158 fn get_binop(&mut self, v1: Arc<mir::Value>, v2: Arc<mir::Value>) -> (Reg, Reg) {
159 let r1 = self.find(&v1);
160 let r2 = self.find(&v2);
161 (r1, r2)
162 }
163 fn emit_binop1<F>(
164 &mut self,
165 inst: F,
166 dst: Arc<mir::Value>,
167 v1: Arc<mir::Value>,
168 ) -> Option<VmInstruction>
169 where
170 F: FnOnce(Reg, Reg) -> VmInstruction,
171 {
172 let r1 = self.find(&v1);
173 let dst = self.get_destination(dst.clone(), 1);
174 let i = inst(dst, r1);
175 Some(i)
176 }
177 fn emit_binop2<F>(
178 &mut self,
179 inst: F,
180 dst: Arc<mir::Value>,
181 v1: Arc<mir::Value>,
182 v2: Arc<mir::Value>,
183 ) -> Option<VmInstruction>
184 where
185 F: FnOnce(Reg, Reg, Reg) -> VmInstruction,
186 {
187 let (r1, r2) = self.get_binop(v1, v2);
189 let dst = self.get_destination(dst.clone(), 1);
190 let i = inst(dst, r1, r2);
191 Some(i)
192 }
193 fn get_destination(&mut self, dst: Arc<mir::Value>, size: TypeSize) -> Reg {
194 self.vregister.push_stack(&dst, size as _)
195 }
196 fn get_or_insert_global(&mut self, gv: Arc<mir::Value>, ty: TypeNodeId) -> GlobalPos {
197 match self.globals.get(&gv) {
198 Some(idx) => *idx as GlobalPos,
199 None => {
200 let size = Self::word_size_for_type(ty) as usize;
201 let idx = if size == 0 && self.program.global_vals.is_empty() {
202 0
203 } else {
204 self.program.global_vals.len() + size - 1
205 };
206 self.globals.insert(gv.clone(), idx);
207 let size = WordSize(size as _);
208 self.program.global_vals.push(size);
209 idx as GlobalPos
210 }
211 }
212 }
213 fn find(&mut self, v: &Arc<mir::Value>) -> Reg {
214 self.vregister
215 .find(v)
216 .or_else(|| self.globals.get(v).map(|&v| v as Reg))
217 .or_else(|| self.varray.iter().position(|av| v == av).map(|v| v as Reg))
218 .expect(format!("value {v} not found").as_str())
219 }
220 fn find_keep(&mut self, v: &Arc<mir::Value>) -> Reg {
221 self.vregister
222 .find_keep(v)
223 .or_else(|| self.globals.get(v).map(|&v| v as Reg))
224 .expect(format!("value {v} not found").as_str())
225 }
226 fn find_upvalue(&self, upval: &Arc<mir::Value>) -> Reg {
227 self.vregister
228 .find_upvalue(upval)
229 .expect("failed to find upvalue")
230 }
231 fn prepare_function(
232 &mut self,
233 bytecodes_dst: &mut Vec<VmInstruction>,
234 faddress: &Arc<mir::Value>,
235 args: &[(Arc<mir::Value>, TypeNodeId)],
236 ) -> (Reg, TypeSize) {
237 let mut aoffsets = vec![];
238 let mut offset = 0;
239 for (a, ty) in args.iter() {
240 let src = self.find(a);
241 let size = Self::word_size_for_type(*ty);
242 aoffsets.push((offset, src, size));
243 offset += size;
244 }
245 let faddress = self.find_keep(faddress);
246 for (adst, src, size) in aoffsets.iter() {
248 let address = *adst + faddress + 1;
249 let is_samedst = address == *src;
250 if !is_samedst {
251 match size {
252 0 => unreachable!(),
253 1 => bytecodes_dst.push(VmInstruction::Move(address as Reg, *src)),
254 _ => bytecodes_dst.push(VmInstruction::MoveRange(address as Reg, *src, *size)),
255 }
256 }
257 }
258 (faddress, offset)
259 }
260 fn get_or_insert_extfunid(&mut self, label: Symbol, ty: TypeNodeId) -> ConstPos {
261 self.program
262 .ext_fun_table
263 .iter()
264 .position(|(name, _ty)| name.as_str() == label.as_str())
265 .unwrap_or_else(|| {
266 self.program.ext_fun_table.push((label.to_string(), ty));
267 self.program.ext_fun_table.len() - 1
268 }) as ConstPos
269 }
270 fn prepare_extfun_or_cls(
281 &mut self,
282 funcproto: &mut vm::FuncProto,
283 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
284 dst: Arc<mir::Value>,
285 args: &[(Arc<mir::Value>, TypeNodeId)],
286 idx: ConstPos,
287 ty: TypeNodeId,
288 ) -> (Reg, Reg, TypeSize) {
289 let fi = funcproto.add_new_constant(idx as u64);
290 let rsize = Self::word_size_for_type(ty);
291 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
292 let f = self.vregister.push_stack(&dst, rsize as _);
293 bytecodes_dst.push(VmInstruction::MoveConst(f, fi as ConstPos));
294 let (dst, argsize) = self.prepare_function(bytecodes_dst, &dst, args);
295 (dst, argsize, rsize)
296 }
297 fn prepare_extfun(
298 &mut self,
299 funcproto: &mut vm::FuncProto,
300 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
301 dst: Arc<mir::Value>,
302 args: &[(Arc<mir::Value>, TypeNodeId)],
303 label: Symbol,
304 ty: TypeNodeId,
305 ) -> (Reg, Reg, TypeSize) {
306 let idx = self.get_or_insert_extfunid(label, ty);
307 self.prepare_extfun_or_cls(funcproto, bytecodes_dst, dst, args, idx, ty)
308 }
309 fn emit_instruction(
322 &mut self,
323 funcproto: &mut vm::FuncProto,
324 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
325 mirfunc: mir::Function,
326 dst: Arc<mir::Value>,
327 mirinst: mir::Instruction,
328 config: Config,
329 ) -> Option<VmInstruction> {
330 match mirinst {
331 mir::Instruction::Uinteger(u) => {
332 let pos = funcproto.add_new_constant(u);
333 Some(VmInstruction::MoveConst(
334 self.get_destination(dst, 1),
335 pos as ConstPos,
336 ))
337 }
338 mir::Instruction::Integer(i) => {
339 let pos = funcproto.add_new_constant(gen_raw_int(&i));
340 Some(VmInstruction::MoveConst(
341 self.get_destination(dst, 1),
342 pos as ConstPos,
343 ))
344 }
345 mir::Instruction::Float(n) => {
346 let dst = self.get_destination(dst, 1);
347 if let Ok(half_f) = HFloat::try_from(n) {
348 Some(VmInstruction::MoveImmF(dst, half_f))
349 } else {
350 let pos = funcproto.add_new_constant(gen_raw_float(&n));
351 Some(VmInstruction::MoveConst(dst, pos as ConstPos))
352 }
353 }
354 mir::Instruction::String(s) => {
355 let pos = self.program.add_new_str(s.to_string());
356 let cpos = funcproto.add_new_constant(pos as u64);
357 Some(VmInstruction::MoveConst(
358 self.get_destination(dst, 1),
359 cpos as ConstPos,
360 ))
361 }
362 mir::Instruction::Alloc(t) => {
363 let size = Self::word_size_for_type(t) as u64;
364 let _ = self.vregister.push_stack(&dst, size);
365 None
366 }
367 mir::Instruction::Load(ptr, ty) => {
368 let d = self.get_destination(dst, Self::word_size_for_type(ty));
369 let s = self.find_keep(&ptr);
370 let size = Self::word_size_for_type(ty);
371 match (d, s, size) {
372 (d, s, 1) if d != s => Some(VmInstruction::Move(d, s)),
373 (d, s, size) if d != s => Some(VmInstruction::MoveRange(d, s, size)),
374 _ => None,
375 }
376 }
377 mir::Instruction::Store(dst, src, ty) => {
378 let s = self.find(&src);
379 let d = self.find_keep(&dst);
380 let size = Self::word_size_for_type(ty);
381 match (d, s, size) {
382 (d, s, 1) if d != s => Some(VmInstruction::Move(d, s)),
383 (d, s, size) if d != s => Some(VmInstruction::MoveRange(d, s, size)),
384 _ => None,
385 }
386 }
387 mir::Instruction::GetGlobal(v, ty) => {
388 let dst = self.get_destination(dst, Self::word_size_for_type(ty));
389 let idx = self.get_or_insert_global(v.clone(), ty);
390 Some(VmInstruction::GetGlobal(
391 dst,
392 idx,
393 Self::word_size_for_type(ty),
394 ))
395 }
396 mir::Instruction::SetGlobal(v, src, ty) => {
397 let idx = self.get_or_insert_global(v.clone(), ty);
398 let s = self.find(&src);
399 Some(VmInstruction::SetGlobal(
400 idx,
401 s,
402 Self::word_size_for_type(ty),
403 ))
404 }
405 mir::Instruction::GetElement {
406 value,
407 ty,
408 tuple_offset,
409 } => {
410 let ptr = self.find_keep(&value) as usize;
411 let ty = ty.to_type();
412 let tvec = ty.get_as_tuple().unwrap();
413 let tsize = Self::word_size_for_type(tvec[tuple_offset as usize]);
414 let t_offset: u64 = tvec[0..(tuple_offset as _)]
415 .iter()
416 .map(|t| Self::word_size_for_type(*t) as u64)
417 .sum();
418 let address = (ptr + t_offset as usize) as Reg;
419 self.vregister
420 .get_top()
421 .0
422 .insert(dst, MemoryRegion(address, tsize));
423 None
424 }
425 mir::Instruction::Call(v, args, r_ty) => {
426 let rsize = Self::word_size_for_type(r_ty);
427 match v.as_ref() {
428 mir::Value::Register(_address) => {
429 let bytecodes_dst =
430 bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
431 let d = self.get_destination(dst.clone(), rsize);
432 let s = self.find(&v);
433 bytecodes_dst.push(VmInstruction::Move(d, s));
434 let (fadd, argsize) = self.prepare_function(bytecodes_dst, &dst, &args);
435 Some(VmInstruction::Call(fadd, argsize, rsize))
436 }
437 mir::Value::Function(_idx) => {
438 unreachable!();
439 }
440 mir::Value::ExtFunction(label, _ty) => {
441 let (dst, argsize, nret) =
443 self.prepare_extfun(funcproto, bytecodes_dst, dst, &args, *label, r_ty);
444 Some(VmInstruction::CallExtFun(dst, argsize, nret))
445 }
446 _ => unreachable!(),
447 }
448 }
449 mir::Instruction::CallCls(f, args, r_ty) => {
450 let rsize = Self::word_size_for_type(r_ty);
451 match f.as_ref() {
452 mir::Value::Register(_address) => {
453 let bytecodes_dst =
454 bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
455
456 let (fadd, argsize) = self.prepare_function(bytecodes_dst, &f, &args);
457 let s = self.find(&f);
458 let d = self.get_destination(dst.clone(), rsize);
459 bytecodes_dst.push(VmInstruction::CallCls(fadd, argsize, rsize));
460 match rsize {
461 0 => None,
462 1 => Some(VmInstruction::Move(d, s)),
463 n => Some(VmInstruction::MoveRange(d, s, n)),
464 }
465 }
466 mir::Value::Function(_idx) => {
467 unreachable!();
468 }
469 mir::Value::ExtFunction(label, _ty) => {
470 let (dst, argsize, nret) =
471 self.prepare_extfun(funcproto, bytecodes_dst, dst, &args, *label, r_ty);
472 Some(VmInstruction::CallExtFun(dst, argsize, nret))
473 }
474 _ => unreachable!(),
475 }
476 }
477 mir::Instruction::Closure(idxcell) => {
478 let idx = self.find(&idxcell);
479 let dst = self.get_destination(dst, 1);
480 Some(VmInstruction::Closure(dst, idx))
481 }
482 mir::Instruction::CloseUpValues(src, ty) => {
483 let flattened = ty.flatten();
485 let base = self.vregister.find_keep(&src).unwrap();
486
487 let mut offset = 0;
488 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
489 for elem_t in flattened {
490 let tsize = Self::word_size_for_type(elem_t);
491 if elem_t.to_type().is_function() {
492 bytecodes_dst.push(VmInstruction::Close(base + offset))
493 }
494 offset += tsize;
495 }
496 None
497 }
498 mir::Instruction::GetUpValue(i, ty) => {
499 let upval = &mirfunc.upindexes[i as usize];
500 let v = self.find_upvalue(upval);
501 let size: u8 = Self::word_size_for_type(ty);
502 let ouv = mir::OpenUpValue {
503 pos: v as usize,
504 size,
505 is_closure: ty.to_type().is_function(),
506 };
507 if let Some(ui) = funcproto.upindexes.get_mut(i as usize) {
508 *ui = ouv;
509 } else {
510 funcproto.upindexes.push(ouv);
511 }
512 let d = self.vregister.get_top().add_newvalue_range(&dst, size as _);
513 Some(VmInstruction::GetUpValue(
514 d,
515 i as Reg,
516 Self::word_size_for_type(ty),
517 ))
518 }
519 mir::Instruction::SetUpValue(dst, src, ty) => {
520 let upval = &mirfunc.upindexes[dst as usize];
521 let v = self.find_upvalue(upval);
522 let size: u8 = Self::word_size_for_type(ty);
523 let ouv = mir::OpenUpValue {
524 pos: v as usize,
525 size,
526 is_closure: ty.to_type().is_function(),
527 };
528 if let Some(ui) = funcproto.upindexes.get_mut(dst as usize) {
529 *ui = ouv;
530 } else {
531 funcproto.upindexes.push(ouv);
532 }
533 let s = self.find(&src);
534 Some(VmInstruction::SetUpValue(
535 dst as Reg,
536 s,
537 Self::word_size_for_type(ty),
538 ))
539 }
540 mir::Instruction::PushStateOffset(v) => {
541 let state_size = StateOffset::try_from(v).expect("too much large state offset.");
542 Some(VmInstruction::PushStatePos(state_size))
543 }
544 mir::Instruction::PopStateOffset(v) => {
545 let state_size = StateOffset::try_from(v).expect("too much large state offset.");
546 Some(VmInstruction::PopStatePos(state_size))
547 }
548 mir::Instruction::GetState(ty) => {
549 let size = Self::word_size_for_type(ty);
550 let d = self.vregister.push_stack(&dst, size as _);
551 Some(VmInstruction::GetState(d, size))
552 }
553
554 mir::Instruction::JmpIf(cond, tbb, ebb, pbb) => {
555 let c = self.find(&cond);
556
557 let mut bytecodes_dst = bytecodes_dst;
559
560 let mut then_bytecodes: Vec<VmInstruction> = vec![];
561 let mut else_bytecodes: Vec<VmInstruction> = vec![];
562 mirfunc.body[tbb as usize]
563 .0
564 .iter()
565 .for_each(|(dst, t_inst)| {
566 let res = self.emit_instruction(
567 funcproto,
568 Some(&mut then_bytecodes),
569 mirfunc.clone(),
570 dst.clone(),
571 t_inst.clone(),
572 config,
573 );
574 if let Some(inst) = res {
575 then_bytecodes.push(inst);
576 }
577 });
578
579 mirfunc.body[ebb as usize]
580 .0
581 .iter()
582 .for_each(|(dst, t_inst)| {
583 if let Some(inst) = self.emit_instruction(
584 funcproto,
585 Some(&mut else_bytecodes),
586 mirfunc.clone(),
587 dst.clone(),
588 t_inst.clone(),
589 config,
590 ) {
591 else_bytecodes.push(inst);
592 };
593 });
594 let phiblock = &mirfunc.body[pbb as usize].0;
595 let (phidst, pinst) = phiblock.first().unwrap();
596 let phi = self.vregister.add_newvalue(phidst);
597 if let mir::Instruction::Phi(t, e) = pinst {
598 let t = self.find(t);
599 then_bytecodes.push(VmInstruction::Move(phi, t));
600 let e = self.find(e);
601 else_bytecodes.push(VmInstruction::Move(phi, e));
602 } else {
603 unreachable!("Unexpected inst: {pinst:?}");
604 }
605 let else_offset = then_bytecodes.len() + 2; let inst = VmInstruction::JmpIfNeg(c, else_offset as _);
607 match &mut bytecodes_dst {
608 Some(dst) => dst.push(inst),
609 None => funcproto.bytecodes.push(inst),
610 }
611
612 let ret_offset = else_bytecodes.len() + 1;
614
615 then_bytecodes.push(VmInstruction::Jmp(ret_offset as i16));
616
617 for mut b in [then_bytecodes, else_bytecodes] {
618 match &mut bytecodes_dst {
619 Some(dst) => dst.append(&mut b),
620 None => funcproto.bytecodes.append(&mut b),
621 }
622 }
623
624 phiblock.iter().skip(1).for_each(|(dst, p_inst)| {
625 if let Some(inst) = self.emit_instruction(
626 funcproto,
627 None,
628 mirfunc.clone(),
629 dst.clone(),
630 p_inst.clone(),
631 config,
632 ) {
633 match &mut bytecodes_dst {
634 Some(dst) => dst.push(inst),
635 None => funcproto.bytecodes.push(inst),
636 }
637 };
638 });
639 None
640 }
641 mir::Instruction::Jmp(offset) => Some(VmInstruction::Jmp(offset)),
642 mir::Instruction::Phi(_, _) => {
643 unreachable!()
644 }
645 mir::Instruction::Return(v, rty) => {
646 let nret = Self::word_size_for_type(rty);
647 let inst = match v.as_ref() {
648 mir::Value::None => VmInstruction::Return0,
649 _ => VmInstruction::Return(self.find(&v), nret),
650 };
651 Some(inst)
652 }
653 mir::Instruction::ReturnFeed(new, rty) => {
654 let size = Self::word_size_for_type(rty);
655 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
656 match config.self_eval_mode {
657 SelfEvalMode::SimpleState => {
658 let new = self.find(&new);
659 bytecodes_dst.push(VmInstruction::SetState(new, size));
660 Some(VmInstruction::Return(new, size))
661 }
662 SelfEvalMode::ZeroAtInit => {
663 let old = self.vregister.add_newvalue(&dst);
665 bytecodes_dst.push(VmInstruction::GetState(old, size));
666 let new = self.find(&new);
667 bytecodes_dst.push(VmInstruction::SetState(new, size));
668 Some(VmInstruction::Return(old, size))
669 }
670 }
671 }
672 mir::Instruction::Delay(max, src, time) => {
673 let s = self.find(&src);
674 let t = self.find(&time);
675
676 let dst = self.vregister.add_newvalue(&dst);
677 funcproto.delay_sizes.push(max);
678 Some(VmInstruction::Delay(dst, s, t))
679 }
680 mir::Instruction::Mem(src) => {
681 let s = self.find(&src);
682 let dst = self.vregister.add_newvalue(&dst);
683 Some(VmInstruction::Mem(dst, s))
684 }
685 mir::Instruction::NegF(v1) => self.emit_binop1(VmInstruction::NegF, dst, v1),
686 mir::Instruction::AddF(v1, v2) => self.emit_binop2(VmInstruction::AddF, dst, v1, v2),
687 mir::Instruction::SubF(v1, v2) => self.emit_binop2(VmInstruction::SubF, dst, v1, v2),
688 mir::Instruction::MulF(v1, v2) => self.emit_binop2(VmInstruction::MulF, dst, v1, v2),
689 mir::Instruction::DivF(v1, v2) => self.emit_binop2(VmInstruction::DivF, dst, v1, v2),
690 mir::Instruction::ModF(v1, v2) => self.emit_binop2(VmInstruction::ModF, dst, v1, v2),
691 mir::Instruction::PowF(v1, v2) => self.emit_binop2(VmInstruction::PowF, dst, v1, v2),
692 mir::Instruction::LogF(v1) => self.emit_binop1(VmInstruction::LogF, dst, v1),
693
694 mir::Instruction::SinF(v1) => self.emit_binop1(VmInstruction::SinF, dst, v1),
695 mir::Instruction::CosF(v1) => self.emit_binop1(VmInstruction::CosF, dst, v1),
696 mir::Instruction::AbsF(v1) => self.emit_binop1(VmInstruction::AbsF, dst, v1),
697 mir::Instruction::SqrtF(v1) => self.emit_binop1(VmInstruction::SqrtF, dst, v1),
698 mir::Instruction::AddI(v1, v2) => self.emit_binop2(VmInstruction::AddI, dst, v1, v2),
699 mir::Instruction::SubI(v1, v2) => self.emit_binop2(VmInstruction::SubI, dst, v1, v2),
700 mir::Instruction::MulI(v1, v2) => self.emit_binop2(VmInstruction::MulI, dst, v1, v2),
701 mir::Instruction::DivI(v1, v2) => self.emit_binop2(VmInstruction::DivI, dst, v1, v2),
702 mir::Instruction::ModI(v1, v2) => self.emit_binop2(VmInstruction::ModI, dst, v1, v2),
703 mir::Instruction::Gt(v1, v2) => self.emit_binop2(VmInstruction::Gt, dst, v1, v2),
704 mir::Instruction::Ge(v1, v2) => self.emit_binop2(VmInstruction::Ge, dst, v1, v2),
705 mir::Instruction::Lt(v1, v2) => self.emit_binop2(VmInstruction::Lt, dst, v1, v2),
706 mir::Instruction::Le(v1, v2) => self.emit_binop2(VmInstruction::Le, dst, v1, v2),
707 mir::Instruction::Eq(v1, v2) => self.emit_binop2(VmInstruction::Eq, dst, v1, v2),
708 mir::Instruction::Ne(v1, v2) => self.emit_binop2(VmInstruction::Ne, dst, v1, v2),
709 mir::Instruction::And(v1, v2) => self.emit_binop2(VmInstruction::And, dst, v1, v2),
710 mir::Instruction::Or(v1, v2) => self.emit_binop2(VmInstruction::Or, dst, v1, v2),
711
712 mir::Instruction::Array(values, ty) => {
713 let elem_ty_size = Self::word_size_for_type(ty);
714 let size = values.len();
715 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
717 let dst_reg = self.get_destination(dst.clone(), 1 as _); bytecodes_dst.push(VmInstruction::AllocArray(dst_reg, size as _, elem_ty_size));
719 for (i, val) in values.iter().enumerate() {
720 let tmp_idx_ref = Arc::new(mir::Value::None);
721 let idx = self.vregister.add_newvalue(&tmp_idx_ref);
722 bytecodes_dst.push(VmInstruction::MoveImmF(
723 idx,
724 HFloat::try_from(i as f64).unwrap(),
725 ));
726 let idx = self.find(&tmp_idx_ref);
727 let src = self.find(val);
728 bytecodes_dst.push(VmInstruction::SetArrayElem(dst_reg, idx, src));
729 }
730 self.varray.push(dst);
731 None }
733
734 mir::Instruction::GetArrayElem(array, index, elem_ty) => {
735 let array_reg = self.find(&array);
736 let index_reg = self.find(&index);
737 let dst_reg = self.get_destination(dst, Self::word_size_for_type(elem_ty));
738 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
739 bytecodes_dst.push(VmInstruction::GetArrayElem(dst_reg, array_reg, index_reg));
740 None
741 }
742
743 mir::Instruction::SetArrayElem(_array, _index, _value, _elem_ty) => {
744 todo!("SetArrayElem is not used in the current implementation");
745 }
746
747 _ => {
748 unimplemented!()
749 }
750 }
751 }
752 fn generate_funcproto(
753 &mut self,
754 mirfunc: &mir::Function,
755 fidx: usize,
756 config: Config,
757 ) -> (String, vm::FuncProto) {
758 log::trace!("generating function {}", mirfunc.label.0);
759 let mut func = vm::FuncProto {
760 nparam: mirfunc.args.len(),
761 nret: Self::word_size_for_type(
762 *mirfunc
763 .return_type
764 .get()
765 .expect("return type not inferred correctly"),
766 ) as _,
767 state_skeleton: mirfunc.state_skeleton.clone(), ..Default::default()
769 };
770 self.vregister.0.push(VRegister::default());
771 for (i, a) in mirfunc.args.iter().enumerate() {
772 let size = Self::word_size_for_type(a.1);
773 self.vregister
774 .push_stack(&Arc::new(mir::Value::Argument(i)), size as _);
775 }
776
777 let block = &mirfunc.body[0];
779 block.0.iter().for_each(|(dst, inst)| {
780 let newinst = self.emit_instruction(
781 &mut func,
782 None,
783 mirfunc.clone(),
784 dst.clone(),
785 inst.clone(),
786 config,
787 );
788 if let Some(i) = newinst {
789 func.bytecodes.push(i);
790 }
791 });
792 (mirfunc.label.to_string(), func)
793 }
794 pub fn generate(&mut self, mir: Mir, config: Config) -> vm::Program {
795 self.program.global_fn_table = mir
796 .functions
797 .iter()
798 .enumerate()
799 .map(|(i, func)| {
800 self.fnmap.insert(func.label, i);
801 self.generate_funcproto(func, i, config)
802 })
803 .collect();
804 self.program.file_path = mir.file_path.clone();
805 self.program.iochannels = mir.get_dsp_iochannels();
806 log::debug!("iochannels: {:?}", self.program.iochannels.unwrap());
807 let _io = self.program.iochannels.unwrap();
808 self.program.dsp_index = self.program.get_fun_index("dsp");
809 self.program.clone()
810 }
811}
812fn remove_redundunt_mov(program: vm::Program) -> vm::Program {
813 let mut res = program.clone();
814 for (_, f) in res.global_fn_table.iter_mut() {
815 let mut remove_idx = std::collections::HashSet::<usize>::new();
816 let mut reduce_idx = std::collections::HashMap::<usize, VmInstruction>::new();
817
818 let mut removeconst_idx = std::collections::HashMap::<usize, VmInstruction>::new();
819
820 for (i, pair) in f.bytecodes.windows(2).enumerate() {
821 match *pair {
822 [
823 VmInstruction::Move(dst, src),
824 VmInstruction::Move(dst2, src2),
825 ] if dst == src2 && src == dst2 =>
826 {
828 remove_idx.insert(i);
829 remove_idx.insert(i + 1);
830 }
831 [
832 VmInstruction::Move(dst, src),
833 VmInstruction::Move(dst2, src2),
834 ] if dst == src2 => {
835 reduce_idx.insert(i, VmInstruction::Move(dst2, src));
836 remove_idx.insert(i + 1);
837 }
838 [
839 VmInstruction::MoveConst(dst, src),
840 VmInstruction::Move(dst2, src2),
841 ] if dst == src2 => {
842 removeconst_idx.insert(i, VmInstruction::MoveConst(dst2, src));
843 remove_idx.insert(i + 1);
844 }
845 _ => {}
846 }
847 }
848 let mut res_bytecodes = vec![];
849 for (i, inst) in f.bytecodes.iter().enumerate() {
850 if remove_idx.contains(&i) {
851 } else if let Some(inst) = removeconst_idx.get(&i) {
853 res_bytecodes.push(*inst);
854 } else if let Some(inst) = reduce_idx.get(&i) {
855 res_bytecodes.push(*inst);
856 } else {
857 res_bytecodes.push(*inst);
858 }
859 }
860 f.bytecodes = res_bytecodes;
861 }
862 res
863}
864fn optimize(program: vm::Program) -> vm::Program {
865 program
867}
868pub fn gen_bytecode(mir: mir::Mir, config: Config) -> vm::Program {
869 let mut generator = ByteCodeGenerator::default();
870 let program = generator.generate(mir, config);
871 optimize(program)
872}
873
874#[cfg(test)]
875mod test {
876
877 use crate::compiler::IoChannelInfo;
878 use crate::interner::ToSymbol;
879 #[test]
880 fn build() {
881 use super::*;
882 use crate::numeric;
883 use crate::types::PType;
884 use crate::types::Type;
885 extern crate colog;
886 let mut src = mir::Mir::default();
893 let arg = mir::Argument("hoge".to_symbol(), numeric!());
894 let argv = Arc::new(mir::Value::Argument(0));
895 let mut func = mir::Function::new(
896 0,
897 "dsp".to_symbol(),
898 std::slice::from_ref(&arg),
899 vec![],
900 None,
901 );
902 func.return_type.get_or_init(|| numeric!());
903 let mut block = mir::Block::default();
904 let resint = Arc::new(mir::Value::Register(1));
905 block.0.push((resint.clone(), mir::Instruction::Integer(1)));
906 let res = Arc::new(mir::Value::Register(2));
907 block
908 .0
909 .push((res.clone(), mir::Instruction::AddF(argv, resint)));
910 block.0.push((
911 Arc::new(mir::Value::None),
912 mir::Instruction::Return(res.clone(), numeric!()),
913 ));
914 func.body = vec![block];
915 src.functions.push(func);
916 let mut generator = ByteCodeGenerator::default();
917 let config = Config::default();
918 let res = generator.generate(src, config);
919
920 let mut answer = vm::Program {
921 iochannels: Some(IoChannelInfo {
922 input: 1,
923 output: 1,
924 }),
925 ..Default::default()
926 };
927
928 let mut main = vm::FuncProto::new(1, 1);
929 main.constants.push(1);
930 main.bytecodes = vec![
931 VmInstruction::MoveConst(1, 0),
932 VmInstruction::AddF(1, 0, 1),
933 VmInstruction::Return(1, 1),
934 ];
935 answer.global_fn_table.push(("dsp".to_string(), main));
936 answer.dsp_index = Some(0);
937 assert_eq!(res, answer);
938 }
939}