1use std::collections::HashMap;
2use std::sync::Arc;
3
4use crate::interner::{Symbol, TypeNodeId};
5use crate::mir::{self, Mir, StateSize};
6use crate::runtime::vm::bytecode::{ConstPos, GlobalPos, Reg};
7use crate::runtime::vm::{self, StateOffset};
8use crate::types::{PType, Type, TypeSize};
9use crate::utils::half_float::HFloat;
10use vm::bytecode::Instruction as VmInstruction;
11
12#[derive(Debug, Default)]
13struct MemoryRegion(Reg, TypeSize);
14#[derive(Debug, Default)]
15struct VRegister(HashMap<Arc<mir::Value>, MemoryRegion>);
16
17impl VRegister {
18 pub fn push_stack(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
19 self.add_newvalue_range(v, size)
20 }
21 pub fn add_newvalue(&mut self, v: &Arc<mir::Value>) -> Reg {
22 let pos = self
23 .0
24 .iter()
25 .max_by_key(|(_v, MemoryRegion(address, size))| address + size)
26 .map(|(_, MemoryRegion(address, size))| address + size)
27 .unwrap_or(0);
28 self.0.insert(v.clone(), MemoryRegion(pos, 1));
29 log::trace!("add {:?}", self.0);
30 pos as Reg
31 }
32 pub fn add_newvalue_range(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
33 let pos = self
34 .0
35 .iter()
36 .max_by_key(|(_v, MemoryRegion(address, size))| address + size)
37 .map(|(_, MemoryRegion(address, size))| address + size)
38 .unwrap_or(0);
39 self.0.insert(v.clone(), MemoryRegion(pos, size as _));
40 log::trace!("add_range {:#?}", self.0);
41 pos as Reg
42 }
43 pub fn find(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
44 log::trace!("find {v}");
45 let res = self.0.get(v).map(|r| r.0);
46 match (res, v.as_ref()) {
47 (Some(_), mir::Value::Argument(_, _)) | (Some(_), mir::Value::Global(_)) => res,
49 (Some(_), _) => {
50 self.0.remove(v);
51 res
52 }
53 _ => None,
54 }
55 }
56 pub fn find_keep(&self, v: &Arc<mir::Value>) -> Option<Reg> {
58 log::trace!("findkeep {v}");
59 self.0.get(v).map(|r| r.0)
60 }
61}
62
63#[derive(Debug, Default)]
64struct VStack(Vec<VRegister>);
65impl VStack {
66 fn get_top(&mut self) -> &mut VRegister {
67 self.0.last_mut().unwrap()
68 }
69 fn find_upvalue(&self, v: &Arc<mir::Value>) -> Option<Reg> {
70 self.0
71 .iter()
72 .rev()
73 .skip(1)
74 .find_map(|vreg| vreg.find_keep(v))
75 }
76 pub fn push_stack(&mut self, v: &Arc<mir::Value>, size: u64) -> Reg {
77 self.get_top().push_stack(v, size)
78 }
79 pub fn add_newvalue(&mut self, v: &Arc<mir::Value>) -> Reg {
80 self.get_top().add_newvalue(v)
81 }
82 pub fn find(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
83 self.get_top().find(v)
84 }
85 pub fn find_keep(&mut self, v: &Arc<mir::Value>) -> Option<Reg> {
86 self.get_top().find_keep(v)
87 }
88}
89
90#[derive(Debug, Default)]
91pub struct ByteCodeGenerator {
92 vregister: VStack,
93 varray: Vec<Arc<mir::Value>>,
94 fnmap: HashMap<Symbol, usize>,
95 globals: Vec<Arc<mir::Value>>,
96 program: vm::Program,
97}
98
99#[derive(Default, Debug, Clone, Copy)]
105pub enum SelfEvalMode {
106 #[default]
107 SimpleState,
108 ZeroAtInit,
109}
110#[derive(Default, Debug, Clone, Copy)]
111pub struct Config {
112 pub self_eval_mode: SelfEvalMode,
113}
114
115fn gen_raw_int(n: &i64) -> vm::RawVal {
116 let raw = {
117 let iptr = n as *const i64;
118 iptr as *const vm::RawVal
119 };
120 unsafe { *raw }
121}
122
123fn gen_raw_float(n: &f64) -> vm::RawVal {
124 let raw = {
125 let iptr = n as *const f64;
126 iptr as *const vm::RawVal
127 };
128 unsafe { *raw }
129}
130
131impl ByteCodeGenerator {
132 pub(crate) fn word_size_for_type(ty: TypeNodeId) -> TypeSize {
137 match ty.to_type() {
138 Type::Primitive(PType::Unit) => 0,
139 Type::Primitive(PType::String) => 1,
140 Type::Primitive(_) => 1,
141 Type::Array(_) => 1, Type::Tuple(types) => types.iter().map(|t| Self::word_size_for_type(*t)).sum(),
143 Type::Struct(types) => types
144 .iter()
145 .map(|(_s, t)| Self::word_size_for_type(*t))
146 .sum(),
147 Type::Function(_, _, _) => 1,
148 Type::Ref(_) => 1,
149 Type::Code(_) => todo!(),
150 _ => {
151 1
153 }
154 }
155 }
156 pub fn calc_state_size<T: AsRef<[StateSize]>>(state_sizes: T) -> u64 {
157 state_sizes
158 .as_ref()
159 .iter()
160 .map(|x| x.size * Self::word_size_for_type(x.ty) as u64)
161 .sum()
162 }
163 fn get_binop(&mut self, v1: Arc<mir::Value>, v2: Arc<mir::Value>) -> (Reg, Reg) {
164 let r1 = self.find(&v1);
165 let r2 = self.find(&v2);
166 (r1, r2)
167 }
168 fn emit_binop1<F>(
169 &mut self,
170 inst: F,
171 dst: Arc<mir::Value>,
172 v1: Arc<mir::Value>,
173 ) -> Option<VmInstruction>
174 where
175 F: FnOnce(Reg, Reg) -> VmInstruction,
176 {
177 let r1 = self.find(&v1);
178 let dst = self.get_destination(dst.clone(), 1);
179 let i = inst(dst, r1);
180 Some(i)
181 }
182 fn emit_binop2<F>(
183 &mut self,
184 inst: F,
185 dst: Arc<mir::Value>,
186 v1: Arc<mir::Value>,
187 v2: Arc<mir::Value>,
188 ) -> Option<VmInstruction>
189 where
190 F: FnOnce(Reg, Reg, Reg) -> VmInstruction,
191 {
192 let (r1, r2) = self.get_binop(v1, v2);
194 let dst = self.get_destination(dst.clone(), 1);
195 let i = inst(dst, r1, r2);
196 Some(i)
197 }
198 fn get_destination(&mut self, dst: Arc<mir::Value>, size: TypeSize) -> Reg {
199 self.vregister.push_stack(&dst, size as _)
200 }
201 fn get_or_insert_global(&mut self, gv: Arc<mir::Value>) -> GlobalPos {
202 match self.globals.iter().position(|v| gv == *v) {
203 Some(idx) => idx as GlobalPos,
204 None => {
205 self.globals.push(gv.clone());
206 let idx = (self.globals.len() - 1) as GlobalPos;
207 self.program.global_vals.push(idx as u64);
208 idx
209 }
210 }
211 }
212 fn find(&mut self, v: &Arc<mir::Value>) -> Reg {
213 self.vregister
214 .find(v)
215 .or_else(|| self.globals.iter().position(|gv| v == gv).map(|v| v as Reg))
216 .or_else(|| self.varray.iter().position(|av| v == av).map(|v| v as Reg))
217 .expect(format!("value {v} not found").as_str())
218 }
219 fn find_keep(&mut self, v: &Arc<mir::Value>) -> Reg {
220 self.vregister
221 .find_keep(v)
222 .or_else(|| self.globals.iter().position(|gv| v == gv).map(|v| v as Reg))
223 .expect(format!("value {v} not found").as_str())
224 }
225 fn find_upvalue(&self, upval: &Arc<mir::Value>) -> Reg {
226 self.vregister
227 .find_upvalue(upval)
228 .expect("failed to find upvalue")
229 }
230 fn prepare_function(
231 &mut self,
232 bytecodes_dst: &mut Vec<VmInstruction>,
233 faddress: &Arc<mir::Value>,
234 args: &[(Arc<mir::Value>, TypeNodeId)],
235 ) -> (Reg, TypeSize) {
236 let mut aoffsets = vec![];
237 let mut offset = 0;
238 for (a, ty) in args.iter() {
239 let src = self.find(a);
240 let size = Self::word_size_for_type(*ty);
241 aoffsets.push((offset, src, size));
242 offset += size;
243 }
244 let faddress = self.find_keep(faddress);
245 for (adst, src, size) in aoffsets.iter() {
247 let address = *adst + faddress + 1;
248 let is_samedst = address == *src;
249 if !is_samedst {
250 match size {
251 0 => unreachable!(),
252 1 => bytecodes_dst.push(VmInstruction::Move(address as Reg, *src)),
253 _ => bytecodes_dst.push(VmInstruction::MoveRange(address as Reg, *src, *size)),
254 }
255 }
256 }
257 (faddress, offset)
258 }
259 fn get_or_insert_extfunid(&mut self, label: Symbol, ty: TypeNodeId) -> ConstPos {
260 self.program
261 .ext_fun_table
262 .iter()
263 .position(|(name, _ty)| *name == label)
264 .unwrap_or_else(|| {
265 self.program.ext_fun_table.push((label, ty));
266 self.program.ext_fun_table.len() - 1
267 }) as ConstPos
268 }
269 fn prepare_extfun_or_cls(
280 &mut self,
281 funcproto: &mut vm::FuncProto,
282 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
283 dst: Arc<mir::Value>,
284 args: &[(Arc<mir::Value>, TypeNodeId)],
285 idx: ConstPos,
286 ty: TypeNodeId,
287 ) -> (Reg, Reg, TypeSize) {
288 let fi = funcproto.add_new_constant(idx as u64);
289 let rsize = Self::word_size_for_type(ty);
290 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
291 let f = self.vregister.push_stack(&dst, rsize as _);
292 bytecodes_dst.push(VmInstruction::MoveConst(f, fi as ConstPos));
293 let (dst, argsize) = self.prepare_function(bytecodes_dst, &dst, args);
294 (dst, argsize, rsize)
295 }
296 fn prepare_extfun(
297 &mut self,
298 funcproto: &mut vm::FuncProto,
299 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
300 dst: Arc<mir::Value>,
301 args: &[(Arc<mir::Value>, TypeNodeId)],
302 label: Symbol,
303 ty: TypeNodeId,
304 ) -> (Reg, Reg, TypeSize) {
305 let idx = self.get_or_insert_extfunid(label, ty);
306 self.prepare_extfun_or_cls(funcproto, bytecodes_dst, dst, args, idx, ty)
307 }
308 fn emit_instruction(
321 &mut self,
322 funcproto: &mut vm::FuncProto,
323 bytecodes_dst: Option<&mut Vec<VmInstruction>>,
324 mirfunc: mir::Function,
325 dst: Arc<mir::Value>,
326 mirinst: mir::Instruction,
327 config: Config,
328 ) -> Option<VmInstruction> {
329 match mirinst {
330 mir::Instruction::Uinteger(u) => {
331 let pos = funcproto.add_new_constant(u);
332 Some(VmInstruction::MoveConst(
333 self.get_destination(dst, 1),
334 pos as ConstPos,
335 ))
336 }
337 mir::Instruction::Integer(i) => {
338 let pos = funcproto.add_new_constant(gen_raw_int(&i));
339 Some(VmInstruction::MoveConst(
340 self.get_destination(dst, 1),
341 pos as ConstPos,
342 ))
343 }
344 mir::Instruction::Float(n) => {
345 let dst = self.get_destination(dst, 1);
346 if let Ok(half_f) = HFloat::try_from(n) {
347 Some(VmInstruction::MoveImmF(dst, half_f))
348 } else {
349 let pos = funcproto.add_new_constant(gen_raw_float(&n));
350 Some(VmInstruction::MoveConst(dst, pos as ConstPos))
351 }
352 }
353 mir::Instruction::String(s) => {
354 let pos = self.program.add_new_str(s);
355 let cpos = funcproto.add_new_constant(pos as u64);
356 Some(VmInstruction::MoveConst(
357 self.get_destination(dst, 1),
358 cpos as ConstPos,
359 ))
360 }
361 mir::Instruction::Alloc(t) => {
362 let size = Self::word_size_for_type(t) as u64;
363 let _ = self.vregister.push_stack(&dst, size);
364 None
365 }
366 mir::Instruction::Load(ptr, ty) => {
367 let d = self.get_destination(dst, Self::word_size_for_type(ty));
368 let s = self.find_keep(&ptr);
369 let size = Self::word_size_for_type(ty);
370 match (d, s, size) {
371 (d, s, 1) if d != s => Some(VmInstruction::Move(d, s)),
372 (d, s, size) if d != s => Some(VmInstruction::MoveRange(d, s, size)),
373 _ => None,
374 }
375 }
376 mir::Instruction::Store(dst, src, ty) => {
377 let s = self.find(&src);
378 let d = self.find_keep(&dst);
379 let size = Self::word_size_for_type(ty);
380 match (d, s, size) {
381 (d, s, 1) if d != s => Some(VmInstruction::Move(d, s)),
382 (d, s, size) if d != s => Some(VmInstruction::MoveRange(d, s, size)),
383 _ => None,
384 }
385 }
386 mir::Instruction::GetGlobal(v, ty) => {
387 let dst = self.get_destination(dst, Self::word_size_for_type(ty));
388 let idx = self.get_or_insert_global(v.clone());
389 Some(VmInstruction::GetGlobal(
390 dst,
391 idx,
392 Self::word_size_for_type(ty),
393 ))
394 }
395 mir::Instruction::SetGlobal(v, src, ty) => {
396 let idx = self.get_or_insert_global(v.clone());
397 let s = self.find(&src);
398 Some(VmInstruction::SetGlobal(
399 idx,
400 s,
401 Self::word_size_for_type(ty),
402 ))
403 }
404 mir::Instruction::GetElement {
405 value,
406 ty,
407 tuple_offset,
408 } => {
409 let ptr = self.find_keep(&value) as usize;
410 let ty = ty.to_type();
411 let tvec = ty.get_as_tuple().unwrap();
412 let tsize = Self::word_size_for_type(tvec[tuple_offset as usize]);
413 let t_offset: u64 = tvec[0..(tuple_offset as _)]
414 .iter()
415 .map(|t| Self::word_size_for_type(*t) as u64)
416 .sum();
417 let address = (ptr + t_offset as usize) as Reg;
418 self.vregister
419 .get_top()
420 .0
421 .insert(dst, MemoryRegion(address, tsize));
422 None
423 }
424 mir::Instruction::Call(v, args, r_ty) => {
425 let rsize = Self::word_size_for_type(r_ty);
426 match v.as_ref() {
427 mir::Value::Register(_address) => {
428 let bytecodes_dst =
429 bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
430 let d = self.get_destination(dst.clone(), rsize);
431 let s = self.find(&v);
432 bytecodes_dst.push(VmInstruction::Move(d, s));
433 let (fadd, argsize) = self.prepare_function(bytecodes_dst, &dst, &args);
434 Some(VmInstruction::Call(fadd, argsize, rsize))
435 }
436 mir::Value::Function(_idx) => {
437 unreachable!();
438 }
439 mir::Value::ExtFunction(label, _ty) => {
440 let (dst, argsize, nret) =
442 self.prepare_extfun(funcproto, bytecodes_dst, dst, &args, *label, r_ty);
443 Some(VmInstruction::CallExtFun(dst, argsize, nret))
444 }
445 _ => unreachable!(),
446 }
447 }
448 mir::Instruction::CallCls(f, args, r_ty) => {
449 let rsize = Self::word_size_for_type(r_ty);
450 match f.as_ref() {
451 mir::Value::Register(_address) => {
452 let bytecodes_dst =
453 bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
454
455 let (fadd, argsize) = self.prepare_function(bytecodes_dst, &f, &args);
456 let s = self.find(&f);
457 let d = self.get_destination(dst.clone(), rsize);
458 bytecodes_dst.push(VmInstruction::CallCls(fadd, argsize, rsize));
459 match rsize {
460 0 => None,
461 1 => Some(VmInstruction::Move(d, s)),
462 n => Some(VmInstruction::MoveRange(d, s, n)),
463 }
464 }
465 mir::Value::Function(_idx) => {
466 unreachable!();
467 }
468 mir::Value::ExtFunction(label, _ty) => {
469 let (dst, argsize, nret) =
470 self.prepare_extfun(funcproto, bytecodes_dst, dst, &args, *label, r_ty);
471 Some(VmInstruction::CallExtFun(dst, argsize, nret))
472 }
473 _ => unreachable!(),
474 }
475 }
476 mir::Instruction::Closure(idxcell) => {
477 let idx = self.find(&idxcell);
478 let dst = self.get_destination(dst, 1);
479 Some(VmInstruction::Closure(dst, idx))
480 }
481 mir::Instruction::CloseUpValues(src, ty) => {
482 let flattened = ty.flatten();
484 let base = self.vregister.find_keep(&src).unwrap();
485
486 let mut offset = 0;
487 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
488 for elem_t in flattened {
489 let tsize = Self::word_size_for_type(elem_t);
490 if elem_t.to_type().is_function() {
491 bytecodes_dst.push(VmInstruction::Close(base + offset))
492 }
493 offset += tsize;
494 }
495 None
496 }
497 mir::Instruction::GetUpValue(i, ty) => {
498 let upval = &mirfunc.upindexes[i as usize];
499 let v = self.find_upvalue(upval);
500 let size: u8 = Self::word_size_for_type(ty);
501 let ouv = mir::OpenUpValue {
502 pos: v as usize,
503 size,
504 is_closure: ty.to_type().is_function(),
505 };
506 if let Some(ui) = funcproto.upindexes.get_mut(i as usize) {
507 *ui = ouv;
508 } else {
509 funcproto.upindexes.push(ouv);
510 }
511 let d = self.vregister.get_top().add_newvalue_range(&dst, size as _);
512 Some(VmInstruction::GetUpValue(
513 d,
514 i as Reg,
515 Self::word_size_for_type(ty),
516 ))
517 }
518 mir::Instruction::SetUpValue(dst, src, ty) => {
519 let upval = &mirfunc.upindexes[dst as usize];
520 let v = self.find_upvalue(upval);
521 let size: u8 = Self::word_size_for_type(ty);
522 let ouv = mir::OpenUpValue {
523 pos: v as usize,
524 size,
525 is_closure: ty.to_type().is_function(),
526 };
527 if let Some(ui) = funcproto.upindexes.get_mut(dst as usize) {
528 *ui = ouv;
529 } else {
530 funcproto.upindexes.push(ouv);
531 }
532 let s = self.find(&src);
533 Some(VmInstruction::SetUpValue(
534 dst as Reg,
535 s,
536 Self::word_size_for_type(ty),
537 ))
538 }
539 mir::Instruction::PushStateOffset(v) => {
540 let state_size = StateOffset::try_from(Self::calc_state_size(v))
541 .expect("too much large state offset.");
542 Some(VmInstruction::PushStatePos(state_size))
543 }
544 mir::Instruction::PopStateOffset(v) => {
545 let state_size = StateOffset::try_from(Self::calc_state_size(v))
546 .expect("too much large state offset.");
547 Some(VmInstruction::PopStatePos(state_size))
548 }
549 mir::Instruction::GetState(ty) => {
550 let size = Self::word_size_for_type(ty);
551 let d = self.vregister.push_stack(&dst, size as _);
552 Some(VmInstruction::GetState(d, size))
553 }
554
555 mir::Instruction::JmpIf(cond, tbb, ebb, pbb) => {
556 let c = self.find(&cond);
557
558 let mut bytecodes_dst = bytecodes_dst;
560
561 let mut then_bytecodes: Vec<VmInstruction> = vec![];
562 let mut else_bytecodes: Vec<VmInstruction> = vec![];
563 mirfunc.body[tbb as usize]
564 .0
565 .iter()
566 .for_each(|(dst, t_inst)| {
567 let res = self.emit_instruction(
568 funcproto,
569 Some(&mut then_bytecodes),
570 mirfunc.clone(),
571 dst.clone(),
572 t_inst.clone(),
573 config,
574 );
575 if let Some(inst) = res {
576 then_bytecodes.push(inst);
577 }
578 });
579
580 mirfunc.body[ebb as usize]
581 .0
582 .iter()
583 .for_each(|(dst, t_inst)| {
584 if let Some(inst) = self.emit_instruction(
585 funcproto,
586 Some(&mut else_bytecodes),
587 mirfunc.clone(),
588 dst.clone(),
589 t_inst.clone(),
590 config,
591 ) {
592 else_bytecodes.push(inst);
593 };
594 });
595 let phiblock = &mirfunc.body[pbb as usize].0;
596 let (phidst, pinst) = phiblock.first().unwrap();
597 let phi = self.vregister.add_newvalue(phidst);
598 if let mir::Instruction::Phi(t, e) = pinst {
599 let t = self.find(t);
600 then_bytecodes.push(VmInstruction::Move(phi, t));
601 let e = self.find(e);
602 else_bytecodes.push(VmInstruction::Move(phi, e));
603 } else {
604 unreachable!("Unexpected inst: {pinst:?}");
605 }
606 let else_offset = then_bytecodes.len() + 2; let inst = VmInstruction::JmpIfNeg(c, else_offset as _);
608 match &mut bytecodes_dst {
609 Some(dst) => dst.push(inst),
610 None => funcproto.bytecodes.push(inst),
611 }
612
613 let ret_offset = else_bytecodes.len() + 1;
615
616 then_bytecodes.push(VmInstruction::Jmp(ret_offset as i16));
617
618 for mut b in [then_bytecodes, else_bytecodes] {
619 match &mut bytecodes_dst {
620 Some(dst) => dst.append(&mut b),
621 None => funcproto.bytecodes.append(&mut b),
622 }
623 }
624
625 phiblock.iter().skip(1).for_each(|(dst, p_inst)| {
626 if let Some(inst) = self.emit_instruction(
627 funcproto,
628 None,
629 mirfunc.clone(),
630 dst.clone(),
631 p_inst.clone(),
632 config,
633 ) {
634 match &mut bytecodes_dst {
635 Some(dst) => dst.push(inst),
636 None => funcproto.bytecodes.push(inst),
637 }
638 };
639 });
640 None
641 }
642 mir::Instruction::Jmp(offset) => Some(VmInstruction::Jmp(offset)),
643 mir::Instruction::Phi(_, _) => {
644 unreachable!()
645 }
646 mir::Instruction::Return(v, rty) => {
647 let nret = Self::word_size_for_type(rty);
648 let inst = match v.as_ref() {
649 mir::Value::None => VmInstruction::Return0,
650 _ => VmInstruction::Return(self.find(&v), nret),
651 };
652 Some(inst)
653 }
654 mir::Instruction::ReturnFeed(new, rty) => {
655 let size = Self::word_size_for_type(rty);
656 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
657 match config.self_eval_mode {
658 SelfEvalMode::SimpleState => {
659 let new = self.find(&new);
660 bytecodes_dst.push(VmInstruction::SetState(new, size));
661 Some(VmInstruction::Return(new, size))
662 }
663 SelfEvalMode::ZeroAtInit => {
664 let old = self.vregister.add_newvalue(&dst);
666 bytecodes_dst.push(VmInstruction::GetState(old, size));
667 let new = self.find(&new);
668 bytecodes_dst.push(VmInstruction::SetState(new, size));
669 Some(VmInstruction::Return(old, size))
670 }
671 }
672 }
673 mir::Instruction::Delay(max, src, time) => {
674 let s = self.find(&src);
675 let t = self.find(&time);
676
677 let dst = self.vregister.add_newvalue(&dst);
678 funcproto.delay_sizes.push(max);
679 Some(VmInstruction::Delay(dst, s, t))
680 }
681 mir::Instruction::Mem(src) => {
682 let s = self.find(&src);
683 let dst = self.vregister.add_newvalue(&dst);
684 Some(VmInstruction::Mem(dst, s))
685 }
686 mir::Instruction::NegF(v1) => self.emit_binop1(VmInstruction::NegF, dst, v1),
687 mir::Instruction::AddF(v1, v2) => self.emit_binop2(VmInstruction::AddF, dst, v1, v2),
688 mir::Instruction::SubF(v1, v2) => self.emit_binop2(VmInstruction::SubF, dst, v1, v2),
689 mir::Instruction::MulF(v1, v2) => self.emit_binop2(VmInstruction::MulF, dst, v1, v2),
690 mir::Instruction::DivF(v1, v2) => self.emit_binop2(VmInstruction::DivF, dst, v1, v2),
691 mir::Instruction::ModF(v1, v2) => self.emit_binop2(VmInstruction::ModF, dst, v1, v2),
692 mir::Instruction::PowF(v1, v2) => self.emit_binop2(VmInstruction::PowF, dst, v1, v2),
693 mir::Instruction::LogF(v1) => self.emit_binop1(VmInstruction::LogF, dst, v1),
694
695 mir::Instruction::SinF(v1) => self.emit_binop1(VmInstruction::SinF, dst, v1),
696 mir::Instruction::CosF(v1) => self.emit_binop1(VmInstruction::CosF, dst, v1),
697 mir::Instruction::AbsF(v1) => self.emit_binop1(VmInstruction::AbsF, dst, v1),
698 mir::Instruction::SqrtF(v1) => self.emit_binop1(VmInstruction::SqrtF, dst, v1),
699 mir::Instruction::AddI(v1, v2) => self.emit_binop2(VmInstruction::AddI, dst, v1, v2),
700 mir::Instruction::SubI(v1, v2) => self.emit_binop2(VmInstruction::SubI, dst, v1, v2),
701 mir::Instruction::MulI(v1, v2) => self.emit_binop2(VmInstruction::MulI, dst, v1, v2),
702 mir::Instruction::DivI(v1, v2) => self.emit_binop2(VmInstruction::DivI, dst, v1, v2),
703 mir::Instruction::ModI(v1, v2) => self.emit_binop2(VmInstruction::ModI, dst, v1, v2),
704 mir::Instruction::Gt(v1, v2) => self.emit_binop2(VmInstruction::Gt, dst, v1, v2),
705 mir::Instruction::Ge(v1, v2) => self.emit_binop2(VmInstruction::Ge, dst, v1, v2),
706 mir::Instruction::Lt(v1, v2) => self.emit_binop2(VmInstruction::Lt, dst, v1, v2),
707 mir::Instruction::Le(v1, v2) => self.emit_binop2(VmInstruction::Le, dst, v1, v2),
708 mir::Instruction::Eq(v1, v2) => self.emit_binop2(VmInstruction::Eq, dst, v1, v2),
709 mir::Instruction::Ne(v1, v2) => self.emit_binop2(VmInstruction::Ne, dst, v1, v2),
710 mir::Instruction::And(v1, v2) => self.emit_binop2(VmInstruction::And, dst, v1, v2),
711 mir::Instruction::Or(v1, v2) => self.emit_binop2(VmInstruction::Or, dst, v1, v2),
712
713 mir::Instruction::Array(values, ty) => {
714 let elem_ty_size = Self::word_size_for_type(ty);
715 let size = values.len();
716 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
718 let dst_reg = self.get_destination(dst.clone(), 1 as _); bytecodes_dst.push(VmInstruction::AllocArray(dst_reg, size as _, elem_ty_size));
720 for (i, val) in values.iter().enumerate() {
721 let tmp_idx_ref = Arc::new(mir::Value::None);
722 let idx = self.vregister.add_newvalue(&tmp_idx_ref);
723 bytecodes_dst.push(VmInstruction::MoveImmF(
724 idx,
725 HFloat::try_from(i as f64).unwrap(),
726 ));
727 let idx = self.find(&tmp_idx_ref);
728 let src = self.find(val);
729 bytecodes_dst.push(VmInstruction::SetArrayElem(dst_reg, idx, src));
730 }
731 self.varray.push(dst);
732 None }
734
735 mir::Instruction::GetArrayElem(array, index, elem_ty) => {
736 let array_reg = self.find(&array);
737 let index_reg = self.find(&index);
738 let dst_reg = self.get_destination(dst, Self::word_size_for_type(elem_ty));
739 let bytecodes_dst = bytecodes_dst.unwrap_or_else(|| funcproto.bytecodes.as_mut());
740 bytecodes_dst.push(VmInstruction::GetArrayElem(dst_reg, array_reg, index_reg));
741 None
742 }
743
744 mir::Instruction::SetArrayElem(_array, _index, _value, _elem_ty) => {
745 todo!("SetArrayElem is not used in the current implementation");
746 }
747
748 _ => {
749 unimplemented!()
750 }
751 }
752 }
753 fn generate_funcproto(
754 &mut self,
755 mirfunc: &mir::Function,
756 fidx: usize,
757 config: Config,
758 ) -> (Symbol, vm::FuncProto) {
759 log::trace!("generating function {}", mirfunc.label.0);
760 let state_size = Self::calc_state_size(&mirfunc.state_sizes);
761 let mut func = vm::FuncProto {
762 nparam: mirfunc.args.len(),
763 nret: Self::word_size_for_type(
764 *mirfunc
765 .return_type
766 .get()
767 .expect("return type not inferred correctly"),
768 ) as _,
769 state_size,
770 ..Default::default()
771 };
772 self.vregister.0.push(VRegister::default());
773 for (a, t) in mirfunc.args.iter().zip(mirfunc.argtypes.iter()) {
774 let size = Self::word_size_for_type(*t);
775 self.vregister.push_stack(a, size as _);
776 }
777
778 let block = &mirfunc.body[0];
780 block.0.iter().for_each(|(dst, inst)| {
781 let newinst = self.emit_instruction(
782 &mut func,
783 None,
784 mirfunc.clone(),
785 dst.clone(),
786 inst.clone(),
787 config,
788 );
789 if let Some(i) = newinst {
790 func.bytecodes.push(i);
791 }
792 });
793 (mirfunc.label, func)
794 }
795 pub fn generate(&mut self, mir: Mir, config: Config) -> vm::Program {
796 self.program.global_fn_table = mir
797 .functions
798 .iter()
799 .enumerate()
800 .map(|(i, func)| {
801 self.fnmap.insert(func.label, i);
802 self.generate_funcproto(func, i, config)
803 })
804 .collect();
805 self.program.file_path = mir.file_path;
806 self.program.iochannels = mir.get_dsp_iochannels();
807 let _io = self.program.iochannels.unwrap();
809 self.program.clone()
810 }
811}
812fn remove_redundunt_mov(program: vm::Program) -> vm::Program {
813 let mut res = program.clone();
814 for (_, f) in res.global_fn_table.iter_mut() {
815 let mut remove_idx = std::collections::HashSet::<usize>::new();
816 let mut reduce_idx = std::collections::HashMap::<usize, VmInstruction>::new();
817
818 let mut removeconst_idx = std::collections::HashMap::<usize, VmInstruction>::new();
819
820 for (i, pair) in f.bytecodes.windows(2).enumerate() {
821 match pair {
822 &[VmInstruction::Move(dst, src), VmInstruction::Move(dst2, src2)]
823 if dst == src2 && src == dst2 =>
824 {
826 remove_idx.insert(i);
827 remove_idx.insert(i + 1);
828 }
829 &[VmInstruction::Move(dst, src), VmInstruction::Move(dst2, src2)]
830 if dst == src2 =>
831 {
832 reduce_idx.insert(i, VmInstruction::Move(dst2, src));
833 remove_idx.insert(i + 1);
834 }
835 &[VmInstruction::MoveConst(dst, src), VmInstruction::Move(dst2, src2)]
836 if dst == src2 =>
837 {
838 removeconst_idx.insert(i, VmInstruction::MoveConst(dst2, src));
839 remove_idx.insert(i + 1);
840 }
841 _ => {}
842 }
843 }
844 let mut res_bytecodes = vec![];
845 for (i, inst) in f.bytecodes.iter().enumerate() {
846 if remove_idx.contains(&i) {
847 } else if let Some(inst) = removeconst_idx.get(&i) {
849 res_bytecodes.push(*inst);
850 } else if let Some(inst) = reduce_idx.get(&i) {
851 res_bytecodes.push(*inst);
852 } else {
853 res_bytecodes.push(*inst);
854 }
855 }
856 f.bytecodes = res_bytecodes;
857 }
858 res
859}
860fn optimize(program: vm::Program) -> vm::Program {
861 program
863}
864pub fn gen_bytecode(mir: mir::Mir, config: Config) -> vm::Program {
865 let mut generator = ByteCodeGenerator::default();
866 let program = generator.generate(mir, config);
867 optimize(program)
868}
869
870#[cfg(test)]
871mod test {
872
873 use crate::{compiler::IoChannelInfo, interner::ToSymbol};
874
875 #[test]
876 fn build() {
877 use super::*;
878 use crate::numeric;
879 use crate::types::PType;
880 use crate::types::Type;
881 extern crate colog;
882 let mut src = mir::Mir::default();
889 let arg = Arc::new(mir::Value::Argument(
890 0,
891 Arc::new(mir::Argument("hoge".to_symbol(), numeric!())),
892 ));
893 let mut func =
894 mir::Function::new(0, "dsp".to_symbol(), &[arg.clone()], &[numeric!()], None);
895 func.return_type.get_or_init(|| numeric!());
896 let mut block = mir::Block::default();
897 let resint = Arc::new(mir::Value::Register(1));
898 block.0.push((resint.clone(), mir::Instruction::Integer(1)));
899 let res = Arc::new(mir::Value::Register(2));
900 block
901 .0
902 .push((res.clone(), mir::Instruction::AddF(arg, resint)));
903 block.0.push((
904 Arc::new(mir::Value::None),
905 mir::Instruction::Return(res.clone(), numeric!()),
906 ));
907 func.body = vec![block];
908 src.functions.push(func);
909 let mut generator = ByteCodeGenerator::default();
910 let config = Config::default();
911 let res = generator.generate(src, config);
912
913 let mut answer = vm::Program {
914 iochannels: Some(IoChannelInfo {
915 input: 1,
916 output: 1,
917 }),
918 ..Default::default()
919 };
920
921 let mut main = vm::FuncProto::new(1, 1);
922 main.constants.push(1);
923 main.bytecodes = vec![
924 VmInstruction::MoveConst(1, 0),
925 VmInstruction::AddF(1, 0, 1),
926 VmInstruction::Return(1, 1),
927 ];
928 answer.global_fn_table.push(("dsp".to_symbol(), main));
929 assert_eq!(res, answer);
930 }
931}