1use super::intrinsics;
2use super::typing::{InferContext, infer_root};
3
4use crate::compiler::parser;
5use crate::interner::{ExprNodeId, Symbol, ToSymbol, TypeNodeId};
6use crate::pattern::{Pattern, TypedId, TypedPattern};
7use crate::plugin::MacroFunction;
8use crate::utils::miniprint::MiniPrint;
9use crate::{function, interpreter, numeric, unit};
10pub mod convert_pronoun;
11pub(crate) mod pattern_destructor;
12pub(crate) mod recursecheck;
13
14use crate::mir::{self, Argument, Instruction, Mir, VPtr, VReg, Value};
16
17use state_tree::tree::StateTreeSkeleton;
18
19use std::collections::BTreeMap;
20use std::path::PathBuf;
21use std::sync::Arc;
22
23use crate::types::{PType, RecordTypeField, Type};
24use crate::utils::environment::{Environment, LookupRes};
25use crate::utils::error::ReportableError;
26use crate::utils::metadata::{GLOBAL_LABEL, Location, Span};
27
28use crate::ast::{Expr, Literal};
29
30type StateSkeleton = StateTreeSkeleton<mir::StateType>;
34
35#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord)]
36struct FunctionId(pub u64);
37#[derive(Debug, Default)]
38struct ContextData {
39 pub func_i: FunctionId,
40 pub current_bb: usize,
41 pub next_state_offset: Option<u64>,
42 pub push_sum: u64,
43}
44#[derive(Debug, Default, Clone)]
45struct DefaultArgData {
46 pub name: Symbol,
47 pub fid: FunctionId,
48 pub ty: TypeNodeId,
49}
50
51#[derive(Debug)]
52struct Context {
53 typeenv: InferContext,
54 valenv: Environment<VPtr>,
55 fn_label: Option<Symbol>,
56 anonymous_fncount: u64,
57 reg_count: VReg,
58 program: Mir,
59 default_args_map: BTreeMap<FunctionId, Vec<DefaultArgData>>,
60 data: Vec<ContextData>,
61 data_i: usize,
62}
63enum AssignDestination {
64 Local(VPtr),
65 UpValue(u64, VPtr),
66 Global(VPtr),
67}
68impl Context {
69 pub fn new(typeenv: InferContext, file_path: Option<PathBuf>) -> Self {
70 Self {
71 typeenv,
72 valenv: Environment::new(),
73 program: Mir::new(file_path),
74 reg_count: 0,
75 fn_label: None,
76 anonymous_fncount: 0,
77 default_args_map: BTreeMap::new(),
78 data: vec![ContextData::default()],
79 data_i: 0,
80 }
81 }
82 fn get_loc_from_span(&self, span: &Span) -> Location {
83 Location::new(
84 span.clone(),
85 self.program.file_path.clone().unwrap_or_default(),
86 )
87 }
88 fn get_ctxdata(&mut self) -> &mut ContextData {
89 self.data.get_mut(self.data_i).unwrap()
90 }
91
92 fn consume_fnlabel(&mut self) -> Symbol {
93 let res = self.fn_label.unwrap_or_else(|| {
94 let res = format!("lambda_{}", self.anonymous_fncount);
95 self.anonymous_fncount += 1;
96 res.to_symbol()
97 });
98 self.fn_label = None;
99 res
100 }
101
102 fn get_current_fn(&mut self) -> &mut mir::Function {
103 let i = self.get_ctxdata().func_i.0 as usize;
104 &mut self.program.functions[i]
105 }
106 fn try_make_delay(
107 &mut self,
108 f: &VPtr,
109 args: &[ExprNodeId],
110 ) -> Option<(VPtr, Vec<StateSkeleton>)> {
111 let _rt = match f.as_ref() {
112 Value::ExtFunction(name, ft) if *name == "delay".to_symbol() => ft,
113 _ => return None,
114 };
115
116 let (max, src, time) = match args {
117 [max, src, time] => (max, src, time),
118 _ => return None,
119 };
120 match max.to_expr() {
121 Expr::Literal(Literal::Float(max)) => {
122 let (args, astates) = self.eval_args(&[*src, *time]);
124 let max_time = max.as_str().parse::<f64>().unwrap();
125 let new_skeleton = StateSkeleton::Delay {
126 len: max_time as u64,
127 };
128 self.consume_and_insert_pushoffset();
129 self.get_ctxdata().next_state_offset = Some(new_skeleton.total_size());
130
131 let (args, _types): (Vec<VPtr>, Vec<TypeNodeId>) = args.into_iter().unzip();
132 Some((
133 self.push_inst(Instruction::Delay(
134 max_time as u64,
135 args[0].clone(),
136 args[1].clone(),
137 )),
138 [astates, vec![new_skeleton]].concat(),
139 ))
140 }
141 _ => unreachable!("unbounded delay access, should be an error at typing stage."),
142 }
143 }
144 fn make_binop_intrinsic(
145 &self,
146 label: Symbol,
147 args: &[(VPtr, TypeNodeId)],
148 ) -> Option<Instruction> {
149 debug_assert_eq!(args.len(), 2);
150 let a0 = args[0].0.clone();
151 let a1 = args[1].0.clone();
152 match label.as_str() {
153 intrinsics::ADD => Some(Instruction::AddF(a0, a1)),
154 intrinsics::SUB => Some(Instruction::SubF(a0, a1)),
155 intrinsics::MULT => Some(Instruction::MulF(a0, a1)),
156 intrinsics::DIV => Some(Instruction::DivF(a0, a1)),
157 intrinsics::POW => Some(Instruction::PowF(a0, a1)),
158 intrinsics::MODULO => Some(Instruction::ModF(a0, a1)),
159 intrinsics::GT => Some(Instruction::Gt(a0, a1)),
160 intrinsics::GE => Some(Instruction::Ge(a0, a1)),
161 intrinsics::LT => Some(Instruction::Lt(a0, a1)),
162 intrinsics::LE => Some(Instruction::Le(a0, a1)),
163 intrinsics::EQ => Some(Instruction::Eq(a0, a1)),
164 intrinsics::NE => Some(Instruction::Ne(a0, a1)),
165 intrinsics::AND => Some(Instruction::And(a0, a1)),
166 intrinsics::OR => Some(Instruction::Or(a0, a1)),
167 _ => None,
168 }
169 }
170 fn make_uniop_intrinsic(
171 &mut self,
172 label: Symbol,
173 args: &[(VPtr, TypeNodeId)],
174 ) -> (Option<Instruction>, Vec<StateSkeleton>) {
175 debug_assert_eq!(args.len(), 1);
176 let a0 = args[0].0.clone();
177 let a0_ty = args[0].1;
178 match label.as_str() {
179 intrinsics::NEG => (Some(Instruction::NegF(a0)), vec![]),
180 intrinsics::SQRT => (Some(Instruction::SqrtF(a0)), vec![]),
181 intrinsics::LOG => (Some(Instruction::LogF(a0)), vec![]),
182 intrinsics::ABS => (Some(Instruction::AbsF(a0)), vec![]),
183 intrinsics::SIN => (Some(Instruction::SinF(a0)), vec![]),
184 intrinsics::COS => (Some(Instruction::CosF(a0)), vec![]),
185 intrinsics::MEM => {
186 let skeleton = StateSkeleton::Mem(mir::StateType::from(numeric!()));
187 (Some(Instruction::Mem(a0)), vec![skeleton])
188 }
189 _ => (None, vec![]),
190 }
191 }
192
193 fn make_intrinsics(
194 &mut self,
195 label: Symbol,
196 args: &[(VPtr, TypeNodeId)],
197 ) -> (Option<VPtr>, Vec<StateSkeleton>) {
198 let (inst, states) = match args.len() {
199 1 => self.make_uniop_intrinsic(label, args),
200 2 => (self.make_binop_intrinsic(label, args), vec![]),
201 _ => return (None, vec![]),
202 };
203 let vptr = inst.map(|i| self.push_inst(i));
204 (vptr, states)
205 }
206 fn get_current_basicblock(&mut self) -> &mut mir::Block {
207 let bbid = self.get_ctxdata().current_bb;
208 self.get_current_fn()
209 .body
210 .get_mut(bbid)
211 .expect("no basic block found")
212 }
213 fn add_new_basicblock(&mut self) {
214 let idx = self.get_current_fn().add_new_basicblock();
215 self.get_ctxdata().current_bb = idx;
216 }
217 fn gen_new_register(&mut self) -> VPtr {
218 let res = Arc::new(Value::Register(self.reg_count));
219 self.reg_count += 1;
220 res
221 }
222 fn push_inst(&mut self, inst: Instruction) -> VPtr {
223 let res = self.gen_new_register();
224 self.get_current_basicblock().0.push((res.clone(), inst));
225 res
226 }
227 fn add_bind(&mut self, bind: (Symbol, VPtr)) {
228 self.valenv.add_bind(&[bind]);
229 }
230 fn add_bind_pattern(
231 &mut self,
232 pattern: &TypedPattern,
233 v: VPtr,
234 ty: TypeNodeId,
235 is_global: bool,
236 ) {
237 let TypedPattern { pat, .. } = pattern;
238 let span = pattern.to_span();
239 match (pat, ty.to_type()) {
240 (Pattern::Single(id), t) => {
241 if is_global && !matches!(v.as_ref(), Value::Function(_)) {
242 let gv = Arc::new(Value::Global(v.clone()));
243 if t.is_function() {
244 self.push_inst(Instruction::CloseUpValues(v.clone(), ty));
246 }
247 self.push_inst(Instruction::SetGlobal(gv.clone(), v.clone(), ty));
248 self.add_bind((*id, gv))
249 } else if id.as_str() != GLOBAL_LABEL {
250 self.add_bind((*id, v))
251 }
252 }
253 (Pattern::Tuple(patterns), Type::Tuple(tvec)) => {
254 for ((i, pat), cty) in patterns.iter().enumerate().zip(tvec.iter()) {
255 let elem_v = self.push_inst(Instruction::GetElement {
256 value: v.clone(),
257 ty,
258 tuple_offset: i as u64,
259 });
260 let tid = Type::Unknown.into_id_with_location(self.get_loc_from_span(&span));
261 let tpat = TypedPattern::new(pat.clone(), tid);
262 self.add_bind_pattern(&tpat, elem_v, *cty, is_global);
263 }
264 }
265 (Pattern::Record(patterns), Type::Record(kvvec)) => {
266 for (k, pat) in patterns.iter() {
267 let i = kvvec
268 .iter()
269 .position(|RecordTypeField { key, .. }| key == k);
270 if let Some(offset) = i {
271 let elem_v = self.push_inst(Instruction::GetElement {
272 value: v.clone(),
273 ty,
274 tuple_offset: offset as u64,
275 });
276 let tid =
277 Type::Unknown.into_id_with_location(self.get_loc_from_span(&span));
278 let tpat = TypedPattern::new(pat.clone(), tid);
279 let elem_t = kvvec[offset].ty;
280 self.add_bind_pattern(&tpat, elem_v, elem_t, is_global);
281 };
282 }
283 }
284 _ => {
285 panic!("typing error in the previous stage")
286 }
287 }
288 }
289
290 fn make_new_function(
291 &mut self,
292 name: Symbol,
293 args: &[Argument],
294 state_skeleton: Vec<StateSkeleton>,
295 parent_i: Option<FunctionId>,
296 ) -> FunctionId {
297 let index = self.program.functions.len();
298 let newf = mir::Function::new(
299 index,
300 name,
301 args,
302 state_skeleton,
303 parent_i.map(|FunctionId(f)| f as _),
304 );
305 self.program.functions.push(newf);
306 FunctionId(index as _)
307 }
308
309 fn do_in_child_ctx<
310 F: FnMut(&mut Self, FunctionId) -> (VPtr, TypeNodeId, Vec<StateSkeleton>),
311 >(
312 &mut self,
313 fname: Symbol,
314 abinds: &[(Symbol, TypeNodeId, Option<ExprNodeId>)],
315 state_skeleton: Vec<StateSkeleton>,
316 mut action: F,
317 ) -> (FunctionId, VPtr, Vec<StateSkeleton>) {
318 self.valenv.extend();
319 self.valenv.add_bind(
320 abinds
321 .iter()
322 .enumerate()
323 .map(|(i, (s, _ty, _default))| (*s, Arc::new(Value::Argument(i))))
324 .collect::<Vec<_>>()
325 .as_slice(),
326 );
327 let args = abinds
328 .iter()
329 .map(|(s, ty, _default)| Argument(*s, *ty))
330 .collect::<Vec<_>>();
331 let parent_i = self.get_ctxdata().func_i;
332 let c_idx = self.make_new_function(fname, &args, state_skeleton, Some(parent_i));
333
334 let def_args = abinds
335 .iter()
336 .filter_map(|(s, ty, default)| {
337 default.map(|d| {
338 let (fid, _state) = self.new_default_args_getter(c_idx, *s, d);
340 DefaultArgData {
341 name: *s,
342 fid,
343 ty: *ty,
344 }
345 })
346 })
347 .collect::<Vec<_>>();
348 self.default_args_map.insert(c_idx, def_args);
349
350 self.data.push(ContextData {
351 func_i: c_idx,
352 ..Default::default()
353 });
354 self.data_i += 1;
355 let (fptr, ty, states) = action(self, c_idx);
357
358 let f = self.program.functions.get_mut(c_idx.0 as usize).unwrap();
360 f.return_type.get_or_init(|| ty);
361
362 let _ = self.data.pop();
364 self.data_i -= 1;
365 log::trace!("end of lexical scope {fname}");
366 self.valenv.to_outer();
367 (c_idx, fptr, states)
368 }
369 fn get_default_args_getter_name(name: Symbol, fid: FunctionId) -> Symbol {
370 format!("__default_{}_{name}", fid.0).to_symbol()
371 }
372 fn new_default_args_getter(
373 &mut self,
374 fid: FunctionId,
375 name: Symbol,
376 e: ExprNodeId,
377 ) -> (FunctionId, Vec<StateSkeleton>) {
378 let (fid, _v, states) = self.do_in_child_ctx(
379 Self::get_default_args_getter_name(name, fid),
380 &[],
381 vec![],
382 |ctx, c_idx| {
383 let (v, ty, states) = ctx.eval_expr(e);
384 let _v = ctx.push_inst(Instruction::Return(v, ty));
385 let f = Arc::new(Value::Function(c_idx.0 as usize));
386 (f, ty, states)
387 },
388 );
389 (fid, states)
390 }
391 fn get_default_arg_call(&mut self, name: Symbol, fid: FunctionId) -> Option<VPtr> {
392 let args = self.default_args_map.get(&fid);
393 args.cloned().and_then(|defv_fn_ids| {
394 defv_fn_ids
395 .iter()
396 .find(|default_arg_data| default_arg_data.name == name)
397 .map(|default_arg_data| {
398 let fid = self.push_inst(Instruction::Uinteger(default_arg_data.fid.0));
399 self.push_inst(Instruction::Call(fid, vec![], default_arg_data.ty))
400 })
401 })
402 }
403 fn lookup(&self, key: &Symbol) -> LookupRes<VPtr> {
404 match self.valenv.lookup_cls(key) {
405 LookupRes::Local(v) => LookupRes::Local(v.clone()),
406 LookupRes::UpValue(level, v) => LookupRes::UpValue(level, v.clone()),
407 LookupRes::Global(v) => LookupRes::Global(v.clone()),
408 LookupRes::None => LookupRes::None,
409 }
410 }
411
412 pub fn eval_literal(&mut self, lit: &Literal, _span: &Span) -> VPtr {
413 match lit {
414 Literal::String(s) => self.push_inst(Instruction::String(*s)),
415 Literal::Int(i) => self.push_inst(Instruction::Integer(*i)),
416 Literal::Float(f) => self.push_inst(Instruction::Float(
417 f.as_str().parse::<f64>().expect("illegal float format"),
418 )),
419 Literal::Now => {
420 let ftype = numeric!();
421 let fntype = function!(vec![], ftype);
422 let getnow = Arc::new(Value::ExtFunction("_mimium_getnow".to_symbol(), fntype));
423 self.push_inst(Instruction::CallCls(getnow, vec![], ftype))
424 }
425 Literal::SampleRate => {
426 let ftype = numeric!();
427 let fntype = function!(vec![], ftype);
428 let samplerate = Arc::new(Value::ExtFunction(
429 "_mimium_getsamplerate".to_symbol(),
430 fntype,
431 ));
432 self.push_inst(Instruction::CallCls(samplerate, vec![], ftype))
433 }
434 Literal::SelfLit | Literal::PlaceHolder => unreachable!(),
435 }
436 }
437 fn eval_rvar(&mut self, e: ExprNodeId, t: TypeNodeId) -> VPtr {
438 let span = &e.to_span();
439 let loc = self.get_loc_from_span(span);
440 let name = match e.to_expr() {
441 Expr::Var(name) => name,
442 _ => unreachable!("eval_rvar called on non-variable expr"),
443 };
444 log::trace!("rv t:{} {}", name, t.to_type());
445
446 match self.lookup(&name) {
447 LookupRes::Local(v) => match v.as_ref() {
448 Value::Function(i) => {
449 let reg = self.push_inst(Instruction::Uinteger(*i as u64));
450 self.push_inst(Instruction::Closure(reg))
451 }
452 _ => {
453 let ptr = self.eval_expr_as_address(e);
454 self.push_inst(Instruction::Load(ptr, t))
455 }
456 },
457 LookupRes::UpValue(level, v) => {
458 (0..level)
459 .rev()
460 .fold(v.clone(), |upv, i| match upv.as_ref() {
461 Value::Function(_fi) => v.clone(),
462 _ => {
463 let res = self.gen_new_register();
464 let current = self.data.get_mut(self.data_i - i).unwrap();
465 let currentf = self
466 .program
467 .functions
468 .get_mut(current.func_i.0 as usize)
469 .unwrap();
470 let upi = currentf.get_or_insert_upvalue(&upv) as _;
471 let currentbb = currentf.body.get_mut(current.current_bb).unwrap();
472 currentbb
473 .0
474 .push((res.clone(), Instruction::GetUpValue(upi, t)));
475 res
476 }
477 })
478 }
479 LookupRes::Global(v) => match v.as_ref() {
480 Value::Global(_gv) => self.push_inst(Instruction::GetGlobal(v.clone(), t)),
481 Value::Function(_) | Value::Register(_) => v.clone(),
482 _ => unreachable!("non global_value"),
483 },
484 LookupRes::None => {
485 let ty = self.typeenv.lookup(name, loc).expect(
486 format!(
487 "variable \"{name}\" not found. it should be detected at type checking stage"
488 )
489 .as_str(),
490 );
491 Arc::new(Value::ExtFunction(name, ty))
492 }
493 }
494 }
495 fn eval_destination_ptr(&mut self, assignee: ExprNodeId) -> AssignDestination {
497 match assignee.to_expr() {
498 Expr::Var(name) => {
499 match self.lookup(&name) {
501 LookupRes::Local(v_ptr) => AssignDestination::Local(v_ptr.clone()),
502 LookupRes::Global(v_ptr) => AssignDestination::Global(v_ptr.clone()),
503 LookupRes::UpValue(_level, v_ptr) => {
504 let currentf = self.get_current_fn();
505 let upi = currentf.get_or_insert_upvalue(&v_ptr) as _;
506 AssignDestination::UpValue(upi, v_ptr.clone())
507 }
508 LookupRes::None => {
509 unreachable!("Invalid assignment target: variable not found")
510 }
511 }
512 }
513 Expr::Proj(expr, idx) => {
514 let base_ptr = self.eval_expr_as_address(expr);
515 let tuple_ty_id = self.typeenv.infer_type(expr).unwrap();
516 let ptr = self.push_inst(Instruction::GetElement {
517 value: base_ptr.clone(),
518 ty: tuple_ty_id,
519 tuple_offset: idx as u64,
520 });
521 AssignDestination::Local(ptr)
522 }
523 Expr::FieldAccess(expr, accesskey) => {
524 let base_ptr = self.eval_expr_as_address(expr);
526
527 let record_ty_id = self.typeenv.infer_type(expr).unwrap();
528 let record_ty = record_ty_id.to_type();
529
530 if let Type::Record(fields) = record_ty {
531 let offset = fields
532 .iter()
533 .position(|RecordTypeField { key, .. }| *key == accesskey)
534 .expect("field access to non-existing field");
535
536 let ptr = self.push_inst(Instruction::GetElement {
538 value: base_ptr.clone(),
539 ty: record_ty_id,
540 tuple_offset: offset as u64,
541 });
542 AssignDestination::Local(ptr)
543 } else {
544 panic!("Expected record type for field access assignment, but got {record_ty}");
545 }
546 }
547 Expr::ArrayAccess(_, _) => {
548 unimplemented!("Assignment to array is not implemented yet.")
549 }
550 _ => unreachable!("Invalid assignee expression"),
551 }
552 }
553
554 fn eval_assign(&mut self, assignee: ExprNodeId, src: VPtr, t: TypeNodeId) {
555 match self.eval_destination_ptr(assignee) {
556 AssignDestination::Local(value) => {
557 self.push_inst(Instruction::Store(value, src, t));
558 }
559 AssignDestination::UpValue(upi, _value) => {
560 self.push_inst(Instruction::SetUpValue(upi, src, t));
561 }
562 AssignDestination::Global(value) => {
563 self.push_inst(Instruction::SetGlobal(value, src, t));
564 }
565 }
566 }
567 fn consume_and_insert_pushoffset(&mut self) {
568 if let Some(offset) = self.get_ctxdata().next_state_offset.take() {
569 self.get_ctxdata().push_sum += offset;
570 self.get_current_basicblock()
572 .0
573 .push((Arc::new(Value::None), Instruction::PushStateOffset(offset)));
574 }
575 }
576 fn emit_fncall(
577 &mut self,
578 idx: u64,
579 args: Vec<(VPtr, TypeNodeId)>,
580 ret_t: TypeNodeId,
581 ) -> (VPtr, Vec<StateSkeleton>) {
582 let target_fn = &self.program.functions[idx as usize];
584 let is_stateful = target_fn.is_stateful();
585 let child_skeleton = target_fn.state_skeleton.clone();
586
587 self.consume_and_insert_pushoffset();
588
589 let f = self.push_inst(Instruction::Uinteger(idx));
590
591 let res = self.push_inst(Instruction::Call(f.clone(), args, ret_t));
592
593 if is_stateful {
594 self.get_ctxdata().next_state_offset = Some(child_skeleton.total_size());
595 }
596 let s = if is_stateful {
597 vec![child_skeleton]
598 } else {
599 vec![]
600 };
601 (res, s)
602 }
603
604 fn eval_args(&mut self, args: &[ExprNodeId]) -> (Vec<(VPtr, TypeNodeId)>, Vec<StateSkeleton>) {
605 let res = args
606 .iter()
607 .map(|a_meta| {
608 let (v, t, s) = self.eval_expr(*a_meta);
609 let res = match v.as_ref() {
610 Value::Function(idx) => {
611 let f = self.push_inst(Instruction::Uinteger(*idx as u64));
612 self.push_inst(Instruction::Closure(f))
613 }
614 _ => v.clone(),
615 };
616 if t.to_type().contains_function() {
617 self.push_inst(Instruction::CloseUpValues(res.clone(), t));
619 }
620 (res, t, s)
621 })
622 .collect::<Vec<_>>();
623
624 let ats = res
625 .iter()
626 .map(|(a, t, _)| (a.clone(), *t))
627 .collect::<Vec<_>>();
628 let states = res.into_iter().flat_map(|(_, _, s)| s).collect::<Vec<_>>();
629 (ats, states)
630 }
631 fn eval_block(&mut self, block: Option<ExprNodeId>) -> (VPtr, TypeNodeId, Vec<StateSkeleton>) {
632 self.add_new_basicblock();
633 let (e, rt, s) = match block {
634 Some(e) => self.eval_expr(e),
635 None => (Arc::new(Value::None), unit!(), vec![]),
636 };
637 let e = match e.as_ref() {
639 Value::Function(idx) => {
640 let cpos = self.push_inst(Instruction::Uinteger(*idx as u64));
641 self.push_inst(Instruction::Closure(cpos))
642 }
643 _ => e,
644 };
645 (e, rt, s)
646 }
647 fn alloc_aggregates(
648 &mut self,
649 items: &[ExprNodeId],
650 ty: TypeNodeId,
651 ) -> (VPtr, TypeNodeId, Vec<StateSkeleton>) {
652 log::trace!("alloc_aggregates: items = {items:?}, ty = {ty:?}");
653 let len = items.len();
654 if len == 0 {
655 return (
656 Arc::new(Value::None),
657 Type::Record(vec![]).into_id(),
658 vec![],
659 );
660 }
661 let alloc_insert_point = self.get_current_basicblock().0.len();
662 let dst = self.gen_new_register();
663 let mut states = vec![];
664 for (i, e) in items.iter().enumerate() {
665 let (v, elem_ty, s) = self.eval_expr(*e);
666 let ptr = self.push_inst(Instruction::GetElement {
667 value: dst.clone(),
668 ty, tuple_offset: i as u64,
670 });
671 states.extend(s);
672 self.push_inst(Instruction::Store(ptr, v, elem_ty));
673 }
674 self.get_current_basicblock()
675 .0
676 .insert(alloc_insert_point, (dst.clone(), Instruction::Alloc(ty)));
677
678 (dst, ty, states)
681 }
682 fn eval_expr_as_address(&mut self, e: ExprNodeId) -> VPtr {
684 match e.to_expr() {
685 Expr::Var(name) => {
686 match self.lookup(&name) {
688 LookupRes::Local(ptr) => ptr,
689 LookupRes::Global(ptr) => ptr,
690 _ => unreachable!("Cannot get address of this expression"),
691 }
692 }
693 Expr::FieldAccess(base_expr, accesskey) => {
694 let base_ptr = self.eval_expr_as_address(base_expr);
695
696 let record_ty_id = self.typeenv.infer_type(base_expr).unwrap();
697 let record_ty = record_ty_id.to_type();
698
699 if let Type::Record(fields) = record_ty {
700 let offset = fields
701 .iter()
702 .position(|f| f.key == accesskey)
703 .expect("Field not found");
704
705 self.push_inst(Instruction::GetElement {
706 value: base_ptr,
707 ty: record_ty_id,
708 tuple_offset: offset as u64,
709 })
710 } else {
711 panic!("Cannot access field on a non-record type");
712 }
713 }
714 Expr::ArrayAccess(_, _) => {
715 unimplemented!("Array element assignment is not implemented yet.")
716 }
717 _ => unreachable!("This expression cannot be used as an l-value"),
718 }
719 }
720 pub fn eval_expr(&mut self, e: ExprNodeId) -> (VPtr, TypeNodeId, Vec<StateSkeleton>) {
721 let span = e.to_span();
722 let ty = self
723 .typeenv
724 .infer_type(e)
725 .expect("type inference failed, should be an error at type checker stage");
726 let ty = InferContext::substitute_type(ty);
727 match &e.to_expr() {
728 Expr::Literal(lit) => {
729 let v = self.eval_literal(lit, &span);
730 (v, ty, vec![])
731 }
732 Expr::Var(_name) => (self.eval_rvar(e, ty), ty, vec![]),
733 Expr::Block(b) => {
734 if let Some(block) = b {
735 self.eval_expr(*block)
736 } else {
737 (Arc::new(Value::None), unit!(), vec![])
738 }
739 }
740 Expr::Tuple(items) => self.alloc_aggregates(items, ty),
741 Expr::Proj(tup, idx) => {
742 let i = *idx as usize;
743 let (tup_v, tup_ty, states) = self.eval_expr(*tup);
744 let elem_ty = match tup_ty.to_type() {
745 Type::Tuple(tys) if i < tys.len() => tys[i],
746 _ => panic!(
747 "expected tuple type for projection,perhaps type error bugs in the previous stage"
748 ),
749 };
750 let res = self.push_inst(Instruction::GetElement {
751 value: tup_v.clone(),
752 ty: tup_ty,
753 tuple_offset: i as u64,
754 });
755 (res, elem_ty, states)
756 }
757 Expr::RecordLiteral(fields) => {
758 self.alloc_aggregates(&fields.iter().map(|f| f.expr).collect::<Vec<_>>(), ty)
759 }
760 Expr::ImcompleteRecord(fields) => {
761 self.alloc_aggregates(&fields.iter().map(|f| f.expr).collect::<Vec<_>>(), ty)
764 }
765 Expr::RecordUpdate(_, _) => {
766 unreachable!("RecordUpdate should be expanded during syntax sugar conversion")
769 }
770 Expr::FieldAccess(expr, accesskey) => {
771 let (expr_v, expr_ty, states) = self.eval_expr(*expr);
772 match expr_ty.to_type() {
773 Type::Record(fields) => {
774 let offset = fields
775 .iter()
776 .position(|RecordTypeField { key, .. }| *key == *accesskey)
777 .expect("field access to non-existing field");
778
779 let res = self.push_inst(Instruction::GetElement {
780 value: expr_v.clone(),
781 ty: expr_ty,
782 tuple_offset: offset as u64,
783 });
784 (res, fields[offset].ty, states)
785 }
786 _ => panic!("expected record type for field access"),
787 }
788 }
789 Expr::ArrayLiteral(items) => {
790 let (vts, states): (Vec<_>, Vec<_>) = items
793 .iter()
794 .map(|item| {
795 let (v, t, s) = self.eval_expr(*item);
796 ((v, t), s)
797 })
798 .unzip();
799 let (values, tys): (Vec<_>, Vec<_>) = vts.into_iter().unzip();
800 debug_assert!(tys.windows(2).all(|w| w[0] == w[1]));
802 let elem_ty = if !tys.is_empty() { tys[0] } else { numeric!() };
803 let reg = self.push_inst(Instruction::Array(values.clone(), elem_ty));
804 (
805 reg,
806 Type::Array(elem_ty).into_id(),
807 states.into_iter().flatten().collect(),
808 )
809 }
810 Expr::ArrayAccess(array, index) => {
811 let (array_v, _array_ty, states) = self.eval_expr(*array);
812 let (index_v, _ty, states2) = self.eval_expr(*index);
813
814 let result = self.push_inst(Instruction::GetArrayElem(
816 array_v.clone(),
817 index_v.clone(),
818 ty,
819 ));
820 (result, ty, [states, states2].concat())
821 }
822 Expr::Apply(f, args) => {
823 let (f, ft, app_state) = self.eval_expr(*f);
824 let del = self.try_make_delay(&f, args);
825 if let Some((d, states)) = del {
826 return (d, numeric!(), states);
827 }
828 let (at, rt) = if let Type::Function { arg, ret } = ft.to_type() {
830 (arg, ret)
831 } else {
832 panic!("non function type {} {} ", ft.to_type(), ty.to_type());
833 };
834
835 let (atvvec, arg_states) = if args.len() == 1 {
838 let (ats, states) = self.eval_args(args);
839 let (arg_val, ty) = ats.first().unwrap().clone();
840 if ty.to_type().can_be_unpacked() {
841 log::trace!("Unpacking argument {ty} for {at}");
842 let ats = match ty.to_type() {
844 Type::Tuple(tys) => tys
845 .into_iter()
846 .enumerate()
847 .map(|(i, t)| {
848 let elem_val = self.push_inst(Instruction::GetElement {
849 value: arg_val.clone(),
850 ty,
851 tuple_offset: i as u64,
852 });
853 (elem_val, t)
854 })
855 .collect(),
856 Type::Record(kvs) => {
857 enum SearchRes {
858 Found(usize),
859 Default,
860 }
861 if let Type::Record(param_types) = at.to_type() {
862 let search_res = param_types.iter().map(|param| {
863 kvs.iter()
864 .enumerate()
865 .find_map(|(i, kv)| {
866 (param.key == kv.key)
867 .then_some((SearchRes::Found(i), kv))
868 })
869 .or(param
870 .has_default
871 .then_some((SearchRes::Default, param)))
872 });
873 search_res.map(
874 |searchres| match searchres {
875 Some((SearchRes::Found(i), kv)) => {
876 log::trace!("non-default argument {} found", kv.key);
877
878 let field_val = self.push_inst(
879 Instruction::GetElement {
880 value: arg_val.clone(),
881 ty,
882 tuple_offset: i as u64,
883 },
884 );
885 (field_val, kv.ty)
886 },
887 Some((SearchRes::Default, kv)) => {
888 if let Value::Function(fid) = f.as_ref() {
889 let fid= FunctionId(*fid as u64);
890 log::trace!("searching default argument for {} in function {}", kv.key, self.program.functions[fid.0 as usize].label.as_str());
891 let default_val = self.get_default_arg_call(kv.key, fid).expect(format!("msg: default argument {} not found", kv.key).as_str());
892 (default_val, kv.ty)
893 } else {
894 log::error!("default argument cannot be supported with closure currently");
895 (Arc::new(Value::None), Type::Failure.into_id())
896 }
897 }
898 None=>{
899 panic!("parameter pack failed, possible type inference bug")
900 }
901 }).collect::<Vec<_>>()
902 } else {
903 unreachable!(
904 "parameter pack failed, possible type inference bug"
905 )
906 }
907 }
908 _ => vec![(arg_val, ty)],
909 };
910 (ats, states)
911 } else {
912 (vec![(arg_val, ty)], states)
913 }
914 } else {
915 self.eval_args(args)
916 };
917
918 let (res, state) = match f.as_ref() {
919 Value::Global(v) => match v.as_ref() {
920 Value::Function(idx) => self.emit_fncall(*idx as u64, atvvec.clone(), rt),
921 Value::Register(_) => (
922 self.push_inst(Instruction::CallCls(v.clone(), atvvec.clone(), rt)),
923 vec![],
924 ),
925 _ => {
926 panic!("calling non-function global value")
927 }
928 },
929 Value::Register(_) => {
930 (
933 self.push_inst(Instruction::CallCls(f.clone(), atvvec.clone(), rt)),
934 vec![],
935 )
936 }
937 Value::Function(idx) => self.emit_fncall(*idx as u64, atvvec.clone(), rt),
938 Value::ExtFunction(label, _ty) => {
939 let (res, states) = if let (Some(res), states) =
940 self.make_intrinsics(*label, &atvvec)
941 {
942 (res, states)
943 } else {
944 (
946 self.push_inst(Instruction::Call(f.clone(), atvvec.clone(), rt)),
947 vec![],
948 )
949 };
950 (res, states)
951 }
952 Value::None => unreachable!(),
954 _ => todo!(),
955 };
956 (res, rt, [app_state, arg_states, state].concat())
957 }
958
959 Expr::Lambda(ids, _rett, body) => {
960 let (atype, rt) = match ty.to_type() {
961 Type::Function { arg, ret } => (arg, ret),
962 _ => panic!(),
963 };
964 let binds = match ids.len() {
965 0 => vec![],
966 1 => {
967 let id = ids[0].clone();
968 let label = id.id;
969 vec![(label, atype, id.default_value)]
970 }
971 _ => {
972 let tys = atype
973 .to_type()
974 .get_as_tuple()
975 .expect("must be tuple or record type. type inference failed");
976 ids.iter()
978 .zip(tys.iter())
979 .map(|(id, ty)| {
980 let label = id.id;
981 (label, *ty, id.default_value)
982 })
983 .collect()
984 }
985 };
986
987 let name = self.consume_fnlabel();
988 let (c_idx, f, _astates) =
989 self.do_in_child_ctx(name, &binds, vec![], |ctx, c_idx| {
990 let (res, _, states) = ctx.eval_expr(*body);
991
992 let child = ctx.program.functions.get_mut(c_idx.0 as usize).unwrap();
993 if let StateTreeSkeleton::FnCall(child) = &mut child.state_skeleton {
995 *child = states.clone().into_iter().map(Box::new).collect();
996 }
997
998 let push_sum = ctx.get_ctxdata().push_sum;
999 if push_sum > 0 {
1000 ctx.get_current_basicblock().0.push((
1001 Arc::new(mir::Value::None),
1002 Instruction::PopStateOffset(push_sum),
1003 )); }
1005 match (res.as_ref(), rt.to_type()) {
1006 (_, Type::Primitive(PType::Unit)) => {
1007 let _ =
1008 ctx.push_inst(Instruction::Return(Arc::new(Value::None), rt));
1009 }
1010 (Value::State(v), _) => {
1011 let _ = ctx.push_inst(Instruction::ReturnFeed(v.clone(), rt));
1012 }
1013 (Value::Function(i), _) => {
1014 let idx = ctx.push_inst(Instruction::Uinteger(*i as u64));
1015 let cls = ctx.push_inst(Instruction::Closure(idx));
1016 let _ = ctx.push_inst(Instruction::CloseUpValues(cls.clone(), rt));
1017 let _ = ctx.push_inst(Instruction::Return(cls, rt));
1018 }
1019 (_, _) => {
1020 if rt.to_type().contains_function() {
1021 let _ =
1022 ctx.push_inst(Instruction::CloseUpValues(res.clone(), rt));
1023 let _ = ctx.push_inst(Instruction::Return(res.clone(), rt));
1024 } else {
1025 let _ = ctx.push_inst(Instruction::Return(res.clone(), rt));
1026 }
1027 }
1028 };
1029
1030 let f = Arc::new(Value::Function(c_idx.0 as usize));
1031 (f, rt, states)
1032 });
1033 let child = self.program.functions.get_mut(c_idx.0 as usize).unwrap();
1034 let res = if child.upindexes.is_empty() {
1035 f
1037 } else {
1038 let idxcell = self.push_inst(Instruction::Uinteger(c_idx.0));
1039 self.push_inst(Instruction::Closure(idxcell))
1040 };
1041 (res, ty, vec![])
1042 }
1043 Expr::Feed(id, expr) => {
1044 debug_assert!(self.get_ctxdata().next_state_offset.is_none());
1045
1046 let res = self.push_inst(Instruction::GetState(ty));
1047 let skeleton = StateTreeSkeleton::Feed(mir::StateType::from(ty));
1048 self.add_bind((*id, res.clone()));
1049 self.get_ctxdata().next_state_offset = Some(skeleton.total_size());
1050 let (retv, _t, states) = self.eval_expr(*expr);
1051
1052 (
1053 Arc::new(Value::State(retv)),
1054 ty,
1055 [states, vec![skeleton]].concat(),
1056 )
1057 }
1058 Expr::Let(pat, body, then) => {
1059 if let Ok(tid) = TypedId::try_from(pat.clone()) {
1060 self.fn_label = Some(tid.id);
1061 log::trace!(
1062 "mirgen let {}",
1063 self.fn_label.map_or("".to_string(), |s| s.to_string())
1064 )
1065 };
1066 let (bodyv, t, states) = self.eval_expr(*body);
1073 self.fn_label = None;
1074
1075 let is_global = self.get_ctxdata().func_i.0 == 0;
1076 let is_function = matches!(bodyv.as_ref(), Value::Function(_));
1077
1078 if !is_global && !is_function {
1079 let ptr = self.push_inst(Instruction::Alloc(t));
1081 self.push_inst(Instruction::Store(ptr.clone(), bodyv, t));
1082 self.add_bind_pattern(pat, ptr, t, false);
1083 } else {
1084 self.add_bind_pattern(pat, bodyv, t, is_global);
1086 }
1087
1088 if let Some(then_e) = then {
1089 let (r, t, s) = self.eval_expr(*then_e);
1090 (r, t, [states, s].concat())
1091 } else {
1092 (Arc::new(Value::None), unit!(), states)
1093 }
1094 }
1095 Expr::LetRec(id, body, then) => {
1096 let is_global = self.get_ctxdata().func_i.0 == 0;
1097 self.fn_label = Some(id.id);
1098 let nextfunid = self.program.functions.len();
1099 let t = self
1100 .typeenv
1101 .infer_type(e)
1102 .expect("type inference failed, should be an error at type checker stage");
1103 let t = InferContext::substitute_type(t);
1104
1105 let v = if is_global {
1106 Arc::new(Value::Function(nextfunid))
1107 } else {
1108 self.push_inst(Instruction::Alloc(t))
1109 };
1110 let bind = (id.id, v.clone());
1111 self.add_bind(bind);
1112 let (b, _bt, states) = self.eval_expr(*body);
1113 if !is_global {
1114 let _ = self.push_inst(Instruction::Store(v.clone(), b.clone(), t));
1115 }
1116 if let Some(then_e) = then {
1117 let (r, t, s) = self.eval_expr(*then_e);
1118 (r, t, [states, s].concat())
1119 } else {
1120 (Arc::new(Value::None), unit!(), states)
1121 }
1122 }
1123 Expr::Assign(assignee, body) => {
1124 let (src, ty, states) = self.eval_expr(*body);
1125 self.eval_assign(*assignee, src, ty);
1126 (Arc::new(Value::None), unit!(), states)
1127 }
1128 Expr::Then(body, then) => {
1129 let (_, _, states) = self.eval_expr(*body);
1130 match then {
1131 Some(t) => self.eval_expr(*t),
1132 None => (Arc::new(Value::None), unit!(), states),
1133 }
1134 }
1135 Expr::If(cond, then, else_) => {
1136 let (c, _, state_c) = self.eval_expr(*cond);
1137 let cond_bidx = self.get_ctxdata().current_bb;
1138
1139 let _ = self.push_inst(Instruction::JmpIf(c, 0, 0, 0));
1143 let then_bidx = cond_bidx + 1;
1146 let (t, _, state_t) = self.eval_block(Some(*then));
1147 let else_bidx = self.get_ctxdata().current_bb + 1;
1150 let (e, _, state_e) = self.eval_block(*else_);
1151 self.add_new_basicblock();
1153 let res = self.push_inst(Instruction::Phi(t, e));
1154 let phi_bidx = self.get_ctxdata().current_bb;
1155
1156 let jmp_if = self
1158 .get_current_fn()
1159 .body
1160 .get_mut(cond_bidx)
1161 .expect("no basic block found")
1162 .0
1163 .last_mut()
1164 .expect("the block contains no inst?");
1165 match &mut jmp_if.1 {
1166 Instruction::JmpIf(_, then_dst, else_dst, phi_dst) => {
1167 *then_dst = then_bidx as _;
1168 *else_dst = else_bidx as _;
1169 *phi_dst = phi_bidx as _;
1170 }
1171 _ => panic!("the last block should be Jmp"),
1172 }
1173
1174 (res, ty, [state_c, state_t, state_e].concat())
1175 }
1176 Expr::Bracket(_) | Expr::Escape(_) | Expr::MacroExpand(_, _) => {
1177 unreachable!("Macro code should be expanded before mirgen")
1178 }
1179 Expr::BinOp(_, _, _) | Expr::UniOp(_, _) | Expr::Paren(_) => {
1180 unreachable!(
1181 "syntactic sugar for infix&unary operators are removed before this stage"
1182 )
1183 }
1184 Expr::Error => {
1185 self.push_inst(Instruction::Error);
1186 (Arc::new(Value::None), unit!(), vec![])
1187 }
1188 }
1189 }
1190}
1191
1192fn is_toplevel_macro(typeenv: &mut InferContext, top_ast: ExprNodeId) -> bool {
1193 typeenv.infer_type(top_ast).is_ok_and(|t| {
1194 log::trace!("toplevel type: {}", t.to_type());
1195 matches!(t.to_type(), Type::Code(_))
1196 })
1197}
1198
1199pub fn typecheck(
1200 root_expr_id: ExprNodeId,
1201 builtin_types: &[(Symbol, TypeNodeId)],
1202 file_path: Option<PathBuf>,
1203) -> (ExprNodeId, InferContext, Vec<Box<dyn ReportableError>>) {
1204 let (expr, convert_errs) =
1205 convert_pronoun::convert_pronoun(root_expr_id, file_path.clone().unwrap_or_default());
1206 let expr = recursecheck::convert_recurse(expr, file_path.clone().unwrap_or_default());
1207 let infer_ctx = infer_root(expr, builtin_types, file_path.clone().unwrap_or_default());
1209 let errors = infer_ctx
1210 .errors
1211 .iter()
1212 .cloned()
1213 .map(|e| -> Box<dyn ReportableError> { Box::new(e) })
1214 .chain(
1215 convert_errs
1216 .into_iter()
1217 .map(|e| -> Box<dyn ReportableError> { Box::new(e) }),
1218 )
1219 .collect::<Vec<_>>();
1220 (expr, infer_ctx, errors)
1221}
1222
1223pub fn compile(
1228 root_expr_id: ExprNodeId,
1229 builtin_types: &[(Symbol, TypeNodeId)],
1230 macro_env: &[Box<dyn MacroFunction>],
1231 file_path: Option<PathBuf>,
1232) -> Result<Mir, Vec<Box<dyn ReportableError>>> {
1233 let expr = root_expr_id.wrap_to_staged_expr();
1234 let (expr, mut infer_ctx, errors) = typecheck(expr, builtin_types, file_path.clone());
1235 if errors.is_empty() {
1236 let top_type = infer_ctx.infer_type(expr).unwrap();
1237 let expr = interpreter::expand_macro(expr, top_type, macro_env);
1238
1239 log::trace!(
1240 "ast after macro expansion: {:?}",
1241 expr.to_expr().simple_print()
1242 );
1243 let expr = parser::add_global_context(expr, file_path.clone().unwrap_or_default());
1244 let mut ctx = Context::new(infer_ctx, file_path.clone());
1245 let _res = ctx.eval_expr(expr);
1246 ctx.program.file_path = file_path.clone();
1247 Ok(ctx.program.clone())
1248 } else {
1249 Err(errors)
1250 }
1251}
1252
1253