runmat_ignition/compiler.rs
1use crate::functions::UserFunction;
2use crate::instr::Instr;
3use once_cell::sync::OnceCell;
4use runmat_builtins::Type;
5use runmat_hir::{HirExpr, HirExprKind, HirProgram, HirStmt};
6use std::collections::HashMap;
7
8pub struct LoopLabels {
9 pub break_jumps: Vec<usize>,
10 pub continue_jumps: Vec<usize>,
11}
12
13pub struct Compiler {
14 pub instructions: Vec<Instr>,
15 pub var_count: usize,
16 pub loop_stack: Vec<LoopLabels>,
17 pub functions: HashMap<String, UserFunction>,
18 pub imports: Vec<(Vec<String>, bool)>,
19 pub var_types: Vec<Type>,
20}
21
22struct StochasticEvolutionPlan<'a> {
23 state: runmat_hir::VarId,
24 drift: &'a HirExpr,
25 scale: &'a HirExpr,
26 steps: &'a HirExpr,
27}
28
29fn expr_is_one(expr: &HirExpr) -> bool {
30 parse_number(expr)
31 .map(|v| (v - 1.0).abs() < 1e-9)
32 .unwrap_or(false)
33}
34
35fn parse_number(expr: &HirExpr) -> Option<f64> {
36 if let runmat_hir::HirExprKind::Number(raw) = &expr.kind {
37 raw.parse::<f64>().ok()
38 } else {
39 None
40 }
41}
42
43fn stochastic_evolution_disabled() -> bool {
44 static DISABLE: OnceCell<bool> = OnceCell::new();
45 *DISABLE.get_or_init(|| {
46 std::env::var("RUNMAT_DISABLE_STOCHASTIC_EVOLUTION")
47 .map(|v| matches!(v.trim().to_ascii_lowercase().as_str(), "1" | "true" | "yes"))
48 .unwrap_or(false)
49 })
50}
51
52fn is_randn_call(expr: &HirExpr) -> bool {
53 match &expr.kind {
54 runmat_hir::HirExprKind::FuncCall(name, _) => name.eq_ignore_ascii_case("randn"),
55 _ => false,
56 }
57}
58
59fn matches_var(expr: &HirExpr, var: runmat_hir::VarId) -> bool {
60 matches!(expr.kind, runmat_hir::HirExprKind::Var(id) if id == var)
61}
62
63fn extract_drift_and_scale(
64 expr: &HirExpr,
65 state_var: runmat_hir::VarId,
66 z_var: runmat_hir::VarId,
67) -> Option<(&HirExpr, &HirExpr)> {
68 use runmat_hir::HirExprKind as EK;
69 use runmat_parser::BinOp;
70
71 let (maybe_state_side, maybe_exp_side) = match &expr.kind {
72 EK::Binary(lhs, BinOp::ElemMul, rhs) => (lhs.as_ref(), rhs.as_ref()),
73 _ => return None,
74 };
75
76 let exp_side = if matches_var(maybe_state_side, state_var) && is_exp_call(maybe_exp_side) {
77 maybe_exp_side
78 } else if matches_var(maybe_exp_side, state_var) && is_exp_call(maybe_state_side) {
79 maybe_state_side
80 } else {
81 return None;
82 };
83
84 let exp_arg = match &exp_side.kind {
85 EK::FuncCall(name, args) if name.eq_ignore_ascii_case("exp") && args.len() == 1 => &args[0],
86 _ => return None,
87 };
88
89 match &exp_arg.kind {
90 EK::Binary(lhs, BinOp::Add, rhs) => {
91 if let Some(scale_expr) = extract_scale_term(lhs, z_var) {
92 Some((rhs.as_ref(), scale_expr))
93 } else if let Some(scale_expr) = extract_scale_term(rhs, z_var) {
94 Some((lhs.as_ref(), scale_expr))
95 } else {
96 None
97 }
98 }
99 _ => None,
100 }
101}
102
103fn extract_scale_term(expr: &HirExpr, z_var: runmat_hir::VarId) -> Option<&HirExpr> {
104 use runmat_hir::HirExprKind as EK;
105 use runmat_parser::BinOp;
106
107 match &expr.kind {
108 EK::Binary(lhs, BinOp::ElemMul, rhs) => {
109 if matches_var(lhs, z_var) {
110 Some(rhs.as_ref())
111 } else if matches_var(rhs, z_var) {
112 Some(lhs.as_ref())
113 } else {
114 None
115 }
116 }
117 _ => None,
118 }
119}
120
121fn is_exp_call(expr: &HirExpr) -> bool {
122 matches!(
123 &expr.kind,
124 runmat_hir::HirExprKind::FuncCall(name, _) if name.eq_ignore_ascii_case("exp")
125 )
126}
127
128impl Compiler {
129 fn compile_stochastic_evolution(
130 &mut self,
131 plan: StochasticEvolutionPlan<'_>,
132 ) -> Result<(), String> {
133 self.emit(Instr::LoadVar(plan.state.0));
134 self.compile_expr(plan.drift)?;
135 self.compile_expr(plan.scale)?;
136 self.compile_expr(plan.steps)?;
137 self.emit(Instr::StochasticEvolution);
138 self.emit(Instr::StoreVar(plan.state.0));
139 Ok(())
140 }
141
142 fn detect_stochastic_evolution<'a>(
143 &self,
144 expr: &'a HirExpr,
145 body: &'a [HirStmt],
146 ) -> Option<StochasticEvolutionPlan<'a>> {
147 if stochastic_evolution_disabled() {
148 return None;
149 }
150 use runmat_hir::HirExprKind as EK;
151 match &expr.kind {
152 EK::Range(start, step, end) => {
153 if !expr_is_one(start) {
154 return None;
155 }
156 if let Some(step_expr) = step {
157 if !expr_is_one(step_expr) {
158 return None;
159 }
160 }
161 if body.len() != 2 {
162 return None;
163 }
164 let (z_var, randn_expr) = match &body[0] {
165 HirStmt::Assign(var, expr, _) => (*var, expr),
166 _ => return None,
167 };
168 if !is_randn_call(randn_expr) {
169 return None;
170 }
171 let (state_var, update_expr) = match &body[1] {
172 HirStmt::Assign(var, expr, _) => (*var, expr),
173 _ => return None,
174 };
175 let (drift, scale) = extract_drift_and_scale(update_expr, state_var, z_var)?;
176 Some(StochasticEvolutionPlan {
177 state: state_var,
178 drift,
179 scale,
180 steps: end,
181 })
182 }
183 _ => None,
184 }
185 }
186 fn attr_access_from_str(s: &str) -> runmat_builtins::Access {
187 match s.to_ascii_lowercase().as_str() {
188 "private" => runmat_builtins::Access::Private,
189 _ => runmat_builtins::Access::Public,
190 }
191 }
192 fn parse_prop_attrs(attrs: &Vec<runmat_parser::Attr>) -> (bool, bool, String, String) {
193 // Defaults
194 let mut is_static = false;
195 let mut is_dependent = false;
196 let mut get_acc = runmat_builtins::Access::Public;
197 let mut set_acc = runmat_builtins::Access::Public;
198 for a in attrs {
199 if a.name.eq_ignore_ascii_case("Static") {
200 is_static = true;
201 }
202 if a.name.eq_ignore_ascii_case("Dependent") {
203 is_dependent = true;
204 }
205 if a.name.eq_ignore_ascii_case("Access") {
206 if let Some(v) = &a.value {
207 let acc = Self::attr_access_from_str(v.trim_matches('\'').trim());
208 get_acc = acc.clone();
209 set_acc = acc;
210 }
211 }
212 if a.name.eq_ignore_ascii_case("GetAccess") {
213 if let Some(v) = &a.value {
214 get_acc = Self::attr_access_from_str(v.trim_matches('\'').trim());
215 }
216 }
217 if a.name.eq_ignore_ascii_case("SetAccess") {
218 if let Some(v) = &a.value {
219 set_acc = Self::attr_access_from_str(v.trim_matches('\'').trim());
220 }
221 }
222 }
223 let gs = match get_acc {
224 runmat_builtins::Access::Private => "private".to_string(),
225 _ => "public".to_string(),
226 };
227 let ss = match set_acc {
228 runmat_builtins::Access::Private => "private".to_string(),
229 _ => "public".to_string(),
230 };
231 (is_static, is_dependent, gs, ss)
232 }
233 fn parse_method_attrs(attrs: &Vec<runmat_parser::Attr>) -> (bool, String) {
234 let mut is_static = false;
235 let mut access = runmat_builtins::Access::Public;
236 for a in attrs {
237 if a.name.eq_ignore_ascii_case("Static") {
238 is_static = true;
239 }
240 if a.name.eq_ignore_ascii_case("Access") {
241 if let Some(v) = &a.value {
242 access = Self::attr_access_from_str(v.trim_matches('\'').trim());
243 }
244 }
245 }
246 let acc_str = match access {
247 runmat_builtins::Access::Private => "private".to_string(),
248 _ => "public".to_string(),
249 };
250 (is_static, acc_str)
251 }
252 fn expr_contains_end(expr: &runmat_hir::HirExpr) -> bool {
253 use runmat_hir::HirExprKind as K;
254 match &expr.kind {
255 K::End => true,
256 K::Unary(_, e) => Self::expr_contains_end(e),
257 K::Binary(a, _, b) => Self::expr_contains_end(a) || Self::expr_contains_end(b),
258 K::Tensor(rows) | K::Cell(rows) => rows
259 .iter()
260 .flat_map(|r| r.iter())
261 .any(Self::expr_contains_end),
262 K::Index(base, idxs) | K::IndexCell(base, idxs) => {
263 if Self::expr_contains_end(base) {
264 return true;
265 }
266 idxs.iter().any(Self::expr_contains_end)
267 }
268 _ => false,
269 }
270 }
271 #[allow(clippy::only_used_in_recursion)]
272 fn collect_free_vars(
273 &self,
274 expr: &runmat_hir::HirExpr,
275 bound: &std::collections::HashSet<runmat_hir::VarId>,
276 seen: &mut std::collections::HashSet<runmat_hir::VarId>,
277 out: &mut Vec<runmat_hir::VarId>,
278 ) {
279 use runmat_hir::HirExprKind as K;
280 match &expr.kind {
281 K::Var(id) => {
282 if !bound.contains(id) && !seen.contains(id) {
283 seen.insert(*id);
284 out.push(*id);
285 }
286 }
287 K::Unary(_, e) => self.collect_free_vars(e, bound, seen, out),
288 K::Binary(a, _, b) => {
289 self.collect_free_vars(a, bound, seen, out);
290 self.collect_free_vars(b, bound, seen, out);
291 }
292 K::Tensor(rows) | K::Cell(rows) => {
293 for row in rows {
294 for e in row {
295 self.collect_free_vars(e, bound, seen, out);
296 }
297 }
298 }
299 K::Index(base, idxs) | K::IndexCell(base, idxs) => {
300 self.collect_free_vars(base, bound, seen, out);
301 for i in idxs {
302 self.collect_free_vars(i, bound, seen, out);
303 }
304 }
305 K::Range(s, st, e) => {
306 self.collect_free_vars(s, bound, seen, out);
307 if let Some(st) = st {
308 self.collect_free_vars(st, bound, seen, out);
309 }
310 self.collect_free_vars(e, bound, seen, out);
311 }
312 K::FuncCall(_, args) | K::MethodCall(_, _, args) => {
313 for a in args {
314 self.collect_free_vars(a, bound, seen, out);
315 }
316 }
317 K::Member(base, _) => self.collect_free_vars(base, bound, seen, out),
318 K::MemberDynamic(base, name) => {
319 self.collect_free_vars(base, bound, seen, out);
320 self.collect_free_vars(name, bound, seen, out);
321 }
322 K::AnonFunc { params, body } => {
323 let mut new_bound = bound.clone();
324 for p in params {
325 new_bound.insert(*p);
326 }
327 self.collect_free_vars(body, &new_bound, seen, out);
328 }
329 _ => {}
330 }
331 }
332 pub fn new(prog: &HirProgram) -> Self {
333 let mut max_var = 0;
334 fn visit_expr(expr: &HirExpr, max: &mut usize) {
335 match &expr.kind {
336 HirExprKind::Var(id) => {
337 if id.0 + 1 > *max {
338 *max = id.0 + 1;
339 }
340 }
341 HirExprKind::Unary(_, e) => visit_expr(e, max),
342 HirExprKind::Binary(left, _, right) => {
343 visit_expr(left, max);
344 visit_expr(right, max);
345 }
346 HirExprKind::Tensor(rows) | HirExprKind::Cell(rows) => {
347 for row in rows {
348 for expr in row {
349 visit_expr(expr, max);
350 }
351 }
352 }
353 HirExprKind::Index(expr, indices) | HirExprKind::IndexCell(expr, indices) => {
354 visit_expr(expr, max);
355 for idx in indices {
356 visit_expr(idx, max);
357 }
358 }
359 HirExprKind::Range(start, step, end) => {
360 visit_expr(start, max);
361 if let Some(step) = step {
362 visit_expr(step, max);
363 }
364 visit_expr(end, max);
365 }
366 HirExprKind::FuncCall(_, args) | HirExprKind::MethodCall(_, _, args) => {
367 for arg in args {
368 visit_expr(arg, max);
369 }
370 }
371 HirExprKind::Member(base, _) => visit_expr(base, max),
372 HirExprKind::MemberDynamic(base, name) => {
373 visit_expr(base, max);
374 visit_expr(name, max);
375 }
376 HirExprKind::AnonFunc { body, .. } => visit_expr(body, max),
377 HirExprKind::Number(_)
378 | HirExprKind::String(_)
379 | HirExprKind::Constant(_)
380 | HirExprKind::Colon
381 | HirExprKind::End
382 | HirExprKind::FuncHandle(_)
383 | HirExprKind::MetaClass(_) => {}
384 }
385 }
386
387 fn visit_stmts(stmts: &[HirStmt], max: &mut usize) {
388 for s in stmts {
389 match s {
390 HirStmt::Assign(id, expr, _) => {
391 if id.0 + 1 > *max {
392 *max = id.0 + 1;
393 }
394 visit_expr(expr, max);
395 }
396 HirStmt::ExprStmt(expr, _) => visit_expr(expr, max),
397 HirStmt::Return => {}
398 HirStmt::If {
399 cond,
400 then_body,
401 elseif_blocks,
402 else_body,
403 } => {
404 visit_expr(cond, max);
405 visit_stmts(then_body, max);
406 for (cond, body) in elseif_blocks {
407 visit_expr(cond, max);
408 visit_stmts(body, max);
409 }
410 if let Some(body) = else_body {
411 visit_stmts(body, max);
412 }
413 }
414 HirStmt::While { cond, body } => {
415 visit_expr(cond, max);
416 visit_stmts(body, max);
417 }
418 HirStmt::For { var, expr, body } => {
419 if var.0 + 1 > *max {
420 *max = var.0 + 1;
421 }
422 visit_expr(expr, max);
423 visit_stmts(body, max);
424 }
425 HirStmt::Switch {
426 expr,
427 cases,
428 otherwise,
429 } => {
430 visit_expr(expr, max);
431 for (c, b) in cases {
432 visit_expr(c, max);
433 visit_stmts(b, max);
434 }
435 if let Some(b) = otherwise {
436 visit_stmts(b, max);
437 }
438 }
439 HirStmt::TryCatch {
440 try_body,
441 catch_var,
442 catch_body,
443 } => {
444 if let Some(v) = catch_var {
445 if v.0 + 1 > *max {
446 *max = v.0 + 1;
447 }
448 }
449 visit_stmts(try_body, max);
450 visit_stmts(catch_body, max);
451 }
452 HirStmt::Global(vars) | HirStmt::Persistent(vars) => {
453 for (v, _name) in vars {
454 if v.0 + 1 > *max {
455 *max = v.0 + 1;
456 }
457 }
458 }
459 HirStmt::AssignLValue(_, expr, _) => visit_expr(expr, max),
460 HirStmt::MultiAssign(vars, expr, _) => {
461 for v in vars.iter().flatten() {
462 if v.0 + 1 > *max {
463 *max = v.0 + 1;
464 }
465 }
466 visit_expr(expr, max);
467 }
468 HirStmt::Function { .. }
469 | HirStmt::ClassDef { .. }
470 | HirStmt::Import { .. }
471 | HirStmt::Break
472 | HirStmt::Continue => {}
473 }
474 }
475 }
476
477 visit_stmts(&prog.body, &mut max_var);
478 let mut var_types = prog.var_types.clone();
479 if var_types.len() < max_var {
480 var_types.resize(max_var, Type::Unknown);
481 }
482 Self {
483 instructions: Vec::new(),
484 var_count: max_var,
485 loop_stack: Vec::new(),
486 functions: HashMap::new(),
487 imports: Vec::new(),
488 var_types,
489 }
490 }
491
492 fn ensure_var(&mut self, id: usize) {
493 if id + 1 > self.var_count {
494 self.var_count = id + 1;
495 }
496 while self.var_types.len() <= id {
497 self.var_types.push(Type::Unknown);
498 }
499 }
500
501 fn alloc_temp(&mut self) -> usize {
502 let id = self.var_count;
503 self.var_count += 1;
504 if self.var_types.len() <= id {
505 self.var_types.push(Type::Unknown);
506 }
507 id
508 }
509
510 pub fn emit(&mut self, instr: Instr) -> usize {
511 match &instr {
512 Instr::LoadVar(id) | Instr::StoreVar(id) => self.ensure_var(*id),
513 _ => {}
514 }
515 let pc = self.instructions.len();
516 self.instructions.push(instr);
517 pc
518 }
519
520 pub fn patch(&mut self, idx: usize, instr: Instr) {
521 self.instructions[idx] = instr;
522 }
523
524 pub fn compile_program(&mut self, prog: &HirProgram) -> Result<(), String> {
525 // Validate imports early for duplicate/specific-name ambiguities
526 runmat_hir::validate_imports(prog)?;
527 // Validate class definitions for attribute correctness and name conflicts
528 runmat_hir::validate_classdefs(prog)?;
529 // Pre-collect imports (both wildcard and specific) for name resolution
530 for stmt in &prog.body {
531 if let HirStmt::Import { path, wildcard } = stmt {
532 self.imports.push((path.clone(), *wildcard));
533 self.emit(Instr::RegisterImport {
534 path: path.clone(),
535 wildcard: *wildcard,
536 });
537 }
538 if let HirStmt::Global(vars) = stmt {
539 let ids: Vec<usize> = vars.iter().map(|(v, _n)| v.0).collect();
540 let names: Vec<String> = vars.iter().map(|(_v, n)| n.clone()).collect();
541 self.emit(Instr::DeclareGlobalNamed(ids, names));
542 }
543 if let HirStmt::Persistent(vars) = stmt {
544 let ids: Vec<usize> = vars.iter().map(|(v, _n)| v.0).collect();
545 let names: Vec<String> = vars.iter().map(|(_v, n)| n.clone()).collect();
546 self.emit(Instr::DeclarePersistentNamed(ids, names));
547 }
548 }
549 for stmt in &prog.body {
550 if !matches!(
551 stmt,
552 HirStmt::Import { .. } | HirStmt::Global(_) | HirStmt::Persistent(_)
553 ) {
554 self.compile_stmt(stmt)?;
555 }
556 }
557 Ok(())
558 }
559
560 pub fn compile_stmt(&mut self, stmt: &HirStmt) -> Result<(), String> {
561 match stmt {
562 HirStmt::ExprStmt(expr, _) => {
563 self.compile_expr(expr)?;
564 self.emit(Instr::Pop);
565 }
566 HirStmt::Assign(id, expr, _) => {
567 self.compile_expr(expr)?;
568 self.emit(Instr::StoreVar(id.0));
569 }
570 HirStmt::If {
571 cond,
572 then_body,
573 elseif_blocks,
574 else_body,
575 } => {
576 self.compile_expr(cond)?;
577 let mut last_jump = self.emit(Instr::JumpIfFalse(usize::MAX));
578 for s in then_body {
579 self.compile_stmt(s)?;
580 }
581 let mut end_jumps = Vec::new();
582 end_jumps.push(self.emit(Instr::Jump(usize::MAX)));
583 for (c, b) in elseif_blocks {
584 self.patch(last_jump, Instr::JumpIfFalse(self.instructions.len()));
585 self.compile_expr(c)?;
586 last_jump = self.emit(Instr::JumpIfFalse(usize::MAX));
587 for s in b {
588 self.compile_stmt(s)?;
589 }
590 end_jumps.push(self.emit(Instr::Jump(usize::MAX)));
591 }
592 self.patch(last_jump, Instr::JumpIfFalse(self.instructions.len()));
593 if let Some(body) = else_body {
594 for s in body {
595 self.compile_stmt(s)?;
596 }
597 }
598 let end = self.instructions.len();
599 for j in end_jumps {
600 self.patch(j, Instr::Jump(end));
601 }
602 }
603 HirStmt::While { cond, body } => {
604 let start = self.instructions.len();
605 self.compile_expr(cond)?;
606 let jump_end = self.emit(Instr::JumpIfFalse(usize::MAX));
607 let labels = LoopLabels {
608 break_jumps: Vec::new(),
609 continue_jumps: Vec::new(),
610 };
611 self.loop_stack.push(labels);
612 for s in body {
613 self.compile_stmt(s)?;
614 }
615 let labels = self.loop_stack.pop().unwrap();
616 for j in labels.continue_jumps {
617 self.patch(j, Instr::Jump(start));
618 }
619 self.emit(Instr::Jump(start));
620 let end = self.instructions.len();
621 self.patch(jump_end, Instr::JumpIfFalse(end));
622 for j in labels.break_jumps {
623 self.patch(j, Instr::Jump(end));
624 }
625 }
626 HirStmt::For { var, expr, body } => {
627 if let Some(plan) = self.detect_stochastic_evolution(expr, body) {
628 self.compile_stochastic_evolution(plan)?;
629 return Ok(());
630 }
631 if let HirExprKind::Range(start, step, end) = &expr.kind {
632 // Initialize loop variable, end, and step
633 self.compile_expr(start)?;
634 self.emit(Instr::StoreVar(var.0));
635 self.compile_expr(end)?;
636 let end_var = self.alloc_temp();
637 self.emit(Instr::StoreVar(end_var));
638 let step_var = self.alloc_temp();
639 if let Some(step_expr) = step {
640 self.compile_expr(step_expr)?;
641 self.emit(Instr::StoreVar(step_var));
642 } else {
643 self.emit(Instr::LoadConst(1.0));
644 self.emit(Instr::StoreVar(step_var));
645 }
646
647 let loop_start = self.instructions.len();
648
649 // If step == 0 -> terminate loop immediately
650 self.emit(Instr::LoadVar(step_var));
651 self.emit(Instr::LoadConst(0.0));
652 self.emit(Instr::Equal);
653 let j_step_zero_skip = self.emit(Instr::JumpIfFalse(usize::MAX));
654 let jump_end = self.emit(Instr::Jump(usize::MAX));
655 let after_step_zero_check = self.instructions.len();
656 self.patch(j_step_zero_skip, Instr::JumpIfFalse(after_step_zero_check));
657
658 // Determine condition based on sign(step)
659 // if step >= 0: cond = var <= end else cond = var >= end
660 self.emit(Instr::LoadVar(step_var));
661 self.emit(Instr::LoadConst(0.0));
662 self.emit(Instr::GreaterEqual);
663 let j_neg_branch = self.emit(Instr::JumpIfFalse(usize::MAX));
664 // positive step: var <= end
665 self.emit(Instr::LoadVar(var.0));
666 self.emit(Instr::LoadVar(end_var));
667 self.emit(Instr::LessEqual);
668 let j_exit_pos = self.emit(Instr::JumpIfFalse(usize::MAX));
669 let j_cond_done = self.emit(Instr::Jump(usize::MAX));
670 let neg_branch = self.instructions.len();
671 self.patch(j_neg_branch, Instr::JumpIfFalse(neg_branch));
672 // negative step: var >= end
673 self.emit(Instr::LoadVar(var.0));
674 self.emit(Instr::LoadVar(end_var));
675 self.emit(Instr::GreaterEqual);
676 let j_exit_neg = self.emit(Instr::JumpIfFalse(usize::MAX));
677 let cond_done = self.instructions.len();
678 self.patch(j_cond_done, Instr::Jump(cond_done));
679
680 // At this point, loop condition is satisfied; execute body
681 self.loop_stack.push(LoopLabels {
682 break_jumps: Vec::new(),
683 continue_jumps: Vec::new(),
684 });
685 for s in body {
686 self.compile_stmt(s)?;
687 }
688 let labels = self.loop_stack.pop().unwrap();
689 for j in labels.continue_jumps {
690 self.patch(j, Instr::Jump(self.instructions.len()));
691 }
692 // increment: var = var + step
693 self.emit(Instr::LoadVar(var.0));
694 self.emit(Instr::LoadVar(step_var));
695 self.emit(Instr::Add);
696 self.emit(Instr::StoreVar(var.0));
697 self.emit(Instr::Jump(loop_start));
698
699 // Exit jump targets
700 let end_pc = self.instructions.len();
701 self.patch(jump_end, Instr::Jump(end_pc));
702 self.patch(j_exit_pos, Instr::JumpIfFalse(end_pc));
703 self.patch(j_exit_neg, Instr::JumpIfFalse(end_pc));
704 for j in labels.break_jumps {
705 self.patch(j, Instr::Jump(end_pc));
706 }
707 } else {
708 return Err("for loop expects range".into());
709 }
710 }
711 HirStmt::Break => {
712 if let Some(labels) = self.loop_stack.last_mut() {
713 let idx = self.instructions.len();
714 self.instructions.push(Instr::Jump(usize::MAX));
715 labels.break_jumps.push(idx);
716 } else {
717 return Err("break outside loop".into());
718 }
719 }
720 HirStmt::Continue => {
721 if let Some(labels) = self.loop_stack.last_mut() {
722 let idx = self.instructions.len();
723 self.instructions.push(Instr::Jump(usize::MAX));
724 labels.continue_jumps.push(idx);
725 } else {
726 return Err("continue outside loop".into());
727 }
728 }
729 HirStmt::Return => {
730 self.emit(Instr::Return);
731 }
732 HirStmt::Function {
733 name,
734 params,
735 outputs,
736 body,
737 has_varargin,
738 has_varargout,
739 } => {
740 let mut max_local_var = 0;
741 fn visit_expr_for_vars(expr: &HirExpr, max: &mut usize) {
742 match &expr.kind {
743 HirExprKind::Var(id) => {
744 if id.0 + 1 > *max {
745 *max = id.0 + 1;
746 }
747 }
748 HirExprKind::Unary(_, e) => visit_expr_for_vars(e, max),
749 HirExprKind::Binary(a, _, b) => {
750 visit_expr_for_vars(a, max);
751 visit_expr_for_vars(b, max);
752 }
753 HirExprKind::Tensor(rows) | HirExprKind::Cell(rows) => {
754 for row in rows {
755 for elem in row {
756 visit_expr_for_vars(elem, max);
757 }
758 }
759 }
760 HirExprKind::Index(base, indices)
761 | HirExprKind::IndexCell(base, indices) => {
762 visit_expr_for_vars(base, max);
763 for idx in indices {
764 visit_expr_for_vars(idx, max);
765 }
766 }
767 HirExprKind::Range(start, step, end) => {
768 visit_expr_for_vars(start, max);
769 if let Some(step) = step {
770 visit_expr_for_vars(step, max);
771 }
772 visit_expr_for_vars(end, max);
773 }
774 HirExprKind::FuncCall(_, args) | HirExprKind::MethodCall(_, _, args) => {
775 for arg in args {
776 visit_expr_for_vars(arg, max);
777 }
778 }
779 _ => {}
780 }
781 }
782 fn visit_stmt_for_vars(stmt: &HirStmt, max: &mut usize) {
783 match stmt {
784 HirStmt::ExprStmt(expr, _) => visit_expr_for_vars(expr, max),
785 HirStmt::Assign(id, expr, _) => {
786 if id.0 + 1 > *max {
787 *max = id.0 + 1;
788 }
789 visit_expr_for_vars(expr, max);
790 }
791 HirStmt::If {
792 cond,
793 then_body,
794 elseif_blocks,
795 else_body,
796 } => {
797 visit_expr_for_vars(cond, max);
798 for stmt in then_body {
799 visit_stmt_for_vars(stmt, max);
800 }
801 for (cond, body) in elseif_blocks {
802 visit_expr_for_vars(cond, max);
803 for stmt in body {
804 visit_stmt_for_vars(stmt, max);
805 }
806 }
807 if let Some(body) = else_body {
808 for stmt in body {
809 visit_stmt_for_vars(stmt, max);
810 }
811 }
812 }
813 HirStmt::While { cond, body } => {
814 visit_expr_for_vars(cond, max);
815 for stmt in body {
816 visit_stmt_for_vars(stmt, max);
817 }
818 }
819 HirStmt::For { var, expr, body } => {
820 if var.0 + 1 > *max {
821 *max = var.0 + 1;
822 }
823 visit_expr_for_vars(expr, max);
824 for stmt in body {
825 visit_stmt_for_vars(stmt, max);
826 }
827 }
828 HirStmt::Break | HirStmt::Continue | HirStmt::Return => {}
829 HirStmt::Switch {
830 expr,
831 cases,
832 otherwise,
833 } => {
834 visit_expr_for_vars(expr, max);
835 for (c, b) in cases {
836 visit_expr_for_vars(c, max);
837 for s in b {
838 visit_stmt_for_vars(s, max);
839 }
840 }
841 if let Some(b) = otherwise {
842 for s in b {
843 visit_stmt_for_vars(s, max);
844 }
845 }
846 }
847 HirStmt::TryCatch {
848 try_body,
849 catch_var,
850 catch_body,
851 } => {
852 if let Some(v) = catch_var {
853 if v.0 + 1 > *max {
854 *max = v.0 + 1;
855 }
856 }
857 for s in try_body {
858 visit_stmt_for_vars(s, max);
859 }
860 for s in catch_body {
861 visit_stmt_for_vars(s, max);
862 }
863 }
864 HirStmt::Global(vars) | HirStmt::Persistent(vars) => {
865 for (v, _name) in vars {
866 if v.0 + 1 > *max {
867 *max = v.0 + 1;
868 }
869 }
870 }
871 HirStmt::AssignLValue(_, expr, _) => visit_expr_for_vars(expr, max),
872 HirStmt::MultiAssign(vars, expr, _) => {
873 for v in vars.iter().flatten() {
874 if v.0 + 1 > *max {
875 *max = v.0 + 1;
876 }
877 }
878 visit_expr_for_vars(expr, max);
879 }
880 HirStmt::Function { .. }
881 | HirStmt::ClassDef { .. }
882 | HirStmt::Import { .. } => {}
883 }
884 }
885 for stmt in body {
886 visit_stmt_for_vars(stmt, &mut max_local_var);
887 }
888 let var_map =
889 runmat_hir::remapping::create_complete_function_var_map(params, outputs, body);
890 let local_var_count = var_map.len();
891 if local_var_count > max_local_var {
892 max_local_var = local_var_count;
893 }
894 let mut func_var_types = vec![Type::Unknown; local_var_count];
895 for (orig, local) in &var_map {
896 if let Some(ty) = self.var_types.get(orig.0) {
897 if let Some(slot) = func_var_types.get_mut(local.0) {
898 *slot = ty.clone();
899 }
900 }
901 }
902 let user_func = UserFunction {
903 name: name.clone(),
904 params: params.clone(),
905 outputs: outputs.clone(),
906 body: body.clone(),
907 local_var_count: max_local_var,
908 has_varargin: *has_varargin,
909 has_varargout: *has_varargout,
910 var_types: func_var_types,
911 };
912 self.functions.insert(name.clone(), user_func);
913 }
914 HirStmt::Switch {
915 expr,
916 cases,
917 otherwise,
918 } => {
919 let temp_id = self.alloc_temp();
920 self.compile_expr(expr)?;
921 self.emit(Instr::StoreVar(temp_id));
922 let mut end_jumps: Vec<usize> = Vec::new();
923 let mut next_case_jump_to_here: Option<usize> = None;
924 for (case_expr, body) in cases {
925 if let Some(j) = next_case_jump_to_here.take() {
926 self.patch(j, Instr::JumpIfFalse(self.instructions.len()));
927 }
928 self.emit(Instr::LoadVar(temp_id));
929 self.compile_expr(case_expr)?;
930 self.emit(Instr::Equal);
931 let jmp = self.emit(Instr::JumpIfFalse(usize::MAX));
932 for s in body {
933 self.compile_stmt(s)?;
934 }
935 end_jumps.push(self.emit(Instr::Jump(usize::MAX)));
936 next_case_jump_to_here = Some(jmp);
937 }
938 let otherwise_start = self.instructions.len();
939 if let Some(j) = next_case_jump_to_here.take() {
940 self.patch(j, Instr::JumpIfFalse(otherwise_start));
941 }
942 if let Some(body) = otherwise {
943 for s in body {
944 self.compile_stmt(s)?;
945 }
946 }
947 let end = self.instructions.len();
948 for j in end_jumps {
949 self.patch(j, Instr::Jump(end));
950 }
951 }
952 HirStmt::TryCatch {
953 try_body,
954 catch_var,
955 catch_body,
956 } => {
957 // Reserve slot for EnterTry with placeholder
958 let enter_idx = self.emit(Instr::EnterTry(usize::MAX, catch_var.map(|v| v.0)));
959 // Compile try body
960 for s in try_body {
961 self.compile_stmt(s)?;
962 }
963 // On normal completion, pop try frame and jump past catch
964 self.emit(Instr::PopTry);
965 let jmp_end = self.emit(Instr::Jump(usize::MAX));
966 // Catch block starts here
967 let catch_pc = self.instructions.len();
968 // Patch EnterTry with catch_pc
969 self.patch(enter_idx, Instr::EnterTry(catch_pc, catch_var.map(|v| v.0)));
970 // Compile catch body
971 for s in catch_body {
972 self.compile_stmt(s)?;
973 }
974 let end_pc = self.instructions.len();
975 self.patch(jmp_end, Instr::Jump(end_pc));
976 }
977 HirStmt::AssignLValue(lv, rhs, _) => {
978 match lv {
979 runmat_hir::HirLValue::Index(base, indices) => {
980 if let runmat_hir::HirExprKind::Var(var_id) = base.kind {
981 // Load base variable first
982 self.emit(Instr::LoadVar(var_id.0));
983 // Compute masks and numeric indices as in IndexSlice
984 let has_colon = indices
985 .iter()
986 .any(|e| matches!(e.kind, runmat_hir::HirExprKind::Colon));
987 let has_end = indices
988 .iter()
989 .any(|e| matches!(e.kind, runmat_hir::HirExprKind::End));
990 let has_vector = indices.iter().any(|e| {
991 matches!(
992 e.kind,
993 HirExprKind::Range(_, _, _) | HirExprKind::Tensor(_)
994 ) || matches!(e.ty, runmat_hir::Type::Tensor { .. })
995 });
996 if has_colon || has_end || has_vector || indices.len() > 2 {
997 let mut colon_mask: u32 = 0;
998 let mut end_mask: u32 = 0;
999 let mut numeric_count = 0usize;
1000 let mut end_offsets: Vec<(usize, i64)> = Vec::new();
1001 let mut lowered_range_end = false;
1002 for (dim, index) in indices.iter().enumerate() {
1003 if matches!(index.kind, runmat_hir::HirExprKind::Colon) {
1004 colon_mask |= 1u32 << dim;
1005 } else if matches!(index.kind, runmat_hir::HirExprKind::End) {
1006 end_mask |= 1u32 << dim;
1007 } else {
1008 // If this index is a Range whose end references End (with or without offset),
1009 // skip compiling it here; it will be handled by StoreRangeEnd.
1010 if indices.len() > 1 {
1011 if let runmat_hir::HirExprKind::Range(
1012 _start,
1013 _step,
1014 end,
1015 ) = &index.kind
1016 {
1017 match &end.kind {
1018 runmat_hir::HirExprKind::End => {
1019 // offset 0
1020 end_offsets.push((numeric_count, 0));
1021 continue;
1022 }
1023 runmat_hir::HirExprKind::Binary(
1024 left,
1025 op,
1026 right,
1027 ) => {
1028 if matches!(op, runmat_parser::BinOp::Sub)
1029 && matches!(
1030 left.kind,
1031 runmat_hir::HirExprKind::End
1032 )
1033 {
1034 if let runmat_hir::HirExprKind::Number(
1035 ref s,
1036 ) = right.kind
1037 {
1038 if let Ok(k) = s.parse::<i64>() {
1039 end_offsets
1040 .push((numeric_count, k));
1041 } else {
1042 end_offsets
1043 .push((numeric_count, 0));
1044 }
1045 } else {
1046 end_offsets
1047 .push((numeric_count, 0));
1048 }
1049 continue;
1050 }
1051 }
1052 _ => {}
1053 }
1054 }
1055 }
1056 // Special-case 1-D range with end arithmetic when dims == 1
1057 if indices.len() == 1 {
1058 if let runmat_hir::HirExprKind::Range(
1059 start,
1060 step,
1061 end,
1062 ) = &index.kind
1063 {
1064 if let runmat_hir::HirExprKind::Binary(
1065 left,
1066 op,
1067 right,
1068 ) = &end.kind
1069 {
1070 if matches!(op, runmat_parser::BinOp::Sub)
1071 && matches!(
1072 left.kind,
1073 runmat_hir::HirExprKind::End
1074 )
1075 {
1076 // Emit StoreSlice1DRangeEnd: base is already on stack
1077 self.compile_expr(start)?;
1078 if let Some(st) = step {
1079 self.compile_expr(st)?;
1080 self.compile_expr(rhs)?;
1081 self.emit(Instr::StoreSlice1DRangeEnd { has_step: true, offset: match right.kind { runmat_hir::HirExprKind::Number(ref s) => s.parse::<i64>().unwrap_or(0), _ => 0 } });
1082 } else {
1083 self.compile_expr(rhs)?;
1084 self.emit(Instr::StoreSlice1DRangeEnd { has_step: false, offset: match right.kind { runmat_hir::HirExprKind::Number(ref s) => s.parse::<i64>().unwrap_or(0), _ => 0 } });
1085 }
1086 lowered_range_end = true;
1087 break;
1088 }
1089 }
1090 }
1091 }
1092 if !lowered_range_end {
1093 self.compile_expr(index)?;
1094 numeric_count += 1;
1095 }
1096 }
1097 }
1098 if lowered_range_end {
1099 // VM already pushed updated tensor; store back to var
1100 self.emit(Instr::StoreVar(var_id.0));
1101 } else {
1102 // Push RHS last so VM pops it first
1103 // Detect any ranges with end arithmetic across dims
1104 let mut has_any_range_end = false;
1105 let mut range_dims: Vec<usize> = Vec::new();
1106 let mut range_has_step: Vec<bool> = Vec::new();
1107 let mut end_offs: Vec<i64> = Vec::new();
1108 for (dim, index) in indices.iter().enumerate() {
1109 if let runmat_hir::HirExprKind::Range(_start, step, end) =
1110 &index.kind
1111 {
1112 match &end.kind {
1113 runmat_hir::HirExprKind::End => {
1114 has_any_range_end = true;
1115 range_dims.push(dim);
1116 range_has_step.push(step.is_some());
1117 end_offs.push(0);
1118 }
1119 runmat_hir::HirExprKind::Binary(
1120 left,
1121 op,
1122 right,
1123 ) => {
1124 if matches!(op, runmat_parser::BinOp::Sub)
1125 && matches!(
1126 left.kind,
1127 runmat_hir::HirExprKind::End
1128 )
1129 {
1130 has_any_range_end = true;
1131 range_dims.push(dim);
1132 range_has_step.push(step.is_some());
1133 if let runmat_hir::HirExprKind::Number(
1134 ref s,
1135 ) = right.kind
1136 {
1137 end_offs.push(
1138 s.parse::<i64>().unwrap_or(0),
1139 );
1140 } else {
1141 end_offs.push(0);
1142 }
1143 }
1144 }
1145 _ => {}
1146 }
1147 }
1148 }
1149 if has_any_range_end {
1150 // Push start[, step] for each range in dim order
1151 for &dim in &range_dims {
1152 if let runmat_hir::HirExprKind::Range(
1153 start,
1154 step,
1155 _end,
1156 ) = &indices[dim].kind
1157 {
1158 self.compile_expr(start)?;
1159 if let Some(st) = step {
1160 self.compile_expr(st)?;
1161 }
1162 }
1163 }
1164 self.compile_expr(rhs)?;
1165 self.emit(Instr::StoreRangeEnd {
1166 dims: indices.len(),
1167 numeric_count,
1168 colon_mask,
1169 end_mask,
1170 range_dims,
1171 range_has_step,
1172 end_offsets: end_offs,
1173 });
1174 } else {
1175 // Attempt packing of function returns or cell expansion for 1-D slices
1176 let dims_len = indices.len();
1177 let idx_is_scalar = |e: &HirExpr| -> bool {
1178 matches!(
1179 e.kind,
1180 HirExprKind::Number(_) | HirExprKind::End
1181 )
1182 };
1183 let idx_is_vector = |e: &HirExpr| -> bool {
1184 matches!(
1185 e.kind,
1186 HirExprKind::Colon
1187 | HirExprKind::Range(_, _, _)
1188 | HirExprKind::Tensor(_)
1189 )
1190 };
1191 let (is_row_slice, is_col_slice) = if dims_len == 2 {
1192 (
1193 idx_is_scalar(&indices[0])
1194 && idx_is_vector(&indices[1]),
1195 idx_is_vector(&indices[0])
1196 && idx_is_scalar(&indices[1]),
1197 )
1198 } else {
1199 (false, false)
1200 };
1201 fn const_vec_len(e: &HirExpr) -> Option<usize> {
1202 match &e.kind {
1203 HirExprKind::Number(_) | HirExprKind::End => {
1204 Some(1)
1205 }
1206 HirExprKind::Tensor(rows) => {
1207 Some(rows.iter().map(|r| r.len()).sum())
1208 }
1209 HirExprKind::Range(start, step, end) => {
1210 if let (
1211 HirExprKind::Number(sa),
1212 HirExprKind::Number(ea),
1213 ) = (&start.kind, &end.kind)
1214 {
1215 let s: f64 = sa.parse().ok()?;
1216 let en: f64 = ea.parse().ok()?;
1217 let st: f64 = if let Some(st) = step {
1218 if let HirExprKind::Number(x) = &st.kind
1219 {
1220 x.parse().ok()?
1221 } else {
1222 return None;
1223 }
1224 } else {
1225 1.0
1226 };
1227 if st == 0.0 {
1228 return None;
1229 }
1230 let n =
1231 ((en - s) / st).floor() as isize + 1;
1232 if n <= 0 {
1233 Some(0)
1234 } else {
1235 Some(n as usize)
1236 }
1237 } else {
1238 None
1239 }
1240 }
1241 HirExprKind::Colon => None,
1242 _ => None,
1243 }
1244 }
1245 let mut packed = false;
1246 if let HirExprKind::FuncCall(fname, fargs) = &rhs.kind {
1247 if self.functions.contains_key(fname)
1248 && (dims_len == 1 || is_row_slice || is_col_slice)
1249 {
1250 for a in fargs {
1251 self.compile_expr(a)?;
1252 }
1253 let outc = self
1254 .functions
1255 .get(fname)
1256 .map(|f| f.outputs.len().max(1))
1257 .unwrap_or(1);
1258 self.emit(Instr::CallFunctionMulti(
1259 fname.clone(),
1260 fargs.len(),
1261 outc,
1262 ));
1263 if dims_len == 1 || is_col_slice {
1264 self.emit(Instr::PackToCol(outc));
1265 } else {
1266 self.emit(Instr::PackToRow(outc));
1267 }
1268 packed = true;
1269 }
1270 } else if let HirExprKind::IndexCell(cbase, cidx) =
1271 &rhs.kind
1272 {
1273 // Expand cell into vector matching selected slice length if determinable
1274 let outc = if dims_len == 1 {
1275 const_vec_len(&indices[0])
1276 } else if is_row_slice {
1277 const_vec_len(&indices[1])
1278 } else if is_col_slice {
1279 const_vec_len(&indices[0])
1280 } else {
1281 None
1282 };
1283 if let Some(n) = outc {
1284 self.compile_expr(cbase)?;
1285 // Special case: C{:} => expand all; do not compile colon index
1286 let expand_all = cidx.len() == 1
1287 && matches!(cidx[0].kind, HirExprKind::Colon);
1288 if expand_all {
1289 self.emit(Instr::IndexCellExpand(0, n));
1290 } else {
1291 for i in cidx {
1292 self.compile_expr(i)?;
1293 }
1294 self.emit(Instr::IndexCellExpand(
1295 cidx.len(),
1296 n,
1297 ));
1298 }
1299 if dims_len == 1 || is_col_slice {
1300 self.emit(Instr::PackToCol(n));
1301 } else {
1302 self.emit(Instr::PackToRow(n));
1303 }
1304 packed = true;
1305 }
1306 }
1307 if !packed {
1308 self.compile_expr(rhs)?;
1309 }
1310 if end_offsets.is_empty() {
1311 self.emit(Instr::StoreSlice(
1312 indices.len(),
1313 numeric_count,
1314 colon_mask,
1315 end_mask,
1316 ));
1317 } else {
1318 self.emit(Instr::StoreSliceEx(
1319 indices.len(),
1320 numeric_count,
1321 colon_mask,
1322 end_mask,
1323 end_offsets,
1324 ));
1325 }
1326 }
1327 // Store updated base back to variable
1328 self.emit(Instr::StoreVar(var_id.0));
1329 }
1330 } else {
1331 // Pure numeric indexing
1332 for index in indices {
1333 self.compile_expr(index)?;
1334 }
1335 // If RHS is a user function call, request multiple outputs and pack to column for linear targets
1336 if let HirExprKind::FuncCall(fname, fargs) = &rhs.kind {
1337 if self.functions.contains_key(fname) && indices.len() == 1 {
1338 for a in fargs {
1339 self.compile_expr(a)?;
1340 }
1341 let outc = self
1342 .functions
1343 .get(fname)
1344 .map(|f| f.outputs.len().max(1))
1345 .unwrap_or(1);
1346 self.emit(Instr::CallFunctionMulti(
1347 fname.clone(),
1348 fargs.len(),
1349 outc,
1350 ));
1351 self.emit(Instr::PackToCol(outc));
1352 } else {
1353 self.compile_expr(rhs)?;
1354 }
1355 } else {
1356 self.compile_expr(rhs)?;
1357 }
1358 self.emit(Instr::StoreIndex(indices.len()));
1359 self.emit(Instr::StoreVar(var_id.0));
1360 }
1361 } else if let runmat_hir::HirExprKind::Member(member_base, field) =
1362 &base.kind
1363 {
1364 // Chain: base is a member access. Evaluate object, load member, update via index, then write member back.
1365 // Evaluate object and get member value
1366 self.compile_expr(member_base)?;
1367 self.emit(Instr::LoadMember(field.clone()));
1368 // Decide slice vs numeric
1369 let has_colon = indices
1370 .iter()
1371 .any(|e| matches!(e.kind, runmat_hir::HirExprKind::Colon));
1372 let has_end = indices
1373 .iter()
1374 .any(|e| matches!(e.kind, runmat_hir::HirExprKind::End));
1375 let has_vector = indices.iter().any(|e| {
1376 matches!(
1377 e.kind,
1378 HirExprKind::Range(_, _, _) | HirExprKind::Tensor(_)
1379 ) || matches!(e.ty, runmat_hir::Type::Tensor { .. })
1380 });
1381 if has_colon || has_end || has_vector || indices.len() > 2 {
1382 let mut colon_mask: u32 = 0;
1383 let mut end_mask: u32 = 0;
1384 let mut numeric_count = 0usize;
1385 for (dim, index) in indices.iter().enumerate() {
1386 if matches!(index.kind, runmat_hir::HirExprKind::Colon) {
1387 colon_mask |= 1u32 << dim;
1388 } else if matches!(index.kind, runmat_hir::HirExprKind::End) {
1389 end_mask |= 1u32 << dim;
1390 } else {
1391 self.compile_expr(index)?;
1392 numeric_count += 1;
1393 }
1394 }
1395 self.compile_expr(rhs)?;
1396 self.emit(Instr::StoreSlice(
1397 indices.len(),
1398 numeric_count,
1399 colon_mask,
1400 end_mask,
1401 ));
1402 } else {
1403 for index in indices {
1404 self.compile_expr(index)?;
1405 }
1406 self.compile_expr(rhs)?;
1407 self.emit(Instr::StoreIndex(indices.len()));
1408 }
1409 // Now updated member is on stack. Re-evaluate object, swap, and StoreMember
1410 self.compile_expr(member_base)?;
1411 self.emit(Instr::Swap);
1412 self.emit(Instr::StoreMember(field.clone()));
1413 // If object is a variable, also store back to var
1414 if let runmat_hir::HirExprKind::Var(root_var) = member_base.kind {
1415 self.emit(Instr::StoreVar(root_var.0));
1416 }
1417 } else {
1418 return Err(
1419 "unsupported lvalue target (index on non-variable/non-member)"
1420 .into(),
1421 );
1422 }
1423 }
1424 runmat_hir::HirLValue::IndexCell(base, indices) => {
1425 if let runmat_hir::HirExprKind::Var(var_id) = base.kind {
1426 self.emit(Instr::LoadVar(var_id.0));
1427 for index in indices {
1428 self.compile_expr(index)?;
1429 }
1430 self.compile_expr(rhs)?;
1431 self.emit(Instr::StoreIndexCell(indices.len()));
1432 self.emit(Instr::StoreVar(var_id.0));
1433 } else if let runmat_hir::HirExprKind::Member(member_base, field) =
1434 &base.kind
1435 {
1436 // Load object, load member, perform cell index store, then write member back to object
1437 self.compile_expr(member_base)?;
1438 self.emit(Instr::LoadMember(field.clone()));
1439 for index in indices {
1440 self.compile_expr(index)?;
1441 }
1442 self.compile_expr(rhs)?;
1443 self.emit(Instr::StoreIndexCell(indices.len()));
1444 // Updated member on stack; re-evaluate object, swap, store member
1445 self.compile_expr(member_base)?;
1446 self.emit(Instr::Swap);
1447 self.emit(Instr::StoreMember(field.clone()));
1448 if let runmat_hir::HirExprKind::Var(root_var) = member_base.kind {
1449 self.emit(Instr::StoreVar(root_var.0));
1450 }
1451 } else {
1452 // Fallback: evaluate base, indices, rhs, and store (for object chains via subsasgn)
1453 self.compile_expr(base)?;
1454 for index in indices {
1455 self.compile_expr(index)?;
1456 }
1457 self.compile_expr(rhs)?;
1458 self.emit(Instr::StoreIndexCell(indices.len()));
1459 }
1460 }
1461 runmat_hir::HirLValue::Member(base, field) => {
1462 // Member assignment. If base is a variable, ensure we store updated object back.
1463 if let runmat_hir::HirExprKind::Var(var_id) = base.kind.clone() {
1464 self.emit(Instr::LoadVar(var_id.0));
1465 self.compile_expr(rhs)?;
1466 self.emit(Instr::StoreMember(field.clone()));
1467 self.emit(Instr::StoreVar(var_id.0));
1468 } else {
1469 // Complex base: evaluate to a value, then store member; updated object remains on stack
1470 self.compile_expr(base)?;
1471 self.compile_expr(rhs)?;
1472 self.emit(Instr::StoreMember(field.clone()));
1473 }
1474 }
1475 runmat_hir::HirLValue::MemberDynamic(base, name_expr) => {
1476 if let runmat_hir::HirExprKind::Var(var_id) = base.kind.clone() {
1477 self.emit(Instr::LoadVar(var_id.0));
1478 self.compile_expr(name_expr)?;
1479 self.compile_expr(rhs)?;
1480 self.emit(Instr::StoreMemberDynamic);
1481 self.emit(Instr::StoreVar(var_id.0));
1482 } else {
1483 self.compile_expr(base)?;
1484 self.compile_expr(name_expr)?;
1485 self.compile_expr(rhs)?;
1486 self.emit(Instr::StoreMemberDynamic);
1487 }
1488 }
1489 _ => return Err("unsupported lvalue target".into()),
1490 }
1491 }
1492 HirStmt::Global(vars) => {
1493 let ids: Vec<usize> = vars.iter().map(|(v, _n)| v.0).collect();
1494 let names: Vec<String> = vars.iter().map(|(_v, n)| n.clone()).collect();
1495 self.emit(Instr::DeclareGlobalNamed(ids, names));
1496 }
1497 HirStmt::Persistent(vars) => {
1498 let ids: Vec<usize> = vars.iter().map(|(v, _n)| v.0).collect();
1499 let names: Vec<String> = vars.iter().map(|(_v, n)| n.clone()).collect();
1500 self.emit(Instr::DeclarePersistentNamed(ids, names));
1501 }
1502 HirStmt::Import { path, wildcard } => {
1503 self.emit(Instr::RegisterImport {
1504 path: path.clone(),
1505 wildcard: *wildcard,
1506 });
1507 }
1508 HirStmt::ClassDef {
1509 name,
1510 super_class,
1511 members,
1512 } => {
1513 // Synthesize a minimal RegisterClass instruction by extracting property names and method names
1514 let mut props: Vec<(String, bool, String, String)> = Vec::new();
1515 let mut methods: Vec<(String, String, bool, String)> = Vec::new();
1516 for m in members {
1517 match m {
1518 runmat_hir::HirClassMember::Properties { names, attributes } => {
1519 let (is_static, is_dependent, get_access, set_access) =
1520 Self::parse_prop_attrs(attributes);
1521 // Encode dependent flag by prefixing name with "@dep:"; VM will strip and set flag.
1522 for n in names {
1523 let enc = if is_dependent {
1524 format!("@dep:{n}")
1525 } else {
1526 n.clone()
1527 };
1528 props.push((
1529 enc,
1530 is_static,
1531 get_access.clone(),
1532 set_access.clone(),
1533 ));
1534 }
1535 }
1536 runmat_hir::HirClassMember::Methods { body, attributes } => {
1537 let (is_static, access) = Self::parse_method_attrs(attributes);
1538 for s in body {
1539 if let runmat_hir::HirStmt::Function { name: mname, .. } = s {
1540 methods.push((
1541 mname.clone(),
1542 mname.clone(),
1543 is_static,
1544 access.clone(),
1545 ));
1546 }
1547 }
1548 }
1549 _ => {}
1550 }
1551 }
1552 self.emit(Instr::RegisterClass {
1553 name: name.clone(),
1554 super_class: super_class.clone(),
1555 properties: props,
1556 methods,
1557 });
1558 }
1559 HirStmt::MultiAssign(vars, expr, _) => {
1560 // Compile RHS once; if function call or value, arrange to extract multiple
1561 match &expr.kind {
1562 HirExprKind::FuncCall(name, args) => {
1563 if self.functions.contains_key(name) {
1564 for arg in args {
1565 self.compile_expr(arg)?;
1566 }
1567 // Emit multi-call to request N outputs
1568 self.emit(Instr::CallFunctionMulti(
1569 name.clone(),
1570 args.len(),
1571 vars.len(),
1572 ));
1573 // Store outputs in order
1574 for (_i, var) in vars.iter().enumerate().rev() {
1575 if let Some(v) = var {
1576 self.emit(Instr::StoreVar(v.0));
1577 } else {
1578 self.emit(Instr::Pop);
1579 }
1580 }
1581 } else {
1582 // Builtin or unknown: treat as single return value
1583 for arg in args {
1584 self.compile_expr(arg)?;
1585 }
1586 self.emit(Instr::CallBuiltinMulti(
1587 name.clone(),
1588 args.len(),
1589 vars.len(),
1590 ));
1591 for (_i, var) in vars.iter().enumerate().rev() {
1592 if let Some(v) = var {
1593 self.emit(Instr::StoreVar(v.0));
1594 } else {
1595 self.emit(Instr::Pop);
1596 }
1597 }
1598 }
1599 }
1600 HirExprKind::IndexCell(base, indices) => {
1601 // Support comma-list expansion from cell indexing: [a,b,...] = C{idx}
1602 self.compile_expr(base)?;
1603 for index in indices {
1604 self.compile_expr(index)?;
1605 }
1606 // Expand into N outputs
1607 self.emit(Instr::IndexCellExpand(indices.len(), vars.len()));
1608 for (_i, var) in vars.iter().enumerate().rev() {
1609 if let Some(v) = var {
1610 self.emit(Instr::StoreVar(v.0));
1611 } else {
1612 self.emit(Instr::Pop);
1613 }
1614 }
1615 }
1616 _ => {
1617 // Non-call: assign expr to first non-placeholder, zeros to remaining non-placeholders
1618 let first_real = vars.iter().position(|v| v.is_some());
1619 if let Some(first_idx) = first_real {
1620 self.compile_expr(expr)?;
1621 if let Some(Some(first_var)) = vars.get(first_idx) {
1622 self.emit(Instr::StoreVar(first_var.0));
1623 }
1624 }
1625 for (i, var) in vars.iter().enumerate() {
1626 if Some(i) == first_real {
1627 continue;
1628 }
1629 if let Some(v) = var {
1630 self.emit(Instr::LoadConst(0.0));
1631 self.emit(Instr::StoreVar(v.0));
1632 }
1633 }
1634 }
1635 }
1636 }
1637 }
1638 Ok(())
1639 }
1640
1641 pub fn compile_expr(&mut self, expr: &HirExpr) -> Result<(), String> {
1642 match &expr.kind {
1643 HirExprKind::Number(n) => {
1644 let val: f64 = n.parse().map_err(|_| "invalid number")?;
1645 self.emit(Instr::LoadConst(val));
1646 }
1647 HirExprKind::String(s) => {
1648 if s.starts_with('"') && s.ends_with('"') {
1649 // String scalar
1650 let inner = &s[1..s.len() - 1];
1651 let clean = inner.replace("\"\"", "\"");
1652 self.emit(Instr::LoadString(clean));
1653 } else if s.starts_with('\'') && s.ends_with('\'') {
1654 // Char vector -> CharArray row
1655 let inner = &s[1..s.len() - 1];
1656 let clean = inner.replace("''", "'");
1657 // Encode as CharArray(1, len)
1658 self.emit(Instr::LoadCharRow(clean));
1659 } else {
1660 self.emit(Instr::LoadString(s.clone()));
1661 }
1662 }
1663 HirExprKind::Var(id) => {
1664 self.emit(Instr::LoadVar(id.0));
1665 }
1666 // Fallback path for unqualified static property via Class.* imports: treat bare identifier as Class.prop
1667 // Only when not a known var/const/func; handled earlier in HIR, so we also handle here for robustness via Member lowering
1668 // Note: HIR would have errored on undefined variable; we resolve at compile time before that for function calls,
1669 // but for expressions like `v = staticValue;` under `import Point.*`, we handle here by probing imports.
1670 HirExprKind::Constant(name) => {
1671 let constants = runmat_builtins::constants();
1672 if let Some(constant) = constants.iter().find(|c| c.name == name) {
1673 match &constant.value {
1674 runmat_builtins::Value::Num(val) => {
1675 self.emit(Instr::LoadConst(*val));
1676 }
1677 runmat_builtins::Value::Complex(re, im) => {
1678 self.emit(Instr::LoadComplex(*re, *im));
1679 }
1680 runmat_builtins::Value::Bool(b) => {
1681 self.emit(Instr::LoadBool(*b));
1682 }
1683 _ => {
1684 return Err(format!("Constant {name} is not a number or boolean"));
1685 }
1686 }
1687 } else {
1688 // Try resolving as unqualified static property via Class.* imports (multi-segment)
1689 let mut classes: Vec<String> = Vec::new();
1690 for (path, wildcard) in &self.imports {
1691 if !*wildcard {
1692 continue;
1693 }
1694 if path.is_empty() {
1695 continue;
1696 }
1697 let mut cls = String::new();
1698 for (i, part) in path.iter().enumerate() {
1699 if i > 0 {
1700 cls.push('.');
1701 }
1702 cls.push_str(part);
1703 }
1704 if let Some((p, _owner)) = runmat_builtins::lookup_property(&cls, name) {
1705 if p.is_static {
1706 classes.push(cls.clone());
1707 }
1708 }
1709 }
1710 if classes.len() > 1 {
1711 return Err(format!(
1712 "ambiguous unqualified static property '{}' via Class.* imports: {}",
1713 name,
1714 classes.join(", ")
1715 ));
1716 }
1717 if classes.len() == 1 {
1718 self.emit(Instr::LoadStaticProperty(classes.remove(0), name.clone()));
1719 return Ok(());
1720 }
1721 return Err(format!("Unknown constant or static property: {name}"));
1722 }
1723 }
1724 HirExprKind::Unary(op, e) => {
1725 self.compile_expr(e)?;
1726 match op {
1727 runmat_parser::UnOp::Plus => {
1728 self.emit(Instr::UPlus);
1729 }
1730 runmat_parser::UnOp::Minus => {
1731 self.emit(Instr::Neg);
1732 }
1733 runmat_parser::UnOp::Transpose => {
1734 self.emit(Instr::ConjugateTranspose);
1735 }
1736 runmat_parser::UnOp::NonConjugateTranspose => {
1737 self.emit(Instr::Transpose);
1738 }
1739 runmat_parser::UnOp::Not => {
1740 // Simple lowering: x -> (x == 0)
1741 self.emit(Instr::LoadConst(0.0));
1742 self.emit(Instr::Equal);
1743 }
1744 }
1745 }
1746 HirExprKind::Binary(a, op, b) => {
1747 use runmat_parser::BinOp;
1748 match op {
1749 BinOp::AndAnd => {
1750 self.compile_expr(a)?;
1751 self.emit(Instr::LoadConst(0.0));
1752 self.emit(Instr::NotEqual);
1753 let j_false = self.emit(Instr::JumpIfFalse(usize::MAX));
1754 self.compile_expr(b)?;
1755 self.emit(Instr::LoadConst(0.0));
1756 self.emit(Instr::NotEqual);
1757 let end = self.emit(Instr::Jump(usize::MAX));
1758 let after_cond = self.instructions.len();
1759 self.patch(j_false, Instr::JumpIfFalse(after_cond));
1760 self.emit(Instr::LoadConst(0.0));
1761 let end_pc = self.instructions.len();
1762 self.patch(end, Instr::Jump(end_pc));
1763 }
1764 BinOp::OrOr => {
1765 self.compile_expr(a)?;
1766 self.emit(Instr::LoadConst(0.0));
1767 self.emit(Instr::NotEqual);
1768 let j_true = self.emit(Instr::JumpIfFalse(usize::MAX));
1769 self.emit(Instr::LoadConst(1.0));
1770 let end = self.emit(Instr::Jump(usize::MAX));
1771 let after_check = self.instructions.len();
1772 self.patch(j_true, Instr::JumpIfFalse(after_check));
1773 self.compile_expr(b)?;
1774 self.emit(Instr::LoadConst(0.0));
1775 self.emit(Instr::NotEqual);
1776 let end_pc = self.instructions.len();
1777 self.patch(end, Instr::Jump(end_pc));
1778 }
1779 BinOp::BitAnd => {
1780 self.compile_expr(a)?;
1781 self.emit(Instr::LoadConst(0.0));
1782 self.emit(Instr::CallBuiltin("ne".to_string(), 2));
1783 self.compile_expr(b)?;
1784 self.emit(Instr::LoadConst(0.0));
1785 self.emit(Instr::CallBuiltin("ne".to_string(), 2));
1786 self.emit(Instr::ElemMul);
1787 }
1788 BinOp::BitOr => {
1789 self.compile_expr(a)?;
1790 self.emit(Instr::LoadConst(0.0));
1791 self.emit(Instr::CallBuiltin("ne".to_string(), 2));
1792 self.compile_expr(b)?;
1793 self.emit(Instr::LoadConst(0.0));
1794 self.emit(Instr::CallBuiltin("ne".to_string(), 2));
1795 self.emit(Instr::Add);
1796 self.emit(Instr::LoadConst(0.0));
1797 self.emit(Instr::CallBuiltin("ne".to_string(), 2));
1798 // Stay element-wise by folding through elementwise ops
1799 }
1800 _ => {
1801 self.compile_expr(a)?;
1802 self.compile_expr(b)?;
1803 match op {
1804 BinOp::Add => {
1805 self.emit(Instr::Add);
1806 }
1807 BinOp::Sub => {
1808 self.emit(Instr::Sub);
1809 }
1810 BinOp::Mul => {
1811 self.emit(Instr::Mul);
1812 }
1813 BinOp::Div | BinOp::LeftDiv => {
1814 self.emit(Instr::Div);
1815 }
1816 BinOp::Pow => {
1817 self.emit(Instr::Pow);
1818 }
1819 BinOp::ElemMul => {
1820 self.emit(Instr::ElemMul);
1821 }
1822 BinOp::ElemDiv | BinOp::ElemLeftDiv => {
1823 self.emit(Instr::ElemDiv);
1824 }
1825 BinOp::ElemPow => {
1826 self.emit(Instr::ElemPow);
1827 }
1828 BinOp::Equal => {
1829 self.emit(Instr::Equal);
1830 }
1831 BinOp::NotEqual => {
1832 self.emit(Instr::NotEqual);
1833 }
1834 BinOp::Less => {
1835 self.emit(Instr::Less);
1836 }
1837 BinOp::LessEqual => {
1838 self.emit(Instr::LessEqual);
1839 }
1840 BinOp::Greater => {
1841 self.emit(Instr::Greater);
1842 }
1843 BinOp::GreaterEqual => {
1844 self.emit(Instr::GreaterEqual);
1845 }
1846 BinOp::Colon => {
1847 return Err("colon operator not supported".into());
1848 }
1849 _ => unreachable!(),
1850 }
1851 }
1852 }
1853 }
1854 HirExprKind::Range(start, step, end) => {
1855 self.compile_expr(start)?;
1856 if let Some(step) = step {
1857 self.compile_expr(step)?;
1858 self.compile_expr(end)?;
1859 self.emit(Instr::CreateRange(true));
1860 } else {
1861 self.compile_expr(end)?;
1862 self.emit(Instr::CreateRange(false));
1863 }
1864 }
1865 HirExprKind::FuncCall(name, args) => {
1866 // Special-case: feval(f, a1, a2, ...) compiles to VM feval to access user functions/closures
1867 if name == "feval" {
1868 if args.is_empty() {
1869 return Err("feval: missing function argument".into());
1870 }
1871 // Push function value first
1872 self.compile_expr(&args[0])?;
1873 let rest = &args[1..];
1874 let has_expand = rest
1875 .iter()
1876 .any(|a| matches!(a.kind, HirExprKind::IndexCell(_, _)));
1877 if has_expand {
1878 let mut specs: Vec<crate::instr::ArgSpec> = Vec::with_capacity(rest.len());
1879 for arg in rest {
1880 if let HirExprKind::IndexCell(base, indices) = &arg.kind {
1881 let is_expand_all = indices.len() == 1
1882 && matches!(indices[0].kind, HirExprKind::Colon);
1883 if is_expand_all {
1884 specs.push(crate::instr::ArgSpec {
1885 is_expand: true,
1886 num_indices: 0,
1887 expand_all: true,
1888 });
1889 self.compile_expr(base)?;
1890 } else {
1891 specs.push(crate::instr::ArgSpec {
1892 is_expand: true,
1893 num_indices: indices.len(),
1894 expand_all: false,
1895 });
1896 self.compile_expr(base)?;
1897 for i in indices {
1898 self.compile_expr(i)?;
1899 }
1900 }
1901 } else {
1902 specs.push(crate::instr::ArgSpec {
1903 is_expand: false,
1904 num_indices: 0,
1905 expand_all: false,
1906 });
1907 self.compile_expr(arg)?;
1908 }
1909 }
1910 self.emit(Instr::CallFevalExpandMulti(specs));
1911 } else {
1912 for arg in rest {
1913 self.compile_expr(arg)?;
1914 }
1915 self.emit(Instr::CallFeval(rest.len()));
1916 }
1917 return Ok(());
1918 }
1919 let has_any_expand = args
1920 .iter()
1921 .any(|a| matches!(a.kind, HirExprKind::IndexCell(_, _)));
1922 if self.functions.contains_key(name) {
1923 // existing path
1924 if has_any_expand {
1925 let mut specs: Vec<crate::instr::ArgSpec> = Vec::with_capacity(args.len());
1926 for arg in args {
1927 if let HirExprKind::IndexCell(base, indices) = &arg.kind {
1928 let is_expand_all = indices.len() == 1
1929 && matches!(indices[0].kind, HirExprKind::Colon);
1930 if is_expand_all {
1931 specs.push(crate::instr::ArgSpec {
1932 is_expand: true,
1933 num_indices: 0,
1934 expand_all: true,
1935 });
1936 self.compile_expr(base)?;
1937 } else {
1938 specs.push(crate::instr::ArgSpec {
1939 is_expand: true,
1940 num_indices: indices.len(),
1941 expand_all: false,
1942 });
1943 self.compile_expr(base)?;
1944 for i in indices {
1945 self.compile_expr(i)?;
1946 }
1947 }
1948 } else {
1949 specs.push(crate::instr::ArgSpec {
1950 is_expand: false,
1951 num_indices: 0,
1952 expand_all: false,
1953 });
1954 self.compile_expr(arg)?;
1955 }
1956 }
1957 self.emit(Instr::CallFunctionExpandMulti(name.clone(), specs));
1958 } else {
1959 for arg in args {
1960 self.compile_expr(arg)?;
1961 }
1962 self.emit(Instr::CallFunction(name.clone(), args.len()));
1963 }
1964 } else {
1965 // Existing import-based function/builtin resolution, extended with static method via Class.*
1966 // Attempt compile-time import resolution for builtins and user functions with ambiguity checks
1967 // Precedence for unqualified resolution:
1968 // locals > user functions in scope > specific imports > wildcard imports > Class.* static methods
1969 // 1) Specific imports: import pkg.foo => resolve 'foo' (takes precedence over wildcard)
1970 // 2) Wildcard imports: import pkg.* => resolve 'pkg.foo'
1971 // 3) Class.* static methods: import Class.* (or pkg.Class.*) => resolve static methods if unambiguous
1972 let mut resolved = name.clone();
1973 let mut static_candidates: Vec<(String, String)> = Vec::new();
1974 if !runmat_builtins::builtin_functions()
1975 .iter()
1976 .any(|b| b.name == resolved)
1977 {
1978 // Specific candidates
1979 let mut specific_candidates: Vec<String> = Vec::new();
1980 for (path, wildcard) in &self.imports {
1981 if *wildcard {
1982 continue;
1983 }
1984 if path.last().map(|s| s.as_str()) == Some(name.as_str()) {
1985 let qual = path.join(".");
1986 if runmat_builtins::builtin_functions()
1987 .iter()
1988 .any(|b| b.name == qual)
1989 || self.functions.contains_key(&qual)
1990 {
1991 specific_candidates.push(qual);
1992 }
1993 }
1994 }
1995 if specific_candidates.len() > 1 {
1996 return Err(format!(
1997 "ambiguous unqualified reference '{}' via imports: {}",
1998 name,
1999 specific_candidates.join(", ")
2000 ));
2001 }
2002 if specific_candidates.len() == 1 {
2003 resolved = specific_candidates.remove(0);
2004 } else {
2005 // Wildcard candidates for functions
2006 let mut wildcard_candidates: Vec<String> = Vec::new();
2007 for (path, wildcard) in &self.imports {
2008 if !*wildcard {
2009 continue;
2010 }
2011 if path.is_empty() {
2012 continue;
2013 }
2014 let mut qual = String::new();
2015 for (i, part) in path.iter().enumerate() {
2016 if i > 0 {
2017 qual.push('.');
2018 }
2019 qual.push_str(part);
2020 }
2021 qual.push('.');
2022 qual.push_str(name);
2023 if runmat_builtins::builtin_functions()
2024 .iter()
2025 .any(|b| b.name == qual)
2026 || self.functions.contains_key(&qual)
2027 {
2028 wildcard_candidates.push(qual);
2029 }
2030 // Accumulate Class.* static method candidates for any class path
2031 let mut cls = String::new();
2032 for (i, part) in path.iter().enumerate() {
2033 if i > 0 {
2034 cls.push('.');
2035 }
2036 cls.push_str(part);
2037 }
2038 if let Some((m, _owner)) =
2039 runmat_builtins::lookup_method(&cls, name)
2040 {
2041 if m.is_static {
2042 static_candidates.push((cls.clone(), name.clone()));
2043 }
2044 }
2045 }
2046 if wildcard_candidates.len() > 1 {
2047 return Err(format!(
2048 "ambiguous unqualified reference '{}' via wildcard imports: {}",
2049 name,
2050 wildcard_candidates.join(", ")
2051 ));
2052 }
2053 if wildcard_candidates.len() == 1 {
2054 resolved = wildcard_candidates.remove(0);
2055 }
2056 }
2057 }
2058 // If resolved maps to a user function, compile it now
2059 if self.functions.contains_key(&resolved) {
2060 if has_any_expand {
2061 let mut specs: Vec<crate::instr::ArgSpec> =
2062 Vec::with_capacity(args.len());
2063 for arg in args {
2064 if let HirExprKind::IndexCell(base, indices) = &arg.kind {
2065 let is_expand_all = indices.len() == 1
2066 && matches!(indices[0].kind, HirExprKind::Colon);
2067 if is_expand_all {
2068 specs.push(crate::instr::ArgSpec {
2069 is_expand: true,
2070 num_indices: 0,
2071 expand_all: true,
2072 });
2073 self.compile_expr(base)?;
2074 } else {
2075 specs.push(crate::instr::ArgSpec {
2076 is_expand: true,
2077 num_indices: indices.len(),
2078 expand_all: false,
2079 });
2080 self.compile_expr(base)?;
2081 for i in indices {
2082 self.compile_expr(i)?;
2083 }
2084 }
2085 } else {
2086 specs.push(crate::instr::ArgSpec {
2087 is_expand: false,
2088 num_indices: 0,
2089 expand_all: false,
2090 });
2091 self.compile_expr(arg)?;
2092 }
2093 }
2094 self.emit(Instr::CallFunctionExpandMulti(resolved.clone(), specs));
2095 return Ok(());
2096 } else {
2097 // Flatten inner user-function returns into argument list for the call
2098 let mut total_argc: usize = 0;
2099 for arg in args {
2100 if let HirExprKind::FuncCall(inner, inner_args) = &arg.kind {
2101 if self.functions.contains_key(inner) {
2102 for a in inner_args {
2103 self.compile_expr(a)?;
2104 }
2105 let outc = self
2106 .functions
2107 .get(inner)
2108 .map(|f| f.outputs.len().max(1))
2109 .unwrap_or(1);
2110 self.emit(Instr::CallFunctionMulti(
2111 inner.clone(),
2112 inner_args.len(),
2113 outc,
2114 ));
2115 total_argc += outc;
2116 continue;
2117 }
2118 }
2119 self.compile_expr(arg)?;
2120 total_argc += 1;
2121 }
2122 self.emit(Instr::CallFunction(resolved.clone(), total_argc));
2123 return Ok(());
2124 }
2125 }
2126 // If still no function, and exactly one static candidate, call it
2127 if !runmat_builtins::builtin_functions()
2128 .iter()
2129 .any(|b| b.name == resolved)
2130 && static_candidates.len() == 1
2131 {
2132 let (cls, method) = static_candidates.remove(0);
2133 for arg in args {
2134 self.compile_expr(arg)?;
2135 }
2136 self.emit(Instr::CallStaticMethod(cls, method, args.len()));
2137 return Ok(());
2138 }
2139 // If multiple static candidates and no function resolved, report ambiguity
2140 if !runmat_builtins::builtin_functions()
2141 .iter()
2142 .any(|b| b.name == resolved)
2143 && static_candidates.len() > 1
2144 {
2145 return Err(format!(
2146 "ambiguous unqualified static method '{}' via Class.* imports: {}",
2147 name,
2148 static_candidates
2149 .iter()
2150 .map(|(c, _)| c.clone())
2151 .collect::<Vec<_>>()
2152 .join(", ")
2153 ));
2154 }
2155 // Existing propagation path and builtin call
2156 if !has_any_expand {
2157 // First scan for user-defined inner function calls to expand; avoid compiling
2158 // simple args here to prevent duplicating them on the stack. If any inner
2159 // user functions are found, compile them and then compile the remaining
2160 // simple args once, emit the builtin call, and return. Otherwise, fall
2161 // through to the normal (single-pass) compilation below.
2162 let mut total_argc = 0usize;
2163 let mut did_expand_inner = false;
2164 let mut pending_simple: Vec<&runmat_hir::HirExpr> = Vec::new();
2165 for arg in args {
2166 if let HirExprKind::FuncCall(inner, inner_args) = &arg.kind {
2167 if self.functions.contains_key(inner) {
2168 for a in inner_args {
2169 self.compile_expr(a)?;
2170 }
2171 let outc = self
2172 .functions
2173 .get(inner)
2174 .map(|f| f.outputs.len().max(1))
2175 .unwrap_or(1);
2176 self.emit(Instr::CallFunctionMulti(
2177 inner.clone(),
2178 inner_args.len(),
2179 outc,
2180 ));
2181 total_argc += outc;
2182 did_expand_inner = true;
2183 } else {
2184 pending_simple.push(arg);
2185 }
2186 } else {
2187 pending_simple.push(arg);
2188 }
2189 }
2190 if did_expand_inner {
2191 for arg in pending_simple {
2192 self.compile_expr(arg)?;
2193 total_argc += 1;
2194 }
2195 self.emit(Instr::CallBuiltin(resolved, total_argc));
2196 return Ok(());
2197 }
2198 }
2199 if has_any_expand {
2200 let mut specs: Vec<crate::instr::ArgSpec> = Vec::with_capacity(args.len());
2201 for arg in args {
2202 if let HirExprKind::IndexCell(base, indices) = &arg.kind {
2203 let is_expand_all = indices.len() == 1
2204 && matches!(indices[0].kind, HirExprKind::Colon);
2205 if is_expand_all {
2206 specs.push(crate::instr::ArgSpec {
2207 is_expand: true,
2208 num_indices: 0,
2209 expand_all: true,
2210 });
2211 self.compile_expr(base)?;
2212 } else {
2213 specs.push(crate::instr::ArgSpec {
2214 is_expand: true,
2215 num_indices: indices.len(),
2216 expand_all: false,
2217 });
2218 self.compile_expr(base)?;
2219 for i in indices {
2220 self.compile_expr(i)?;
2221 }
2222 }
2223 } else {
2224 specs.push(crate::instr::ArgSpec {
2225 is_expand: false,
2226 num_indices: 0,
2227 expand_all: false,
2228 });
2229 self.compile_expr(arg)?;
2230 }
2231 }
2232 self.emit(Instr::CallBuiltinExpandMulti(resolved, specs));
2233 } else {
2234 for arg in args {
2235 self.compile_expr(arg)?;
2236 }
2237 self.emit(Instr::CallBuiltin(resolved, args.len()));
2238 }
2239 return Ok(());
2240 }
2241 return Ok(());
2242 }
2243 HirExprKind::Tensor(matrix_data) | HirExprKind::Cell(matrix_data) => {
2244 let rows = matrix_data.len();
2245 // Special case: 1-row tensor literal with a single element that is IndexCell(base, {:})
2246 // Lower "[C{:}]" into cat(2, C{:}) so downstream expansion works without colon compilation
2247 if matches!(expr.kind, HirExprKind::Tensor(_))
2248 && rows == 1
2249 && matrix_data.first().map(|r| r.len()).unwrap_or(0) == 1
2250 {
2251 if let HirExprKind::IndexCell(base, indices) = &matrix_data[0][0].kind {
2252 if indices.len() == 1 && matches!(indices[0].kind, HirExprKind::Colon) {
2253 // Build specs: first fixed dim=2, then expand_all for base
2254 let mut specs: Vec<crate::instr::ArgSpec> = Vec::with_capacity(2);
2255 // Fixed dimension 2
2256 specs.push(crate::instr::ArgSpec {
2257 is_expand: false,
2258 num_indices: 0,
2259 expand_all: false,
2260 });
2261 self.emit(Instr::LoadConst(2.0));
2262 // Expand all from base cell
2263 specs.push(crate::instr::ArgSpec {
2264 is_expand: true,
2265 num_indices: 0,
2266 expand_all: true,
2267 });
2268 self.compile_expr(base)?;
2269 self.emit(Instr::CallBuiltinExpandMulti("cat".to_string(), specs));
2270 return Ok(());
2271 }
2272 }
2273 }
2274 let has_non_literals = matrix_data.iter().any(|row| {
2275 row.iter()
2276 .any(|expr| !matches!(expr.kind, HirExprKind::Number(_)))
2277 });
2278 if has_non_literals {
2279 for row in matrix_data {
2280 for element in row {
2281 self.compile_expr(element)?;
2282 }
2283 }
2284 let row_lengths: Vec<usize> = matrix_data.iter().map(|row| row.len()).collect();
2285 if matches!(expr.kind, HirExprKind::Cell(_)) {
2286 // For 2D cells, we know rows and row lengths; emit 2D version when rectangular
2287 let rectangular = row_lengths.iter().all(|&c| c == row_lengths[0]);
2288 if rectangular {
2289 let cols = if rows > 0 { row_lengths[0] } else { 0 };
2290 self.emit(Instr::CreateCell2D(rows, cols));
2291 } else {
2292 // Ragged cells: fall back to 1D create with total count and row-major ordering
2293 let total: usize = row_lengths.iter().sum();
2294 self.emit(Instr::CreateCell2D(1, total));
2295 }
2296 } else {
2297 for &row_len in &row_lengths {
2298 self.emit(Instr::LoadConst(row_len as f64));
2299 }
2300 self.emit(Instr::CreateMatrixDynamic(rows));
2301 }
2302 } else {
2303 let cols = if rows > 0 { matrix_data[0].len() } else { 0 };
2304 for row in matrix_data {
2305 for element in row {
2306 self.compile_expr(element)?;
2307 }
2308 }
2309 if matches!(expr.kind, HirExprKind::Cell(_)) {
2310 self.emit(Instr::CreateCell2D(rows, cols));
2311 } else {
2312 self.emit(Instr::CreateMatrix(rows, cols));
2313 }
2314 }
2315 }
2316 HirExprKind::Index(base, indices) => {
2317 let has_colon = indices.iter().any(|e| matches!(e.kind, HirExprKind::Colon));
2318 let has_end = indices.iter().any(|e| matches!(e.kind, HirExprKind::End));
2319 let has_vector = indices.iter().any(|e| {
2320 matches!(e.kind, HirExprKind::Range(_, _, _) | HirExprKind::Tensor(_))
2321 || matches!(e.ty, runmat_hir::Type::Tensor { .. })
2322 });
2323 // General case: any-dimension ranges with end arithmetic (e.g., A(:,2:2:end-1,...))
2324 // We lower into IndexRangeEnd: push base, then per-range start[, step] in increasing dimension order,
2325 // then any numeric scalar indices (in order). Colon and plain end dims are marked in masks.
2326 {
2327 let mut has_any_range_end = false;
2328 let mut range_dims: Vec<usize> = Vec::new();
2329 let mut range_has_step: Vec<bool> = Vec::new();
2330 let mut end_offsets: Vec<i64> = Vec::new();
2331 // First pass: detect any Range with End-Sub on end expression
2332 for (dim, index) in indices.iter().enumerate() {
2333 if let HirExprKind::Range(_start, step, end) = &index.kind {
2334 if let HirExprKind::Binary(left, op, right) = &end.kind {
2335 if matches!(op, runmat_parser::BinOp::Sub)
2336 && matches!(left.kind, HirExprKind::End)
2337 {
2338 has_any_range_end = true;
2339 range_dims.push(dim);
2340 range_has_step.push(step.is_some());
2341 let off = if let HirExprKind::Number(ref s) = right.kind {
2342 s.parse::<i64>().unwrap_or(0)
2343 } else {
2344 0
2345 };
2346 end_offsets.push(off);
2347 }
2348 }
2349 }
2350 }
2351 if has_any_range_end {
2352 self.compile_expr(base)?;
2353 // Push per-range start and optional step in dimension order
2354 for &dim in &range_dims {
2355 if let HirExprKind::Range(start, step, _end) = &indices[dim].kind {
2356 self.compile_expr(start)?;
2357 if let Some(st) = step {
2358 self.compile_expr(st)?;
2359 }
2360 }
2361 }
2362 // Count numeric scalar indices and push them
2363 let mut colon_mask: u32 = 0;
2364 let mut end_mask: u32 = 0;
2365 let mut numeric_count = 0usize;
2366 for (dim, index) in indices.iter().enumerate() {
2367 match &index.kind {
2368 HirExprKind::Colon => {
2369 colon_mask |= 1u32 << dim;
2370 }
2371 HirExprKind::End => {
2372 end_mask |= 1u32 << dim;
2373 }
2374 HirExprKind::Range(_, _, end) => {
2375 // If this range used end arithmetic, we already handled; otherwise treat as plain numeric idx vector at runtime via VM
2376 if let HirExprKind::Binary(left, op, _right) = &end.kind {
2377 if matches!(op, runmat_parser::BinOp::Sub)
2378 && matches!(left.kind, HirExprKind::End)
2379 {
2380 // skip pushing numeric for this dim
2381 continue;
2382 }
2383 }
2384 // For non-end ranges, we will resolve via VM range gather; push placeholders via numeric_count == 0 (no need to push indices)
2385 }
2386 _ => {
2387 self.compile_expr(index)?;
2388 numeric_count += 1;
2389 }
2390 }
2391 }
2392 // For pure 1-D case with a single range_dim, degrade to legacy Index1DRangeEnd to keep existing test stable
2393 if indices.len() == 1 && range_dims.len() == 1 {
2394 // We pushed start[, step] already. Emit Index1DRangeEnd.
2395 self.emit(Instr::Index1DRangeEnd {
2396 has_step: range_has_step[0],
2397 offset: end_offsets[0],
2398 });
2399 } else {
2400 self.emit(Instr::IndexRangeEnd {
2401 dims: indices.len(),
2402 numeric_count,
2403 colon_mask,
2404 end_mask,
2405 range_dims,
2406 range_has_step,
2407 end_offsets,
2408 });
2409 }
2410 return Ok(());
2411 }
2412 }
2413 if has_colon
2414 || has_vector
2415 || has_end
2416 || indices.len() > 2
2417 || Self::expr_contains_end(base)
2418 {
2419 // Push base first, then numeric indices in order; compute colon mask
2420 self.compile_expr(base)?;
2421 let mut colon_mask: u32 = 0;
2422 let mut end_mask: u32 = 0;
2423 let mut numeric_count = 0usize;
2424 let mut end_offsets: Vec<(usize, i64)> = Vec::new();
2425 for (dim, index) in indices.iter().enumerate() {
2426 if matches!(index.kind, HirExprKind::Colon) {
2427 colon_mask |= 1u32 << dim;
2428 } else if matches!(index.kind, HirExprKind::End) {
2429 end_mask |= 1u32 << dim;
2430 } else {
2431 // Detect simple end arithmetic forms: end-1, end-2 etc.
2432 if let HirExprKind::Binary(left, op, right) = &index.kind {
2433 if matches!(op, runmat_parser::BinOp::Sub)
2434 && matches!(left.kind, HirExprKind::End)
2435 {
2436 // Right should be number literal string; parse as integer offset if possible
2437 if let HirExprKind::Number(ref s) = right.kind {
2438 if let Ok(k) = s.parse::<i64>() {
2439 // Reserve a numeric slot: push placeholder and count it
2440 self.emit(Instr::LoadConst(0.0));
2441 end_offsets.push((numeric_count, k));
2442 numeric_count += 1;
2443 continue;
2444 }
2445 }
2446 }
2447 }
2448 self.compile_expr(index)?;
2449 numeric_count += 1;
2450 }
2451 }
2452 if end_offsets.is_empty() {
2453 self.emit(Instr::IndexSlice(
2454 indices.len(),
2455 numeric_count,
2456 colon_mask,
2457 end_mask,
2458 ));
2459 } else {
2460 self.emit(Instr::IndexSliceEx(
2461 indices.len(),
2462 numeric_count,
2463 colon_mask,
2464 end_mask,
2465 end_offsets,
2466 ));
2467 }
2468 } else {
2469 self.compile_expr(base)?;
2470 for index in indices {
2471 self.compile_expr(index)?;
2472 }
2473 self.emit(Instr::Index(indices.len()));
2474 }
2475 }
2476 HirExprKind::Colon => {
2477 // Placeholder for contexts where colon appeared in RHS expansion; real colon handling occurs in indexing logic
2478 // Emit a benign constant to keep stack discipline when mistakenly compiled
2479 self.emit(Instr::LoadConst(0.0));
2480 }
2481 HirExprKind::End => {
2482 self.emit(Instr::LoadConst(-0.0)); /* placeholder, resolved via end_mask in IndexSlice */
2483 }
2484 HirExprKind::Member(base, field) => {
2485 // If base is a known class ref literal (string via classref builtin), static access
2486 // Or if base is MetaClass (string literal), treat as class name for static access
2487 // Otherwise, instance member
2488 match &base.kind {
2489 HirExprKind::MetaClass(cls_name) => {
2490 self.emit(Instr::LoadStaticProperty(cls_name.clone(), field.clone()));
2491 }
2492 HirExprKind::FuncCall(name, args) if name == "classref" && args.len() == 1 => {
2493 if let HirExprKind::String(cls) = &args[0].kind {
2494 let cls_name = if cls.starts_with('\'') && cls.ends_with('\'') {
2495 cls[1..cls.len() - 1].to_string()
2496 } else {
2497 cls.clone()
2498 };
2499 self.emit(Instr::LoadStaticProperty(cls_name, field.clone()));
2500 } else {
2501 self.compile_expr(base)?;
2502 self.emit(Instr::LoadMember(field.clone()));
2503 }
2504 }
2505 _ => {
2506 // Default to instance property access; subsref overloading is handled at runtime via call_method if needed
2507 self.compile_expr(base)?;
2508 self.emit(Instr::LoadMember(field.clone()));
2509 }
2510 }
2511 }
2512 HirExprKind::MemberDynamic(base, name_expr) => {
2513 self.compile_expr(base)?;
2514 self.compile_expr(name_expr)?;
2515 self.emit(Instr::LoadMemberDynamic);
2516 }
2517 // Dynamic member s.(expr)
2518 HirExprKind::MethodCall(b, m, a) if m == &"()".to_string() && a.len() == 1 => {
2519 // Note: parser currently doesn't produce this form; placeholder for dynamic
2520 self.compile_expr(b)?;
2521 self.compile_expr(&a[0])?;
2522 self.emit(Instr::LoadMemberDynamic);
2523 }
2524 HirExprKind::MethodCall(base, method, args) => match &base.kind {
2525 HirExprKind::MetaClass(cls_name) => {
2526 for arg in args {
2527 self.compile_expr(arg)?;
2528 }
2529 self.emit(Instr::CallStaticMethod(
2530 cls_name.clone(),
2531 method.clone(),
2532 args.len(),
2533 ));
2534 }
2535 HirExprKind::FuncCall(name, bargs) if name == "classref" && bargs.len() == 1 => {
2536 if let HirExprKind::String(cls) = &bargs[0].kind {
2537 let cls_name = if cls.starts_with('\'') && cls.ends_with('\'') {
2538 cls[1..cls.len() - 1].to_string()
2539 } else {
2540 cls.clone()
2541 };
2542 for arg in args {
2543 self.compile_expr(arg)?;
2544 }
2545 self.emit(Instr::CallStaticMethod(
2546 cls_name,
2547 method.clone(),
2548 args.len(),
2549 ));
2550 } else {
2551 self.compile_expr(base)?;
2552 for arg in args {
2553 self.compile_expr(arg)?;
2554 }
2555 self.emit(Instr::CallMethod(method.clone(), args.len()));
2556 }
2557 }
2558 _ => {
2559 self.compile_expr(base)?;
2560 for arg in args {
2561 self.compile_expr(arg)?;
2562 }
2563 self.emit(Instr::CallMethod(method.clone(), args.len()));
2564 }
2565 },
2566 HirExprKind::AnonFunc { params, body } => {
2567 // Collect free variables in body (in order of first appearance)
2568 use std::collections::{HashMap, HashSet};
2569 let mut seen: HashSet<runmat_hir::VarId> = HashSet::new();
2570 let mut captures_order: Vec<runmat_hir::VarId> = Vec::new();
2571 let bound: HashSet<runmat_hir::VarId> = params.iter().cloned().collect();
2572 self.collect_free_vars(body, &bound, &mut seen, &mut captures_order);
2573
2574 // Build placeholder VarIds for captures and parameters
2575 let capture_count = captures_order.len();
2576 let mut placeholder_params: Vec<runmat_hir::VarId> =
2577 Vec::with_capacity(capture_count + params.len());
2578 for i in 0..capture_count {
2579 placeholder_params.push(runmat_hir::VarId(i));
2580 }
2581 for j in 0..params.len() {
2582 placeholder_params.push(runmat_hir::VarId(capture_count + j));
2583 }
2584 let output_id = runmat_hir::VarId(capture_count + params.len());
2585
2586 // Remap body vars: free vars -> capture placeholders; param vars -> shifted placeholders
2587 let mut var_map: HashMap<runmat_hir::VarId, runmat_hir::VarId> = HashMap::new();
2588 for (i, old) in captures_order.iter().enumerate() {
2589 var_map.insert(*old, runmat_hir::VarId(i));
2590 }
2591 for (j, old) in params.iter().enumerate() {
2592 var_map.insert(*old, runmat_hir::VarId(capture_count + j));
2593 }
2594 let remapped_body = runmat_hir::remapping::remap_expr(body, &var_map);
2595 let func_body = vec![runmat_hir::HirStmt::Assign(output_id, remapped_body, true)];
2596
2597 // Synthesize function name and register
2598 let synthesized = format!("__anon_{}", self.functions.len());
2599 let user_func = UserFunction {
2600 name: synthesized.clone(),
2601 params: placeholder_params,
2602 outputs: vec![output_id],
2603 body: func_body,
2604 local_var_count: capture_count + params.len() + 1,
2605 has_varargin: false,
2606 has_varargout: false,
2607 var_types: vec![Type::Unknown; capture_count + params.len() + 1],
2608 };
2609 self.functions.insert(synthesized.clone(), user_func);
2610
2611 // Emit capture values on stack then create closure
2612 for old in &captures_order {
2613 self.emit(Instr::LoadVar(old.0));
2614 }
2615 self.emit(Instr::CreateClosure(synthesized, capture_count));
2616 }
2617 HirExprKind::FuncHandle(name) => {
2618 self.emit(Instr::LoadString(name.clone()));
2619 self.emit(Instr::CallBuiltin("make_handle".to_string(), 1));
2620 }
2621 HirExprKind::MetaClass(name) => {
2622 self.emit(Instr::LoadString(name.clone()));
2623 }
2624 // Member/Method on metaclass (string on stack) will be handled by runtime as static property/method via classref
2625 // We lower MetaClass to a string (class name) and then member/method code paths remain unchanged.
2626 HirExprKind::IndexCell(base, indices) => {
2627 self.compile_expr(base)?;
2628 for index in indices {
2629 self.compile_expr(index)?;
2630 }
2631 self.emit(Instr::IndexCell(indices.len()));
2632 }
2633 }
2634 Ok(())
2635 }
2636}