1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
//! Bytecode compiler - AST to bytecode translation
//!
//! OPT-002: Bytecode Compiler
//!
//! Translates Ruchy AST to bytecode instructions with:
//! - Linear scan register allocation
//! - Constant pool management
//! - Jump target resolution
//! - Local variable tracking
//!
//! Reference: ../`ruchyruchy/OPTIMIZATION_REPORT_FOR_RUCHY.md`
//! Expected: Efficient bytecode generation with minimal overhead
use super::instruction::Instruction;
use super::opcode::OpCode;
use crate::frontend::ast::{BinaryOp, Expr, ExprKind, Literal, Param, UnaryOp};
use crate::runtime::Value;
use std::cell::RefCell; // ISSUE-119: For shared mutable environment
use std::collections::HashMap;
use std::rc::Rc; // ISSUE-119: For shared mutable environment
use std::sync::Arc;
/// Bytecode function chunk
///
/// Contains compiled bytecode and associated metadata for a function.
#[derive(Debug, Clone)]
pub struct BytecodeChunk {
/// Function name (for debugging)
pub name: String,
/// Bytecode instructions
pub instructions: Vec<Instruction>,
/// Constant pool (literals used in the function)
pub constants: Vec<Value>,
/// Number of registers required
pub register_count: u8,
/// Number of parameters
pub parameter_count: u8,
/// Local variable names (for debugging)
pub local_names: Vec<String>,
/// Source line numbers (parallel to instructions for debugging)
pub line_numbers: Vec<usize>,
/// Loop bodies (for hybrid execution - OPT-012)
/// Stores AST bodies for for-loops to enable interpreter delegation
pub loop_bodies: Vec<Arc<Expr>>,
/// Method calls (for hybrid execution - OPT-014)
/// Stores AST for method calls to enable interpreter delegation
/// Each entry: (`receiver_expr`, `method_name`, `args_exprs`)
pub method_calls: Vec<(Arc<Expr>, String, Vec<Arc<Expr>>)>,
/// Match expressions (for hybrid execution - OPT-018)
/// Stores AST for match expressions to enable interpreter delegation
/// Each entry: (`match_expr`, `match_arms`)
pub match_exprs: Vec<(Arc<Expr>, Vec<crate::frontend::ast::MatchArm>)>,
/// Closures (for hybrid execution - OPT-019)
/// Stores AST for closures to enable interpreter delegation
/// Each entry: (`params_with_defaults`, body) - environment captured at runtime
/// RUNTIME-DEFAULT-PARAMS: Params now include default values
pub closures: Vec<(Vec<(String, Option<Arc<Expr>>)>, Arc<Expr>)>,
/// Array element registers (for runtime array construction - OPT-020)
/// Stores register lists for `NewArray` opcodes (element registers may not be contiguous)
pub array_element_regs: Vec<Vec<u8>>,
/// Object field data (for runtime object construction - OPT-020)
/// Stores (`key`, `value_register`) pairs for `NewObject` opcodes
pub object_fields: Vec<Vec<(String, u8)>>,
/// Local variable name to register mapping (for hybrid execution)
/// Enables synchronization between bytecode registers and interpreter scope
pub locals_map: HashMap<String, u8>,
}
impl BytecodeChunk {
/// Create a new empty bytecode chunk
pub fn new(name: String) -> Self {
Self {
name,
instructions: Vec::new(),
constants: Vec::new(),
register_count: 0,
parameter_count: 0,
local_names: Vec::new(),
line_numbers: Vec::new(),
loop_bodies: Vec::new(),
method_calls: Vec::new(),
match_exprs: Vec::new(),
closures: Vec::new(),
array_element_regs: Vec::new(),
object_fields: Vec::new(),
locals_map: HashMap::new(),
}
}
/// Add an instruction to the chunk
///
/// Returns the index where the instruction was added (for jump patching).
pub fn emit(&mut self, instruction: Instruction, line: usize) -> usize {
let index = self.instructions.len();
self.instructions.push(instruction);
self.line_numbers.push(line);
index
}
/// Add a constant to the constant pool
///
/// Returns the index of the constant (existing or newly added).
pub fn add_constant(&mut self, value: Value) -> u16 {
// Check if constant already exists
for (i, existing) in self.constants.iter().enumerate() {
if values_equal(existing, &value) {
return i as u16;
}
}
// Add new constant
let index = self.constants.len();
self.constants.push(value);
index as u16
}
/// Patch a jump instruction at the given index
///
/// Updates the jump offset to point to the current instruction position.
pub fn patch_jump(&mut self, jump_index: usize) {
let offset = (self.instructions.len() - jump_index - 1) as i16;
let instruction = &self.instructions[jump_index];
// Recreate instruction with patched offset
let patched = Instruction::asbx(
OpCode::from_u8(instruction.opcode()).expect("Invalid opcode"),
instruction.get_a(),
offset,
);
self.instructions[jump_index] = patched;
}
}
/// Simple register allocator using linear scan
///
/// Tracks which registers are in use and allocates new ones as needed.
#[derive(Debug)]
struct RegisterAllocator {
/// Number of registers currently allocated
next_register: u8,
/// Maximum register count seen (for function metadata)
max_registers: u8,
/// Stack of free registers (for reuse)
free_registers: Vec<u8>,
}
impl RegisterAllocator {
/// Create a new register allocator
fn new() -> Self {
Self {
next_register: 0,
max_registers: 0,
free_registers: Vec::new(),
}
}
/// Allocate a new register
///
/// Returns the register index. Reuses freed registers when possible.
fn allocate(&mut self) -> u8 {
if let Some(reg) = self.free_registers.pop() {
reg
} else {
let reg = self.next_register;
self.next_register += 1;
self.max_registers = self.max_registers.max(self.next_register);
reg
}
}
/// Free a register for reuse
fn free(&mut self, register: u8) {
self.free_registers.push(register);
}
/// Get the maximum number of registers used
fn max_count(&self) -> u8 {
self.max_registers
}
}
/// Bytecode compiler state
///
/// Maintains compilation context including local variables and register allocation.
pub struct Compiler {
/// Current bytecode chunk being compiled
chunk: BytecodeChunk,
/// Register allocator
registers: RegisterAllocator,
/// Local variable mapping (name -> register)
locals: HashMap<String, u8>,
/// Current scope depth
scope_depth: usize,
/// Last result register (for Return instruction)
last_result: u8,
}
impl Compiler {
/// Create a new compiler
pub fn new(function_name: String) -> Self {
Self {
chunk: BytecodeChunk::new(function_name),
registers: RegisterAllocator::new(),
locals: HashMap::new(),
scope_depth: 0,
last_result: 0,
}
}
/// Compile an expression to bytecode
///
/// Returns the register containing the result.
pub fn compile_expr(&mut self, expr: &Expr) -> Result<u8, String> {
let result = match &expr.kind {
ExprKind::Literal(lit) => self.compile_literal(lit),
ExprKind::Binary { op, left, right } => self.compile_binary(op, left, right),
ExprKind::Unary { op, operand } => self.compile_unary(op, operand),
ExprKind::Identifier(name) => self.compile_variable(name),
ExprKind::Let {
name, value, body, ..
} => self.compile_let(name, value, body),
ExprKind::Block(exprs) => self.compile_block(exprs),
ExprKind::If {
condition,
then_branch,
else_branch,
} => self.compile_if(condition, then_branch, else_branch.as_deref()),
ExprKind::Call { func, args } => self.compile_call(func, args),
ExprKind::While {
condition, body, ..
} => self.compile_while(condition, body),
ExprKind::Assign { target, value } => self.compile_assign(target, value),
ExprKind::Function {
name, params, body, ..
} => self.compile_function(name, params, body),
ExprKind::List(elements) => self.compile_list(elements),
ExprKind::Tuple(elements) => self.compile_tuple(elements),
ExprKind::ObjectLiteral { fields } => self.compile_object_literal(fields),
ExprKind::For {
var, iter, body, ..
} => self.compile_for(var, iter, body),
ExprKind::IndexAccess { object, index } => self.compile_index_access(object, index),
ExprKind::MethodCall {
receiver,
method,
args,
} => self.compile_method_call(receiver, method, args),
ExprKind::FieldAccess { object, field } => self.compile_field_access(object, field),
ExprKind::Match { expr, arms } => self.compile_match(expr, arms),
ExprKind::Lambda { params, body } => self.compile_closure(params, body),
_ => Err(format!("Unsupported expression kind: {:?}", expr.kind)),
}?;
self.last_result = result;
Ok(result)
}
/// Compile a literal value
fn compile_literal(&mut self, literal: &Literal) -> Result<u8, String> {
let value = match literal {
Literal::Integer(i, _) => Value::Integer(*i),
Literal::Float(f) => Value::Float(*f),
Literal::String(s) => Value::from_string(s.clone()),
Literal::Bool(b) => Value::Bool(*b),
Literal::Unit | Literal::Null => Value::Nil,
Literal::Char(c) => Value::from_string(c.to_string()),
Literal::Byte(b) => Value::Integer(i64::from(*b)),
Literal::Atom(s) => Value::Atom(s.clone()),
};
let const_index = self.chunk.add_constant(value);
let result_reg = self.registers.allocate();
// Emit CONST instruction: R[result] = constants[const_index]
self.chunk.emit(
Instruction::abx(OpCode::Const, result_reg, const_index),
0, // Line number placeholder (debug info not yet tracked)
);
Ok(result_reg)
}
/// Compile a binary operation
fn compile_binary(&mut self, op: &BinaryOp, left: &Expr, right: &Expr) -> Result<u8, String> {
let left_reg = self.compile_expr(left)?;
let right_reg = self.compile_expr(right)?;
let result_reg = self.registers.allocate();
let opcode = match op {
BinaryOp::Add => OpCode::Add,
BinaryOp::Subtract => OpCode::Sub,
BinaryOp::Multiply => OpCode::Mul,
BinaryOp::Divide => OpCode::Div,
BinaryOp::Modulo => OpCode::Mod,
BinaryOp::Equal => OpCode::Equal,
BinaryOp::NotEqual => OpCode::NotEqual,
BinaryOp::Greater | BinaryOp::Gt => OpCode::Greater,
BinaryOp::GreaterEqual => OpCode::GreaterEqual,
BinaryOp::Less => OpCode::Less,
BinaryOp::LessEqual => OpCode::LessEqual,
BinaryOp::And => OpCode::And,
BinaryOp::Or => OpCode::Or,
BinaryOp::BitwiseAnd => OpCode::BitAnd,
BinaryOp::BitwiseOr => OpCode::BitOr,
BinaryOp::BitwiseXor => OpCode::BitXor,
BinaryOp::LeftShift => OpCode::ShiftLeft,
BinaryOp::RightShift => OpCode::ShiftRight,
_ => return Err(format!("Unsupported binary operator: {op:?}")),
};
// Emit binary operation: R[result] = R[left] op R[right]
self.chunk
.emit(Instruction::abc(opcode, result_reg, left_reg, right_reg), 0);
// Free input registers
self.registers.free(left_reg);
self.registers.free(right_reg);
Ok(result_reg)
}
/// Compile a unary operation
fn compile_unary(&mut self, op: &UnaryOp, operand: &Expr) -> Result<u8, String> {
let operand_reg = self.compile_expr(operand)?;
let result_reg = self.registers.allocate();
let opcode = match op {
UnaryOp::Negate => OpCode::Neg,
UnaryOp::Not => OpCode::Not,
UnaryOp::BitwiseNot => OpCode::BitNot,
UnaryOp::Reference | UnaryOp::MutableReference | UnaryOp::Deref => {
// PARSER-085: Issue #71 - Added MutableReference
return Err(format!("Unsupported unary operator: {op:?}"));
}
};
// Emit unary operation: R[result] = op R[operand]
// Using AB format: A = result, B = operand
self.chunk
.emit(Instruction::abc(opcode, result_reg, operand_reg, 0), 0);
// Free input register
self.registers.free(operand_reg);
Ok(result_reg)
}
/// Compile a variable reference
fn compile_variable(&mut self, name: &str) -> Result<u8, String> {
if let Some(&var_reg) = self.locals.get(name) {
// Local variable - copy to temporary register
// This prevents compile_binary() from freeing the variable's register
let temp_reg = self.registers.allocate();
self.chunk
.emit(Instruction::abc(OpCode::Move, temp_reg, var_reg, 0), 0);
Ok(temp_reg)
} else {
// Global variable - need to load from global table
let name_const = self
.chunk
.add_constant(Value::from_string(name.to_string()));
let result_reg = self.registers.allocate();
self.chunk.emit(
Instruction::abx(OpCode::LoadGlobal, result_reg, name_const),
0,
);
Ok(result_reg)
}
}
/// Compile a let binding
///
/// Stores the value in a register, records it in the locals table,
/// then compiles and returns the body expression.
fn compile_let(&mut self, name: &str, value: &Expr, body: &Expr) -> Result<u8, String> {
let value_reg = self.compile_expr(value)?;
// Store in locals table
self.locals.insert(name.to_string(), value_reg);
self.chunk.local_names.push(name.to_string());
// Compile and return the body expression result
self.compile_expr(body)
}
/// Compile a block expression
///
/// Returns the register containing the result of the last expression.
fn compile_block(&mut self, exprs: &[Expr]) -> Result<u8, String> {
if exprs.is_empty() {
// Empty block returns nil
return self.compile_literal(&Literal::Unit);
}
// Compile all expressions, free intermediate registers
let mut last_reg = 0;
for (i, expr) in exprs.iter().enumerate() {
if i > 0 {
// Free previous result (except the last one)
// But DON'T free if it's a local variable's register
if !self.is_local_register(last_reg) {
self.registers.free(last_reg);
}
}
last_reg = self.compile_expr(expr)?;
}
Ok(last_reg)
}
/// Check if a register is used by a local variable
fn is_local_register(&self, reg: u8) -> bool {
self.locals.values().any(|&r| r == reg)
}
/// Compile an if expression
///
/// Generates conditional branching bytecode with optional else branch.
fn compile_if(
&mut self,
condition: &Expr,
then_branch: &Expr,
else_branch: Option<&Expr>,
) -> Result<u8, String> {
let result_reg = self.registers.allocate();
// Compile condition
let cond_reg = self.compile_expr(condition)?;
// Emit conditional jump: if !R[cond] jump to else/end
let jump_to_else = self
.chunk
.emit(Instruction::asbx(OpCode::JumpIfFalse, cond_reg, 0), 0);
self.registers.free(cond_reg);
// Compile then branch
let then_reg = self.compile_expr(then_branch)?;
// Move result to result register
self.chunk
.emit(Instruction::abc(OpCode::Move, result_reg, then_reg, 0), 0);
self.registers.free(then_reg);
if let Some(else_expr) = else_branch {
// Emit jump to skip else branch
let jump_to_end = self.chunk.emit(Instruction::asbx(OpCode::Jump, 0, 0), 0);
// Patch jump to else
self.chunk.patch_jump(jump_to_else);
// Compile else branch
let else_reg = self.compile_expr(else_expr)?;
self.chunk
.emit(Instruction::abc(OpCode::Move, result_reg, else_reg, 0), 0);
self.registers.free(else_reg);
// Patch jump to end
self.chunk.patch_jump(jump_to_end);
} else {
// No else branch - result is nil if condition is false
// Patch jump to end (just after then branch)
self.chunk.patch_jump(jump_to_else);
}
Ok(result_reg)
}
/// Compile a while loop
///
/// Generates: `loop_start` → check condition → if false jump to end → body → jump to start → `loop_end`
fn compile_while(&mut self, condition: &Expr, body: &Expr) -> Result<u8, String> {
let result_reg = self.registers.allocate();
// Mark loop start position
let loop_start = self.chunk.instructions.len();
// Compile condition
let cond_reg = self.compile_expr(condition)?;
// Emit conditional jump: if !condition, jump to loop end
let jump_to_end = self
.chunk
.emit(Instruction::asbx(OpCode::JumpIfFalse, cond_reg, 0), 0);
self.registers.free(cond_reg);
// Compile body
let body_reg = self.compile_expr(body)?;
self.registers.free(body_reg);
// Emit backward jump to loop start
let offset = -((self.chunk.instructions.len() - loop_start + 1) as i16);
self.chunk
.emit(Instruction::asbx(OpCode::Jump, 0, offset), 0);
// Patch forward jump to end
self.chunk.patch_jump(jump_to_end);
// While loops return nil
let nil_const = self.chunk.add_constant(Value::Nil);
self.chunk
.emit(Instruction::abx(OpCode::Const, result_reg, nil_const), 0);
Ok(result_reg)
}
/// Compile an assignment expression
///
/// Generates: value → register, Move to target register
/// Returns the value register (assignment is an expression that returns the assigned value)
fn compile_assign(&mut self, target: &Expr, value: &Expr) -> Result<u8, String> {
// For now, only support simple identifier assignments
match &target.kind {
ExprKind::Identifier(name) => {
// Look up the variable's register
let target_reg = if let Some(®) = self.locals.get(name) {
reg
} else {
return Err(format!("Undefined variable: {name}"));
};
// Compile the value expression
let value_reg = self.compile_expr(value)?;
// Move value to target register
self.chunk
.emit(Instruction::abc(OpCode::Move, target_reg, value_reg, 0), 0);
// Free temporary value register if different from target
if value_reg != target_reg {
self.registers.free(value_reg);
}
// Assignment returns the assigned value
Ok(target_reg)
}
_ => Err(format!("Unsupported assignment target: {:?}", target.kind)),
}
}
/// Compile a function call
fn compile_call(&mut self, func: &Expr, args: &[Expr]) -> Result<u8, String> {
// OPT-011: Simplified calling convention for hybrid execution
// Store function and arguments in separate registers, VM will extract them
let result_reg = self.registers.allocate();
// Compile function expression
let func_reg = self.compile_expr(func)?;
// Compile arguments to temporary registers
let mut arg_regs = Vec::new();
for arg in args {
let arg_reg = self.compile_expr(arg)?;
arg_regs.push(arg_reg);
}
// Store func_reg and argument registers in chunk constants for VM to access
// Format: [func_reg, arg_reg1, arg_reg2, ...]
// This is a workaround until we implement proper bytecode calling convention
let mut call_info = vec![Value::Integer(i64::from(func_reg))];
call_info.extend(arg_regs.iter().map(|&r| Value::Integer(i64::from(r))));
let call_info_value = Value::from_array(call_info);
let call_info_idx = self.chunk.add_constant(call_info_value);
// Emit call instruction: R[result] = call with info from constants[call_info_idx]
// ABx format: A = result, Bx = call_info constant index
self.chunk
.emit(Instruction::abx(OpCode::Call, result_reg, call_info_idx), 0);
// OPT-011: Don't free func_reg or arg_regs yet - they contain values needed at runtime
// DESIGN DECISION: Current register allocation accepts some register pressure to ensure correctness.
// Future optimization: Implement precise lifetime analysis for register reuse (see OPT-011).
Ok(result_reg)
}
/// Compile a function definition
///
/// Creates a closure and stores it in locals for later invocation.
fn compile_function(
&mut self,
name: &str,
params: &[Param],
body: &Expr,
) -> Result<u8, String> {
// RUNTIME-DEFAULT-PARAMS: Extract both param names AND default values
let params_with_defaults: Vec<(String, Option<Arc<Expr>>)> = params
.iter()
.map(|p| {
(
p.name(),
p.default_value
.clone()
.map(|expr| Arc::new((*expr).clone())),
)
})
.collect();
// Create closure value
// Note: Using empty environment for now. Full lexical scoping will be added later.
let closure = Value::Closure {
params: params_with_defaults,
body: Arc::new(body.clone()),
env: Rc::new(RefCell::new(HashMap::new())), // ISSUE-119: Wrap in Rc<RefCell>
};
// Add closure to constant pool
let const_index = self.chunk.add_constant(closure);
// Allocate register for the closure
let closure_reg = self.registers.allocate();
// Emit CONST instruction to load closure into register
self.chunk
.emit(Instruction::abx(OpCode::Const, closure_reg, const_index), 0);
// Store function in locals table for later retrieval
self.locals.insert(name.to_string(), closure_reg);
self.chunk.local_names.push(name.to_string());
Ok(closure_reg)
}
/// Compile a list/array literal
///
/// OPT-020: Support both literal and non-literal elements
/// - Literals: Compile to constant pool (optimization)
/// - Non-literals: Compile elements to registers, emit `NewArray` opcode
fn compile_list(&mut self, elements: &[Expr]) -> Result<u8, String> {
// Check if all elements are literals (can optimize)
let all_literals = elements
.iter()
.all(|elem| matches!(&elem.kind, ExprKind::Literal(_)));
if all_literals && !elements.is_empty() {
// Optimization: Create array at compile-time in constant pool
let mut element_values = Vec::new();
for elem in elements {
if let ExprKind::Literal(lit) = &elem.kind {
let value = match lit {
Literal::Integer(i, _) => Value::Integer(*i),
Literal::Float(f) => Value::Float(*f),
Literal::String(s) => Value::from_string(s.clone()),
Literal::Bool(b) => Value::Bool(*b),
Literal::Unit | Literal::Null => Value::Nil,
Literal::Char(c) => Value::from_string(c.to_string()),
Literal::Byte(b) => Value::Integer(i64::from(*b)),
Literal::Atom(s) => Value::Atom(s.clone()),
};
element_values.push(value);
}
}
let array_value = Value::from_array(element_values);
let const_index = self.chunk.add_constant(array_value);
let result_reg = self.registers.allocate();
self.chunk
.emit(Instruction::abx(OpCode::Const, result_reg, const_index), 0);
Ok(result_reg)
} else {
// Runtime array construction: compile elements to registers
let mut element_regs = Vec::new();
for elem in elements {
let elem_reg = self.compile_expr(elem)?;
element_regs.push(elem_reg);
}
// Store element registers in chunk (element registers may not be contiguous)
let element_regs_idx = self.chunk.array_element_regs.len();
self.chunk.array_element_regs.push(element_regs);
// Allocate destination register
let result_reg = self.registers.allocate();
// Emit NewArray instruction: result = new Array(elements...)
// Format: NewArray result_reg, element_regs_idx (stored in chunk)
// B field holds the index into chunk.array_element_regs
self.chunk.emit(
Instruction::abx(OpCode::NewArray, result_reg, element_regs_idx as u16),
0,
);
Ok(result_reg)
}
}
/// Compile a tuple literal
///
/// OPT-020: Support both literal and non-literal elements
/// - Literals: Compile to constant pool (optimization)
/// - Non-literals: Compile elements to registers, emit `NewTuple` opcode
fn compile_tuple(&mut self, elements: &[Expr]) -> Result<u8, String> {
// Check if all elements are literals (can optimize)
let all_literals = elements
.iter()
.all(|elem| matches!(&elem.kind, ExprKind::Literal(_)));
if all_literals && !elements.is_empty() {
// Optimization: Create tuple at compile-time in constant pool
let mut element_values = Vec::new();
for elem in elements {
if let ExprKind::Literal(lit) = &elem.kind {
let value = match lit {
Literal::Integer(i, _) => Value::Integer(*i),
Literal::Float(f) => Value::Float(*f),
Literal::String(s) => Value::from_string(s.clone()),
Literal::Bool(b) => Value::Bool(*b),
Literal::Unit | Literal::Null => Value::Nil,
Literal::Char(c) => Value::from_string(c.to_string()),
Literal::Byte(b) => Value::Integer(i64::from(*b)),
Literal::Atom(s) => Value::Atom(s.clone()),
};
element_values.push(value);
}
}
let tuple_value = Value::Tuple(Arc::from(element_values.as_slice()));
let const_index = self.chunk.add_constant(tuple_value);
let result_reg = self.registers.allocate();
self.chunk
.emit(Instruction::abx(OpCode::Const, result_reg, const_index), 0);
Ok(result_reg)
} else {
// Runtime tuple construction: compile elements to registers
let mut element_regs = Vec::new();
for elem in elements {
let elem_reg = self.compile_expr(elem)?;
element_regs.push(elem_reg);
}
// Store element registers in chunk (reuse array_element_regs for tuples)
let element_regs_idx = self.chunk.array_element_regs.len();
self.chunk.array_element_regs.push(element_regs);
// Allocate destination register
let result_reg = self.registers.allocate();
// Emit NewTuple instruction: result = new Tuple(elements...)
// Format: NewTuple result_reg, element_regs_idx (stored in chunk)
self.chunk.emit(
Instruction::abx(OpCode::NewTuple, result_reg, element_regs_idx as u16),
0,
);
Ok(result_reg)
}
}
/// Compile an object literal
///
/// OPT-020: Support both literal and non-literal field values
/// - All literals: Compile to constant pool (optimization)
/// - Non-literals: Compile values to registers, emit `NewObject` opcode
fn compile_object_literal(
&mut self,
fields: &[crate::frontend::ast::ObjectField],
) -> Result<u8, String> {
use crate::frontend::ast::ObjectField;
use std::collections::HashMap;
// Check if all field values are literals (can optimize)
let all_literals = fields.iter().all(|field| {
match field {
ObjectField::KeyValue { value, .. } => matches!(&value.kind, ExprKind::Literal(_)),
ObjectField::Spread { .. } => false, // Spread not supported yet
}
});
if all_literals && !fields.is_empty() {
// Optimization: Create object at compile-time in constant pool
let mut object_map = HashMap::new();
for field in fields {
if let ObjectField::KeyValue { key, value } = field {
if let ExprKind::Literal(lit) = &value.kind {
let val = match lit {
Literal::Integer(i, _) => Value::Integer(*i),
Literal::Float(f) => Value::Float(*f),
Literal::String(s) => Value::from_string(s.clone()),
Literal::Bool(b) => Value::Bool(*b),
Literal::Unit | Literal::Null => Value::Nil,
Literal::Char(c) => Value::from_string(c.to_string()),
Literal::Byte(b) => Value::Integer(i64::from(*b)),
Literal::Atom(s) => Value::Atom(s.clone()),
};
object_map.insert(key.clone(), val);
}
}
}
let object_value = Value::Object(Arc::new(object_map));
let const_index = self.chunk.add_constant(object_value);
let result_reg = self.registers.allocate();
self.chunk
.emit(Instruction::abx(OpCode::Const, result_reg, const_index), 0);
Ok(result_reg)
} else {
// Runtime object construction: compile field values to registers
let mut field_data = Vec::new();
for field in fields {
match field {
ObjectField::KeyValue { key, value } => {
let value_reg = self.compile_expr(value)?;
field_data.push((key.clone(), value_reg));
}
ObjectField::Spread { .. } => {
return Err(
"Spread operator in object literals not yet supported in bytecode mode"
.to_string(),
);
}
}
}
// Store field data in chunk
let field_data_idx = self.chunk.object_fields.len();
self.chunk.object_fields.push(field_data);
// Allocate destination register
let result_reg = self.registers.allocate();
// Emit NewObject instruction: result = new Object(fields...)
// Format: NewObject result_reg, field_data_idx (stored in chunk)
self.chunk.emit(
Instruction::abx(OpCode::NewObject, result_reg, field_data_idx as u16),
0,
);
Ok(result_reg)
}
}
/// Compile array indexing: arr[index]
///
/// OPT-013: Array indexing using `LoadIndex` opcode
/// Compiles both the array and index expressions, then emits `LoadIndex`
fn compile_index_access(&mut self, object: &Expr, index: &Expr) -> Result<u8, String> {
// Compile the array/object expression
let object_reg = self.compile_expr(object)?;
// Compile the index expression
let index_reg = self.compile_expr(index)?;
// Allocate result register
let result_reg = self.registers.allocate();
// Emit LoadIndex instruction: result = object[index]
// Format: LoadIndex result_reg, object_reg, index_reg
self.chunk.emit(
Instruction::abc(OpCode::LoadIndex, result_reg, object_reg, index_reg),
0,
);
Ok(result_reg)
}
/// Compile a for-loop
///
/// OPT-012: Hybrid approach - like function calls, delegate to interpreter
/// For-loops require array iteration which is complex to compile,
/// so we store loop info and let the VM execute via interpreter.
fn compile_for(&mut self, var: &str, iter: &Expr, body: &Expr) -> Result<u8, String> {
// Compile iterator expression (the array/collection)
let iter_reg = self.compile_expr(iter)?;
// Store body AST in chunk's loop_bodies for interpreter access
let body_idx = self.chunk.loop_bodies.len();
self.chunk.loop_bodies.push(Arc::new(body.clone()));
// Store for-loop metadata in constant pool
// Format: [iter_reg, var_name, body_index]
let loop_info = vec![
Value::Integer(i64::from(iter_reg)), // Register holding the iterator
Value::from_string(var.to_string()), // Loop variable name
Value::Integer(body_idx as i64), // Index into chunk.loop_bodies
];
let loop_info_value = Value::from_array(loop_info);
let loop_info_idx = self.chunk.add_constant(loop_info_value);
// Allocate result register
let result_reg = self.registers.allocate();
// Emit For instruction: ABx format
// A = result register, Bx = loop_info constant index
self.chunk
.emit(Instruction::abx(OpCode::For, result_reg, loop_info_idx), 0);
Ok(result_reg)
}
/// Compile a method call
///
/// OPT-014: Hybrid approach - delegate to interpreter like for-loops
/// Method calls require complex dispatch logic (stdlib, mutating methods, `DataFrame`, Actor),
/// so we store the AST and let the VM execute via interpreter.
fn compile_method_call(
&mut self,
receiver: &Expr,
method: &str,
args: &[Expr],
) -> Result<u8, String> {
// Store method call AST in chunk for interpreter access
let method_call_idx = self.chunk.method_calls.len();
self.chunk.method_calls.push((
Arc::new(receiver.clone()),
method.to_string(),
args.iter().map(|arg| Arc::new(arg.clone())).collect(),
));
// Store method call index in constant pool
let call_info_value = Value::Integer(method_call_idx as i64);
let call_info_idx = self.chunk.add_constant(call_info_value);
// Allocate result register
let result_reg = self.registers.allocate();
// Emit MethodCall instruction: ABx format
// A = result register, Bx = method_call_idx
self.chunk.emit(
Instruction::abx(OpCode::MethodCall, result_reg, call_info_idx),
0,
);
Ok(result_reg)
}
/// Compile a match expression
///
/// OPT-018: Hybrid approach - delegate to interpreter like for-loops
/// Match expressions require complex pattern matching logic (destructuring, guards, scope management),
/// so we store the AST and let the VM execute via interpreter.
fn compile_match(
&mut self,
expr: &Expr,
arms: &[crate::frontend::ast::MatchArm],
) -> Result<u8, String> {
// Store match expression AST in chunk for interpreter access
let match_idx = self.chunk.match_exprs.len();
self.chunk
.match_exprs
.push((Arc::new(expr.clone()), arms.to_vec()));
// Store match index in constant pool
let match_info_value = Value::Integer(match_idx as i64);
let match_info_idx = self.chunk.add_constant(match_info_value);
// Allocate result register
let result_reg = self.registers.allocate();
// Emit Match instruction: ABx format
// A = result register, Bx = match_idx
self.chunk.emit(
Instruction::abx(OpCode::Match, result_reg, match_info_idx),
0,
);
Ok(result_reg)
}
/// Compile a closure expression
///
/// OPT-019: Hybrid approach - delegate to interpreter like for-loops, method calls, match
/// Closures require environment capture and complex scope management,
/// so we store the AST and let the VM create the closure with captured environment.
fn compile_closure(
&mut self,
params: &[crate::frontend::ast::Param],
body: &Expr,
) -> Result<u8, String> {
// RUNTIME-DEFAULT-PARAMS: Extract both param names AND default values
let params_with_defaults: Vec<(String, Option<Arc<Expr>>)> = params
.iter()
.map(|p| {
(
p.name(),
p.default_value
.clone()
.map(|expr| Arc::new((*expr).clone())),
)
})
.collect();
// Store closure definition in chunk for runtime access
let closure_idx = self.chunk.closures.len();
self.chunk
.closures
.push((params_with_defaults, Arc::new(body.clone())));
// Store closure index in constant pool
let closure_info_value = Value::Integer(closure_idx as i64);
let closure_info_idx = self.chunk.add_constant(closure_info_value);
// Allocate result register
let result_reg = self.registers.allocate();
// Emit NewClosure instruction: ABx format
// A = result register, Bx = closure_idx
self.chunk.emit(
Instruction::abx(OpCode::NewClosure, result_reg, closure_info_idx),
0,
);
Ok(result_reg)
}
/// Compile a field access
///
/// OPT-015: Direct VM implementation (not hybrid)
/// Field access is simpler than method calls - just extract field from Value.
/// We can implement the match logic directly in the VM for better performance.
fn compile_field_access(&mut self, object: &Expr, field: &str) -> Result<u8, String> {
// Compile object expression
let object_reg = self.compile_expr(object)?;
// Store field name in constant pool
let field_value = Value::from_string(field.to_string());
let field_idx = self.chunk.add_constant(field_value);
// Allocate result register
let result_reg = self.registers.allocate();
// Emit LoadField instruction: ABC format
// A = result register, B = object register, C = field constant index
self.chunk.emit(
Instruction::abc(OpCode::LoadField, result_reg, object_reg, field_idx as u8),
0,
);
Ok(result_reg)
}
/// Finalize compilation and return the bytecode chunk
pub fn finalize(mut self) -> BytecodeChunk {
// Emit return instruction with the last result register
self.chunk
.emit(Instruction::abc(OpCode::Return, self.last_result, 0, 0), 0);
// Update register count
self.chunk.register_count = self.registers.max_count();
// Copy locals mapping for hybrid execution (for-loops need interpreter access)
self.chunk.locals_map = self.locals.clone();
self.chunk
}
}
/// Compare two values for equality (for constant pool deduplication)
fn values_equal(a: &Value, b: &Value) -> bool {
match (a, b) {
(Value::Integer(a), Value::Integer(b)) => a == b,
(Value::Float(a), Value::Float(b)) => a == b,
(Value::Bool(a), Value::Bool(b)) => a == b,
(Value::Nil, Value::Nil) => true,
// String comparison by reference (interned strings would be ideal)
_ => false,
}
}
#[cfg(test)]
#[path = "compiler_tests.rs"]
mod tests;