silt_lua/lua.rs
1use std::{cell::RefCell, mem::take, rc::Rc};
2
3use crate::{
4 chunk::Chunk,
5 code::OpCode,
6 compiler::Compiler,
7 error::{ErrorTuple, ErrorTypes, SiltError},
8 function::{CallFrame, Closure, FunctionObject, NativeObject, UpValue},
9 table::Table,
10 token::Operator,
11 value::Value,
12};
13
14/** Convert Integer to Float, lossy for now */
15macro_rules! int2f {
16 ($left:ident) => {
17 $left as f64
18 };
19}
20
21macro_rules! intr2f {
22 ($left:ident) => {
23 *$left as f64
24 };
25}
26macro_rules! devout {
27 ($($arg:tt)*) => {
28 #[cfg(feature = "dev-out")]
29 println!($($arg)*);
30 }
31
32}
33
34macro_rules! str_op_str{
35 ($left:ident $op:tt $right:ident $enu:ident )=>{
36 (||{
37 if let Ok(n1) = $left.parse::<i64>() {
38 if let Ok(n2) = $right.parse::<i64>() {
39 return Ok(Value::Integer(n1 $op n2));
40 }
41 if let Ok(n2) = $right.parse::<f64>() {
42 return Ok(Value::Number(int2f!(n1) $op n2));
43 }
44 }
45 if let Ok(n1) = $left.parse::<f64>() {
46 if let Ok(n2) = $right.parse::<f64>() {
47 return Ok(Value::Number(n1 $op n2));
48 }
49 }
50 return Err(SiltError::ExpOpValueWithValue(
51 ErrorTypes::String,
52 Operator::$enu,
53 ErrorTypes::String,
54 ));
55 })()
56 }
57}
58
59macro_rules! str_op_int{
60 ($left:ident $op:tt $right:ident $enu:ident)=>{
61 (||{
62 if let Ok(n1) = $left.parse::<i64>() {
63 return Ok(Value::Integer(n1 $op $right));
64
65 }
66 if let Ok(n1) = $left.parse::<f64>() {
67 return Ok(Value::Number(n1 $op int2f!($right)));
68 }
69 return Err(SiltError::ExpOpValueWithValue(
70 ErrorTypes::String,
71 Operator::$enu,
72 ErrorTypes::Integer,
73 ));
74 })()
75 }
76}
77
78macro_rules! int_op_str{
79 ($left:ident $op:tt $right:ident $enu:ident)=>{
80 (||{
81 if let Ok(n1) = $right.parse::<i64>() {
82 return Ok(Value::Integer($left $op n1));
83
84 }
85 if let Ok(n1) = $right.parse::<f64>() {
86 return Ok(Value::Number((int2f!($left) $op n1)));
87 }
88 return Err(SiltError::ExpOpValueWithValue(
89 ErrorTypes::Integer,
90 Operator::$enu,
91 ErrorTypes::String,
92 ));
93 })()
94 }
95}
96
97macro_rules! op_error {
98 ($left:ident $op:ident $right:ident ) => {{
99 return Err(SiltError::ExpOpValueWithValue($left, Operator::$op, $right));
100 }};
101}
102
103macro_rules! str_op_num{
104 ($left:ident $op:tt $right:ident $enu:ident)=>{
105 if let Ok(n1) = $left.parse::<f64>() {
106 Value::Number(n1 $op $right)
107 }else {
108 return Err(SiltError::ExpOpValueWithValue(
109 ErrorTypes::String,
110 Operator::$enu,
111 ErrorTypes::String,
112 ))
113 }
114 }
115}
116
117macro_rules! num_op_str{
118 ($left:ident $op:tt $right:ident $enu:ident)=>{
119 if let Ok(n1) = $right.parse::<f64>() {
120 Value::Number($left $op n1)
121 }else{
122 return Err(SiltError::ExpOpValueWithValue(
123 ErrorTypes::Number,
124 Operator::$enu,
125 ErrorTypes::String,
126 ))
127 }
128 }
129}
130
131macro_rules! binary_op_push {
132 ($src:ident, $op:tt, $opp:tt) => {{
133 // TODO test speed of this vs 1 pop and a mutate
134 let r = $src.pop();
135 let l = $src.pop();
136
137 $src.push(binary_op!(l, $op, r, $opp));
138 }};
139}
140
141macro_rules! binary_op {
142 ($l:ident, $op:tt, $r:ident, $opp:tt) => {
143 match ($l, $r) {
144 (Value::Number(left), Value::Number(right)) => (Value::Number(left $op right)),
145 (Value::Integer(left), Value::Integer(right)) => (Value::Integer(left $op right)),
146 (Value::Number(left), Value::Integer(right)) => (Value::Number(left $op right as f64)),
147 (Value::Integer(left), Value::Number(right)) =>(Value::Number(left as f64 $op right)),
148 (Value::String(left), Value::String(right)) => str_op_str!(left $op right $opp)?,
149 (Value::String(left), Value::Integer(right)) => str_op_int!(left $op right $opp)?,
150 (Value::Integer(left), Value::String(right)) => int_op_str!(left $op right $opp)?,
151 (Value::String(left), Value::Number(right)) => str_op_num!(left $op right $opp),
152 (Value::Number(left), Value::String(right)) => num_op_str!(left $op right $opp),
153 // TODO userdata
154 // (Value::UserData(left), Value::UserData(right)) => {
155 // if let Some(f) = left.get_meta_method($opp) {
156 // f(left, right)?
157 // } else {
158 // op_error!(left.to_error(), $opp, right.to_error())
159 // }
160 // }
161 (ll,rr) => return Err(SiltError::ExpOpValueWithValue(ll.to_error(), Operator::$opp, rr.to_error()))
162 }
163 };
164}
165
166pub struct Lua {
167 body: Rc<FunctionObject>,
168 compiler: Compiler,
169 // frames: Vec<CallFrame>,
170 // dummy_frame: CallFrame,
171 /** Instruction to be run at start of loop */
172 // ip: *const OpCode, // TODO usize vs *const OpCode, will rust optimize the same?
173 // stack: Vec<Value>, // TODO fixed size array vs Vec, how much less overhead is there?
174 stack: [Value; 256],
175 stack_top: *mut Value,
176 stack_count: usize,
177 /** Next empty location */
178 // stack_top: *mut Value,
179 globals: hashbrown::HashMap<String, Value>, // TODO store strings as identifer usize and use that as key
180 // original CI code uses linked list, most recent closed upvalue is the first and links to previous closed values down the chain
181 // allegedly performance of a linked list is heavier then an array and shifting values but is that true here or the opposite?
182 // resizing a sequential array is faster then non sequential heap items, BUT since we'll USUALLY resolve the upvalue on the top of the list we're derefencing once to get our Upvalue vs an index lookup which is slightly slower.
183 // TODO TLDR: benchmark this
184 open_upvalues: Vec<Rc<RefCell<UpValue>>>,
185 // references: Vec<Reference>,
186 // TODO should we store all strings in their own table/array for better equality checks? is this cheaper?
187 // obj
188
189 // TODO GC gray_stack
190 // gray_stack: Vec<Value>,
191 // TODO temporary solution to a hash id
192 table_counter: usize,
193}
194
195impl<'a> Lua {
196 /** Create a new lua compiler and runtime */
197 pub fn new() -> Self {
198 // TODO try the hard way
199 // force array to be 256 Values
200 // let stack = unsafe {
201 // std::alloc::alloc(std::alloc::Layout::array::<Value>(256).unwrap()) as *mut [Value; 256]
202 // };
203 // let stack: [Value; 256] = [const { Value::Nil }; 256];
204 let mut stack = [(); 256].map(|_| Value::default());
205 let stack_top = stack.as_mut_ptr() as *mut Value;
206 // let stack = vec![];
207 // let stack_top = stack.as_ptr() as *mut Value;
208 Self {
209 compiler: Compiler::new(),
210 body: Rc::new(FunctionObject::new(None, false)),
211 // dummy_frame: CallFrame::new(Rc::new(FunctionObject::new(None, false))),
212 // frames: vec![],
213 // ip: 0 as *const OpCode,
214 // stack, //: unsafe { *stack },
215 stack_count: 0,
216 stack,
217 stack_top,
218 globals: hashbrown::HashMap::new(),
219 open_upvalues: vec![],
220 table_counter: 1,
221 }
222 }
223
224 fn push(&mut self, value: Value) {
225 devout!(" | push: {}", value);
226 unsafe { self.stack_top.write(value) };
227 self.stack_top = unsafe { self.stack_top.add(1) };
228 self.stack_count += 1;
229 }
230
231 fn reserve(&mut self) -> *mut Value {
232 self.stack_count += 1;
233 let old = self.stack_top;
234 self.stack_top = unsafe { self.stack_top.add(1) };
235 old
236 }
237
238 /** pop N number of values from stack */
239 fn popn_drop(&mut self, n: u8) {
240 unsafe { self.stack_top = self.stack_top.sub(n as usize) };
241 self.stack_count -= n as usize;
242 }
243
244 fn print_upvalues(&self) {
245 self.open_upvalues.iter().enumerate().for_each(|(i, up)| {
246 println!("{}:{}", i, up.borrow());
247 });
248 }
249
250 fn close_n_upvalues(&mut self, n: u8) {
251 #[cfg(feature = "dev-out")]
252 self.print_upvalues();
253 // remove n from end of list
254 if n > 1 {
255 self.open_upvalues
256 .drain(self.open_upvalues.len() - n as usize..)
257 .rev()
258 .for_each(|up| {
259 let mut upvalue = up.borrow_mut();
260 upvalue.close_around(unsafe { self.stack_top.replace(Value::Nil) });
261 });
262 unsafe { self.stack_top = self.stack_top.sub(n as usize) };
263 } else {
264 let upvalue = self.open_upvalues.pop().unwrap();
265 upvalue.borrow_mut().close_around(self.pop());
266 }
267 }
268
269 fn close_upvalues_by_return(&mut self, last: *mut Value) {
270 // devout!("value: {}", unsafe { &*last });
271 #[cfg(feature = "dev-out")]
272 self.print_upvalues();
273 for upvalue in self.open_upvalues.iter().rev() {
274 let mut up = upvalue.borrow_mut();
275 // let upv = unsafe { &*up.get_location() };
276 // let vv = unsafe { &*last };
277 // let b = up.get_location() < last;
278 // println!("upvalue {} less than {} is {} ", upv, vv, b);
279 if up.get_location() < last {
280 break;
281 }
282 up.close();
283 }
284 }
285
286 /** pop and return top of stack */
287 fn pop(&mut self) -> Value {
288 self.stack_count -= 1;
289 unsafe { self.stack_top = self.stack_top.sub(1) };
290 let v = unsafe { self.stack_top.replace(Value::Nil) };
291 // TODO is there a way to read without segfaulting?
292 // We'd have to list the value to be forgotten, but is this even faster?
293 // let v = unsafe { self.stack_top.read() };
294 devout!(" | pop: {}", v);
295 v
296 }
297
298 // TODO can we make this faster with slices? can we slice a pointer? 🤔
299 fn popn(&mut self, n: u8) -> Vec<Value> {
300 // println!("popn: {}", n);
301 let mut values = vec![];
302 for _ in 0..n {
303 values.push(self.pop());
304 }
305 values.reverse();
306 values
307 }
308
309 fn safe_pop(&mut self) -> Value {
310 // let v3 = take(&mut self.stack[3]);
311 // println!("we took {}", v3);
312 // let v0 = take(&mut self.stack[self.stack_count - 1]);
313 // println!("we took {}", v0);
314 // let ve = v0.clone();
315 // std::mem::forget(v3);
316 // drop(v0);
317 // println!("we took {}", ve);
318 // self.print_raw_stack();
319 // core::ptr::read()
320
321 let v0 = take(&mut self.stack[self.stack_count - 1]);
322
323 // for i in self.stack.iter_mut().enumerate() {
324 // *i = Value::Nil;
325 // }
326
327 v0
328 }
329
330 /** Dangerous! */
331 fn read_top(&self) -> Value {
332 // match self.peek(0) {
333 // Some(v) => v.clone(),
334 // None => Value::Nil,
335 // }
336
337 unsafe { self.stack_top.sub(1).read() }
338 }
339
340 /** Safer but clones! */
341 fn duplicate(&self) -> Value {
342 unsafe { (*self.stack_top.sub(1)).clone() }
343 }
344
345 /** Look and get immutable reference to top of stack */
346 fn peek(&self) -> &Value {
347 // self.stack.last()
348 unsafe { &*self.stack_top.sub(1) }
349 }
350
351 /** Look and get mutable reference to top of stack */
352 fn peek_mut(&mut self) -> &mut Value {
353 unsafe { &mut *self.stack_top.sub(1) }
354 }
355
356 fn grab(&mut self, n: usize) -> &Value {
357 unsafe { &*self.stack_top.sub(n) }
358 }
359
360 fn grab_mut(&mut self, n: usize) -> &mut Value {
361 unsafe { &mut *self.stack_top.sub(n) }
362 }
363
364 /** Look down N amount of stack and return immutable reference */
365 fn peekn(&self, n: u8) -> &Value {
366 // unsafe { *self.stack_top.sub(n as usize) }
367 // &self.stack[self.stack.len() - n as usize]
368 unsafe { &*self.stack_top.sub((n as usize) + 1) }
369 }
370
371 pub fn evaluate(&mut self, source: &str) -> FunctionObject {
372 self.compiler.compile(source.to_owned())
373 }
374
375 pub fn run(&mut self, source: &str) -> Result<Value, Vec<ErrorTuple>> {
376 let object = self.compiler.compile(source.to_owned());
377 if object.chunk.is_valid() {
378 match self.execute(object.into()) {
379 Ok(v) => Ok(v),
380 Err(e) => Err(vec![ErrorTuple {
381 code: e,
382 location: (0, 0),
383 }]),
384 }
385 } else {
386 Err(self.compiler.pop_errors())
387 }
388 }
389
390 pub fn execute(&mut self, object: Rc<FunctionObject>) -> Result<Value, SiltError> {
391 // TODO param is a reference of &'a
392 // self.ip = object.chunk.code.as_ptr();
393 // frame.ip = object.chunk.code.as_ptr();
394 // frame.slots = self.stack ???
395 // let rstack = self.stack.as_ptr();
396 #[cfg(feature = "dev-out")]
397 object.chunk.print_chunk(None);
398 self.stack_top = self.stack.as_mut_ptr() as *mut Value;
399 self.body = object.clone();
400 let closure = Rc::new(Closure::new(object.clone(), vec![]));
401
402 let mut frame = CallFrame::new(closure, 0);
403 frame.ip = object.chunk.code.as_ptr();
404 frame.local_stack = self.stack_top;
405 // frame.stack.resize(256, Value::Nil); // TODO
406 self.push(Value::Function(object)); // TODO this needs to store the function object itself somehow, RC?
407 let frames = vec![frame];
408 // self.body = object;
409 let res = self.process(frames);
410
411 res
412 }
413
414 fn process(&mut self, mut frames: Vec<CallFrame>) -> Result<Value, SiltError> {
415 let mut last = Value::Nil; // TODO temporary for testing
416 // let stack_pointer = self.stack.as_mut_ptr();
417 // let mut dummy_frame = CallFrame::new(Rc::new(FunctionObject::new(None, false)), 0);
418 let mut frame = frames.last_mut().unwrap();
419 let mut frame_count = 1;
420 loop {
421 let instruction = frame.current_instruction();
422
423 // devout!("ip: {:p} | {}", self.ip, instruction);
424 devout!(" | {}", instruction);
425
426 // TODO how much faster would it be to order these ops in order of usage, does match hash? probably.
427 match instruction {
428 OpCode::RETURN => {
429 frame_count -= 1;
430 if frame_count <= 0 {
431 if self.stack_count <= 1 {
432 return Ok(Value::Nil);
433 }
434 return Ok(self.safe_pop());
435 }
436 let res = self.pop();
437 self.stack_top = frame.local_stack;
438 self.close_upvalues_by_return(self.stack_top);
439 devout!("stack top {}", unsafe { &*self.stack_top });
440 self.stack_count = frame.stack_snapshot;
441 frames.pop();
442 frame = frames.last_mut().unwrap();
443 devout!("next instruction {}", frame.current_instruction());
444 #[cfg(feature = "dev-out")]
445 self.print_stack();
446 self.push(res);
447
448 // println!("<< {}", self.pop());
449 // match self.pop() {
450 // Some(v) => return Ok(v),
451 // None => return Ok(last),
452 // }
453 }
454 OpCode::CONSTANT { constant } => {
455 let value = Self::get_chunk(&frame).get_constant(*constant);
456 self.push(value.clone());
457 // match value {
458 // Value::Number(f) => self.push(*f),
459 // Value::Integer(i) => self.push(*i as f64),
460 // _ => {}
461 // }
462 }
463 OpCode::DEFINE_GLOBAL { constant } => {
464 let value = self.body.chunk.get_constant(*constant);
465 if let Value::String(s) = value {
466 // DEV inline pop due to self lifetime nonsense
467 self.stack_count -= 1;
468 unsafe { self.stack_top = self.stack_top.sub(1) };
469 let v = unsafe { self.stack_top.read() };
470
471 // let v = self.pop();
472 self.globals.insert(s.to_string(), v);
473 } else {
474 return Err(SiltError::VmCorruptConstant);
475 }
476 }
477 // TODO does this need to exist?
478 OpCode::SET_GLOBAL { constant } => {
479 let value = self.body.chunk.get_constant(*constant);
480 if let Value::String(s) = value {
481 let v = self.duplicate();
482 // TODO we could take, expr statements send pop, this is a hack of sorts, ideally the compiler only sends a pop for nonassigment
483 // alternatively we can peek the value, that might be better to prevent side effects
484 // do we want expressions to evaluate to a value? probably? is this is ideal for implicit returns?
485
486 // if let Some(_) = self.globals.get(&**s) {
487 // self.globals.insert(s.to_string(), v);
488 // } else {
489 // self.globals.insert(s.to_string(), v);
490 // }
491 self.globals.insert(s.to_string(), v);
492 } else {
493 devout!("SET_GLOBAL: {}", value);
494 #[cfg(feature = "dev-out")]
495 self.body.chunk.print_constants();
496 return Err(SiltError::VmCorruptConstant);
497 }
498 }
499 OpCode::GET_GLOBAL { constant } => {
500 let value = Self::get_chunk(&frame).get_constant(*constant);
501 if let Value::String(s) = value {
502 if let Some(v) = self.globals.get(&**s) {
503 self.push(v.clone());
504 } else {
505 self.push(Value::Nil);
506 }
507 } else {
508 return Err(SiltError::VmCorruptConstant);
509 }
510 }
511 OpCode::SET_LOCAL { index } => {
512 let value = self.duplicate();
513 // frame.stack[*index as usize] = value;
514 frame.set_val(*index, value)
515 }
516 OpCode::GET_LOCAL { index } => {
517 self.push(frame.get_val(*index).clone());
518 // self.push(frame.stack[*index as usize].clone());
519 // TODO ew cloning, is our cloning optimized yet?
520 // TODO also we should convert from stack to register based so we can use the index as a reference instead
521 }
522 OpCode::DEFINE_LOCAL { constant } => todo!(),
523 OpCode::ADD => binary_op_push!(self, +, Add),
524 OpCode::SUB => binary_op_push!(self,-, Sub),
525 OpCode::MULTIPLY => binary_op_push!(self,*, Multiply),
526 OpCode::DIVIDE => {
527 let right = self.pop();
528 let left = self.pop();
529
530 match (left, right) {
531 (Value::Number(left), Value::Number(right)) => {
532 self.push(Value::Number(left / right))
533 }
534 (Value::Integer(left), Value::Integer(right)) => {
535 self.push(Value::Number(left as f64 / right as f64))
536 }
537 (Value::Number(left), Value::Integer(right)) => {
538 self.push(Value::Number(left / right as f64))
539 }
540 (Value::Integer(left), Value::Number(right)) => {
541 self.push(Value::Number(left as f64 / right))
542 }
543 (l, r) => {
544 return Err(SiltError::ExpOpValueWithValue(
545 l.to_error(),
546 Operator::Divide,
547 r.to_error(),
548 ))
549 }
550 }
551 }
552 OpCode::NEGATE => {
553 match self.peek() {
554 Value::Number(n) => {
555 let f = -n;
556 self.pop();
557 self.push(Value::Number(f))
558 }
559 Value::Integer(i) => {
560 let f = -i;
561 self.pop();
562 self.push(Value::Integer(f))
563 }
564 // None => Err(SiltError::EarlyEndOfFile)?,
565 c => Err(SiltError::ExpInvalidNegation(c.to_error()))?,
566 }
567 // TODO test this vs below: unsafe { *self.stack_top = -*self.stack_top };
568 }
569 OpCode::NIL => self.push(Value::Nil),
570 OpCode::TRUE => self.push(Value::Bool(true)),
571 OpCode::FALSE => self.push(Value::Bool(false)),
572 OpCode::NOT => {
573 let value = self.pop();
574 self.push(Value::Bool(!Self::is_truthy(&value)));
575 }
576 OpCode::EQUAL => {
577 let r = self.pop();
578 let l = self.pop();
579 self.push(Value::Bool(Self::is_equal(&l, &r)));
580 }
581 OpCode::NOT_EQUAL => {
582 let r = self.pop();
583 let l = self.pop();
584 self.push(Value::Bool(!Self::is_equal(&l, &r)));
585 }
586 OpCode::LESS => {
587 let r = self.pop();
588 let l = self.pop();
589 self.push(Value::Bool(Self::is_less(&l, &r)?));
590 }
591 OpCode::LESS_EQUAL => {
592 let r = self.pop();
593 let l = self.pop();
594 self.push(Value::Bool(!Self::is_greater(&l, &r)?));
595 }
596 OpCode::GREATER => {
597 let r = self.pop();
598 let l = self.pop();
599 self.push(Value::Bool(Self::is_greater(&l, &r)?));
600 }
601 OpCode::GREATER_EQUAL => {
602 let r = self.pop();
603 let l = self.pop();
604 self.push(Value::Bool(!Self::is_less(&l, &r)?));
605 }
606 OpCode::CONCAT => {
607 let r = self.pop();
608 let l = self.pop();
609 match (l, r) {
610 (Value::String(left), Value::String(right)) => {
611 self.push(Value::String(Box::new(*left + &right)))
612 }
613 (Value::String(left), v2) => {
614 self.push(Value::String(Box::new(*left + &v2.to_string())))
615 }
616 (v1, Value::String(right)) => {
617 self.push(Value::String(Box::new(v1.to_string() + &right)))
618 }
619 (v1, v2) => {
620 self.push(Value::String(Box::new(v1.to_string() + &v2.to_string())))
621 }
622 }
623 }
624
625 OpCode::LITERAL { dest, literal } => {}
626 OpCode::POP => {
627 last = self.pop();
628 }
629 OpCode::POPS(n) => self.popn_drop(*n), //TODO here's that 255 local limit again
630 OpCode::CLOSE_UPVALUES(n) => {
631 self.close_n_upvalues(*n);
632 }
633 OpCode::GOTO_IF_FALSE(offset) => {
634 let value = self.peek();
635 // println!("GOTO_IF_FALSE: {}", value);
636 if !Self::is_truthy(value) {
637 frame.forward(*offset);
638 }
639 }
640 OpCode::POP_AND_GOTO_IF_FALSE(offset) => {
641 let value = &self.pop();
642 // println!("GOTO_IF_FALSE: {}", value);
643 if !Self::is_truthy(value) {
644 frame.forward(*offset);
645 }
646 }
647 OpCode::GOTO_IF_TRUE(offset) => {
648 let value = self.peek();
649 if Self::is_truthy(value) {
650 frame.forward(*offset);
651 }
652 }
653 OpCode::FORWARD(offset) => {
654 frame.forward(*offset);
655 }
656 OpCode::REWIND(offset) => {
657 frame.rewind(*offset);
658 }
659 OpCode::FOR_NUMERIC(skip) => {
660 // for needs it's own version of the stack for upvalues?
661 // compare, if greater then we skip, if less or equal we continue and then increment AFTER block
662 // let increment = self.grab(1);
663 let iterator = unsafe { &mut *self.stack_top.sub(3) };
664 let compare = self.grab(2);
665 if Self::is_greater(iterator, compare)? {
666 frame.forward(*skip);
667 } else {
668 self.push(iterator.clone())
669 }
670 // self.push(iterator.clone());
671 // if iterator > compare {
672 // frame.forward(*skip);
673 // }
674 }
675 OpCode::INCREMENT { index } => {
676 let value = frame.get_val_mut(*index);
677 let step = self.peek();
678 value.increment(step)?;
679 }
680 OpCode::CLOSURE { constant } => {
681 let value = Self::get_chunk(&frame).get_constant(*constant);
682 devout!(" | => {}", value);
683 if let Value::Function(f) = value {
684 // f.upvalue_count
685 let mut closure =
686 Closure::new(f.clone(), Vec::with_capacity(f.upvalue_count as usize));
687 // let reserved_value = self.reserve();
688 if f.upvalue_count >= 0 {
689 let next_instruction = frame.get_next_n_codes(f.upvalue_count as usize);
690 for i in 0..f.upvalue_count {
691 devout!(" | {}", next_instruction[i as usize]);
692 if let OpCode::REGISTER_UPVALUE { index, neighboring } =
693 next_instruction[i as usize]
694 {
695 closure.upvalues.push(if neighboring {
696 // insert at i
697 self.capture_upvalue(index, frame)
698 // closure.upvalues.insert(
699 // i as usize,
700 // frame.function.upvalues[index as usize].clone(),
701 // );
702 // *slots.add(index) ?
703 } else {
704 frame.function.upvalues[index as usize].clone()
705 });
706 } else {
707 println!(
708 "next instruction is not CLOSE_UPVALUE {}",
709 next_instruction[i as usize]
710 );
711 unreachable!()
712 }
713 }
714
715 self.push(Value::Closure(Rc::new(closure)));
716 } else {
717 // devout!("closure is of type {}", closure.function.function.name);
718 return Err(SiltError::VmRuntimeError);
719 }
720 frame.shift(f.upvalue_count as usize);
721 }
722 }
723 OpCode::GET_UPVALUE { index } => {
724 let value = frame.function.upvalues[*index as usize]
725 .borrow()
726 .copy_value();
727
728 #[cfg(feature = "dev-out")]
729 {
730 frame.print_local_stack();
731 frame.function.print_upvalues();
732 }
733
734 devout!("GET_UPVALUE: {}", value);
735 self.push(value);
736 }
737 OpCode::SET_UPVALUE { index } => {
738 let value = self.peek(); // TODO pop and set would be faster, less cloning
739 let ff = &frame.function.upvalues;
740 ff[*index as usize].borrow_mut().set_value(value.clone());
741 // unsafe { *upvalue.value = value };
742 }
743 OpCode::CALL(param_count) => {
744 let value = self.peekn(*param_count);
745 devout!(" | -> {}", value);
746 match value {
747 Value::Closure(c) => {
748 // TODO this logic is identical to function, but to make this a function causes some lifetime issues. A macro would work but we're already a little macro heavy aren't we?
749 let frame_top =
750 unsafe { self.stack_top.sub((*param_count as usize) + 1) };
751 let new_frame = CallFrame::new(
752 c.clone(),
753 self.stack_count - (*param_count as usize) - 1,
754 );
755 frames.push(new_frame);
756 frame = frames.last_mut().unwrap();
757
758 frame.local_stack = frame_top;
759 devout!("top of frame stack {}", unsafe { &*frame.local_stack });
760 frame_count += 1;
761 }
762 Value::Function(func) => {
763 // let frame_top =
764 // unsafe { self.stack_top.sub((*param_count as usize) + 1) };
765 // let new_frame = CallFrame::new(
766 // func.clone(),
767 // self.stack_count - (*param_count as usize) - 1,
768 // );
769 // frames.push(new_frame);
770 // frame = frames.last_mut().unwrap();
771
772 // frame.local_stack = frame_top;
773 // devout!("top of frame stack {}", unsafe { &*frame.local_stack });
774 // frame_count += 1;
775
776 // devout!("current stack count {}", frame.stack_snapshot);
777 // frame.ip = f.chunk.code.as_ptr();
778 // // frame.stack.resize(256, Value::Nil); // TODO
779 // self.push(Value::Function(f.clone())); // TODO this needs to store the function object itself somehow, RC?
780 }
781 Value::NativeFunction(_) => {
782 // get args including the function value at index 0. We do it here so don't have mutability issues with native fn
783 let mut args = self.popn(*param_count + 1);
784 if let Value::NativeFunction(f) = args.remove(0) {
785 let res = ((*f).function)(self, args);
786 // self.popn_drop(*param_count);
787 self.push(res);
788 } else {
789 unreachable!();
790 }
791 }
792 _ => {
793 return Err(SiltError::NotCallable(format!("Value: {}", value)));
794 }
795 }
796 }
797 OpCode::PRINT => {
798 println!("<<<<<< {} >>>>>>>", self.pop());
799 }
800 OpCode::META(_) => todo!(),
801 OpCode::REGISTER_UPVALUE {
802 index: _,
803 neighboring: _,
804 } => unreachable!(),
805 OpCode::LENGTH => {
806 let value = self.pop();
807 match value {
808 Value::String(s) => self.push(Value::Integer(s.len() as i64)),
809 Value::Table(t) => self.push(Value::Integer(t.borrow().len() as i64)),
810 _ => Err(SiltError::ExpInvalidLength(value.to_error()))?,
811 }
812 }
813 OpCode::NEW_TABLE => {
814 self.push(self.new_table());
815 self.table_counter += 1;
816 }
817 OpCode::TABLE_INSERT { offset } => {
818 self.insert_immediate_table(*offset)?;
819 }
820 OpCode::TABLE_BUILD(n) => {
821 self.build_table(*n)?;
822 }
823 OpCode::TABLE_SET { depth } => {
824 let value = self.pop();
825 self.operate_table(*depth, Some(value))?;
826 }
827 // OpCode::TABLE_SET_BY_CONSTANT { constant } => {
828 // let value = self.pop();
829 // let key = Self::get_chunk(&frame).get_constant(*constant);
830 // let table = self.peek_mut();
831 // if let Value::Table(t) = table {
832 // // TODO can we pre-hash this to avoid a clone?
833 // t.borrow_mut().insert(key.clone(), value);
834 // } else {
835 // return Err(SiltError::VmNonTableOperations(table.to_error()));
836 // }
837 // }
838 OpCode::TABLE_GET { depth } => {
839 self.operate_table(*depth, None)?;
840 }
841 OpCode::TABLE_GET_FROM { index } => {
842 // let key = self.pop();
843
844 // let table = frame.get_val_mut(*index);
845 // if let Value::Table(t) = table {
846 // let v = t.borrow().get_value(&key);
847 // self.push(v);
848 // } else {
849 // return Err(SiltError::VmNonTableOperations(table.to_error()));
850 // }
851 todo!("TABLE_GET_FROM")
852 }
853
854 OpCode::TABLE_GET_BY_CONSTANT { constant } => {
855 let key = Self::get_chunk(&frame).get_constant(*constant);
856 let table = self.peek_mut();
857 if let Value::Table(t) = table {
858 let v = t.borrow().get_value(&key);
859 self.push(v);
860 } else {
861 return Err(SiltError::VmNonTableOperations(table.to_error()));
862 }
863 }
864 }
865 frame.iterate();
866 //stack
867 #[cfg(feature = "dev-out")]
868 {
869 self.print_stack();
870 println!("--------------------------------------");
871 }
872 }
873 }
874
875 // TODO is having a default empty chunk cheaper?
876 /** We're operating on the assumtpion a chunk is always present when using this */
877 fn get_chunk(frame: &CallFrame) -> &Chunk {
878 &frame.function.function.chunk
879 }
880
881 // pub fn reset_stack(&mut self) {
882 // // TODO we probably dont even need to clear the stack, just reset the stack_top
883 // // self.stack.clear();
884 // // set to 0 index of stack
885 // self.stack_top = unsafe { self.stack.as_mut_ptr() };
886 // }
887
888 fn is_truthy(v: &Value) -> bool {
889 match v {
890 Value::Bool(b) => *b,
891 Value::Nil => false,
892 _ => true,
893 }
894 }
895 fn is_equal(l: &Value, r: &Value) -> bool {
896 match (l, r) {
897 (Value::Number(left), Value::Number(right)) => left == right,
898 (Value::Integer(left), Value::Integer(right)) => left == right,
899 (Value::Number(left), Value::Integer(right)) => *left == *right as f64,
900 (Value::Integer(left), Value::Number(right)) => *left as f64 == *right,
901 (Value::String(left), Value::String(right)) => left == right,
902 (Value::Bool(left), Value::Bool(right)) => left == right,
903 (Value::Nil, Value::Nil) => true,
904 (Value::Infinity(left), Value::Infinity(right)) => left == right,
905 (_, _) => false,
906 }
907 }
908
909 fn is_less(l: &Value, r: &Value) -> Result<bool, SiltError> {
910 Ok(match (l, r) {
911 (Value::Number(left), Value::Number(right)) => left < right,
912 (Value::Integer(left), Value::Integer(right)) => left < right,
913 (Value::Number(left), Value::Integer(right)) => *left < *right as f64,
914 (Value::Integer(left), Value::Number(right)) => (*left as f64) < (*right),
915 (Value::Infinity(left), Value::Infinity(right)) => left != right && *left,
916 (_, _) => Err(SiltError::ExpOpValueWithValue(
917 l.to_error(),
918 Operator::Less,
919 r.to_error(),
920 ))?,
921 })
922 }
923
924 fn is_greater(l: &Value, r: &Value) -> Result<bool, SiltError> {
925 Ok(match (l, r) {
926 (Value::Number(left), Value::Number(right)) => left > right,
927 (Value::Integer(left), Value::Integer(right)) => {
928 // println!(" is {} > {}", left, right);
929 left > right
930 }
931 (Value::Number(left), Value::Integer(right)) => *left > *right as f64,
932 (Value::Integer(left), Value::Number(right)) => (*left as f64) > (*right),
933 (Value::Infinity(left), Value::Infinity(right)) => left != right && !*left,
934 (_, _) => Err(SiltError::ExpOpValueWithValue(
935 l.to_error(),
936 Operator::Greater,
937 r.to_error(),
938 ))?,
939 })
940 }
941
942 // /** unsafe as hell, we're relying on compiler*/
943 // fn read_string(&mut self, constant: u8) -> String {
944 // let value = self.get_chunk().get_constant(constant);
945 // if let Value::String(s) = value {
946 // return s.to_string();
947 // } else {
948 // unreachable!("Only strings can be identifiers")
949 // }
950 // }
951
952 fn call(&self, function: &Rc<Closure>, param_count: u8) -> CallFrame {
953 let frame_top = unsafe { self.stack_top.sub((param_count as usize) + 1) };
954 let new_frame = CallFrame::new(
955 function.clone(),
956 self.stack_count - (param_count as usize) - 1,
957 );
958 new_frame
959 }
960
961 fn capture_upvalue(&mut self, index: u8, frame: &CallFrame) -> Rc<RefCell<UpValue>> {
962 //, stack: *mut Value,
963 //stack
964 // self.print_stack();
965 #[cfg(feature = "dev-out")]
966 frame.print_local_stack();
967 let value = unsafe { frame.local_stack.add(index as usize) };
968 devout!("2capture_upvalue at index {} : {}", index, unsafe {
969 &*value
970 });
971 let mut ind = None;
972 for (i, up) in self.open_upvalues.iter().enumerate() {
973 let upvalue = up.borrow();
974 if upvalue.location == value {
975 return up.clone();
976 }
977
978 if upvalue.location < value {
979 break;
980 }
981 ind = Some(i);
982 }
983
984 let u = Rc::new(RefCell::new(UpValue::new(index, value)));
985
986 match ind {
987 Some(i) => self.open_upvalues.insert(i, u.clone()),
988 None => self.open_upvalues.push(u.clone()),
989 }
990
991 #[cfg(feature = "dev-out")]
992 self.print_upvalues();
993 u
994
995 // self
996 // .open_upvalues
997 // .iter()
998 // // DEV originally we loop through until the pointer is not greater then the stack pointer
999 // .find(|upvalue| upvalue.index == index)
1000 // {
1001 // Some(u) => u.clone(),
1002 // None => {
1003 // // let v = unsafe { stack.sub(index as usize) };
1004 // let u = Rc::new(UpValue::new(index));
1005 // self.open_upvalues.push(u.clone());
1006 // u
1007 // }
1008 // }
1009
1010 // let mut prev = stack;
1011 // for _ in 0..index {
1012 // prev = unsafe { prev.sub(1) };
1013 // }
1014 // unsafe { prev.read() }
1015 }
1016
1017 fn close_upvalue(&mut self, value: Value) {
1018 devout!("close_upvalue: {}", value);
1019
1020 // for up in
1021 // self.open_upvalues
1022 // .iter()
1023 // .find(|up| {
1024 // let mut upvalue = up.borrow_mut();
1025 // if upvalue.index >= self.stack_count as u8 {
1026 // false
1027 // } else {
1028 // true
1029 // }
1030 // })
1031 // .unwrap()
1032 // .borrow_mut()
1033 // .close(value);
1034 // TODO
1035 // self.open_upvalues.retain(|up| {
1036 // let mut upvalue = up.borrow_mut();
1037 // if upvalue.index >= self.stack_count as u8 {
1038 // upvalue.close(value);
1039 // false
1040 // } else {
1041 // true
1042 // }
1043 // });
1044 }
1045
1046 fn new_table(&self) -> Value {
1047 let t = Table::new(self.table_counter);
1048 Value::Table(Rc::new(RefCell::new(t)))
1049 }
1050
1051 fn build_table(&mut self, n: u8) -> Result<(), SiltError> {
1052 let offset = n as usize + 1;
1053 let table_point = unsafe { self.stack_top.sub(offset) };
1054 let table = unsafe { &*table_point };
1055 if let Value::Table(t) = table {
1056 let mut b = t.borrow_mut();
1057 // push in reverse
1058 for i in (0..n).rev() {
1059 let value = unsafe { self.stack_top.sub(i as usize + 1).replace(Value::Nil) };
1060 b.push(value);
1061 }
1062
1063 self.stack_count -= offset - 1;
1064 self.stack_top = unsafe { table_point.add(1) };
1065
1066 Ok(())
1067 } else {
1068 Err(SiltError::ChunkCorrupt) // shouldn't happen unless our compiler really screwed up
1069 }
1070 }
1071
1072 /** Used at table creation to simplify direct index insertion */
1073 fn insert_immediate_table(&mut self, offset: u8) -> Result<(), SiltError> {
1074 let table = unsafe { &*self.stack_top.sub(offset as usize + 3) }; // -3 because -1 for top of stack, -1 for key, -1 for value, and then offset from there
1075 if let Value::Table(t) = table {
1076 let value = self.pop();
1077 let key = self.pop();
1078 t.borrow_mut().insert(key, value);
1079 Ok(())
1080 } else {
1081 Err(SiltError::ChunkCorrupt) // shouldn't happen unless our compiler really screwed up
1082 }
1083 }
1084
1085 /**
1086 * Compares indexes on stack by depth amount, if set value not passed we act as a getter and push value at index on to stack
1087 * Unintentional pun
1088 */
1089 fn operate_table(&mut self, depth: u8, set: Option<Value>) -> Result<(), SiltError> {
1090 // let value = unsafe { self.stack_top.read() };
1091 // let value = unsafe { self.stack_top.replace(Value::Nil) };
1092
1093 let u = depth as usize + 1;
1094 let decrease = match set {
1095 Some(_) => u,
1096 None => u - 1,
1097 };
1098 let table_point = unsafe { self.stack_top.sub(u) };
1099 let table = unsafe { &*table_point };
1100 if let Value::Table(t) = table {
1101 let mut current = t;
1102 for i in 1..=depth {
1103 let key = unsafe { self.stack_top.sub(i as usize).replace(Value::Nil) };
1104 devout!("get from table with key: {}", key);
1105 if i == depth {
1106 // let offset = depth as usize;
1107 self.stack_count -= decrease;
1108 unsafe { self.stack_top = self.stack_top.sub(decrease) };
1109 // assert!(self.stack_top == table_point);
1110 match set {
1111 Some(value) => {
1112 current.borrow_mut().insert(key, value);
1113 unsafe { table_point.replace(Value::Nil) };
1114 }
1115 None => {
1116 let out = current.borrow().get_value(&key);
1117 unsafe { table_point.replace(out) };
1118 }
1119 }
1120 return Ok(());
1121 } else {
1122 let check = unsafe { current.try_borrow_unguarded() }.unwrap().get(&key);
1123 match check {
1124 Some(Value::Table(t)) => {
1125 current = t;
1126 }
1127 Some(v) => {
1128 return Err(SiltError::VmNonTableOperations(v.to_error()));
1129 }
1130 None => {
1131 return Err(SiltError::VmNonTableOperations(ErrorTypes::Nil));
1132 }
1133 }
1134 }
1135 }
1136 Err(SiltError::VmRuntimeError)
1137 } else {
1138 Err(SiltError::VmNonTableOperations(table.to_error()))
1139 }
1140
1141 // self.stack_count -= 1;
1142 // unsafe { self.stack_top = self.stack_top.sub(1) };
1143 // let v = unsafe { self.stack_top.replace(Value::Nil) };
1144 // // TODO is there a way to read without segfaulting?
1145 // // We'd have to list the value to be forggoten, but is this even faster?
1146 // // let v = unsafe { self.stack_top.read() };
1147 // devout!("pop: {}", v);
1148 // v
1149
1150 // // let value = self.stack[self.stack_count - (index as usize) - 1].clone();
1151 // if let Value::Table(t) = value {
1152 // t
1153 // } else {
1154 // unreachable!("Only tables can be indexed")
1155 // }
1156 }
1157
1158 /** Register a native function on the global table */
1159 pub fn register_native_function(
1160 &mut self,
1161 name: &str,
1162 function: fn(&mut Self, Vec<Value>) -> Value,
1163 ) {
1164 let fn_obj = Rc::new(NativeObject::new(name.to_string(), function));
1165 self.globals
1166 .insert(name.to_string(), Value::NativeFunction(fn_obj));
1167 }
1168
1169 /** Load standard library functions */
1170 pub fn load_standard_library(&mut self) {
1171 self.register_native_function("clock", crate::standard::clock);
1172 self.register_native_function("print", crate::standard::print);
1173 }
1174
1175 fn print_raw_stack(&self) {
1176 println!("=== Stack ({}) ===", self.stack_count);
1177 // 0 to stack_top
1178 print!("[");
1179 for i in self.stack.iter() {
1180 print!("{} ", i);
1181 }
1182 print!("]");
1183 println!("---");
1184 }
1185
1186 pub fn print_stack(&self) {
1187 println!("=== Stack ({}) ===", self.stack_count);
1188 print!("[");
1189 let mut c = 0;
1190 for i in self.stack.iter() {
1191 c += 1;
1192 if c > self.stack_count {
1193 break;
1194 }
1195 let s = format!("{:?}", i);
1196 if s == "nil" {
1197 print!("_");
1198 } else {
1199 print!("{} ", i);
1200 }
1201 }
1202 print!("]");
1203 println!("---");
1204 }
1205}