use crate::tagged_stack::{StackValue, TAG_FALSE, TAG_TRUE, is_tagged_int, tag_int, untag_int};
use crate::value::Value;
use std::sync::Arc;
pub type Stack = *mut StackValue;
#[inline]
pub fn stack_value_size() -> usize {
std::mem::size_of::<StackValue>()
}
pub const DISC_INT: u64 = 0;
pub const DISC_FLOAT: u64 = 1;
pub const DISC_BOOL: u64 = 2;
pub const DISC_STRING: u64 = 3;
pub const DISC_VARIANT: u64 = 4;
pub const DISC_MAP: u64 = 5;
pub const DISC_QUOTATION: u64 = 6;
pub const DISC_CLOSURE: u64 = 7;
pub const DISC_CHANNEL: u64 = 8;
pub const DISC_WEAVECTX: u64 = 9;
pub const DISC_SYMBOL: u64 = 10;
#[inline]
pub fn value_to_stack_value(value: Value) -> StackValue {
match value {
Value::Int(i) => tag_int(i),
Value::Bool(false) => TAG_FALSE,
Value::Bool(true) => TAG_TRUE,
other => {
Arc::into_raw(Arc::new(other)) as u64
}
}
}
#[inline]
pub unsafe fn stack_value_to_value(sv: StackValue) -> Value {
if is_tagged_int(sv) {
Value::Int(untag_int(sv))
} else if sv == TAG_FALSE {
Value::Bool(false)
} else if sv == TAG_TRUE {
Value::Bool(true)
} else {
let arc = unsafe { Arc::from_raw(sv as *const Value) };
Arc::try_unwrap(arc).unwrap_or_else(|arc| (*arc).clone())
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_clone_value(src: *const StackValue, dst: *mut StackValue) {
unsafe {
let sv = *src;
let cloned = clone_stack_value(sv);
*dst = cloned;
}
}
#[inline]
pub unsafe fn clone_stack_value(sv: StackValue) -> StackValue {
if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
sv
} else {
unsafe {
let arc = Arc::from_raw(sv as *const Value);
let cloned = Arc::clone(&arc);
std::mem::forget(arc); Arc::into_raw(cloned) as u64
}
}
}
#[inline]
pub unsafe fn drop_stack_value(sv: StackValue) {
if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
return;
}
unsafe {
let _ = Arc::from_raw(sv as *const Value);
}
}
#[inline]
pub unsafe fn push(stack: Stack, value: Value) -> Stack {
unsafe {
let sv = value_to_stack_value(value);
*stack = sv;
stack.add(1)
}
}
#[inline]
pub unsafe fn push_sv(stack: Stack, sv: StackValue) -> Stack {
unsafe {
*stack = sv;
stack.add(1)
}
}
#[inline]
pub unsafe fn pop(stack: Stack) -> (Stack, Value) {
unsafe {
let new_sp = stack.sub(1);
let sv = *new_sp;
(new_sp, stack_value_to_value(sv))
}
}
#[inline]
pub unsafe fn pop_sv(stack: Stack) -> (Stack, StackValue) {
unsafe {
let new_sp = stack.sub(1);
let sv = *new_sp;
(new_sp, sv)
}
}
#[inline]
pub unsafe fn pop_two(stack: Stack, _op_name: &str) -> (Stack, Value, Value) {
unsafe {
let (sp, b) = pop(stack);
let (sp, a) = pop(sp);
(sp, a, b)
}
}
#[inline]
pub unsafe fn pop_three(stack: Stack, _op_name: &str) -> (Stack, Value, Value, Value) {
unsafe {
let (sp, c) = pop(stack);
let (sp, b) = pop(sp);
let (sp, a) = pop(sp);
(sp, a, b, c)
}
}
#[inline]
pub unsafe fn peek(stack: Stack) -> Value {
unsafe {
let sv = *stack.sub(1);
let cloned = clone_stack_value(sv);
stack_value_to_value(cloned)
}
}
#[inline]
pub unsafe fn peek_sv(stack: Stack) -> StackValue {
unsafe { *stack.sub(1) }
}
#[inline]
pub unsafe fn heap_value_mut<'a>(slot: *mut StackValue) -> Option<&'a mut Value> {
unsafe {
let sv = *slot;
if is_tagged_int(sv) || sv == TAG_FALSE || sv == TAG_TRUE {
return None;
}
let mut arc = Arc::from_raw(sv as *const Value);
let val_ref = Arc::get_mut(&mut arc).map(|v| &mut *(v as *mut Value));
std::mem::forget(arc); val_ref
}
}
#[inline]
pub unsafe fn peek_heap_mut<'a>(stack: Stack) -> Option<&'a mut Value> {
unsafe { heap_value_mut(stack.sub(1)) }
}
#[inline]
pub unsafe fn peek_heap_mut_second<'a>(stack: Stack) -> Option<&'a mut Value> {
unsafe { heap_value_mut(stack.sub(2)) }
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_dup(stack: Stack) -> Stack {
unsafe {
let sv = peek_sv(stack);
let cloned = clone_stack_value(sv);
push_sv(stack, cloned)
}
}
#[inline]
pub unsafe fn drop_top(stack: Stack) -> Stack {
unsafe {
let (new_sp, sv) = pop_sv(stack);
drop_stack_value(sv);
new_sp
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_drop_op(stack: Stack) -> Stack {
unsafe { drop_top(stack) }
}
#[allow(improper_ctypes_definitions)]
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_push_value(stack: Stack, value: Value) -> Stack {
unsafe { push(stack, value) }
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_swap(stack: Stack) -> Stack {
unsafe {
let ptr_b = stack.sub(1);
let ptr_a = stack.sub(2);
let a = *ptr_a;
let b = *ptr_b;
*ptr_a = b;
*ptr_b = a;
stack
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_over(stack: Stack) -> Stack {
unsafe {
let sv_a = *stack.sub(2);
let cloned = clone_stack_value(sv_a);
push_sv(stack, cloned)
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_rot(stack: Stack) -> Stack {
unsafe {
let ptr_c = stack.sub(1);
let ptr_b = stack.sub(2);
let ptr_a = stack.sub(3);
let a = *ptr_a;
let b = *ptr_b;
let c = *ptr_c;
*ptr_a = b;
*ptr_b = c;
*ptr_c = a;
stack
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_nip(stack: Stack) -> Stack {
unsafe {
let ptr_b = stack.sub(1);
let ptr_a = stack.sub(2);
let a = *ptr_a;
let b = *ptr_b;
drop_stack_value(a);
*ptr_a = b;
stack.sub(1)
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_tuck(stack: Stack) -> Stack {
unsafe {
let ptr_b = stack.sub(1);
let ptr_a = stack.sub(2);
let a = *ptr_a;
let b = *ptr_b;
let b_clone = clone_stack_value(b);
*ptr_a = b;
*ptr_b = a;
push_sv(stack, b_clone)
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_2dup(stack: Stack) -> Stack {
unsafe {
let sv_a = *stack.sub(2);
let sv_b = *stack.sub(1);
let a_clone = clone_stack_value(sv_a);
let b_clone = clone_stack_value(sv_b);
let sp = push_sv(stack, a_clone);
push_sv(sp, b_clone)
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_pick_op(stack: Stack) -> Stack {
unsafe {
let (sp, n_val) = pop(stack);
let n_raw = match n_val {
Value::Int(i) => i,
_ => {
crate::error::set_runtime_error("pick: expected Int index on top of stack");
return sp;
}
};
if n_raw < 0 {
crate::error::set_runtime_error(format!(
"pick: index cannot be negative (got {})",
n_raw
));
return sp;
}
let n = n_raw as usize;
let base = get_stack_base();
let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
if n >= depth {
crate::error::set_runtime_error(format!(
"pick: index {} exceeds stack depth {} (need at least {} values)",
n,
depth,
n + 1
));
return sp;
}
let sv = *sp.sub(n + 1);
let cloned = clone_stack_value(sv);
push_sv(sp, cloned)
}
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_roll(stack: Stack) -> Stack {
unsafe {
let (sp, n_val) = pop(stack);
let n_raw = match n_val {
Value::Int(i) => i,
_ => {
crate::error::set_runtime_error("roll: expected Int index on top of stack");
return sp;
}
};
if n_raw < 0 {
crate::error::set_runtime_error(format!(
"roll: index cannot be negative (got {})",
n_raw
));
return sp;
}
let n = n_raw as usize;
if n == 0 {
return sp;
}
if n == 1 {
return patch_seq_swap(sp);
}
if n == 2 {
return patch_seq_rot(sp);
}
let base = get_stack_base();
let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
if n >= depth {
crate::error::set_runtime_error(format!(
"roll: index {} exceeds stack depth {} (need at least {} values)",
n,
depth,
n + 1
));
return sp;
}
let src_ptr = sp.sub(n + 1);
let saved = *src_ptr;
std::ptr::copy(src_ptr.add(1), src_ptr, n);
*sp.sub(1) = saved;
sp
}
}
pub unsafe fn clone_stack_segment(src: Stack, dst: Stack, count: usize) {
unsafe {
for i in 0..count {
let sv = *src.sub(count - i);
let cloned = clone_stack_value(sv);
*dst.add(i) = cloned;
}
}
}
use std::cell::Cell;
may::coroutine_local!(static STACK_BASE: Cell<usize> = Cell::new(0));
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_set_stack_base(base: Stack) {
STACK_BASE.with(|cell| {
cell.set(base as usize);
});
}
#[inline]
pub fn get_stack_base() -> Stack {
STACK_BASE.with(|cell| cell.get() as *mut StackValue)
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn clone_stack(sp: Stack) -> Stack {
unsafe {
let (new_sp, _base) = clone_stack_with_base(sp);
new_sp
}
}
pub unsafe fn clone_stack_with_base(sp: Stack) -> (Stack, Stack) {
let base = get_stack_base();
if base.is_null() {
panic!("clone_stack: stack base not set");
}
let depth = unsafe { sp.offset_from(base) as usize };
if depth == 0 {
use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
let new_stack = TaggedStack::new(DEFAULT_STACK_CAPACITY);
let new_base = new_stack.base;
std::mem::forget(new_stack);
return (new_base, new_base);
}
use crate::tagged_stack::{DEFAULT_STACK_CAPACITY, TaggedStack};
let capacity = depth.max(DEFAULT_STACK_CAPACITY);
let new_stack = TaggedStack::new(capacity);
let new_base = new_stack.base;
std::mem::forget(new_stack);
unsafe {
for i in 0..depth {
let sv = *base.add(i);
let cloned = clone_stack_value(sv);
*new_base.add(i) = cloned;
}
}
unsafe { (new_base.add(depth), new_base) }
}
pub fn alloc_stack() -> Stack {
use crate::tagged_stack::TaggedStack;
let stack = TaggedStack::with_default_capacity();
let base = stack.base;
std::mem::forget(stack);
base
}
pub fn alloc_test_stack() -> Stack {
let stack = alloc_stack();
unsafe { patch_seq_set_stack_base(stack) };
stack
}
#[unsafe(no_mangle)]
pub unsafe extern "C" fn patch_seq_stack_dump(sp: Stack) -> Stack {
let base = get_stack_base();
if base.is_null() {
eprintln!("[stack.dump: base not set]");
return sp;
}
let depth = (sp as usize - base as usize) / std::mem::size_of::<StackValue>();
if depth == 0 {
println!("»");
} else {
use std::io::Write;
print!("» ");
for i in 0..depth {
if i > 0 {
print!(" ");
}
unsafe {
let sv = *base.add(i);
print_stack_value(sv);
}
}
println!();
let _ = std::io::stdout().flush();
for i in 0..depth {
unsafe {
let sv = *base.add(i);
drop_stack_value(sv);
}
}
}
base
}
fn print_stack_value(sv: StackValue) {
use crate::son::{SonConfig, value_to_son};
let cloned = unsafe { clone_stack_value(sv) };
let value = unsafe { stack_value_to_value(cloned) };
let son = value_to_son(&value, &SonConfig::compact());
print!("{}", son);
}
pub use patch_seq_2dup as two_dup;
pub use patch_seq_dup as dup;
pub use patch_seq_nip as nip;
pub use patch_seq_over as over;
pub use patch_seq_pick_op as pick;
pub use patch_seq_roll as roll;
pub use patch_seq_rot as rot;
pub use patch_seq_swap as swap;
pub use patch_seq_tuck as tuck;
#[macro_export]
macro_rules! test_stack {
() => {{ $crate::stack::alloc_test_stack() }};
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_pick_negative_index_sets_error() {
unsafe {
crate::error::clear_runtime_error();
let stack = alloc_test_stack();
let stack = push(stack, Value::Int(100));
let stack = push(stack, Value::Int(-1));
let _stack = patch_seq_pick_op(stack);
assert!(crate::error::has_runtime_error());
let error = crate::error::take_runtime_error().unwrap();
assert!(error.contains("negative"));
}
}
#[test]
fn test_pick_out_of_bounds_sets_error() {
unsafe {
crate::error::clear_runtime_error();
let stack = alloc_test_stack();
let stack = push(stack, Value::Int(100));
let stack = push(stack, Value::Int(10));
let _stack = patch_seq_pick_op(stack);
assert!(crate::error::has_runtime_error());
let error = crate::error::take_runtime_error().unwrap();
assert!(error.contains("exceeds stack depth"));
}
}
#[test]
fn test_roll_negative_index_sets_error() {
unsafe {
crate::error::clear_runtime_error();
let stack = alloc_test_stack();
let stack = push(stack, Value::Int(100));
let stack = push(stack, Value::Int(-1));
let _stack = patch_seq_roll(stack);
assert!(crate::error::has_runtime_error());
let error = crate::error::take_runtime_error().unwrap();
assert!(error.contains("negative"));
}
}
#[test]
fn test_roll_out_of_bounds_sets_error() {
unsafe {
crate::error::clear_runtime_error();
let stack = alloc_test_stack();
let stack = push(stack, Value::Int(100));
let stack = push(stack, Value::Int(10));
let _stack = patch_seq_roll(stack);
assert!(crate::error::has_runtime_error());
let error = crate::error::take_runtime_error().unwrap();
assert!(error.contains("exceeds stack depth"));
}
}
#[test]
fn test_int_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let stack = push(stack, Value::Int(42));
let (_, val) = pop(stack);
assert_eq!(val, Value::Int(42));
}
}
#[test]
fn test_bool_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let stack = push(stack, Value::Bool(true));
let stack = push(stack, Value::Bool(false));
let (stack, val_f) = pop(stack);
let (_, val_t) = pop(stack);
assert_eq!(val_f, Value::Bool(false));
assert_eq!(val_t, Value::Bool(true));
}
}
#[test]
fn test_float_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let stack = push(stack, Value::Float(std::f64::consts::PI));
let (_, val) = pop(stack);
assert_eq!(val, Value::Float(std::f64::consts::PI));
}
}
#[test]
fn test_string_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let s = crate::seqstring::SeqString::from("hello");
let stack = push(stack, Value::String(s));
let (_, val) = pop(stack);
match val {
Value::String(s) => assert_eq!(s.as_str(), "hello"),
other => panic!("Expected String, got {:?}", other),
}
}
}
#[test]
fn test_symbol_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let s = crate::seqstring::SeqString::from("my-sym");
let stack = push(stack, Value::Symbol(s));
let (_, val) = pop(stack);
match val {
Value::Symbol(s) => assert_eq!(s.as_str(), "my-sym"),
other => panic!("Expected Symbol, got {:?}", other),
}
}
}
#[test]
fn test_variant_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let tag = crate::seqstring::SeqString::from("Foo");
let data = crate::value::VariantData::new(tag, vec![Value::Int(1), Value::Int(2)]);
let stack = push(stack, Value::Variant(std::sync::Arc::new(data)));
let (_, val) = pop(stack);
match val {
Value::Variant(v) => {
assert_eq!(v.tag.as_str(), "Foo");
assert_eq!(v.fields.len(), 2);
}
other => panic!("Expected Variant, got {:?}", other),
}
}
}
#[test]
fn test_map_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let mut map = std::collections::HashMap::new();
map.insert(crate::value::MapKey::Int(1), Value::Int(100));
let stack = push(stack, Value::Map(Box::new(map)));
let (_, val) = pop(stack);
match val {
Value::Map(m) => {
assert_eq!(m.len(), 1);
assert_eq!(m.get(&crate::value::MapKey::Int(1)), Some(&Value::Int(100)));
}
other => panic!("Expected Map, got {:?}", other),
}
}
}
#[test]
fn test_quotation_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let stack = push(
stack,
Value::Quotation {
wrapper: 0x1000,
impl_: 0x2000,
},
);
let (_, val) = pop(stack);
match val {
Value::Quotation { wrapper, impl_ } => {
assert_eq!(wrapper, 0x1000);
assert_eq!(impl_, 0x2000);
}
other => panic!("Expected Quotation, got {:?}", other),
}
}
}
#[test]
fn test_closure_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let env: std::sync::Arc<[Value]> = std::sync::Arc::from(vec![Value::Int(42)]);
let stack = push(
stack,
Value::Closure {
fn_ptr: 0x3000,
env,
},
);
let (_, val) = pop(stack);
match val {
Value::Closure { fn_ptr, env } => {
assert_eq!(fn_ptr, 0x3000);
assert_eq!(env.len(), 1);
}
other => panic!("Expected Closure, got {:?}", other),
}
}
}
#[test]
fn test_channel_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let (sender, receiver) = may::sync::mpmc::channel();
let ch = std::sync::Arc::new(crate::value::ChannelData { sender, receiver });
let stack = push(stack, Value::Channel(ch));
let (_, val) = pop(stack);
assert!(matches!(val, Value::Channel(_)));
}
}
#[test]
fn test_weavectx_roundtrip() {
unsafe {
let stack = alloc_test_stack();
let (ys, yr) = may::sync::mpmc::channel();
let (rs, rr) = may::sync::mpmc::channel();
let yield_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
sender: ys,
receiver: yr,
});
let resume_chan = std::sync::Arc::new(crate::value::WeaveChannelData {
sender: rs,
receiver: rr,
});
let stack = push(
stack,
Value::WeaveCtx {
yield_chan,
resume_chan,
},
);
let (_, val) = pop(stack);
assert!(matches!(val, Value::WeaveCtx { .. }));
}
}
#[test]
fn test_dup_pop_pop_heap_type() {
unsafe {
let stack = alloc_test_stack();
let stack = push(stack, Value::Float(2.5));
let stack = patch_seq_dup(stack);
let (stack, val1) = pop(stack);
let (_, val2) = pop(stack);
assert_eq!(val1, Value::Float(2.5));
assert_eq!(val2, Value::Float(2.5));
}
}
}