use super::{
fmt::{Annotation, OutputNode},
Comparison, Filter, FilterKind, MirEdge,
};
use crate::stack::Overflow;
use ::core::convert::{TryFrom, TryInto};
use ::core::num::NonZeroU8;
use ::mbot_proc_macro_helpers::{Bytecode, BytecodeInstruction};
use ::petgraph::visit::EdgeRef;
use ::petgraph::Direction;
use ::rand::Rng;
use ::std::collections::{BTreeSet, HashMap};
mod debug;
pub(crate) enum BytecodeInfo {
FieldlessEnum {
variant_count: usize,
},
#[allow(dead_code)]
Struct {
fields: &'static [BytecodeInfo],
},
Primitive {
width: usize,
},
}
impl BytecodeInfo {
pub(crate) const fn width(&self) -> usize {
match self {
Self::FieldlessEnum { variant_count } => variant_count.div_euclid(256) + 1,
Self::Struct { fields } => {
let mut idx = 0;
let mut sum = 0;
while idx < fields.len() {
sum += fields[idx].width();
idx += 1;
}
sum
}
Self::Primitive { width } => *width,
}
}
}
pub(crate) trait Bytecode: Sized {
const INFO: BytecodeInfo;
fn contained_by(&self, _range: ::core::ops::RangeToInclusive<Self>) -> bool {
false
}
fn unfolding_code(&self) -> Option<NonZeroU8> {
None
}
fn from_unfolding_code(_code: u8) -> Option<Self> {
None
}
fn to_bytes(&self, _out: &mut Vec<u8>) -> usize;
fn from_bytes(buf: &[u8]) -> Option<(&[u8], Self)>;
}
macro_rules! impl_bytecode_prims {
($($t:ty),*) => {
$(impl Bytecode for $t {
const INFO: BytecodeInfo = BytecodeInfo::Primitive {
width: ::core::mem::size_of::<$t>(),
};
fn contained_by(&self, range: ::core::ops::RangeToInclusive<Self>) -> bool {
range.contains(self)
}
fn unfolding_code(&self) -> Option<NonZeroU8> {
if *self < u8::MAX as _ {
NonZeroU8::new((*self + 1) as u8)
} else {
None
}
}
fn from_unfolding_code(code: u8) -> Option<Self> {
Some((code - 1) as _)
}
fn to_bytes(&self, out: &mut Vec<u8>) -> usize {
out.extend_from_slice(&self.to_le_bytes());
::core::mem::size_of::<Self>()
}
fn from_bytes(buf: &[u8]) -> Option<(&[u8], Self)> {
use ::core::convert::TryFrom;
if buf.len() >= ::core::mem::size_of::<Self>() {
let (int, rest) = buf.split_at(::core::mem::size_of::<Self>());
Some((rest, Self::from_le_bytes(<_>::try_from(int).unwrap())))
} else {
None
}
}
})*
}
}
impl_bytecode_prims! { u8, u16, u32, u64, usize,
i8, i16, i32, i64, isize }
impl Bytecode for () {
const INFO: BytecodeInfo = BytecodeInfo::Primitive {
width: ::core::mem::size_of::<()>(),
};
fn contained_by(&self, range: ::core::ops::RangeToInclusive<Self>) -> bool {
range.contains(self)
}
fn to_bytes(&self, _out: &mut Vec<u8>) -> usize {
0
}
fn from_bytes(buf: &[u8]) -> Option<(&[u8], Self)> {
Some((buf, ()))
}
}
trait BytecodeInstruction<'view>: Sized {
type View;
const PREFIX_WIDTH: usize;
fn encode(self, buf: &mut Vec<u8>) -> usize;
fn decode(cursor: &[u8]) -> Result<(&[u8], Self), ()>;
fn decode_mut(cursor: &'view mut [u8]) -> Result<(&'view mut [u8], Self::View), ()>;
fn width(&self) -> usize;
}
#[derive(Debug, Bytecode)]
enum ArithmeticKind {
Aborting,
Unchecked,
}
type StackOffset = u16;
type FmtStackOffset = u16;
#[derive(Debug, BytecodeInstruction)]
enum Instruction {
Jump(i16),
ConditionalJump(i16),
Call(isize),
Return,
#[ty(i64)]
Integer(i64),
#[ty(i64 -> i64 -> Set<i64>)]
Roll,
#[ty(i64 -> i64 -> i64)]
#[unfold(0)]
RollNoPartials(ArithmeticKind),
#[ty(i64 -> i64 -> i64)]
#[unfold(0)]
Add(ArithmeticKind),
#[ty(i64 -> i64 -> i64)]
#[unfold(0)]
Subtract(ArithmeticKind),
#[ty(Set<i64> -> i64)]
#[unfold(0)]
Summate(ArithmeticKind),
Count,
#[ty(i64 -> Set<i64> -> Set<i64>)]
#[unfold(0)]
SimpleFilter(FilterKind),
FilterSatisfies(i16),
#[ty(i64 -> i64 -> bool)]
#[unfold(0)]
Compare(Comparison),
PushStackFrame(StackOffset),
PopStackFrame,
Store(StackOffset),
Load(StackOffset),
Copy,
Clone,
Abort(u8),
FmtMakeList,
FmtPushToList,
FmtRecord,
FmtAnnotate(u16),
FmtPushStackFrame(FmtStackOffset),
FmtPopStackFrame,
FmtStore(FmtStackOffset),
FmtLoad(FmtStackOffset),
UseFuel(u64),
TaggedNop(u16),
}
impl Instruction {
fn encode(self, buf: &mut Vec<u8>) -> usize {
<Self as BytecodeInstruction>::encode(self, buf)
}
fn decode(cursor: &[u8]) -> Result<(&[u8], Self), ()> {
<Self as BytecodeInstruction>::decode(cursor)
}
fn decode_mut(cursor: &mut [u8]) -> Result<(&mut [u8], InstructionView), ()> {
<Self as BytecodeInstruction>::decode_mut(cursor)
}
fn width(&self) -> usize {
<Self as BytecodeInstruction>::width(self)
}
}
struct InstructionView<'a> {
buf: &'a mut [u8],
}
impl InstructionView<'_> {
fn read(&self) -> Instruction {
Instruction::decode(self.buf)
.expect("InstructionViews should only be constructed for valid code")
.1
}
fn write(&mut self, inst: Instruction) -> Result<(), ()> {
if inst.width() <= self.buf.len() {
let mut buf = Vec::with_capacity(inst.width());
inst.encode(&mut buf);
self.buf.copy_from_slice(&buf);
Ok(())
} else {
Err(())
}
}
}
struct ProgramBuilder {
code: Vec<u8>,
backpatch_queue: BTreeSet<usize>,
annotations: Vec<Annotation>,
}
impl ProgramBuilder {
fn new() -> Self {
Self {
code: Vec::new(),
backpatch_queue: BTreeSet::new(),
annotations: Vec::new(),
}
}
fn inst(&mut self, instruction: Instruction) {
instruction.encode(&mut self.code);
}
fn draft_inst(&mut self, instruction: Instruction) -> usize {
let offset = self.code.len();
instruction.encode(&mut self.code);
self.backpatch_queue.insert(offset);
offset
}
fn backpatch<F: FnOnce(&mut Instruction)>(&mut self, offset: usize, func: F) -> Result<(), ()> {
let inst = Instruction::decode_mut(self.code.get_mut(offset..).ok_or(())?);
match inst {
Ok((_, mut inst)) => {
assert!(self.backpatch_queue.remove(&offset));
let mut temp = inst.read();
func(&mut temp);
inst.write(temp).expect(
"It is a bug in codegen if it ever tries to backpatch an instruction with a wider one."
);
Ok(())
}
Err(()) => Err(()),
}
}
fn add_annotation(&mut self, annotation: Annotation) -> u16 {
let offset = self.annotations.len();
self.annotations.push(annotation);
offset
.try_into()
.expect("only up to u16 annotations are supported")
}
fn current_offset(&self) -> usize {
self.code.len()
}
fn finalize(self) -> Program {
let Self {
code,
backpatch_queue,
annotations,
} = self;
assert!(backpatch_queue.is_empty());
let code = code.into_boxed_slice();
Program { code, annotations }
}
}
mod event {
use super::{FmtStackOffset, StackOffset};
use crate::mir::MirGraph;
use ::core::convert::{TryFrom, TryInto};
use ::core::fmt::Debug;
use ::petgraph::visit::{DfsPostOrder, GraphBase};
use ::std::cell::Cell;
use ::std::collections::BTreeMap;
pub(super) enum Mode {
Normal,
Loop,
Function { enclosure_start: usize },
}
#[derive(Debug)]
pub(super) struct StackSlot<Offset> {
offset: Offset,
accesses: Cell<usize>,
}
impl<O: Copy> StackSlot<O> {
fn new(offset: O) -> Self {
Self {
offset,
accesses: Cell::new(0),
}
}
fn add_access(&self) {
self.accesses.set(self.accesses.get() + 1);
}
pub(super) fn accesses(&self) -> usize {
self.accesses.get()
}
pub(super) fn offset(&self) -> O {
self.offset
}
}
#[derive(Debug)]
pub(super) struct StackAllocation<Offset> {
map: BTreeMap<<MirGraph as GraphBase>::NodeId, StackSlot<Offset>>,
}
impl<O: Copy + TryFrom<usize> + Debug> StackAllocation<O>
where
<O as TryFrom<usize>>::Error: Debug,
{
pub(super) fn new() -> Self {
Self {
map: BTreeMap::new(),
}
}
pub(super) fn get_or_allocate(
&mut self,
key: <MirGraph as GraphBase>::NodeId,
) -> &mut StackSlot<O> {
let l = self.map.len();
let entry = self.map.entry(key);
entry.or_insert(StackSlot::new(l.try_into().expect(&format!(
"only {} number of stack slots supported",
::core::any::type_name::<StackOffset>()
))))
}
pub(super) fn get(&self, key: <MirGraph as GraphBase>::NodeId) -> &StackSlot<O> {
let entry = &self.map[&key];
entry.add_access();
entry
}
pub(super) fn len(&self) -> usize {
self.map.len()
}
}
pub(super) struct Frame<'a> {
pub(super) graph: &'a MirGraph,
pub(super) dfs: DfsPostOrder<<MirGraph as GraphBase>::NodeId, ::fixedbitset::FixedBitSet>,
pub(super) locals: StackAllocation<StackOffset>,
pub(super) fmt_locals: StackAllocation<FmtStackOffset>,
pub(super) open_stack_frame: usize,
pub(super) fmt_open_stack_frame: usize,
pub(super) start: usize,
pub(super) mode: Mode,
pub(super) post: Option<super::Instruction>,
pub(super) post_fmt: Option<super::Instruction>,
}
}
macro_rules! destructure_assign {
($_:ty { $($field:ident),* } = $exp:expr) => {
#[allow(unused_assignments)]
match $exp {
exp => {
$(
$field = exp.$field;
)*
}
}
}
}
pub fn lower(mir: &super::Mir) -> Program {
use super::{BinaryOp, Coercion, FmtNode, MirGraph, MirNode};
use ::petgraph::visit::GraphBase;
use event::{Frame, Mode, StackAllocation, StackSlot};
let mut builder = ProgramBuilder::new();
let mut stack = Vec::<Frame>::new();
let mut graph = &mir.graph;
let mut dfs = ::petgraph::visit::DfsPostOrder::new(&mir.graph, mir.top);
let mut locals = StackAllocation::new();
let mut fmt_locals = StackAllocation::new();
let mut open_stack_frame = builder.draft_inst(Instruction::PushStackFrame(0));
let mut fmt_open_stack_frame = builder.draft_inst(Instruction::FmtPushStackFrame(0));
let mut start = 0;
let mut mode = Mode::Normal;
let mut function_map = HashMap::<usize, usize>::new();
let mut post = None;
let mut post_fmt = None;
loop {
while let Some(node_id) = dfs.next(graph) {
for post in post.into_iter().chain(post_fmt) {
builder.inst(post);
}
struct Arg<'a> {
port: u8,
stack_slot: &'a StackSlot<StackOffset>,
node_index: <MirGraph as GraphBase>::NodeId,
}
let mut args = Vec::new();
for neighbor_id in graph.neighbors_directed(node_id, Direction::Outgoing) {
let ports = graph
.edges_connecting(node_id, neighbor_id)
.filter_map(|x| match x.weight() {
&MirEdge::DataDependency { port }
if graph[neighbor_id].produces_value() =>
{
Some(port)
}
&MirEdge::IntermediateResultDependency { port }
if matches!(&graph[node_id], MirNode::Fmt(FmtNode::Record)) =>
{
Some(port)
}
_ => None,
})
.collect::<Vec<_>>();
assert!(ports.len() <= 1);
for port in ports {
args.push(Arg {
port,
stack_slot: locals.get(neighbor_id),
node_index: neighbor_id,
});
}
}
args.sort_unstable_by(|a, b| a.port.cmp(&b.port));
for Arg {
port: _,
stack_slot,
node_index,
} in args
{
builder.inst(Instruction::Load(stack_slot.offset()));
if stack_slot.accesses()
< graph
.edges_directed(node_index, Direction::Incoming)
.count()
{
builder.inst(Instruction::Clone);
builder.inst(Instruction::Store(stack_slot.offset()));
}
}
struct FmtArg<'a> {
port: u8,
stack_slot: &'a StackSlot<FmtStackOffset>,
node_index: <MirGraph as GraphBase>::NodeId,
}
let mut fmt_args = Vec::new();
for neighbor_id in graph.neighbors_directed(node_id, Direction::Outgoing) {
let ports = graph
.edges_connecting(node_id, neighbor_id)
.filter_map(|x| match x.weight() {
&MirEdge::IntermediateResultDependency { port }
if graph[neighbor_id].produces_output() =>
{
Some(port)
}
_ => None,
})
.collect::<Vec<_>>();
assert!(ports.len() <= 1);
for port in ports {
fmt_args.push(FmtArg {
port,
stack_slot: fmt_locals.get(neighbor_id),
node_index: neighbor_id,
});
}
}
fmt_args.sort_unstable_by(|a, b| a.port.cmp(&b.port));
for FmtArg {
port: _,
stack_slot,
node_index: _,
} in fmt_args
{
builder.inst(Instruction::FmtLoad(stack_slot.offset()));
}
post = if graph[node_id].produces_value() {
Some(Instruction::Store(locals.get_or_allocate(node_id).offset()))
} else {
None
};
post_fmt = if graph[node_id].produces_output() {
Some(Instruction::FmtStore(
fmt_locals.get_or_allocate(node_id).offset(),
))
} else {
None
};
match &graph[node_id] {
&MirNode::Integer(int) => {
builder.inst(Instruction::Integer(int));
}
MirNode::Coerce(Coercion::FromOutputToInt) => {
builder.inst(Instruction::Summate(ArithmeticKind::Aborting));
}
MirNode::Roll => {
builder.inst(Instruction::Roll);
}
MirNode::BinOp(BinaryOp::Add) => {
builder.inst(Instruction::Add(ArithmeticKind::Aborting));
}
MirNode::BinOp(BinaryOp::Subtract) => {
builder.inst(Instruction::Subtract(ArithmeticKind::Aborting));
}
MirNode::BinOp(BinaryOp::LogicalAnd) => {
todo!("lowering logical AND to the stack VM")
}
MirNode::Filter(Filter::Simple(filter)) => {
builder.inst(Instruction::SimpleFilter(*filter));
}
MirNode::Filter(Filter::SatisfiesPredicate) => {
let mut func: Option<usize> = None;
for edge in graph.edges(node_id) {
match edge.weight() {
MirEdge::FunctionDependency { port: 0 } => {
let node = &graph[edge.target()];
match node {
MirNode::FunctionDefinition(body) => {
let id = body as *const _ as usize;
func = Some(id);
break;
}
_ => unreachable!(
"invalid node type for filter predicate argument"
),
}
}
_ => (),
}
}
let func = func.unwrap();
let func = function_map[&func];
let filter = builder.current_offset();
builder.draft_inst(Instruction::FilterSatisfies(0));
let current = builder.current_offset();
builder
.backpatch(filter, |inst| {
let offset: i16 = (current - func).try_into().unwrap();
*inst = Instruction::FilterSatisfies(-offset);
})
.unwrap();
}
MirNode::Compare(comparison) => {
builder.inst(Instruction::Compare(*comparison));
}
MirNode::Count => builder.inst(Instruction::Count),
MirNode::Apply => todo!("figure out functions on the stack VM"),
MirNode::PartialApply => {
todo!("figure out partial function application on the stack VM")
}
MirNode::Loop(body, _ty) => {
stack.push(Frame {
graph,
dfs,
locals,
fmt_locals,
open_stack_frame,
fmt_open_stack_frame,
start,
mode,
post,
post_fmt,
});
graph = &body.graph;
dfs = ::petgraph::visit::DfsPostOrder::new(graph, body.end);
locals = StackAllocation::new();
fmt_locals = StackAllocation::new();
start = builder.current_offset();
mode = Mode::Loop;
post = None;
post_fmt = None;
open_stack_frame = builder.current_offset();
builder.draft_inst(Instruction::PushStackFrame(0));
fmt_open_stack_frame = builder.current_offset();
builder.draft_inst(Instruction::FmtPushStackFrame(0));
}
MirNode::Decision(_branches) => todo!("lowering loops for the stack VM"),
MirNode::FunctionDefinition(body) => {
stack.push(Frame {
graph,
dfs,
locals,
fmt_locals,
open_stack_frame,
fmt_open_stack_frame,
start,
mode,
post,
post_fmt,
});
graph = &body.graph;
dfs = ::petgraph::visit::DfsPostOrder::new(graph, body.end);
locals = StackAllocation::new();
fmt_locals = StackAllocation::new();
let enclosure_start = builder.current_offset();
builder.draft_inst(Instruction::Jump(0));
start = builder.current_offset();
open_stack_frame = builder.current_offset();
builder.draft_inst(Instruction::PushStackFrame(0));
fmt_open_stack_frame = builder.current_offset();
builder.draft_inst(Instruction::FmtPushStackFrame(0));
function_map.insert(body as *const _ as usize, start);
mode = Mode::Function { enclosure_start };
post = None;
post_fmt = None;
}
MirNode::RecursiveEnvironment(_lambdas) => {
todo!("lowering recursive environments for the stack VM")
}
MirNode::RegionArgument(_port) => {
match mode {
Mode::Normal => {
unreachable!(
"the implicit containing region doesn't take arguments yet"
)
}
Mode::Loop => {
}
Mode::Function { enclosure_start: _ } => {
}
}
}
MirNode::End => {
builder.inst(Instruction::PopStackFrame);
builder.inst(Instruction::FmtPopStackFrame);
builder
.backpatch(open_stack_frame, |inst| {
*inst = Instruction::PushStackFrame(locals.len().try_into().unwrap());
})
.unwrap();
builder
.backpatch(fmt_open_stack_frame, |inst| {
*inst = Instruction::FmtPushStackFrame(
fmt_locals.len().try_into().unwrap(),
);
})
.unwrap();
match mode {
Mode::Normal => {
}
Mode::Loop => (),
Mode::Function { .. } => builder.inst(Instruction::Return),
}
}
MirNode::Fmt(FmtNode::MakeList) => builder.inst(Instruction::FmtMakeList),
MirNode::Fmt(FmtNode::PushToList) => builder.inst(Instruction::FmtPushToList),
MirNode::Fmt(FmtNode::Record) => builder.inst(Instruction::FmtRecord),
MirNode::Fmt(FmtNode::Annotate(annotation)) => {
let annotation = builder.add_annotation(annotation.clone());
builder.inst(Instruction::FmtAnnotate(annotation))
}
MirNode::Fmt(FmtNode::RegionArgument(_)) =>
{
()
}
MirNode::UseFuel(amt) => builder.inst(Instruction::UseFuel(*amt)),
}
}
match mode {
Mode::Normal => (),
Mode::Loop => {
let jmp = builder.draft_inst(Instruction::ConditionalJump(0));
let current = builder.current_offset();
builder
.backpatch(jmp, |inst| {
let offset: i16 = (current - start).try_into().unwrap();
*inst = Instruction::ConditionalJump(-offset);
})
.unwrap();
}
Mode::Function { enclosure_start } => {
let current = builder.current_offset();
builder
.backpatch(enclosure_start, |inst| {
let offset: i16 = (current - start).try_into().unwrap();
*inst = Instruction::Jump(offset);
})
.unwrap();
}
}
if let Some(frame) = stack.pop() {
destructure_assign! { Frame { graph, dfs, locals, fmt_locals, open_stack_frame, fmt_open_stack_frame, start, mode, post, post_fmt } = frame }
} else {
break;
}
}
builder.finalize()
}
#[derive(Debug, Clone)]
pub struct Program {
code: Box<[u8]>,
annotations: Vec<Annotation>,
}
impl Program {
pub fn len(&self) -> usize {
self.code.len()
}
pub fn disasm(&self) -> String {
use ::core::fmt::Write;
let mut buf = String::new();
let mut cursor = &*self.code;
while !cursor.is_empty() {
let (rest, inst) =
Instruction::decode(cursor).expect("compiled programs are always valid");
write!(
buf,
"{}\t| ",
cursor.as_ptr() as usize - self.code.as_ptr() as usize
)
.unwrap();
match inst {
Instruction::TaggedNop(code) => match debug::NopTag::from_code(code) {
debug::NopTag::Unknown => writeln!(buf, "{:?}", inst).unwrap(),
tag => writeln!(buf, "TaggedNop({:?})", tag).unwrap(),
},
_ => writeln!(buf, "{:?}", inst).unwrap(),
}
cursor = rest;
}
buf
}
}
#[derive(::derive_more::Unwrap, Debug, Clone)]
pub enum Value {
Integer(i64),
Boolean(bool),
Set(Vec<i64>),
}
impl Value {
pub fn to_int(&self) -> Result<i64, Overflow> {
match self {
&Self::Integer(int) => Ok(int),
Self::Boolean(_) => unreachable!("attempted to coerce boolean to integer"),
Self::Set(set) => compute::summate(set),
}
}
}
#[derive(Debug)]
struct VarFrame<T = Value> {
slots: Vec<Option<T>>,
}
impl<T> VarFrame<T> {
fn with_capacity(size: usize) -> Self {
Self {
slots: ::core::iter::repeat_with(|| None).take(size).collect(),
}
}
fn store(&mut self, offset: usize, value: T) {
self.slots[offset] = Some(value);
}
fn load(&mut self, offset: usize) -> T {
self.slots[offset].take().unwrap()
}
}
#[derive(Debug)]
struct FilterSat {
buf: Vec<i64>,
func: i16,
new_len: usize,
cursor: usize,
}
impl FilterSat {
fn new(buf: Vec<i64>, func: i16) -> Self {
Self {
buf,
func,
new_len: 0,
cursor: 0,
}
}
fn poll(&mut self) -> Option<i64> {
let item = self.buf.get(self.cursor);
match item {
Some(item) => Some(*item),
None => None,
}
}
fn accept(&mut self) {
self.buf[self.new_len] = self.buf[self.cursor];
self.new_len += 1;
self.cursor += 1;
}
fn discard(&mut self) {
self.cursor += 1;
}
fn done(mut self) -> Vec<i64> {
self.buf.truncate(self.new_len);
self.buf
}
}
#[derive(Debug)]
enum CallFrame {
Regular(usize),
FilterSatisfies(usize, FilterSat),
}
#[derive(Debug)]
pub struct Machine<'a> {
instruction_pointer: usize,
value_stack: Vec<Value>,
var_stack: Vec<VarFrame>,
call_stack: Vec<CallFrame>,
code: &'a [u8],
annotations: &'a [Annotation],
output: Vec<OutputNode>,
output_vars: Vec<VarFrame<OutputNode>>,
fuel: u64,
}
#[derive(Debug)]
pub enum S {
Normal,
Break,
Finished,
}
#[derive(Debug)]
pub enum Abort {
Overflow(Overflow),
OutOfFuel,
}
impl From<Overflow> for Abort {
fn from(e: Overflow) -> Self {
Self::Overflow(e)
}
}
impl<'a> Machine<'a> {
pub fn new(program: &Program, fuel: u64) -> Machine<'_> {
Machine {
instruction_pointer: 0,
value_stack: Vec::new(),
var_stack: Vec::new(),
call_stack: Vec::new(),
code: &program.code,
annotations: &program.annotations,
output: Vec::new(),
output_vars: Vec::new(),
fuel,
}
}
pub fn add_fuel(&mut self, fuel: u64) {
self.fuel = self.fuel.saturating_add(fuel);
}
pub fn clear_fuel(&mut self) {
self.fuel = 0;
}
pub fn step<R: Rng>(&mut self, rng: &mut R) -> Result<S, Abort> {
let code = &self.code[self.instruction_pointer..];
let pre_ptr = code.as_ptr();
if code.is_empty() {
return Ok(S::Finished);
}
let (code, instruction) = Instruction::decode(code).unwrap();
self.instruction_pointer += code.as_ptr() as usize - pre_ptr as usize;
match instruction {
Instruction::Jump(offset) => {
self.instruction_pointer += offset as usize;
}
Instruction::ConditionalJump(offset) => {
if self.value_stack.pop().unwrap().unwrap_boolean() {
self.instruction_pointer =
self.instruction_pointer.wrapping_add(offset as usize);
}
}
Instruction::Call(offset) => {
self.call_stack
.push(CallFrame::Regular(self.instruction_pointer));
self.instruction_pointer += offset as usize;
}
Instruction::Return => {
let frame = self
.call_stack
.pop()
.expect("We would never Return from the global context");
match frame {
CallFrame::Regular(ptr) => {
self.instruction_pointer = ptr;
}
CallFrame::FilterSatisfies(ptr, mut generator) => {
let accepted = self.value_stack.pop().unwrap().unwrap_boolean();
if accepted {
generator.accept();
} else {
generator.discard()
}
match generator.poll() {
Some(item) => {
self.value_stack.push(Value::Integer(item));
self.instruction_pointer =
ptr.wrapping_add(generator.func as usize);
self.call_stack
.push(CallFrame::FilterSatisfies(ptr, generator));
}
None => {
self.value_stack.push(Value::Set(generator.done()));
self.instruction_pointer = ptr;
}
}
}
}
}
Instruction::Integer(int) => self.value_stack.push(Value::Integer(int)),
Instruction::Roll => {
let sides = self.value_stack.pop().unwrap().unwrap_integer();
let count = self.value_stack.pop().unwrap().unwrap_integer();
let mut partials = Vec::with_capacity(count as usize);
for _ in 0..count {
partials.push(rng.gen_range(1..=sides));
}
self.value_stack.push(Value::Set(partials));
}
Instruction::Add(ArithmeticKind::Aborting) => {
let first = self.value_stack.pop().unwrap().unwrap_integer();
let second = self.value_stack.pop().unwrap().unwrap_integer();
match first.checked_add(second) {
Some(x) => self.value_stack.push(Value::Integer(x)),
None => {
if first > 0 || second > 0 {
Err(Overflow::Positive)?
} else {
Err(Overflow::Negative)?
}
}
}
}
Instruction::Subtract(ArithmeticKind::Aborting) => {
let (right, left) = (
self.value_stack.pop().unwrap().unwrap_integer(),
self.value_stack.pop().unwrap().unwrap_integer(),
);
self.value_stack.push(match left.checked_sub(right) {
Some(x) => Value::Integer(x),
None => {
if left > 0 || right < 0 {
Err(Overflow::Positive)?
} else {
Err(Overflow::Negative)?
}
}
});
}
Instruction::Summate(ArithmeticKind::Aborting) => {
let set = self.value_stack.pop().unwrap().unwrap_set();
self.value_stack
.push(Value::Integer(compute::summate(&set)?));
}
Instruction::Count => {
let set = self.value_stack.pop().unwrap().unwrap_set();
self.value_stack
.push(Value::Integer(set.len().try_into().unwrap()));
}
Instruction::SimpleFilter(kind @ (FilterKind::KeepHigh | FilterKind::KeepLow)) => {
let keep_count = self.value_stack.pop().unwrap().unwrap_integer();
let mut partials = self.value_stack.pop().unwrap().unwrap_set();
match kind {
FilterKind::KeepHigh => partials.sort_unstable_by(|a, b| b.cmp(a)),
FilterKind::KeepLow => partials.sort_unstable_by(|a, b| a.cmp(b)),
_ => unreachable!(),
}
partials.truncate(keep_count as usize);
self.value_stack.push(Value::Set(partials));
}
Instruction::FilterSatisfies(offset) => {
let partials = self.value_stack.pop().unwrap().unwrap_set();
let mut generator = FilterSat::new(partials, offset);
match generator.poll() {
Some(item) => {
self.value_stack.push(Value::Integer(item));
self.call_stack.push(CallFrame::FilterSatisfies(
self.instruction_pointer,
generator,
));
self.instruction_pointer =
self.instruction_pointer.wrapping_add(offset as usize);
}
None => self.value_stack.push(Value::Set(generator.done())),
}
}
Instruction::Compare(Comparison::Equal) => {
let first = self.value_stack.pop().unwrap().unwrap_integer();
let second = self.value_stack.pop().unwrap().unwrap_integer();
self.value_stack.push(Value::Boolean(first == second));
}
Instruction::Compare(Comparison::GreaterThan) => {
let right = self.value_stack.pop().unwrap().unwrap_integer();
let left = self.value_stack.pop().unwrap().unwrap_integer();
self.value_stack.push(Value::Boolean(left > right));
}
Instruction::PushStackFrame(size) => {
self.var_stack.push(VarFrame::with_capacity(size as usize))
}
Instruction::PopStackFrame => {
self.var_stack.pop();
}
Instruction::Store(offset) => {
let only = self.value_stack.pop().unwrap();
self.var_stack
.last_mut()
.unwrap()
.store(offset as usize, only);
}
Instruction::Load(offset) => {
self.value_stack
.push(self.var_stack.last_mut().unwrap().load(offset as usize));
}
Instruction::Copy => {
let only = self.value_stack.last().unwrap();
match only {
&Value::Integer(int) => self.value_stack.push(Value::Integer(int)),
&Value::Boolean(b) => self.value_stack.push(Value::Boolean(b)),
val => unreachable!("value {:?} is not trivially copyable", val),
}
}
Instruction::Clone => {
let only = self.value_stack.last().unwrap().clone();
self.value_stack.push(only)
}
Instruction::FmtMakeList => self.output.push(OutputNode::List(Vec::new())),
Instruction::FmtPushToList => {
let only = self.output.pop().unwrap();
let mut list = self.output.pop().unwrap();
match list {
OutputNode::List(ref mut list) => list.push(only),
_ => unreachable!("can only push to list"),
}
self.output.push(list);
}
Instruction::FmtRecord => self
.output
.push(OutputNode::Value(self.value_stack.last().unwrap().clone())),
Instruction::FmtAnnotate(offset) => {
let top = self.output.pop().unwrap();
self.output.push(OutputNode::Annotated(
self.annotations[offset as usize].clone(),
Box::new(top),
))
}
Instruction::FmtPushStackFrame(size) => self
.output_vars
.push(VarFrame::<OutputNode>::with_capacity(size as usize)),
Instruction::FmtPopStackFrame => {
self.output_vars.pop();
}
Instruction::FmtStore(offset) => {
let only = self.output.pop().unwrap();
self.output_vars
.last_mut()
.unwrap()
.store(offset as usize, only);
}
Instruction::FmtLoad(offset) => {
self.output
.push(self.output_vars.last_mut().unwrap().load(offset as usize));
}
Instruction::UseFuel(amt) => {
self.fuel = self.fuel.saturating_sub(amt);
if self.fuel == 0 {
return Err(Abort::OutOfFuel);
}
}
Instruction::TaggedNop(code) => {
if let debug::NopTag::Breakpoint = debug::NopTag::from_code(code) {
return Ok(S::Break);
}
}
inst => todo!("execution of {:?}", inst),
}
Ok(S::Normal)
}
pub fn interpret<R: Rng>(&mut self, rng: &mut R) -> Result<(), Abort> {
loop {
match self.step(rng) {
Ok(S::Normal | S::Break) => (),
Ok(S::Finished) => return Ok(()),
Err(e) => return Err(e),
}
}
}
pub fn restart(&mut self) {
self.instruction_pointer = 0;
self.value_stack.clear();
self.var_stack.clear();
self.call_stack.clear();
self.output.clear();
}
pub fn debugger<R: Rng>(&mut self, rng: &mut R) {
debug::debugger_repl(self, rng);
}
pub fn stack_top(&self) -> Option<&Value> {
self.value_stack.last()
}
pub fn output(&self) -> Option<&OutputNode> {
self.output.last()
}
}
mod compute {
use super::Overflow;
pub(super) fn summate(set: &[i64]) -> Result<i64, Overflow> {
Ok(set.into_iter().try_fold(0i64, |a, &x| {
a.checked_add(x).ok_or_else(|| {
if a > 0 || x > 0 {
Overflow::Positive
} else {
Overflow::Negative
}
})
})?)
}
}
pub struct InterpOutput {
pub output: OutputNode,
pub value: Value,
pub total: i64,
}
pub fn interpret<R: Rng>(
ast: &crate::parse::Program,
fuel: u64,
rng: &mut R,
) -> Result<InterpOutput, Abort> {
let mir = super::lower(ast).unwrap();
let program = lower(&mir);
let mut machine = Machine::new(&program, fuel);
machine.interpret(rng)?;
let Machine {
mut output,
mut value_stack,
..
} = machine;
let value = value_stack.pop().unwrap();
let total = value.to_int()?;
Ok(InterpOutput {
output: output.pop().unwrap(),
value,
total,
})
}
#[cfg(test)]
mod tests {
use super::{lower, Machine, Value};
use crate::mir::{BinaryOp, Mir, MirEdge, MirGraph, MirNode};
#[test]
#[should_panic]
fn multiple_dependents() {
let mut graph = MirGraph::new();
let int = graph.add_node(MirNode::Integer(10));
let add = graph.add_node(MirNode::BinOp(BinaryOp::Add));
graph.add_edge(add, int, MirEdge::DataDependency { port: 0 });
graph.add_edge(add, int, MirEdge::DataDependency { port: 1 });
let mir = Mir { graph, top: add };
let prog = lower(&mir);
let mut machine = Machine::new(&prog, 0);
let _ = machine.interpret(&mut ::rand::thread_rng());
let stack_top = machine.stack_top();
assert!(matches!(stack_top, Some(Value::Integer(20))));
}
}