#![no_std]
#[cfg(feature = "std")]
extern crate std;
#[macro_use]
extern crate alloc;
use alloc::vec::Vec;
use core::cell::RefCell;
use miden_air::trace::{
CHIPLETS_WIDTH, DECODER_TRACE_WIDTH, MIN_TRACE_LEN, RANGE_CHECK_TRACE_WIDTH, STACK_TRACE_WIDTH,
SYS_TRACE_WIDTH,
};
pub use miden_air::{ExecutionOptions, ExecutionOptionsError};
pub use vm_core::{
chiplets::hasher::Digest, crypto::merkle::SMT_DEPTH, errors::InputError,
utils::DeserializationError, AdviceInjector, AssemblyOp, Felt, Kernel, Operation, Program,
ProgramInfo, QuadExtension, StackInputs, StackOutputs, Word, EMPTY_WORD, ONE, ZERO,
};
use vm_core::{
code_blocks::{
Call, CodeBlock, Dyn, Join, Loop, OpBatch, Span, Split, OP_BATCH_SIZE, OP_GROUP_SIZE,
},
CodeBlockTable, Decorator, DecoratorIterator, FieldElement, StackTopState,
};
pub use winter_prover::matrix::ColMatrix;
mod operations;
mod system;
use system::System;
pub use system::{ContextId, FMP_MIN, SYSCALL_FMP_MIN};
mod decoder;
use decoder::Decoder;
mod stack;
use stack::Stack;
mod range;
use range::RangeChecker;
mod host;
pub use host::{
advice::{
AdviceExtractor, AdviceInputs, AdviceMap, AdviceProvider, AdviceSource, MemAdviceProvider,
RecAdviceProvider,
},
DefaultHost, Host, HostResponse,
};
mod chiplets;
use chiplets::Chiplets;
mod trace;
use trace::TraceFragment;
pub use trace::{ChipletsLengths, ExecutionTrace, TraceLenSummary};
mod errors;
pub use errors::{ExecutionError, Ext2InttError};
pub mod utils;
mod debug;
pub use debug::{AsmOpInfo, VmState, VmStateIterator};
pub mod math {
pub use vm_core::{Felt, FieldElement, StarkField};
pub use winter_prover::math::fft;
}
pub mod crypto {
pub use vm_core::crypto::{
hash::{Blake3_192, Blake3_256, ElementHasher, Hasher, Rpo256, RpoDigest},
merkle::{
MerkleError, MerklePath, MerkleStore, MerkleTree, NodeIndex, PartialMerkleTree,
SimpleSmt,
},
random::{RandomCoin, RpoRandomCoin, WinterRandomCoin},
};
}
type QuadFelt = QuadExtension<Felt>;
type SysTrace = [Vec<Felt>; SYS_TRACE_WIDTH];
pub struct DecoderTrace {
trace: [Vec<Felt>; DECODER_TRACE_WIDTH],
aux_builder: decoder::AuxTraceBuilder,
}
pub struct StackTrace {
trace: [Vec<Felt>; STACK_TRACE_WIDTH],
aux_builder: stack::AuxTraceBuilder,
}
pub struct RangeCheckTrace {
trace: [Vec<Felt>; RANGE_CHECK_TRACE_WIDTH],
aux_builder: range::AuxTraceBuilder,
}
pub struct ChipletsTrace {
trace: [Vec<Felt>; CHIPLETS_WIDTH],
aux_builder: chiplets::AuxTraceBuilder,
}
#[tracing::instrument("execute_program", skip_all)]
pub fn execute<H>(
program: &Program,
stack_inputs: StackInputs,
host: H,
options: ExecutionOptions,
) -> Result<ExecutionTrace, ExecutionError>
where
H: Host,
{
let mut process = Process::new(program.kernel().clone(), stack_inputs, host, options);
let stack_outputs = process.execute(program)?;
let trace = ExecutionTrace::new(process, stack_outputs);
assert_eq!(&program.hash(), trace.program_hash(), "inconsistent program hash");
Ok(trace)
}
pub fn execute_iter<H>(program: &Program, stack_inputs: StackInputs, host: H) -> VmStateIterator
where
H: Host,
{
let mut process = Process::new_debug(program.kernel().clone(), stack_inputs, host);
let result = process.execute(program);
if result.is_ok() {
assert_eq!(
program.hash(),
process.decoder.program_hash().into(),
"inconsistent program hash"
);
}
VmStateIterator::new(process, result)
}
#[cfg(not(any(test, feature = "internals")))]
struct Process<H>
where
H: Host,
{
system: System,
decoder: Decoder,
stack: Stack,
range: RangeChecker,
chiplets: Chiplets,
host: RefCell<H>,
max_cycles: u32,
enable_tracing: bool,
}
impl<H> Process<H>
where
H: Host,
{
pub fn new(
kernel: Kernel,
stack_inputs: StackInputs,
host: H,
execution_options: ExecutionOptions,
) -> Self {
Self::initialize(kernel, stack_inputs, host, execution_options)
}
pub fn new_debug(kernel: Kernel, stack_inputs: StackInputs, host: H) -> Self {
Self::initialize(
kernel,
stack_inputs,
host,
ExecutionOptions::default().with_tracing().with_debugging(),
)
}
fn initialize(
kernel: Kernel,
stack: StackInputs,
host: H,
execution_options: ExecutionOptions,
) -> Self {
let in_debug_mode = execution_options.enable_debugging();
Self {
system: System::new(execution_options.expected_cycles() as usize),
decoder: Decoder::new(in_debug_mode),
stack: Stack::new(&stack, execution_options.expected_cycles() as usize, in_debug_mode),
range: RangeChecker::new(),
chiplets: Chiplets::new(kernel),
host: RefCell::new(host),
max_cycles: execution_options.max_cycles(),
enable_tracing: execution_options.enable_tracing(),
}
}
pub fn execute(&mut self, program: &Program) -> Result<StackOutputs, ExecutionError> {
assert_eq!(self.system.clk(), 0, "a program has already been executed in this process");
self.execute_code_block(program.root(), program.cb_table())?;
Ok(self.stack.build_stack_outputs())
}
fn execute_code_block(
&mut self,
block: &CodeBlock,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
match block {
CodeBlock::Join(block) => self.execute_join_block(block, cb_table),
CodeBlock::Split(block) => self.execute_split_block(block, cb_table),
CodeBlock::Loop(block) => self.execute_loop_block(block, cb_table),
CodeBlock::Call(block) => self.execute_call_block(block, cb_table),
CodeBlock::Dyn(block) => self.execute_dyn_block(block, cb_table),
CodeBlock::Span(block) => self.execute_span_block(block),
CodeBlock::Proxy(_) => Err(ExecutionError::UnexecutableCodeBlock(block.clone())),
}
}
#[inline(always)]
fn execute_join_block(
&mut self,
block: &Join,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
self.start_join_block(block)?;
self.execute_code_block(block.first(), cb_table)?;
self.execute_code_block(block.second(), cb_table)?;
self.end_join_block(block)
}
#[inline(always)]
fn execute_split_block(
&mut self,
block: &Split,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
let condition = self.start_split_block(block)?;
if condition == ONE {
self.execute_code_block(block.on_true(), cb_table)?;
} else if condition == ZERO {
self.execute_code_block(block.on_false(), cb_table)?;
} else {
return Err(ExecutionError::NotBinaryValue(condition));
}
self.end_split_block(block)
}
#[inline(always)]
fn execute_loop_block(
&mut self,
block: &Loop,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
let condition = self.start_loop_block(block)?;
if condition == ONE {
self.execute_code_block(block.body(), cb_table)?;
while self.stack.peek() == ONE {
self.decoder.repeat();
self.execute_op(Operation::Drop)?;
self.execute_code_block(block.body(), cb_table)?;
}
self.end_loop_block(block, true)
} else if condition == ZERO {
self.end_loop_block(block, false)
} else {
Err(ExecutionError::NotBinaryValue(condition))
}
}
#[inline(always)]
fn execute_call_block(
&mut self,
block: &Call,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
if block.is_syscall() {
self.chiplets.access_kernel_proc(block.fn_hash())?;
}
self.start_call_block(block)?;
if block.fn_hash() == Dyn::dyn_hash() {
self.execute_dyn_block(&Dyn::new(), cb_table)?;
} else {
let fn_body = cb_table
.get(block.fn_hash())
.ok_or_else(|| ExecutionError::CodeBlockNotFound(block.fn_hash()))?;
self.execute_code_block(fn_body, cb_table)?;
}
self.end_call_block(block)
}
#[inline(always)]
fn execute_dyn_block(
&mut self,
block: &Dyn,
cb_table: &CodeBlockTable,
) -> Result<(), ExecutionError> {
let dyn_hash = self.stack.get_word(0);
self.start_dyn_block(block, dyn_hash)?;
let dyn_digest = dyn_hash.into();
let dyn_code = cb_table
.get(dyn_digest)
.ok_or_else(|| ExecutionError::DynamicCodeBlockNotFound(dyn_digest))?;
self.execute_code_block(dyn_code, cb_table)?;
self.end_dyn_block(block)
}
#[inline(always)]
fn execute_span_block(&mut self, block: &Span) -> Result<(), ExecutionError> {
self.start_span_block(block)?;
let mut op_offset = 0;
let mut decorators = block.decorator_iter();
self.execute_op_batch(&block.op_batches()[0], &mut decorators, op_offset)?;
op_offset += block.op_batches()[0].ops().len();
for op_batch in block.op_batches().iter().skip(1) {
self.respan(op_batch);
self.execute_op(Operation::Noop)?;
self.execute_op_batch(op_batch, &mut decorators, op_offset)?;
op_offset += op_batch.ops().len();
}
self.end_span_block(block)?;
for decorator in decorators {
self.execute_decorator(decorator)?;
}
Ok(())
}
#[inline(always)]
fn execute_op_batch(
&mut self,
batch: &OpBatch,
decorators: &mut DecoratorIterator,
op_offset: usize,
) -> Result<(), ExecutionError> {
let op_counts = batch.op_counts();
let mut op_idx = 0;
let mut group_idx = 0;
let mut next_group_idx = 1;
let num_batch_groups = batch.num_groups().next_power_of_two();
for (i, &op) in batch.ops().iter().enumerate() {
while let Some(decorator) = decorators.next_filtered(i + op_offset) {
self.execute_decorator(decorator)?;
}
self.decoder.execute_user_op(op, op_idx);
self.execute_op(op)?;
let has_imm = op.imm_value().is_some();
if has_imm {
next_group_idx += 1;
}
if op_idx == op_counts[group_idx] - 1 {
if has_imm {
debug_assert!(op_idx < OP_GROUP_SIZE - 1, "invalid op index");
self.decoder.execute_user_op(Operation::Noop, op_idx + 1);
self.execute_op(Operation::Noop)?;
}
group_idx = next_group_idx;
next_group_idx += 1;
op_idx = 0;
if group_idx < num_batch_groups {
self.decoder.start_op_group(batch.groups()[group_idx]);
}
} else {
op_idx += 1;
}
}
for group_idx in group_idx..num_batch_groups {
self.decoder.execute_user_op(Operation::Noop, 0);
self.execute_op(Operation::Noop)?;
if group_idx < num_batch_groups - 1 {
self.decoder.start_op_group(ZERO);
}
}
Ok(())
}
fn execute_decorator(&mut self, decorator: &Decorator) -> Result<(), ExecutionError> {
match decorator {
Decorator::Advice(injector) => {
self.host.borrow_mut().set_advice(self, *injector)?;
}
Decorator::Debug(options) => {
self.host.borrow_mut().on_debug(self, options)?;
}
Decorator::AsmOp(assembly_op) => {
if self.decoder.in_debug_mode() {
self.decoder.append_asmop(self.system.clk(), assembly_op.clone());
}
}
Decorator::Event(id) => {
self.host.borrow_mut().on_event(self, *id)?;
}
Decorator::Trace(id) => {
if self.enable_tracing {
self.host.borrow_mut().on_trace(self, *id)?;
}
}
}
Ok(())
}
pub const fn kernel(&self) -> &Kernel {
self.chiplets.kernel()
}
pub fn into_parts(self) -> (System, Decoder, Stack, RangeChecker, Chiplets, H) {
(
self.system,
self.decoder,
self.stack,
self.range,
self.chiplets,
self.host.into_inner(),
)
}
}
pub trait ProcessState {
fn clk(&self) -> u32;
fn ctx(&self) -> ContextId;
fn fmp(&self) -> u64;
fn get_stack_item(&self, pos: usize) -> Felt;
fn get_stack_word(&self, word_idx: usize) -> Word;
fn get_stack_state(&self) -> Vec<Felt>;
fn get_mem_value(&self, ctx: ContextId, addr: u32) -> Option<Word>;
fn get_mem_state(&self, ctx: ContextId) -> Vec<(u64, Word)>;
}
impl<H: Host> ProcessState for Process<H> {
fn clk(&self) -> u32 {
self.system.clk()
}
fn ctx(&self) -> ContextId {
self.system.ctx()
}
fn fmp(&self) -> u64 {
self.system.fmp().as_int()
}
fn get_stack_item(&self, pos: usize) -> Felt {
self.stack.get(pos)
}
fn get_stack_word(&self, word_idx: usize) -> Word {
self.stack.get_word(word_idx)
}
fn get_stack_state(&self) -> Vec<Felt> {
self.stack.get_state_at(self.system.clk())
}
fn get_mem_value(&self, ctx: ContextId, addr: u32) -> Option<Word> {
self.chiplets.get_mem_value(ctx, addr)
}
fn get_mem_state(&self, ctx: ContextId) -> Vec<(u64, Word)> {
self.chiplets.get_mem_state_at(ctx, self.system.clk())
}
}
#[cfg(any(test, feature = "internals"))]
pub struct Process<H>
where
H: Host,
{
pub system: System,
pub decoder: Decoder,
pub stack: Stack,
pub range: RangeChecker,
pub chiplets: Chiplets,
pub host: RefCell<H>,
pub max_cycles: u32,
pub enable_tracing: bool,
}