#[cfg(test)]
use alloc::rc::Rc;
use alloc::{boxed::Box, sync::Arc, vec, vec::Vec};
#[cfg(test)]
use core::cell::Cell;
use core::{cmp::min, ops::ControlFlow};
use miden_air::{Felt, trace::RowIndex};
use miden_core::{
EMPTY_WORD, WORD_SIZE, Word, ZERO,
mast::{MastForest, MastNodeExt, MastNodeId},
operations::Decorator,
precompile::PrecompileTranscript,
program::{MIN_STACK_DEPTH, Program, StackInputs, StackOutputs},
utils::range,
};
use crate::{
AdviceInputs, AdviceProvider, BaseHost, ContextId, ExecutionError, ExecutionOptions,
ProcessorState,
advice::AdviceError,
continuation_stack::{Continuation, ContinuationStack},
errors::MapExecErrNoCtx,
tracer::{OperationHelperRegisters, Tracer},
};
mod basic_block;
mod call_and_dyn;
mod execution_api;
mod external;
mod memory;
mod operation;
mod step;
pub use basic_block::SystemEventError;
pub use memory::Memory;
pub use step::{BreakReason, ResumeContext};
#[cfg(test)]
mod tests;
const INITIAL_STACK_BUFFER_SIZE: usize = 6850;
const INITIAL_STACK_TOP_IDX: usize = 250;
const DEFAULT_MAX_STACK_DEPTH: usize =
INITIAL_STACK_BUFFER_SIZE - INITIAL_STACK_TOP_IDX - 1 + MIN_STACK_DEPTH;
const _: [(); 1] =
[(); (ExecutionOptions::DEFAULT_MAX_STACK_DEPTH == DEFAULT_MAX_STACK_DEPTH) as usize];
const STACK_BUFFER_BASE_IDX: usize = INITIAL_STACK_TOP_IDX - MIN_STACK_DEPTH;
#[derive(Debug)]
pub struct FastProcessor {
stack: Box<[Felt]>,
stack_top_idx: usize,
stack_bot_idx: usize,
clk: RowIndex,
ctx: ContextId,
caller_hash: Word,
advice: AdviceProvider,
memory: Memory,
call_stack: Vec<ExecutionContextInfo>,
options: ExecutionOptions,
pc_transcript: PrecompileTranscript,
#[cfg(test)]
pub decorator_retrieval_count: Rc<Cell<usize>>,
}
impl FastProcessor {
#[inline(always)]
fn into_execution_output(self, stack: StackOutputs) -> ExecutionOutput {
ExecutionOutput {
stack,
advice: self.advice,
memory: self.memory,
final_precompile_transcript: self.pc_transcript,
}
}
#[inline(always)]
fn execution_result_from_flow(
flow: ControlFlow<BreakReason, StackOutputs>,
processor: Self,
) -> Result<ExecutionOutput, ExecutionError> {
match flow {
ControlFlow::Continue(stack_outputs) => {
Ok(processor.into_execution_output(stack_outputs))
},
ControlFlow::Break(break_reason) => match break_reason {
BreakReason::Err(err) => Err(err),
BreakReason::Stopped(_) => {
unreachable!("Execution never stops prematurely with NeverStopper")
},
},
}
}
#[cfg(any(test, feature = "testing"))]
#[inline(always)]
fn stack_result_from_flow(
flow: ControlFlow<BreakReason, StackOutputs>,
) -> Result<StackOutputs, ExecutionError> {
match flow {
ControlFlow::Continue(stack_outputs) => Ok(stack_outputs),
ControlFlow::Break(break_reason) => match break_reason {
BreakReason::Err(err) => Err(err),
BreakReason::Stopped(_) => {
unreachable!("Execution never stops prematurely with NeverStopper")
},
},
}
}
pub fn new(stack_inputs: StackInputs) -> Self {
Self::new_with_options(stack_inputs, AdviceInputs::default(), ExecutionOptions::default())
.expect("empty advice inputs should fit default advice map limits")
}
pub fn with_advice(mut self, advice_inputs: AdviceInputs) -> Result<Self, AdviceError> {
self.advice = AdviceProvider::new(advice_inputs, &self.options)?;
Ok(self)
}
pub fn with_options(mut self, options: ExecutionOptions) -> Result<Self, AdviceError> {
self.advice.set_options(&options)?;
self.options = options;
Ok(self)
}
pub fn with_debugging(mut self, enabled: bool) -> Self {
self.options = self.options.with_debugging(enabled);
self
}
pub fn with_tracing(mut self, enabled: bool) -> Self {
self.options = self.options.with_tracing(enabled);
self
}
pub fn new_with_options(
stack_inputs: StackInputs,
advice_inputs: AdviceInputs,
options: ExecutionOptions,
) -> Result<Self, AdviceError> {
let stack_top_idx = INITIAL_STACK_TOP_IDX;
let stack = {
let mut stack = vec![ZERO; INITIAL_STACK_BUFFER_SIZE].into_boxed_slice();
for (i, &input) in stack_inputs.iter().enumerate() {
stack[stack_top_idx - 1 - i] = input;
}
stack
};
Ok(Self {
advice: AdviceProvider::new(advice_inputs, &options)?,
stack,
stack_top_idx,
stack_bot_idx: stack_top_idx - MIN_STACK_DEPTH,
clk: 0_u32.into(),
ctx: 0_u32.into(),
caller_hash: EMPTY_WORD,
memory: Memory::new(),
call_stack: Vec::new(),
options,
pc_transcript: PrecompileTranscript::new(),
#[cfg(test)]
decorator_retrieval_count: Rc::new(Cell::new(0)),
})
}
pub fn get_initial_resume_context(
&mut self,
program: &Program,
) -> Result<ResumeContext, ExecutionError> {
self.advice
.extend_map(program.mast_forest().advice_map())
.map_exec_err_no_ctx()?;
Ok(ResumeContext {
current_forest: program.mast_forest().clone(),
continuation_stack: ContinuationStack::new(program),
kernel: program.kernel().clone(),
})
}
#[inline(always)]
pub fn in_debug_mode(&self) -> bool {
self.options.enable_debugging()
}
#[inline(always)]
fn should_execute_decorators(&self) -> bool {
self.in_debug_mode() || self.options.enable_tracing()
}
#[cfg(test)]
#[inline(always)]
fn record_decorator_retrieval(&self) {
self.decorator_retrieval_count.set(self.decorator_retrieval_count.get() + 1);
}
#[inline(always)]
fn stack_size(&self) -> usize {
self.stack_top_idx - self.stack_bot_idx
}
pub fn stack(&self) -> &[Felt] {
&self.stack[self.stack_bot_idx..self.stack_top_idx]
}
pub fn stack_top(&self) -> &[Felt] {
&self.stack[self.stack_top_idx - MIN_STACK_DEPTH..self.stack_top_idx]
}
pub fn stack_top_mut(&mut self) -> &mut [Felt] {
&mut self.stack[self.stack_top_idx - MIN_STACK_DEPTH..self.stack_top_idx]
}
#[inline(always)]
pub fn stack_get(&self, idx: usize) -> Felt {
self.stack[self.stack_top_idx - idx - 1]
}
#[inline(always)]
pub fn stack_get_safe(&self, idx: usize) -> Felt {
if idx < self.stack_top_idx {
self.stack[self.stack_top_idx - idx - 1]
} else {
ZERO
}
}
#[inline(always)]
pub fn stack_get_mut(&mut self, idx: usize) -> &mut Felt {
&mut self.stack[self.stack_top_idx - idx - 1]
}
#[inline(always)]
pub fn stack_get_word(&self, start_idx: usize) -> Word {
debug_assert!(
start_idx + WORD_SIZE <= self.stack_depth() as usize,
"Not enough elements on stack to read word starting at index {start_idx}"
);
let word_start_idx = self.stack_top_idx - start_idx - WORD_SIZE;
let mut result: [Felt; WORD_SIZE] =
self.stack[range(word_start_idx, WORD_SIZE)].try_into().unwrap();
result.reverse();
result.into()
}
#[inline(always)]
pub fn stack_get_word_safe(&self, start_idx: usize) -> Word {
let buf_end = self.stack_top_idx.saturating_sub(start_idx);
let buf_start = self.stack_top_idx.saturating_sub(start_idx.saturating_add(WORD_SIZE));
let num_elements_to_read_from_buf = buf_end - buf_start;
let mut result = [ZERO; WORD_SIZE];
if num_elements_to_read_from_buf == WORD_SIZE {
result.copy_from_slice(&self.stack[range(buf_start, WORD_SIZE)]);
} else if num_elements_to_read_from_buf > 0 {
let offset = WORD_SIZE - num_elements_to_read_from_buf;
result[offset..]
.copy_from_slice(&self.stack[range(buf_start, num_elements_to_read_from_buf)]);
}
result.reverse();
result.into()
}
#[inline(always)]
pub fn stack_depth(&self) -> u32 {
(self.stack_top_idx - self.stack_bot_idx) as u32
}
pub fn memory(&self) -> &Memory {
&self.memory
}
pub fn into_parts(self) -> (AdviceProvider, Memory, PrecompileTranscript) {
(self.advice, self.memory, self.pc_transcript)
}
pub fn execution_options(&self) -> &ExecutionOptions {
&self.options
}
#[inline(always)]
pub fn state(&self) -> ProcessorState<'_> {
ProcessorState { processor: self }
}
#[inline(always)]
pub fn stack_write(&mut self, idx: usize, element: Felt) {
self.stack[self.stack_top_idx - idx - 1] = element
}
#[inline(always)]
pub fn stack_write_word(&mut self, start_idx: usize, word: &Word) {
debug_assert!(start_idx <= MIN_STACK_DEPTH - WORD_SIZE);
let word_start_idx = self.stack_top_idx - start_idx - 4;
let mut source: [Felt; WORD_SIZE] = (*word).into();
source.reverse();
self.stack[range(word_start_idx, WORD_SIZE)].copy_from_slice(&source)
}
#[inline(always)]
pub fn stack_swap(&mut self, idx1: usize, idx2: usize) {
let a = self.stack_get(idx1);
let b = self.stack_get(idx2);
self.stack_write(idx1, b);
self.stack_write(idx2, a);
}
fn execute_before_enter_decorators(
&self,
node_id: MastNodeId,
current_forest: &MastForest,
host: &mut impl BaseHost,
) -> ControlFlow<BreakReason> {
if !self.should_execute_decorators() {
return ControlFlow::Continue(());
}
#[cfg(test)]
self.record_decorator_retrieval();
let node = current_forest
.get_node_by_id(node_id)
.expect("internal error: node id {node_id} not found in current forest");
for &decorator_id in node.before_enter(current_forest) {
self.execute_decorator(¤t_forest[decorator_id], host)?;
}
ControlFlow::Continue(())
}
fn execute_after_exit_decorators(
&self,
node_id: MastNodeId,
current_forest: &MastForest,
host: &mut impl BaseHost,
) -> ControlFlow<BreakReason> {
if !self.should_execute_decorators() {
return ControlFlow::Continue(());
}
#[cfg(test)]
self.record_decorator_retrieval();
let node = current_forest
.get_node_by_id(node_id)
.expect("internal error: node id {node_id} not found in current forest");
for &decorator_id in node.after_exit(current_forest) {
self.execute_decorator(¤t_forest[decorator_id], host)?;
}
ControlFlow::Continue(())
}
fn execute_decorator(
&self,
decorator: &Decorator,
host: &mut impl BaseHost,
) -> ControlFlow<BreakReason> {
match decorator {
Decorator::Debug(options) => {
if self.in_debug_mode() {
let processor_state = self.state();
if let Err(err) = host.on_debug(&processor_state, options) {
return ControlFlow::Break(BreakReason::Err(
crate::errors::HostError::DebugHandlerError { err }.into(),
));
}
}
},
Decorator::Trace(id) => {
if self.options.enable_tracing() {
let processor_state = self.state();
if let Err(err) = host.on_trace(&processor_state, *id) {
return ControlFlow::Break(BreakReason::Err(
crate::errors::HostError::TraceHandlerError { trace_id: *id, err }
.into(),
));
}
}
},
};
ControlFlow::Continue(())
}
#[inline(always)]
fn increment_stack_size(&mut self) {
self.stack_top_idx += 1;
}
#[inline(always)]
fn ensure_stack_capacity_for_push(&mut self) -> Result<(), ExecutionError> {
let depth = self.stack_size() + 1;
let max = self.options.max_stack_depth();
if depth > max {
return Err(ExecutionError::StackDepthLimitExceeded { depth, max });
}
if self.stack_top_idx >= self.stack.len() - 1 {
self.grow_stack_buffer(self.stack_top_idx + 2);
}
Ok(())
}
fn ensure_stack_capacity_for_top_idx(&mut self, top_idx: usize) {
if top_idx >= self.stack.len() {
self.grow_stack_buffer(top_idx + 1);
}
}
fn grow_stack_buffer(&mut self, requested_min_len: usize) {
let max_len = STACK_BUFFER_BASE_IDX
.saturating_add(self.options.max_stack_depth())
.saturating_add(1);
let live_len = self.stack_size();
let recentered_min_len = STACK_BUFFER_BASE_IDX.saturating_add(live_len).saturating_add(2);
debug_assert!(recentered_min_len <= max_len);
let new_len = recentered_min_len.saturating_mul(2).max(requested_min_len).min(max_len);
debug_assert!(new_len <= max_len);
let mut new_stack = vec![ZERO; new_len].into_boxed_slice();
let new_stack_bot_idx = STACK_BUFFER_BASE_IDX;
let new_stack_top_idx = new_stack_bot_idx + live_len;
new_stack[new_stack_bot_idx..new_stack_top_idx]
.copy_from_slice(&self.stack[self.stack_bot_idx..self.stack_top_idx]);
self.stack = new_stack;
self.stack_bot_idx = new_stack_bot_idx;
self.stack_top_idx = new_stack_top_idx;
}
#[inline(always)]
fn decrement_stack_size(&mut self) {
if self.stack_top_idx == MIN_STACK_DEPTH {
self.reset_stack_in_buffer(INITIAL_STACK_TOP_IDX);
}
self.stack_top_idx -= 1;
self.stack_bot_idx = min(self.stack_bot_idx, self.stack_top_idx - MIN_STACK_DEPTH);
}
#[inline(always)]
fn reset_stack_in_buffer(&mut self, new_stack_top_idx: usize) {
debug_assert_eq!(self.stack_depth(), MIN_STACK_DEPTH as u32);
let new_stack_bot_idx = new_stack_top_idx - MIN_STACK_DEPTH;
self.stack
.copy_within(self.stack_bot_idx..self.stack_top_idx, new_stack_bot_idx);
self.stack[0..new_stack_bot_idx].fill(ZERO);
self.stack_bot_idx = new_stack_bot_idx;
self.stack_top_idx = new_stack_top_idx;
}
}
#[derive(Debug)]
pub struct ExecutionOutput {
pub stack: StackOutputs,
pub advice: AdviceProvider,
pub memory: Memory,
pub final_precompile_transcript: PrecompileTranscript,
}
#[derive(Debug)]
struct ExecutionContextInfo {
overflow_stack: Vec<Felt>,
ctx: ContextId,
fn_hash: Word,
}
pub struct NoopTracer;
impl Tracer for NoopTracer {
type Processor = FastProcessor;
#[inline(always)]
fn start_clock_cycle(
&mut self,
_processor: &FastProcessor,
_continuation: Continuation,
_continuation_stack: &ContinuationStack,
_current_forest: &Arc<MastForest>,
) {
}
#[inline(always)]
fn finalize_clock_cycle(
&mut self,
_processor: &FastProcessor,
_op_helper_registers: OperationHelperRegisters,
_current_forest: &Arc<MastForest>,
) {
}
}