#[cfg(test)]
use alloc::rc::Rc;
use alloc::{boxed::Box, sync::Arc, vec::Vec};
#[cfg(test)]
use core::cell::Cell;
use core::{cmp::min, ops::ControlFlow};
use miden_air::{Felt, trace::RowIndex};
use miden_core::{
EMPTY_WORD, WORD_SIZE, Word, ZERO,
mast::{MastForest, MastNodeExt, MastNodeId},
operations::Decorator,
precompile::PrecompileTranscript,
program::{Kernel, MIN_STACK_DEPTH, Program, StackInputs, StackOutputs},
utils::range,
};
use tracing::instrument;
use crate::{
AdviceInputs, AdviceProvider, ContextId, ExecutionError, ExecutionOptions, Host,
ProcessorState, Stopper,
continuation_stack::{Continuation, ContinuationStack},
errors::{MapExecErr, MapExecErrNoCtx, OperationError},
execution::{
InternalBreakReason, execute_impl, finish_emit_op_execution,
finish_load_mast_forest_from_dyn_start, finish_load_mast_forest_from_external,
},
trace::execution_tracer::{ExecutionTracer, TraceGenerationContext},
tracer::{OperationHelperRegisters, Tracer},
};
mod basic_block;
mod call_and_dyn;
mod external;
mod memory;
mod operation;
mod step;
pub use basic_block::SystemEventError;
use external::maybe_use_caller_error_context;
pub use memory::Memory;
pub use step::{BreakReason, ResumeContext};
use step::{NeverStopper, StepStopper};
#[cfg(test)]
mod tests;
const STACK_BUFFER_SIZE: usize = 6850;
const INITIAL_STACK_TOP_IDX: usize = 250;
#[derive(Debug)]
pub struct FastProcessor {
stack: Box<[Felt; STACK_BUFFER_SIZE]>,
stack_top_idx: usize,
stack_bot_idx: usize,
clk: RowIndex,
ctx: ContextId,
caller_hash: Word,
advice: AdviceProvider,
memory: Memory,
call_stack: Vec<ExecutionContextInfo>,
options: ExecutionOptions,
pc_transcript: PrecompileTranscript,
#[cfg(test)]
pub decorator_retrieval_count: Rc<Cell<usize>>,
}
impl FastProcessor {
pub fn new(stack_inputs: StackInputs) -> Self {
Self::new_with_options(stack_inputs, AdviceInputs::default(), ExecutionOptions::default())
}
pub fn with_advice(mut self, advice_inputs: AdviceInputs) -> Self {
self.advice = advice_inputs.into();
self
}
pub fn with_options(mut self, options: ExecutionOptions) -> Self {
self.options = options;
self
}
pub fn with_debugging(mut self, enabled: bool) -> Self {
self.options = self.options.with_debugging(enabled);
self
}
pub fn with_tracing(mut self, enabled: bool) -> Self {
self.options = self.options.with_tracing(enabled);
self
}
pub fn new_with_options(
stack_inputs: StackInputs,
advice_inputs: AdviceInputs,
options: ExecutionOptions,
) -> Self {
let stack_top_idx = INITIAL_STACK_TOP_IDX;
let stack = {
let mut stack: Box<[Felt; STACK_BUFFER_SIZE]> =
vec![ZERO; STACK_BUFFER_SIZE].into_boxed_slice().try_into().unwrap();
for (i, &input) in stack_inputs.iter().enumerate() {
stack[stack_top_idx - 1 - i] = input;
}
stack
};
Self {
advice: advice_inputs.into(),
stack,
stack_top_idx,
stack_bot_idx: stack_top_idx - MIN_STACK_DEPTH,
clk: 0_u32.into(),
ctx: 0_u32.into(),
caller_hash: EMPTY_WORD,
memory: Memory::new(),
call_stack: Vec::new(),
options,
pc_transcript: PrecompileTranscript::new(),
#[cfg(test)]
decorator_retrieval_count: Rc::new(Cell::new(0)),
}
}
pub fn get_initial_resume_context(
&mut self,
program: &Program,
) -> Result<ResumeContext, ExecutionError> {
self.advice
.extend_map(program.mast_forest().advice_map())
.map_exec_err_no_ctx()?;
Ok(ResumeContext {
current_forest: program.mast_forest().clone(),
continuation_stack: ContinuationStack::new(program),
kernel: program.kernel().clone(),
})
}
#[inline(always)]
pub fn in_debug_mode(&self) -> bool {
self.options.enable_debugging()
}
#[inline(always)]
fn should_execute_decorators(&self) -> bool {
self.in_debug_mode() || self.options.enable_tracing()
}
#[cfg(test)]
#[inline(always)]
fn record_decorator_retrieval(&self) {
self.decorator_retrieval_count.set(self.decorator_retrieval_count.get() + 1);
}
#[inline(always)]
fn stack_size(&self) -> usize {
self.stack_top_idx - self.stack_bot_idx
}
pub fn stack(&self) -> &[Felt] {
&self.stack[self.stack_bot_idx..self.stack_top_idx]
}
pub fn stack_top(&self) -> &[Felt] {
&self.stack[self.stack_top_idx - MIN_STACK_DEPTH..self.stack_top_idx]
}
pub fn stack_top_mut(&mut self) -> &mut [Felt] {
&mut self.stack[self.stack_top_idx - MIN_STACK_DEPTH..self.stack_top_idx]
}
#[inline(always)]
pub fn stack_get(&self, idx: usize) -> Felt {
self.stack[self.stack_top_idx - idx - 1]
}
#[inline(always)]
pub fn stack_get_safe(&self, idx: usize) -> Felt {
if idx < self.stack_top_idx {
self.stack[self.stack_top_idx - idx - 1]
} else {
ZERO
}
}
#[inline(always)]
pub fn stack_get_mut(&mut self, idx: usize) -> &mut Felt {
&mut self.stack[self.stack_top_idx - idx - 1]
}
#[inline(always)]
pub fn stack_get_word(&self, start_idx: usize) -> Word {
debug_assert!(
start_idx + WORD_SIZE <= self.stack_depth() as usize,
"Not enough elements on stack to read word starting at index {start_idx}"
);
let word_start_idx = self.stack_top_idx - start_idx - WORD_SIZE;
let mut result: [Felt; WORD_SIZE] =
self.stack[range(word_start_idx, WORD_SIZE)].try_into().unwrap();
result.reverse();
result.into()
}
#[inline(always)]
pub fn stack_get_word_safe(&self, start_idx: usize) -> Word {
let buf_end = self.stack_top_idx.saturating_sub(start_idx);
let buf_start = self.stack_top_idx.saturating_sub(start_idx.saturating_add(WORD_SIZE));
let num_elements_to_read_from_buf = buf_end - buf_start;
let mut result = [ZERO; WORD_SIZE];
if num_elements_to_read_from_buf == WORD_SIZE {
result.copy_from_slice(&self.stack[range(buf_start, WORD_SIZE)]);
} else if num_elements_to_read_from_buf > 0 {
let offset = WORD_SIZE - num_elements_to_read_from_buf;
result[offset..]
.copy_from_slice(&self.stack[range(buf_start, num_elements_to_read_from_buf)]);
}
result.reverse();
result.into()
}
#[inline(always)]
pub fn stack_depth(&self) -> u32 {
(self.stack_top_idx - self.stack_bot_idx) as u32
}
pub fn memory(&self) -> &Memory {
&self.memory
}
pub fn into_parts(self) -> (AdviceProvider, Memory, PrecompileTranscript) {
(self.advice, self.memory, self.pc_transcript)
}
pub fn execution_options(&self) -> &ExecutionOptions {
&self.options
}
#[inline(always)]
pub fn state(&self) -> ProcessorState<'_> {
ProcessorState { processor: self }
}
#[inline(always)]
pub fn stack_write(&mut self, idx: usize, element: Felt) {
self.stack[self.stack_top_idx - idx - 1] = element
}
#[inline(always)]
pub fn stack_write_word(&mut self, start_idx: usize, word: &Word) {
debug_assert!(start_idx <= MIN_STACK_DEPTH - WORD_SIZE);
let word_start_idx = self.stack_top_idx - start_idx - 4;
let mut source: [Felt; WORD_SIZE] = (*word).into();
source.reverse();
self.stack[range(word_start_idx, WORD_SIZE)].copy_from_slice(&source)
}
#[inline(always)]
pub fn stack_swap(&mut self, idx1: usize, idx2: usize) {
let a = self.stack_get(idx1);
let b = self.stack_get(idx2);
self.stack_write(idx1, b);
self.stack_write(idx2, a);
}
pub async fn execute(
self,
program: &Program,
host: &mut impl Host,
) -> Result<ExecutionOutput, ExecutionError> {
self.execute_with_tracer(program, host, &mut NoopTracer).await
}
#[instrument(name = "execute_for_trace", skip_all)]
pub async fn execute_for_trace(
self,
program: &Program,
host: &mut impl Host,
) -> Result<(ExecutionOutput, TraceGenerationContext), ExecutionError> {
let mut tracer = ExecutionTracer::new(self.options.core_trace_fragment_size());
let execution_output = self.execute_with_tracer(program, host, &mut tracer).await?;
let context = tracer.into_trace_generation_context();
Ok((execution_output, context))
}
pub async fn execute_with_tracer<T>(
mut self,
program: &Program,
host: &mut impl Host,
tracer: &mut T,
) -> Result<ExecutionOutput, ExecutionError>
where
T: Tracer<Processor = Self>,
{
let mut continuation_stack = ContinuationStack::new(program);
let mut current_forest = program.mast_forest().clone();
self.advice.extend_map(current_forest.advice_map()).map_exec_err_no_ctx()?;
match self
.execute_impl(
&mut continuation_stack,
&mut current_forest,
program.kernel(),
host,
tracer,
&NeverStopper,
)
.await
{
ControlFlow::Continue(stack_outputs) => Ok(ExecutionOutput {
stack: stack_outputs,
advice: self.advice,
memory: self.memory,
final_pc_transcript: self.pc_transcript,
}),
ControlFlow::Break(break_reason) => match break_reason {
BreakReason::Err(err) => Err(err),
BreakReason::Stopped(_) => {
unreachable!("Execution never stops prematurely with NeverStopper")
},
},
}
}
pub async fn step(
&mut self,
host: &mut impl Host,
resume_ctx: ResumeContext,
) -> Result<Option<ResumeContext>, ExecutionError> {
let ResumeContext {
mut current_forest,
mut continuation_stack,
kernel,
} = resume_ctx;
match self
.execute_impl(
&mut continuation_stack,
&mut current_forest,
&kernel,
host,
&mut NoopTracer,
&StepStopper,
)
.await
{
ControlFlow::Continue(_) => Ok(None),
ControlFlow::Break(break_reason) => match break_reason {
BreakReason::Err(err) => Err(err),
BreakReason::Stopped(maybe_continuation) => {
if let Some(continuation) = maybe_continuation {
continuation_stack.push_continuation(continuation);
}
Ok(Some(ResumeContext {
current_forest,
continuation_stack,
kernel,
}))
},
},
}
}
async fn execute_impl<S, T>(
&mut self,
continuation_stack: &mut ContinuationStack,
current_forest: &mut Arc<MastForest>,
kernel: &Kernel,
host: &mut impl Host,
tracer: &mut T,
stopper: &S,
) -> ControlFlow<BreakReason, StackOutputs>
where
S: Stopper<Processor = Self>,
T: Tracer<Processor = Self>,
{
while let ControlFlow::Break(internal_break_reason) =
execute_impl(self, continuation_stack, current_forest, kernel, host, tracer, stopper)
{
match internal_break_reason {
InternalBreakReason::User(break_reason) => return ControlFlow::Break(break_reason),
InternalBreakReason::Emit {
basic_block_node_id,
op_idx,
continuation,
} => {
self.op_emit(host, current_forest, basic_block_node_id, op_idx).await?;
finish_emit_op_execution(
continuation,
self,
continuation_stack,
current_forest,
tracer,
stopper,
)?;
},
InternalBreakReason::LoadMastForestFromDyn { dyn_node_id, callee_hash } => {
let (root_id, new_forest) = match self
.load_mast_forest(callee_hash, host, current_forest, dyn_node_id)
.await
{
Ok(result) => result,
Err(err) => return ControlFlow::Break(BreakReason::Err(err)),
};
finish_load_mast_forest_from_dyn_start(
root_id,
new_forest,
self,
current_forest,
continuation_stack,
tracer,
stopper,
)?;
},
InternalBreakReason::LoadMastForestFromExternal {
external_node_id,
procedure_hash,
} => {
let (root_id, new_forest) = match self
.load_mast_forest(procedure_hash, host, current_forest, external_node_id)
.await
{
Ok(result) => result,
Err(err) => {
let maybe_enriched_err = maybe_use_caller_error_context(
err,
current_forest,
continuation_stack,
host,
);
return ControlFlow::Break(BreakReason::Err(maybe_enriched_err));
},
};
finish_load_mast_forest_from_external(
root_id,
new_forest,
external_node_id,
current_forest,
continuation_stack,
host,
tracer,
)?;
},
}
}
match StackOutputs::new(
&self.stack[self.stack_bot_idx..self.stack_top_idx]
.iter()
.rev()
.copied()
.collect::<Vec<_>>(),
) {
Ok(stack_outputs) => ControlFlow::Continue(stack_outputs),
Err(_) => ControlFlow::Break(BreakReason::Err(ExecutionError::OutputStackOverflow(
self.stack_top_idx - self.stack_bot_idx - MIN_STACK_DEPTH,
))),
}
}
fn execute_before_enter_decorators(
&self,
node_id: MastNodeId,
current_forest: &MastForest,
host: &mut impl Host,
) -> ControlFlow<BreakReason> {
if !self.should_execute_decorators() {
return ControlFlow::Continue(());
}
#[cfg(test)]
self.record_decorator_retrieval();
let node = current_forest
.get_node_by_id(node_id)
.expect("internal error: node id {node_id} not found in current forest");
for &decorator_id in node.before_enter(current_forest) {
self.execute_decorator(¤t_forest[decorator_id], host)?;
}
ControlFlow::Continue(())
}
fn execute_after_exit_decorators(
&self,
node_id: MastNodeId,
current_forest: &MastForest,
host: &mut impl Host,
) -> ControlFlow<BreakReason> {
if !self.in_debug_mode() {
return ControlFlow::Continue(());
}
#[cfg(test)]
self.record_decorator_retrieval();
let node = current_forest
.get_node_by_id(node_id)
.expect("internal error: node id {node_id} not found in current forest");
for &decorator_id in node.after_exit(current_forest) {
self.execute_decorator(¤t_forest[decorator_id], host)?;
}
ControlFlow::Continue(())
}
fn execute_decorator(
&self,
decorator: &Decorator,
host: &mut impl Host,
) -> ControlFlow<BreakReason> {
match decorator {
Decorator::Debug(options) => {
if self.in_debug_mode() {
let processor_state = self.state();
if let Err(err) = host.on_debug(&processor_state, options) {
return ControlFlow::Break(BreakReason::Err(
crate::errors::HostError::DebugHandlerError { err }.into(),
));
}
}
},
Decorator::Trace(id) => {
if self.options.enable_tracing() {
let processor_state = self.state();
if let Err(err) = host.on_trace(&processor_state, *id) {
return ControlFlow::Break(BreakReason::Err(
crate::errors::HostError::TraceHandlerError { trace_id: *id, err }
.into(),
));
}
}
},
};
ControlFlow::Continue(())
}
async fn load_mast_forest(
&mut self,
node_digest: Word,
host: &mut impl Host,
current_forest: &MastForest,
node_id: MastNodeId,
) -> Result<(MastNodeId, Arc<MastForest>), ExecutionError> {
let mast_forest = host.get_mast_forest(&node_digest).await.ok_or_else(|| {
crate::errors::procedure_not_found_with_context(
node_digest,
current_forest,
node_id,
host,
)
})?;
let root_id = mast_forest.find_procedure_root(node_digest).ok_or_else(|| {
Err::<(), _>(OperationError::MalformedMastForestInHost { root_digest: node_digest })
.map_exec_err(current_forest, node_id, host)
.unwrap_err()
})?;
self.advice.extend_map(mast_forest.advice_map()).map_exec_err(
current_forest,
node_id,
host,
)?;
Ok((root_id, mast_forest))
}
#[inline(always)]
fn increment_stack_size(&mut self) {
self.stack_top_idx += 1;
}
#[inline(always)]
fn decrement_stack_size(&mut self) {
if self.stack_top_idx == MIN_STACK_DEPTH {
self.reset_stack_in_buffer(INITIAL_STACK_TOP_IDX);
}
self.stack_top_idx -= 1;
self.stack_bot_idx = min(self.stack_bot_idx, self.stack_top_idx - MIN_STACK_DEPTH);
}
#[inline(always)]
fn reset_stack_in_buffer(&mut self, new_stack_top_idx: usize) {
debug_assert_eq!(self.stack_depth(), MIN_STACK_DEPTH as u32);
let new_stack_bot_idx = new_stack_top_idx - MIN_STACK_DEPTH;
self.stack
.copy_within(self.stack_bot_idx..self.stack_top_idx, new_stack_bot_idx);
self.stack[0..new_stack_bot_idx].fill(ZERO);
self.stack_bot_idx = new_stack_bot_idx;
self.stack_top_idx = new_stack_top_idx;
}
pub fn step_sync(
&mut self,
host: &mut impl Host,
resume_ctx: ResumeContext,
) -> Result<Option<ResumeContext>, ExecutionError> {
let rt = tokio::runtime::Builder::new_current_thread().build().unwrap();
let execution_output = rt.block_on(self.step(host, resume_ctx))?;
Ok(execution_output)
}
pub fn execute_by_step_sync(
mut self,
program: &Program,
host: &mut impl Host,
) -> Result<StackOutputs, ExecutionError> {
let rt = tokio::runtime::Builder::new_current_thread().build().unwrap();
let mut current_resume_ctx = self.get_initial_resume_context(program).unwrap();
rt.block_on(async {
loop {
match self.step(host, current_resume_ctx).await {
Ok(maybe_resume_ctx) => match maybe_resume_ctx {
Some(next_resume_ctx) => {
current_resume_ctx = next_resume_ctx;
},
None => {
break Ok(StackOutputs::new(
&self.stack[self.stack_bot_idx..self.stack_top_idx]
.iter()
.rev()
.copied()
.collect::<Vec<_>>(),
)
.unwrap());
},
},
Err(err) => {
break Err(err);
},
}
}
})
}
#[cfg(not(target_family = "wasm"))]
pub fn execute_sync(
self,
program: &Program,
host: &mut impl Host,
) -> Result<ExecutionOutput, ExecutionError> {
match tokio::runtime::Handle::try_current() {
Ok(_handle) => {
panic!(
"Cannot call execute_sync from within a Tokio runtime. \
Use the async execute() method instead."
)
},
Err(_) => {
let rt = tokio::runtime::Builder::new_current_thread().build().unwrap();
rt.block_on(self.execute(program, host))
},
}
}
#[cfg(not(target_family = "wasm"))]
#[instrument(name = "execute_for_trace_sync", skip_all)]
pub fn execute_for_trace_sync(
self,
program: &Program,
host: &mut impl Host,
) -> Result<(ExecutionOutput, TraceGenerationContext), ExecutionError> {
match tokio::runtime::Handle::try_current() {
Ok(_handle) => {
panic!(
"Cannot call execute_for_trace_sync from within a Tokio runtime. \
Use the async execute_for_trace() method instead."
)
},
Err(_) => {
let rt = tokio::runtime::Builder::new_current_thread().build().unwrap();
rt.block_on(self.execute_for_trace(program, host))
},
}
}
#[cfg(all(any(test, feature = "testing"), not(target_family = "wasm")))]
pub fn execute_sync_mut(
&mut self,
program: &Program,
host: &mut impl Host,
) -> Result<StackOutputs, ExecutionError> {
let mut continuation_stack = ContinuationStack::new(program);
let mut current_forest = program.mast_forest().clone();
self.advice.extend_map(current_forest.advice_map()).map_exec_err_no_ctx()?;
let execute_fut = async {
match self
.execute_impl(
&mut continuation_stack,
&mut current_forest,
program.kernel(),
host,
&mut NoopTracer,
&NeverStopper,
)
.await
{
ControlFlow::Continue(stack_outputs) => Ok(stack_outputs),
ControlFlow::Break(break_reason) => match break_reason {
BreakReason::Err(err) => Err(err),
BreakReason::Stopped(_) => {
unreachable!("Execution never stops prematurely with NeverStopper")
},
},
}
};
match tokio::runtime::Handle::try_current() {
Ok(_handle) => {
panic!(
"Cannot call execute_sync_mut from within a Tokio runtime. \
Use async execution methods instead."
)
},
Err(_) => {
let rt = tokio::runtime::Builder::new_current_thread().build().unwrap();
rt.block_on(execute_fut)
},
}
}
}
#[derive(Debug)]
pub struct ExecutionOutput {
pub stack: StackOutputs,
pub advice: AdviceProvider,
pub memory: Memory,
pub final_pc_transcript: PrecompileTranscript,
}
#[derive(Debug)]
struct ExecutionContextInfo {
overflow_stack: Vec<Felt>,
ctx: ContextId,
fn_hash: Word,
}
pub struct NoopTracer;
impl Tracer for NoopTracer {
type Processor = FastProcessor;
#[inline(always)]
fn start_clock_cycle(
&mut self,
_processor: &FastProcessor,
_continuation: Continuation,
_continuation_stack: &ContinuationStack,
_current_forest: &Arc<MastForest>,
) {
}
#[inline(always)]
fn finalize_clock_cycle(
&mut self,
_processor: &FastProcessor,
_op_helper_registers: OperationHelperRegisters,
_current_forest: &Arc<MastForest>,
) {
}
}