1#![no_std]
2
3#[macro_use]
4extern crate alloc;
5
6#[cfg(feature = "std")]
7extern crate std;
8
9use alloc::{sync::Arc, vec::Vec};
10use core::fmt::{Display, LowerHex};
11
12use fast::FastProcessor;
13use miden_air::trace::{
14 CHIPLETS_WIDTH, DECODER_TRACE_WIDTH, MIN_TRACE_LEN, RANGE_CHECK_TRACE_WIDTH, STACK_TRACE_WIDTH,
15 SYS_TRACE_WIDTH,
16};
17pub use miden_air::{ExecutionOptions, ExecutionOptionsError, RowIndex};
18use utils::resolve_external_node;
19pub use vm_core::{
20 AssemblyOp, EMPTY_WORD, Felt, Kernel, ONE, Operation, Program, ProgramInfo, QuadExtension,
21 StackInputs, StackOutputs, Word, ZERO,
22 chiplets::hasher::Digest,
23 crypto::merkle::SMT_DEPTH,
24 debuginfo::{DefaultSourceManager, SourceManager, SourceSpan},
25 errors::InputError,
26 mast::{MastForest, MastNode, MastNodeId},
27 sys_events::SystemEvent,
28 utils::{DeserializationError, collections::KvMap},
29};
30use vm_core::{
31 Decorator, DecoratorIterator, FieldElement, WORD_SIZE,
32 mast::{
33 BasicBlockNode, CallNode, DynNode, JoinNode, LoopNode, MastNodeExt, OP_GROUP_SIZE, OpBatch,
34 SplitNode,
35 },
36};
37pub use winter_prover::matrix::ColMatrix;
38
39pub mod fast;
40
41mod operations;
42
43mod system;
44use system::System;
45pub use system::{ContextId, FMP_MIN, SYSCALL_FMP_MIN};
46
47mod decoder;
48use decoder::Decoder;
49
50mod stack;
51use stack::Stack;
52
53mod range;
54use range::RangeChecker;
55
56mod host;
57pub use host::{
58 DefaultHost, Host, MastForestStore, MemMastForestStore,
59 advice::{AdviceInputs, AdviceProvider, AdviceSource, MemAdviceProvider, RecAdviceProvider},
60};
61
62mod chiplets;
63use chiplets::Chiplets;
64pub use chiplets::MemoryError;
65
66mod trace;
67use trace::TraceFragment;
68pub use trace::{ChipletsLengths, ExecutionTrace, NUM_RAND_ROWS, TraceLenSummary};
69
70mod errors;
71pub use errors::{ErrorContext, ExecutionError, Ext2InttError};
72
73pub mod utils;
74
75#[cfg(test)]
76mod tests;
77
78mod debug;
79pub use debug::{AsmOpInfo, VmState, VmStateIterator};
80
81pub mod math {
85 pub use vm_core::{Felt, FieldElement, StarkField};
86 pub use winter_prover::math::fft;
87}
88
89pub mod crypto {
90 pub use vm_core::crypto::{
91 hash::{
92 Blake3_192, Blake3_256, ElementHasher, Hasher, Rpo256, RpoDigest, Rpx256, RpxDigest,
93 },
94 merkle::{
95 MerkleError, MerklePath, MerkleStore, MerkleTree, NodeIndex, PartialMerkleTree,
96 SimpleSmt,
97 },
98 random::{RandomCoin, RpoRandomCoin, RpxRandomCoin, WinterRandomCoin},
99 };
100}
101
102type QuadFelt = QuadExtension<Felt>;
106
107#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
108pub struct MemoryAddress(u32);
109
110impl From<u32> for MemoryAddress {
111 fn from(addr: u32) -> Self {
112 MemoryAddress(addr)
113 }
114}
115
116impl From<MemoryAddress> for u32 {
117 fn from(value: MemoryAddress) -> Self {
118 value.0
119 }
120}
121
122impl Display for MemoryAddress {
123 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
124 Display::fmt(&self.0, f)
125 }
126}
127
128impl LowerHex for MemoryAddress {
129 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
130 LowerHex::fmt(&self.0, f)
131 }
132}
133
134impl core::ops::Add<MemoryAddress> for MemoryAddress {
135 type Output = Self;
136
137 fn add(self, rhs: MemoryAddress) -> Self::Output {
138 MemoryAddress(self.0 + rhs.0)
139 }
140}
141
142impl core::ops::Add<u32> for MemoryAddress {
143 type Output = Self;
144
145 fn add(self, rhs: u32) -> Self::Output {
146 MemoryAddress(self.0 + rhs)
147 }
148}
149
150type SysTrace = [Vec<Felt>; SYS_TRACE_WIDTH];
151
152pub struct DecoderTrace {
153 trace: [Vec<Felt>; DECODER_TRACE_WIDTH],
154 aux_builder: decoder::AuxTraceBuilder,
155}
156
157pub struct StackTrace {
158 trace: [Vec<Felt>; STACK_TRACE_WIDTH],
159}
160
161pub struct RangeCheckTrace {
162 trace: [Vec<Felt>; RANGE_CHECK_TRACE_WIDTH],
163 aux_builder: range::AuxTraceBuilder,
164}
165
166pub struct ChipletsTrace {
167 trace: [Vec<Felt>; CHIPLETS_WIDTH],
168 aux_builder: chiplets::AuxTraceBuilder,
169}
170
171#[tracing::instrument("execute_program", skip_all)]
180pub fn execute(
181 program: &Program,
182 stack_inputs: StackInputs,
183 host: &mut impl Host,
184 options: ExecutionOptions,
185 source_manager: Arc<dyn SourceManager>,
186) -> Result<ExecutionTrace, ExecutionError> {
187 let mut process = Process::new(program.kernel().clone(), stack_inputs, options)
188 .with_source_manager(source_manager);
189 let stack_outputs = process.execute(program, host)?;
190 let trace = ExecutionTrace::new(process, stack_outputs);
191 assert_eq!(&program.hash(), trace.program_hash(), "inconsistent program hash");
192 Ok(trace)
193}
194
195pub fn execute_iter(
198 program: &Program,
199 stack_inputs: StackInputs,
200 host: &mut impl Host,
201 source_manager: Arc<dyn SourceManager>,
202) -> VmStateIterator {
203 let mut process = Process::new_debug(program.kernel().clone(), stack_inputs)
204 .with_source_manager(source_manager);
205 let result = process.execute(program, host);
206 if result.is_ok() {
207 assert_eq!(
208 program.hash(),
209 process.decoder.program_hash().into(),
210 "inconsistent program hash"
211 );
212 }
213 VmStateIterator::new(process, result)
214}
215
216#[cfg(not(any(test, feature = "testing")))]
229pub struct Process {
230 system: System,
231 decoder: Decoder,
232 stack: Stack,
233 range: RangeChecker,
234 chiplets: Chiplets,
235 max_cycles: u32,
236 enable_tracing: bool,
237 source_manager: Arc<dyn SourceManager>,
238}
239
240#[cfg(any(test, feature = "testing"))]
241pub struct Process {
242 pub system: System,
243 pub decoder: Decoder,
244 pub stack: Stack,
245 pub range: RangeChecker,
246 pub chiplets: Chiplets,
247 pub max_cycles: u32,
248 pub enable_tracing: bool,
249 pub source_manager: Arc<dyn SourceManager>,
250}
251
252impl Process {
253 pub fn new(
257 kernel: Kernel,
258 stack_inputs: StackInputs,
259 execution_options: ExecutionOptions,
260 ) -> Self {
261 Self::initialize(kernel, stack_inputs, execution_options)
262 }
263
264 pub fn new_debug(kernel: Kernel, stack_inputs: StackInputs) -> Self {
266 Self::initialize(
267 kernel,
268 stack_inputs,
269 ExecutionOptions::default().with_tracing().with_debugging(true),
270 )
271 }
272
273 fn initialize(kernel: Kernel, stack: StackInputs, execution_options: ExecutionOptions) -> Self {
274 let in_debug_mode = execution_options.enable_debugging();
275 let source_manager = Arc::new(DefaultSourceManager::default());
276 Self {
277 system: System::new(execution_options.expected_cycles() as usize),
278 decoder: Decoder::new(in_debug_mode),
279 stack: Stack::new(&stack, execution_options.expected_cycles() as usize, in_debug_mode),
280 range: RangeChecker::new(),
281 chiplets: Chiplets::new(kernel),
282 max_cycles: execution_options.max_cycles(),
283 enable_tracing: execution_options.enable_tracing(),
284 source_manager,
285 }
286 }
287
288 pub fn with_source_manager(mut self, source_manager: Arc<dyn SourceManager>) -> Self {
290 self.source_manager = source_manager;
291 self
292 }
293
294 pub fn execute(
299 &mut self,
300 program: &Program,
301 host: &mut impl Host,
302 ) -> Result<StackOutputs, ExecutionError> {
303 if self.system.clk() != 0 {
304 return Err(ExecutionError::ProgramAlreadyExecuted);
305 }
306
307 for (digest, values) in program.mast_forest().advice_map().iter() {
309 if let Some(stored_values) = host.advice_provider().get_mapped_values(digest) {
310 if stored_values != values {
311 return Err(ExecutionError::AdviceMapKeyAlreadyPresent {
312 key: digest.into(),
313 prev_values: stored_values.to_vec(),
314 new_values: values.clone(),
315 });
316 }
317 } else {
318 host.advice_provider_mut().insert_into_map(digest.into(), values.clone());
319 }
320 }
321
322 self.execute_mast_node(program.entrypoint(), &program.mast_forest().clone(), host)?;
323
324 self.stack.build_stack_outputs()
325 }
326
327 fn execute_mast_node(
331 &mut self,
332 node_id: MastNodeId,
333 program: &MastForest,
334 host: &mut impl Host,
335 ) -> Result<(), ExecutionError> {
336 let node = program
337 .get_node_by_id(node_id)
338 .ok_or(ExecutionError::MastNodeNotFoundInForest { node_id })?;
339
340 for &decorator_id in node.before_enter() {
341 self.execute_decorator(&program[decorator_id], host)?;
342 }
343
344 match node {
345 MastNode::Block(node) => self.execute_basic_block_node(node, program, host)?,
346 MastNode::Join(node) => self.execute_join_node(node, program, host)?,
347 MastNode::Split(node) => self.execute_split_node(node, program, host)?,
348 MastNode::Loop(node) => self.execute_loop_node(node, program, host)?,
349 MastNode::Call(node) => {
350 let err_ctx = ErrorContext::new(program, node, self.source_manager.clone());
351 add_error_ctx_to_external_error(
352 self.execute_call_node(node, program, host),
353 err_ctx,
354 )?
355 },
356 MastNode::Dyn(node) => {
357 let err_ctx = ErrorContext::new(program, node, self.source_manager.clone());
358 add_error_ctx_to_external_error(
359 self.execute_dyn_node(node, program, host),
360 err_ctx,
361 )?
362 },
363 MastNode::External(external_node) => {
364 let (root_id, mast_forest) = resolve_external_node(external_node, host)?;
365
366 self.execute_mast_node(root_id, &mast_forest, host)?;
367 },
368 }
369
370 for &decorator_id in node.after_exit() {
371 self.execute_decorator(&program[decorator_id], host)?;
372 }
373
374 Ok(())
375 }
376
377 #[inline(always)]
379 fn execute_join_node(
380 &mut self,
381 node: &JoinNode,
382 program: &MastForest,
383 host: &mut impl Host,
384 ) -> Result<(), ExecutionError> {
385 self.start_join_node(node, program, host)?;
386
387 self.execute_mast_node(node.first(), program, host)?;
389 self.execute_mast_node(node.second(), program, host)?;
390
391 self.end_join_node(node, program, host)
392 }
393
394 #[inline(always)]
396 fn execute_split_node(
397 &mut self,
398 node: &SplitNode,
399 program: &MastForest,
400 host: &mut impl Host,
401 ) -> Result<(), ExecutionError> {
402 let condition = self.start_split_node(node, program, host)?;
404
405 if condition == ONE {
407 self.execute_mast_node(node.on_true(), program, host)?;
408 } else if condition == ZERO {
409 self.execute_mast_node(node.on_false(), program, host)?;
410 } else {
411 let err_ctx = ErrorContext::new(program, node, self.source_manager.clone());
412 return Err(ExecutionError::not_binary_value_if(condition, &err_ctx));
413 }
414
415 self.end_split_node(node, program, host)
416 }
417
418 #[inline(always)]
420 fn execute_loop_node(
421 &mut self,
422 node: &LoopNode,
423 program: &MastForest,
424 host: &mut impl Host,
425 ) -> Result<(), ExecutionError> {
426 let condition = self.start_loop_node(node, program, host)?;
428
429 if condition == ONE {
431 self.execute_mast_node(node.body(), program, host)?;
433
434 while self.stack.peek() == ONE {
438 self.decoder.repeat();
439 self.execute_op(Operation::Drop, program, host)?;
440 self.execute_mast_node(node.body(), program, host)?;
441 }
442
443 if self.stack.peek() != ZERO {
444 let err_ctx = ErrorContext::new(program, node, self.source_manager.clone());
445 return Err(ExecutionError::not_binary_value_loop(self.stack.peek(), &err_ctx));
446 }
447
448 self.end_loop_node(node, true, program, host)
450 } else if condition == ZERO {
451 self.end_loop_node(node, false, program, host)
454 } else {
455 let err_ctx = ErrorContext::new(program, node, self.source_manager.clone());
456 Err(ExecutionError::not_binary_value_loop(condition, &err_ctx))
457 }
458 }
459
460 #[inline(always)]
462 fn execute_call_node(
463 &mut self,
464 call_node: &CallNode,
465 program: &MastForest,
466 host: &mut impl Host,
467 ) -> Result<(), ExecutionError> {
468 if self.system.in_syscall() {
470 let instruction = if call_node.is_syscall() { "syscall" } else { "call" };
471 return Err(ExecutionError::CallInSyscall(instruction));
472 }
473
474 if call_node.is_syscall() {
476 let callee = program.get_node_by_id(call_node.callee()).ok_or_else(|| {
477 ExecutionError::MastNodeNotFoundInForest { node_id: call_node.callee() }
478 })?;
479 let err_ctx = ErrorContext::new(program, call_node, self.source_manager.clone());
480 self.chiplets.kernel_rom.access_proc(callee.digest(), &err_ctx)?;
481 }
482 let err_ctx = ErrorContext::new(program, call_node, self.source_manager.clone());
483
484 self.start_call_node(call_node, program, host)?;
485 self.execute_mast_node(call_node.callee(), program, host)?;
486 self.end_call_node(call_node, program, host, &err_ctx)
487 }
488
489 #[inline(always)]
494 fn execute_dyn_node(
495 &mut self,
496 node: &DynNode,
497 program: &MastForest,
498 host: &mut impl Host,
499 ) -> Result<(), ExecutionError> {
500 if node.is_dyncall() && self.system.in_syscall() {
502 return Err(ExecutionError::CallInSyscall("dyncall"));
503 }
504
505 let error_ctx = ErrorContext::new(program, node, self.source_manager.clone());
506
507 let callee_hash = if node.is_dyncall() {
508 self.start_dyncall_node(node, &error_ctx)?
509 } else {
510 self.start_dyn_node(node, program, host, &error_ctx)?
511 };
512
513 match program.find_procedure_root(callee_hash.into()) {
517 Some(callee_id) => self.execute_mast_node(callee_id, program, host)?,
518 None => {
519 let mast_forest = host.get_mast_forest(&callee_hash.into()).ok_or_else(|| {
520 ExecutionError::dynamic_node_not_found(callee_hash.into(), &error_ctx)
521 })?;
522
523 let root_id = mast_forest.find_procedure_root(callee_hash.into()).ok_or(
526 ExecutionError::malfored_mast_forest_in_host(
527 callee_hash.into(),
528 &ErrorContext::default(),
529 ),
530 )?;
531
532 self.execute_mast_node(root_id, &mast_forest, host)?
533 },
534 }
535
536 if node.is_dyncall() {
537 self.end_dyncall_node(node, program, host, &error_ctx)
538 } else {
539 self.end_dyn_node(node, program, host)
540 }
541 }
542
543 #[inline(always)]
545 fn execute_basic_block_node(
546 &mut self,
547 basic_block: &BasicBlockNode,
548 program: &MastForest,
549 host: &mut impl Host,
550 ) -> Result<(), ExecutionError> {
551 self.start_basic_block_node(basic_block, program, host)?;
552
553 let mut op_offset = 0;
554 let mut decorator_ids = basic_block.decorator_iter();
555
556 self.execute_op_batch(
558 basic_block,
559 &basic_block.op_batches()[0],
560 &mut decorator_ids,
561 op_offset,
562 program,
563 host,
564 )?;
565 op_offset += basic_block.op_batches()[0].ops().len();
566
567 for op_batch in basic_block.op_batches().iter().skip(1) {
571 self.respan(op_batch);
572 self.execute_op(Operation::Noop, program, host)?;
573 self.execute_op_batch(
574 basic_block,
575 op_batch,
576 &mut decorator_ids,
577 op_offset,
578 program,
579 host,
580 )?;
581 op_offset += op_batch.ops().len();
582 }
583
584 self.end_basic_block_node(basic_block, program, host)?;
585
586 for &decorator_id in decorator_ids {
591 let decorator = program
592 .get_decorator_by_id(decorator_id)
593 .ok_or(ExecutionError::DecoratorNotFoundInForest { decorator_id })?;
594 self.execute_decorator(decorator, host)?;
595 }
596
597 Ok(())
598 }
599
600 #[inline(always)]
607 fn execute_op_batch(
608 &mut self,
609 basic_block: &BasicBlockNode,
610 batch: &OpBatch,
611 decorators: &mut DecoratorIterator,
612 op_offset: usize,
613 program: &MastForest,
614 host: &mut impl Host,
615 ) -> Result<(), ExecutionError> {
616 let op_counts = batch.op_counts();
617 let mut op_idx = 0;
618 let mut group_idx = 0;
619 let mut next_group_idx = 1;
620
621 let num_batch_groups = batch.num_groups().next_power_of_two();
625
626 for (i, &op) in batch.ops().iter().enumerate() {
628 while let Some(&decorator_id) = decorators.next_filtered(i + op_offset) {
629 let decorator = program
630 .get_decorator_by_id(decorator_id)
631 .ok_or(ExecutionError::DecoratorNotFoundInForest { decorator_id })?;
632 self.execute_decorator(decorator, host)?;
633 }
634
635 let error_ctx = ErrorContext::new_with_op_idx(
637 program,
638 basic_block,
639 self.source_manager.clone(),
640 i + op_offset,
641 );
642 self.decoder.execute_user_op(op, op_idx);
643 self.execute_op_with_error_ctx(op, program, host, &error_ctx)?;
644
645 let has_imm = op.imm_value().is_some();
648 if has_imm {
649 next_group_idx += 1;
650 }
651
652 if op_idx == op_counts[group_idx] - 1 {
654 if has_imm {
657 debug_assert!(op_idx < OP_GROUP_SIZE - 1, "invalid op index");
662 self.decoder.execute_user_op(Operation::Noop, op_idx + 1);
663 self.execute_op(Operation::Noop, program, host)?;
664 }
665
666 group_idx = next_group_idx;
668 next_group_idx += 1;
669 op_idx = 0;
670
671 if group_idx < num_batch_groups {
674 self.decoder.start_op_group(batch.groups()[group_idx]);
675 }
676 } else {
677 op_idx += 1;
679 }
680 }
681
682 for group_idx in group_idx..num_batch_groups {
685 self.decoder.execute_user_op(Operation::Noop, 0);
686 self.execute_op(Operation::Noop, program, host)?;
687
688 if group_idx < num_batch_groups - 1 {
692 self.decoder.start_op_group(ZERO);
693 }
694 }
695
696 Ok(())
697 }
698
699 fn execute_decorator(
701 &mut self,
702 decorator: &Decorator,
703 host: &mut impl Host,
704 ) -> Result<(), ExecutionError> {
705 match decorator {
706 Decorator::Debug(options) => {
707 if self.decoder.in_debug_mode() {
708 host.on_debug(self.into(), options)?;
709 }
710 },
711 Decorator::AsmOp(assembly_op) => {
712 if self.decoder.in_debug_mode() {
713 self.decoder.append_asmop(self.system.clk(), assembly_op.clone());
714 }
715 },
716 Decorator::Trace(id) => {
717 if self.enable_tracing {
718 host.on_trace(self.into(), *id)?;
719 }
720 },
721 };
722 Ok(())
723 }
724
725 pub const fn kernel(&self) -> &Kernel {
729 self.chiplets.kernel_rom.kernel()
730 }
731
732 pub fn into_parts(self) -> (System, Decoder, Stack, RangeChecker, Chiplets) {
733 (self.system, self.decoder, self.stack, self.range, self.chiplets)
734 }
735}
736
737#[derive(Debug, Clone, Copy)]
741pub struct SlowProcessState<'a> {
742 system: &'a System,
743 stack: &'a Stack,
744 chiplets: &'a Chiplets,
745}
746
747#[derive(Debug, Clone, Copy)]
748pub struct FastProcessState<'a> {
749 processor: &'a FastProcessor,
750 op_idx: usize,
752}
753
754#[derive(Debug, Clone, Copy)]
755pub enum ProcessState<'a> {
756 Slow(SlowProcessState<'a>),
757 Fast(FastProcessState<'a>),
758}
759
760impl<'a> ProcessState<'a> {
761 pub fn new_fast(processor: &'a FastProcessor, op_idx: usize) -> Self {
762 Self::Fast(FastProcessState { processor, op_idx })
763 }
764
765 #[inline(always)]
767 pub fn clk(&self) -> RowIndex {
768 match self {
769 ProcessState::Slow(state) => state.system.clk(),
770 ProcessState::Fast(state) => state.processor.clk + state.op_idx,
771 }
772 }
773
774 #[inline(always)]
776 pub fn ctx(&self) -> ContextId {
777 match self {
778 ProcessState::Slow(state) => state.system.ctx(),
779 ProcessState::Fast(state) => state.processor.ctx,
780 }
781 }
782
783 #[inline(always)]
785 pub fn fmp(&self) -> u64 {
786 match self {
787 ProcessState::Slow(state) => state.system.fmp().as_int(),
788 ProcessState::Fast(state) => state.processor.fmp.as_int(),
789 }
790 }
791
792 #[inline(always)]
794 pub fn get_stack_item(&self, pos: usize) -> Felt {
795 match self {
796 ProcessState::Slow(state) => state.stack.get(pos),
797 ProcessState::Fast(state) => state.processor.stack_get(pos),
798 }
799 }
800
801 #[inline(always)]
812 pub fn get_stack_word(&self, word_idx: usize) -> Word {
813 match self {
814 ProcessState::Slow(state) => state.stack.get_word(word_idx),
815 ProcessState::Fast(state) => state.processor.stack_get_word(word_idx * WORD_SIZE),
816 }
817 }
818
819 #[inline(always)]
822 pub fn get_stack_state(&self) -> Vec<Felt> {
823 match self {
824 ProcessState::Slow(state) => state.stack.get_state_at(state.system.clk()),
825 ProcessState::Fast(state) => state.processor.stack().iter().rev().copied().collect(),
826 }
827 }
828
829 #[inline(always)]
832 pub fn get_mem_value(&self, ctx: ContextId, addr: u32) -> Option<Felt> {
833 match self {
834 ProcessState::Slow(state) => state.chiplets.memory.get_value(ctx, addr),
835 ProcessState::Fast(state) => state.processor.memory.read_element_impl(ctx, addr),
836 }
837 }
838
839 #[inline(always)]
844 pub fn get_mem_word(&self, ctx: ContextId, addr: u32) -> Result<Option<Word>, ExecutionError> {
845 match self {
846 ProcessState::Slow(state) => {
847 state.chiplets.memory.get_word(ctx, addr).map_err(ExecutionError::MemoryError)
848 },
849 ProcessState::Fast(state) => {
850 Ok(state.processor.memory.read_word_impl(ctx, addr, None)?.copied())
851 },
852 }
853 }
854
855 #[inline(always)]
861 pub fn get_mem_state(&self, ctx: ContextId) -> Vec<(MemoryAddress, Felt)> {
862 match self {
863 ProcessState::Slow(state) => {
864 state.chiplets.memory.get_state_at(ctx, state.system.clk())
865 },
866 ProcessState::Fast(state) => state.processor.memory.get_memory_state(ctx),
867 }
868 }
869}
870
871impl<'a> From<&'a Process> for ProcessState<'a> {
875 fn from(process: &'a Process) -> Self {
876 Self::Slow(SlowProcessState {
877 system: &process.system,
878 stack: &process.stack,
879 chiplets: &process.chiplets,
880 })
881 }
882}
883
884impl<'a> From<&'a mut Process> for ProcessState<'a> {
885 fn from(process: &'a mut Process) -> Self {
886 Self::Slow(SlowProcessState {
887 system: &process.system,
888 stack: &process.stack,
889 chiplets: &process.chiplets,
890 })
891 }
892}
893
894fn add_error_ctx_to_external_error(
900 result: Result<(), ExecutionError>,
901 err_ctx: ErrorContext<impl MastNodeExt>,
902) -> Result<(), ExecutionError> {
903 match result {
904 Ok(_) => Ok(()),
905 Err(err) => match err {
907 ExecutionError::NoMastForestWithProcedure { label, source_file: _, root_digest }
908 | ExecutionError::MalformedMastForestInHost { label, source_file: _, root_digest } => {
909 if label == SourceSpan::UNKNOWN {
910 let err_with_ctx =
911 ExecutionError::no_mast_forest_with_procedure(root_digest, &err_ctx);
912 Err(err_with_ctx)
913 } else {
914 Err(err)
918 }
919 },
920
921 _ => {
922 Err(err)
924 },
925 },
926 }
927}