1use alloc::{
2 string::{String, ToString},
3 vec::Vec,
4};
5use core::fmt;
6
7use miden_air::RowIndex;
8use vm_core::{AssemblyOp, Operation, StackOutputs};
9
10use crate::{
11 range::RangeChecker, system::ContextId, Chiplets, ChipletsLengths, Decoder, ExecutionError,
12 Felt, Process, Stack, System, TraceLenSummary,
13};
14
15#[derive(Clone, Debug, Eq, PartialEq)]
17pub struct VmState {
18 pub clk: RowIndex,
19 pub ctx: ContextId,
20 pub op: Option<Operation>,
21 pub asmop: Option<AsmOpInfo>,
22 pub fmp: Felt,
23 pub stack: Vec<Felt>,
24 pub memory: Vec<(u64, Felt)>,
25}
26
27impl fmt::Display for VmState {
28 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
29 let stack: Vec<u64> = self.stack.iter().map(|x| x.as_int()).collect();
30 write!(
31 f,
32 "clk={}{}{}, fmp={}, stack={stack:?}, memory={:?}",
33 self.clk,
34 match self.op {
35 Some(op) => format!(", op={op}"),
36 None => "".to_string(),
37 },
38 match &self.asmop {
39 Some(op) => format!(", {op}"),
40 None => "".to_string(),
41 },
42 self.fmp,
43 self.memory
44 )
45 }
46}
47
48pub struct VmStateIterator {
53 chiplets: Chiplets,
54 decoder: Decoder,
55 stack: Stack,
56 system: System,
57 error: Option<ExecutionError>,
58 clk: RowIndex,
59 asmop_idx: usize,
60 forward: bool,
61 trace_len_summary: TraceLenSummary,
62}
63
64impl VmStateIterator {
65 pub fn new(process: Process, result: Result<StackOutputs, ExecutionError>) -> Self {
66 let (system, decoder, stack, mut range, chiplets) = process.into_parts();
67 let trace_len_summary = Self::build_trace_len_summary(&system, &mut range, &chiplets);
68
69 Self {
70 chiplets,
71 decoder,
72 stack,
73 system,
74 error: result.err(),
75 clk: RowIndex::from(0),
76 asmop_idx: 0,
77 forward: true,
78 trace_len_summary,
79 }
80 }
81
82 fn get_asmop(&self) -> (Option<AsmOpInfo>, bool) {
85 let assembly_ops = self.decoder.debug_info().assembly_ops();
86
87 if self.clk == 0 || assembly_ops.is_empty() || self.asmop_idx > assembly_ops.len() {
88 return (None, false);
89 }
90
91 let next_asmop = if self.forward && self.asmop_idx < assembly_ops.len() {
94 &assembly_ops[self.asmop_idx]
95 } else {
96 &assembly_ops[self.asmop_idx.saturating_sub(1)]
97 };
98
99 let (curr_asmop, cycle_idx) = if self.asmop_idx > 0 {
102 let a = self.clk;
103 let b = RowIndex::from(assembly_ops[self.asmop_idx - 1].0);
104 (
105 &assembly_ops[self.asmop_idx - 1],
106 (a.max(b) - a.min(b)) as u8,
109 )
110 } else {
111 (next_asmop, 0) };
113
114 if next_asmop.0 == (self.clk - 1).as_usize() {
117 let cycle_idx = 1;
119 let asmop = AsmOpInfo::new(next_asmop.1.clone(), cycle_idx);
120 (Some(asmop), true)
121 }
122 else if self.asmop_idx > 0 && cycle_idx <= curr_asmop.1.num_cycles() {
126 let asmop = AsmOpInfo::new(curr_asmop.1.clone(), cycle_idx);
128 (Some(asmop), false)
129 }
130 else {
133 (None, false)
134 }
135 }
136
137 pub fn back(&mut self) -> Option<VmState> {
138 if self.clk == 0 {
139 return None;
140 }
141
142 if self.forward {
144 self.clk = self.clk.saturating_sub(1);
145 self.forward = false;
146 }
147
148 let ctx = self.system.get_ctx_at(self.clk);
149
150 let op = if self.clk == 0 {
151 None
152 } else {
153 Some(self.decoder.debug_info().operations()[self.clk - 1])
154 };
155
156 let (asmop, is_start) = self.get_asmop();
157 if is_start {
158 self.asmop_idx -= 1;
159 }
160
161 let result = Some(VmState {
162 clk: self.clk,
163 ctx,
164 op,
165 asmop,
166 fmp: self.system.get_fmp_at(self.clk),
167 stack: self.stack.get_state_at(self.clk),
168 memory: self.chiplets.memory().get_state_at(ctx, self.clk),
169 });
170
171 self.clk -= 1;
172
173 result
174 }
175
176 pub fn into_parts(self) -> (System, Decoder, Stack, Chiplets, Option<ExecutionError>) {
177 (self.system, self.decoder, self.stack, self.chiplets, self.error)
178 }
179
180 pub fn trace_len_summary(&self) -> &TraceLenSummary {
181 &self.trace_len_summary
182 }
183
184 fn build_trace_len_summary(
186 system: &System,
187 range: &mut RangeChecker,
188 chiplets: &Chiplets,
189 ) -> TraceLenSummary {
190 let clk = system.clk();
191 let range_table_len = range.get_number_range_checker_rows();
192 chiplets.append_range_checks(range);
193
194 TraceLenSummary::new(clk.into(), range_table_len, ChipletsLengths::new(chiplets))
195 }
196}
197
198impl Iterator for VmStateIterator {
199 type Item = Result<VmState, ExecutionError>;
200
201 fn next(&mut self) -> Option<Self::Item> {
202 if self.clk > self.system.clk() {
203 match &self.error {
204 Some(_) => {
205 let error = core::mem::take(&mut self.error);
206 return Some(Err(error.unwrap()));
207 },
208 None => return None,
209 }
210 }
211
212 if !self.forward && self.clk < self.system.clk() {
214 self.clk += 1;
215 self.forward = true;
216 }
217
218 let ctx = self.system.get_ctx_at(self.clk);
219
220 let op = if self.clk == 0 {
221 None
222 } else {
223 Some(self.decoder.debug_info().operations()[self.clk - 1])
224 };
225
226 let (asmop, is_start) = self.get_asmop();
227 if is_start {
228 self.asmop_idx += 1;
229 }
230
231 let result = Some(Ok(VmState {
232 clk: self.clk,
233 ctx,
234 op,
235 asmop,
236 fmp: self.system.get_fmp_at(self.clk),
237 stack: self.stack.get_state_at(self.clk),
238 memory: self.chiplets.memory().get_state_at(ctx, self.clk),
239 }));
240
241 self.clk += 1;
242
243 result
244 }
245}
246
247#[derive(Clone, Debug, Eq, PartialEq)]
250pub struct AsmOpInfo {
251 asmop: AssemblyOp,
252 cycle_idx: u8,
253}
254
255impl AsmOpInfo {
259 pub fn new(asmop: AssemblyOp, cycle_idx: u8) -> Self {
263 Self { asmop, cycle_idx }
264 }
265
266 pub fn context_name(&self) -> &str {
268 self.asmop.context_name()
269 }
270
271 pub fn op(&self) -> &str {
273 self.asmop.op()
274 }
275
276 pub fn op_generalized(&self) -> String {
278 let op_vec: Vec<&str> = self.op().split('.').collect();
279 let keep_params = matches!(op_vec[0], "movdn" | "movup");
280 if !keep_params && op_vec.last().unwrap().parse::<usize>().is_ok() {
281 op_vec.split_last().unwrap().1.join(".")
282 } else {
283 self.op().to_string()
284 }
285 }
286
287 pub fn num_cycles(&self) -> u8 {
289 self.asmop.num_cycles()
290 }
291
292 pub fn cycle_idx(&self) -> u8 {
295 self.cycle_idx
296 }
297
298 pub const fn should_break(&self) -> bool {
300 self.asmop.should_break()
301 }
302}
303
304impl fmt::Display for AsmOpInfo {
305 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
306 write!(f, "{}, cycles={}", self.asmop, self.cycle_idx)
307 }
308}
309
310impl AsRef<AssemblyOp> for AsmOpInfo {
311 #[inline]
312 fn as_ref(&self) -> &AssemblyOp {
313 &self.asmop
314 }
315}