1use alloc::vec::Vec;
2use core::slice;
3
4use miden_air::trace::MIN_TRACE_LEN;
5
6use super::chiplets::Chiplets;
7use crate::{Felt, RowIndex};
8#[cfg(test)]
9use crate::{operation::Operation, utils::ToElements};
10
11#[derive(Debug)]
16pub struct RowMajorTraceWriter<'a, E> {
17 data: &'a mut [E],
18 width: usize,
19}
20
21impl<'a, E: Copy> RowMajorTraceWriter<'a, E> {
22 pub fn new(data: &'a mut [E], width: usize) -> Self {
23 debug_assert_eq!(data.len() % width, 0, "buffer length must be a multiple of width");
24 Self { data, width }
25 }
26
27 #[inline(always)]
29 pub fn write_row(&mut self, row: usize, values: &[E]) {
30 debug_assert_eq!(values.len(), self.width);
31 let start = row * self.width;
32 self.data[start..start + self.width].copy_from_slice(values);
33 }
34}
35
36pub struct TraceFragment<'a> {
41 data: Vec<&'a mut [Felt]>,
42 num_rows: usize,
43}
44
45impl<'a> TraceFragment<'a> {
46 pub fn new(num_columns: usize, num_rows: usize) -> Self {
50 TraceFragment {
51 data: Vec::with_capacity(num_columns),
52 num_rows,
53 }
54 }
55
56 pub fn width(&self) -> usize {
61 self.data.len()
62 }
63
64 pub fn len(&self) -> usize {
66 self.num_rows
67 }
68
69 #[inline(always)]
74 pub fn set(&mut self, row_idx: RowIndex, col_idx: usize, value: Felt) {
75 self.data[col_idx][row_idx] = value;
76 }
77
78 pub fn columns(&mut self) -> slice::IterMut<'_, &'a mut [Felt]> {
80 self.data.iter_mut()
81 }
82
83 pub fn push_column_slice(&mut self, column: &'a mut [Felt]) -> &'a mut [Felt] {
88 let (column_fragment, rest) = column.split_at_mut(self.num_rows);
89 self.data.push(column_fragment);
90 rest
91 }
92
93 #[cfg(test)]
97 pub fn trace_to_fragment(trace: &'a mut [Vec<Felt>]) -> Self {
98 assert!(!trace.is_empty(), "expected trace to have at least one column");
99 let mut data = Vec::new();
100 for column in trace.iter_mut() {
101 data.push(column.as_mut_slice());
102 }
103
104 let num_rows = data[0].len();
105 Self { data, num_rows }
106 }
107}
108
109#[derive(Debug, Default, Eq, PartialEq, Clone, Copy)]
119pub struct TraceLenSummary {
120 main_trace_len: usize,
121 range_trace_len: usize,
122 chiplets_trace_len: ChipletsLengths,
123}
124
125impl TraceLenSummary {
126 pub fn new(
127 main_trace_len: usize,
128 range_trace_len: usize,
129 chiplets_trace_len: ChipletsLengths,
130 ) -> Self {
131 TraceLenSummary {
132 main_trace_len,
133 range_trace_len,
134 chiplets_trace_len,
135 }
136 }
137
138 pub fn main_trace_len(&self) -> usize {
140 self.main_trace_len
141 }
142
143 pub fn range_trace_len(&self) -> usize {
145 self.range_trace_len
146 }
147
148 pub fn chiplets_trace_len(&self) -> ChipletsLengths {
150 self.chiplets_trace_len
151 }
152
153 pub fn trace_len(&self) -> usize {
155 self.range_trace_len
156 .max(self.main_trace_len)
157 .max(self.chiplets_trace_len.trace_len())
158 }
159
160 pub fn padded_trace_len(&self) -> usize {
162 self.trace_len().next_power_of_two().max(MIN_TRACE_LEN)
163 }
164
165 pub fn padding_percentage(&self) -> usize {
168 (self.padded_trace_len() - self.trace_len()) * 100 / self.padded_trace_len()
169 }
170}
171
172#[derive(Default, Clone, Copy, Debug, PartialEq, Eq)]
177pub struct ChipletsLengths {
178 hash_chiplet_len: usize,
179 bitwise_chiplet_len: usize,
180 memory_chiplet_len: usize,
181 ace_chiplet_len: usize,
182 kernel_rom_len: usize,
183}
184
185impl ChipletsLengths {
186 pub fn new(chiplets: &Chiplets) -> Self {
187 ChipletsLengths {
188 hash_chiplet_len: chiplets.bitwise_start().into(),
189 bitwise_chiplet_len: chiplets.memory_start() - chiplets.bitwise_start(),
190 memory_chiplet_len: chiplets.ace_start() - chiplets.memory_start(),
191 ace_chiplet_len: chiplets.kernel_rom_start() - chiplets.ace_start(),
192 kernel_rom_len: chiplets.padding_start() - chiplets.kernel_rom_start(),
193 }
194 }
195
196 pub fn from_parts(
197 hash_len: usize,
198 bitwise_len: usize,
199 memory_len: usize,
200 ace_len: usize,
201 kernel_len: usize,
202 ) -> Self {
203 ChipletsLengths {
204 hash_chiplet_len: hash_len,
205 bitwise_chiplet_len: bitwise_len,
206 memory_chiplet_len: memory_len,
207 ace_chiplet_len: ace_len,
208 kernel_rom_len: kernel_len,
209 }
210 }
211
212 pub fn hash_chiplet_len(&self) -> usize {
214 self.hash_chiplet_len
215 }
216
217 pub fn bitwise_chiplet_len(&self) -> usize {
219 self.bitwise_chiplet_len
220 }
221
222 pub fn memory_chiplet_len(&self) -> usize {
224 self.memory_chiplet_len
225 }
226
227 pub fn ace_chiplet_len(&self) -> usize {
229 self.ace_chiplet_len
230 }
231
232 pub fn kernel_rom_len(&self) -> usize {
234 self.kernel_rom_len
235 }
236
237 pub fn trace_len(&self) -> usize {
241 self.hash_chiplet_len()
242 + self.bitwise_chiplet_len()
243 + self.memory_chiplet_len()
244 + self.ace_chiplet_len()
245 + self.kernel_rom_len()
246 + 1
247 }
248}
249
250pub(crate) fn split_element_u32_into_u16(value: Felt) -> (Felt, Felt) {
256 let (hi, lo) = split_u32_into_u16(value.as_canonical_u64());
257 (Felt::new_unchecked(hi as u64), Felt::new_unchecked(lo as u64))
258}
259
260pub(crate) fn split_u32_into_u16(value: u64) -> (u16, u16) {
265 const U32MAX: u64 = u32::MAX as u64;
266 debug_assert!(value <= U32MAX, "not a 32-bit value");
267
268 let lo = value as u16;
269 let hi = (value >> 16) as u16;
270
271 (hi, lo)
272}
273
274#[cfg(test)]
281pub fn build_span_with_respan_ops() -> (Vec<Operation>, Vec<Felt>) {
282 let iv = [1, 3, 5, 7, 9, 11, 13, 15, 17].to_elements();
283 let ops = alloc::vec![
284 Operation::Push(iv[0]),
285 Operation::Push(iv[1]),
286 Operation::Push(iv[2]),
287 Operation::Push(iv[3]),
288 Operation::Push(iv[4]),
289 Operation::Push(iv[5]),
290 Operation::Push(iv[6]),
291 Operation::Push(iv[7]),
293 Operation::Push(iv[8]),
294 Operation::Add,
295 Operation::Drop,
297 Operation::Drop,
298 Operation::Drop,
299 Operation::Drop,
300 Operation::Drop,
301 Operation::Drop,
302 Operation::Drop,
303 Operation::Drop,
304 ];
305 (ops, iv)
306}