1use core::ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign};
6use zkboo::{
7 backend::{Backend, Frontend},
8 crypto::{Digest, Seed},
9 memory::{FlexibleMemoryManager, MemoryManager, RefCount},
10 word::{ByWordType, CompositeWord, Shape, Word, WordIdx},
11};
12
13#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
15pub struct MemoryUsage {
16 pub stack: usize,
17 pub heap: usize,
18}
19
20impl MemoryUsage {
21 pub fn total(&self) -> usize {
23 return self.stack + self.heap;
24 }
25
26 pub fn from_num_words(num_words: Shape) -> MemoryUsage {
28 let usize_num_bytes = core::mem::size_of::<usize>();
29 let mut usage = MemoryUsage { heap: 0, stack: 0 };
30 let num_word_types = num_words.map(|_| 1).sum();
31 usage.stack += 3 * usize_num_bytes * num_word_types;
32 usage.heap += num_words.map_with_width(|w, n| w / 8 * n).sum();
33 return usage;
34 }
35}
36
37impl Add<Self> for MemoryUsage {
38 type Output = Self;
39
40 fn add(self, other: Self) -> Self {
42 return Self {
43 stack: self.stack + other.stack,
44 heap: self.heap + other.heap,
45 };
46 }
47}
48
49impl AddAssign<Self> for MemoryUsage {
50 fn add_assign(&mut self, other: Self) {
51 self.stack += other.stack;
52 self.heap += other.heap;
53 }
54}
55
56impl Sub<Self> for MemoryUsage {
57 type Output = Self;
58
59 fn sub(self, other: Self) -> Self {
61 return Self {
62 stack: self.stack - other.stack,
63 heap: self.heap - other.heap,
64 };
65 }
66}
67
68impl SubAssign<Self> for MemoryUsage {
69 fn sub_assign(&mut self, other: Self) {
70 self.stack -= other.stack;
71 self.heap -= other.heap;
72 }
73}
74
75impl Mul<usize> for MemoryUsage {
76 type Output = Self;
77
78 fn mul(self, rhs: usize) -> Self {
80 return Self {
81 stack: self.stack * rhs,
82 heap: self.heap * rhs,
83 };
84 }
85}
86
87impl MulAssign<usize> for MemoryUsage {
88 fn mul_assign(&mut self, rhs: usize) {
89 self.stack *= rhs;
90 self.heap *= rhs;
91 }
92}
93
94#[derive(Debug, Clone, Copy, Default)]
97pub struct GateCounts {
98 pub input: Shape,
99 pub alloc: Shape,
100 pub constant: Shape,
101 pub from_le_words: Shape,
102 pub to_le_words: Shape,
103 pub output: Shape,
104 pub not: Shape,
105 pub bitxor: Shape,
106 pub bitand: Shape,
107 pub bitxor_const: Shape,
108 pub bitand_const: Shape,
109 pub unbounded_shl: Shape,
110 pub unbounded_shr: Shape,
111 pub rotate_left: Shape,
112 pub rotate_right: Shape,
113 pub reverse_bits: Shape,
114 pub swap_bytes: Shape,
115 pub cast: ByWordType<Shape>,
116 pub carry: Shape,
117}
118
119#[derive(Debug, Clone, Copy, Default)]
121pub struct ProfilingData {
122 gate_counts: GateCounts,
123 state_size: Shape,
124 max_live_wordrefs: Shape,
125 max_cumulative_refcount: Shape,
126 max_refcount: Shape,
127}
128
129#[derive(Debug, Clone, Copy, Default)]
131pub struct ResponseData {
132 and_msg_size: Shape,
133 input_share_size: Shape,
134}
135
136impl ResponseData {
137 pub fn and_msg_size(&self) -> Shape {
139 return self.and_msg_size;
140 }
141
142 pub fn input_share_size(&self) -> Shape {
144 return self.input_share_size;
145 }
146
147 pub fn mem_usage<D: Digest, S: Seed>(&self) -> MemoryUsage {
149 let mut usage = MemoryUsage { heap: 0, stack: 0 };
150 usage.stack += core::mem::size_of::<u8>(); usage.stack += 2 * core::mem::size_of::<S>(); usage.stack += core::mem::size_of::<D>(); usage.heap += self.and_msg_size.map_with_width(|w, n| w / 8 * n).sum();
154 usage.heap += self.input_share_size.map_with_width(|w, n| w / 8 * n).sum();
155 return usage;
156 }
157}
158
159#[derive(Debug, Clone, Copy, Default)]
161pub struct ViewsData {
162 pub and_msgs_size: Shape,
163 pub input_share2_size: Shape,
164 pub output_shares_size: Shape,
165}
166
167impl ViewsData {
168 pub fn and_msgs_size(&self) -> Shape {
170 return self.and_msgs_size;
171 }
172
173 pub fn input_share2_size(&self) -> Shape {
175 return self.input_share2_size;
176 }
177
178 pub fn output_shares_size(&self) -> Shape {
180 return self.output_shares_size;
181 }
182}
183
184impl ProfilingData {
185 pub fn state_size(&self) -> Shape {
187 return self.state_size;
188 }
189
190 pub fn gate_counts(&self) -> &GateCounts {
192 return &self.gate_counts;
193 }
194
195 pub fn max_live_wordrefs(&self) -> Shape {
198 return self.max_live_wordrefs;
199 }
200
201 pub fn max_cumulative_refcount(&self) -> Shape {
204 return self.max_cumulative_refcount;
205 }
206
207 pub fn max_refcount(&self) -> Shape {
209 return self.max_refcount;
210 }
211
212 pub fn and_msg_size(&self) -> Shape {
214 return self
215 .gate_counts
216 .bitand
217 .zip(&self.gate_counts.carry, |bitand_count, carry_count| {
218 bitand_count + carry_count
219 });
220 }
221
222 pub fn response_data(&self) -> ResponseData {
224 return ResponseData {
225 and_msg_size: self.and_msg_size(),
226 input_share_size: self.gate_counts.input,
227 };
228 }
229
230 pub fn views_data(&self) -> ViewsData {
232 return ViewsData {
233 and_msgs_size: self.and_msg_size().map(|n| n * 3),
234 input_share2_size: self.gate_counts.input,
235 output_shares_size: self.gate_counts.output.map(|n| n * 3),
236 };
237 }
238
239 pub fn state_mem_usage(&self) -> MemoryUsage {
241 return MemoryUsage::from_num_words(self.state_size);
242 }
243
244 pub fn output_mem_usage(&self) -> MemoryUsage {
246 return MemoryUsage::from_num_words(self.gate_counts.output);
247 }
248
249 pub fn wordrefs_mem_usage(&self) -> MemoryUsage {
252 let usize_num_bytes = core::mem::size_of::<usize>();
253 let mut usage = MemoryUsage { heap: 0, stack: 0 };
254 let num_wordrefs = self.max_live_wordrefs.sum();
255 let num_idxs = self.max_cumulative_refcount.sum();
256 usage.stack += usize_num_bytes * num_wordrefs; usage.stack += usize_num_bytes * num_idxs; return usage;
259 }
260
261 pub fn memory_manager_mem_usage<RC: RefCount>(&self) -> MemoryUsage {
264 let usize_num_bytes = core::mem::size_of::<usize>();
265 let rc_num_bytes = core::mem::size_of::<RC>();
266 let mut usage = MemoryUsage { heap: 0, stack: 0 };
267 let state_size = self.state_size;
268 let num_word_types = state_size.map(|_| 1).sum();
269 usage.stack += 3 * usize_num_bytes * num_word_types; usage.stack += 6 * usize_num_bytes * num_word_types; usage.heap += rc_num_bytes * state_size.sum(); usage.heap += rc_num_bytes * state_size.map(|v| (v + 63) / 64).sum(); return usage;
274 }
275
276 pub fn executor_mem_usage<RC: RefCount>(&self) -> MemoryUsage {
279 let mut usage = MemoryUsage { heap: 0, stack: 0 };
280 usage += self.state_mem_usage();
281 usage += self.wordrefs_mem_usage();
282 usage += self.memory_manager_mem_usage::<RC>();
283 usage += self.output_mem_usage();
284 return usage;
285 }
286
287 pub fn prover_mem_usage<RC: RefCount>(&self) -> MemoryUsage {
290 let mut usage = MemoryUsage { heap: 0, stack: 0 };
291 usage += self.state_mem_usage() * 3;
292 usage += self.wordrefs_mem_usage();
293 usage += self.memory_manager_mem_usage::<RC>();
294 return usage;
295 }
296
297 pub fn verifier_mem_usage<RC: RefCount>(&self) -> MemoryUsage {
300 let mut usage = MemoryUsage { heap: 0, stack: 0 };
301 usage += self.state_mem_usage() * 2;
302 usage += self.wordrefs_mem_usage();
303 usage += self.memory_manager_mem_usage::<RC>();
304 return usage;
305 }
306}
307
308#[derive(Debug)]
310pub struct ProfilingBackend {
311 data: ProfilingData,
312 memory_manager: FlexibleMemoryManager<usize>,
313 live_wordrefs: Shape,
314 cumulative_refcount: Shape,
315}
316
317impl ProfilingBackend {
318 pub fn new() -> Self {
320 return Self {
321 data: ProfilingData::default(),
322 memory_manager: FlexibleMemoryManager::new(),
323 live_wordrefs: Shape::zero(),
324 cumulative_refcount: Shape::zero(),
325 };
326 }
327
328 pub fn into_profiler(self) -> Frontend<Self> {
332 return self.into_frontend();
333 }
334}
335
336impl Backend for ProfilingBackend {
337 type FinalizeArg = ();
338 type FinalizeResult = ProfilingData;
339
340 fn finalize(self, _arg: Self::FinalizeArg) -> Self::FinalizeResult {
341 return self.data;
342 }
343
344 fn input<W: Word, const N: usize>(&mut self, _word: CompositeWord<W, N>) -> WordIdx<W, N> {
345 let (idx, size) = self.memory_manager.alloc::<W, N>();
346 *self.data.gate_counts.input.as_value_mut::<W>() += N;
347 *self.data.state_size.as_value_mut::<W>() = size;
348 return idx;
349 }
350
351 fn alloc<W: Word, const N: usize>(&mut self) -> WordIdx<W, N> {
352 let (idx, size) = self.memory_manager.alloc::<W, N>();
353 *self.data.gate_counts.alloc.as_value_mut::<W>() += N;
354 *self.data.state_size.as_value_mut::<W>() = size;
355 return idx;
356 }
357
358 fn constant<W: Word, const N: usize>(
359 &mut self,
360 _word: CompositeWord<W, N>,
361 _out: WordIdx<W, N>,
362 ) {
363 *self.data.gate_counts.constant.as_value_mut::<W>() += N;
364 }
365
366 fn from_le_words<W: Word, const N: usize>(
367 &mut self,
368 _ins: [WordIdx<W, 1>; N],
369 _out: WordIdx<W, N>,
370 ) {
371 *self.data.gate_counts.from_le_words.as_value_mut::<W>() += N;
372 }
373
374 fn to_le_words<W: Word, const N: usize>(
375 &mut self,
376 _in_: WordIdx<W, N>,
377 _outs: [WordIdx<W, 1>; N],
378 ) {
379 *self.data.gate_counts.to_le_words.as_value_mut::<W>() += N;
380 }
381
382 fn output<W: Word, const N: usize>(&mut self, _out: WordIdx<W, N>) {
383 *self.data.gate_counts.output.as_value_mut::<W>() += N;
384 }
385
386 fn increase_refcount<W: Word, const N: usize>(&mut self, idx: WordIdx<W, N>) {
387 self.memory_manager.increase_refcount(idx);
388 let cumulative_refcount = self.cumulative_refcount.as_value_mut::<W>();
390 let max_cumulative_refcount = self.data.max_cumulative_refcount.as_value_mut::<W>();
391 *cumulative_refcount += N;
392 if cumulative_refcount > max_cumulative_refcount {
393 *max_cumulative_refcount = *cumulative_refcount;
394 }
395 let live_wordrefs = self.live_wordrefs.as_value_mut::<W>();
397 let max_live_wordrefs = self.data.max_live_wordrefs.as_value_mut::<W>();
398 *live_wordrefs += 1;
399 if live_wordrefs > max_live_wordrefs {
400 *max_live_wordrefs = *live_wordrefs;
401 }
402 let refcount = self.memory_manager.refcounts().as_vec::<W>()[idx.into_array()[0]];
404 let max_refcount = self.data.max_refcount.as_value_mut::<W>();
405 if refcount > *max_refcount {
406 *max_refcount = refcount;
407 }
408 }
409
410 fn decrease_refcount<W: Word, const N: usize>(&mut self, idx: WordIdx<W, N>) {
411 self.memory_manager.decrease_refcount(idx);
412 *self.cumulative_refcount.as_value_mut::<W>() -= N;
413 *self.live_wordrefs.as_value_mut::<W>() -= 1;
414 }
415
416 fn not<W: Word, const N: usize>(&mut self, _in_: WordIdx<W, N>, _out: WordIdx<W, N>) {
417 *self.data.gate_counts.not.as_value_mut::<W>() += N;
418 }
419
420 fn bitxor<W: Word, const N: usize>(
421 &mut self,
422 _inl: WordIdx<W, N>,
423 _inr: WordIdx<W, N>,
424 _out: WordIdx<W, N>,
425 ) {
426 *self.data.gate_counts.bitxor.as_value_mut::<W>() += N;
427 }
428
429 fn bitand<W: Word, const N: usize>(
430 &mut self,
431 _inl: WordIdx<W, N>,
432 _inr: WordIdx<W, N>,
433 _out: WordIdx<W, N>,
434 ) {
435 *self.data.gate_counts.bitand.as_value_mut::<W>() += N;
436 }
437
438 fn bitxor_const<W: Word, const N: usize>(
439 &mut self,
440 _inl: WordIdx<W, N>,
441 _inr: CompositeWord<W, N>,
442 _out: WordIdx<W, N>,
443 ) {
444 *self.data.gate_counts.bitxor_const.as_value_mut::<W>() += N;
445 }
446
447 fn bitand_const<W: Word, const N: usize>(
448 &mut self,
449 _inl: WordIdx<W, N>,
450 _inr: CompositeWord<W, N>,
451 _out: WordIdx<W, N>,
452 ) {
453 *self.data.gate_counts.bitand_const.as_value_mut::<W>() += N;
454 }
455
456 fn unbounded_shl<W: Word, const N: usize>(
457 &mut self,
458 _in_: WordIdx<W, N>,
459 _shift: usize,
460 _out: WordIdx<W, N>,
461 ) {
462 if N == 1 {
463 *self.data.gate_counts.unbounded_shl.as_value_mut::<W>() += 1;
464 } else {
465 *self.data.gate_counts.rotate_left.as_value_mut::<W>() += N;
466 *self.data.gate_counts.bitand_const.as_value_mut::<W>() += 2 * N;
467 *self.data.gate_counts.bitxor.as_value_mut::<W>() += N;
468 }
469 }
470
471 fn unbounded_shr<W: Word, const N: usize>(
472 &mut self,
473 _in_: WordIdx<W, N>,
474 _shift: usize,
475 _out: WordIdx<W, N>,
476 ) {
477 if N == 1 {
478 *self.data.gate_counts.unbounded_shr.as_value_mut::<W>() += 1;
479 } else {
480 *self.data.gate_counts.rotate_right.as_value_mut::<W>() += N;
481 *self.data.gate_counts.bitand_const.as_value_mut::<W>() += 2 * N;
482 *self.data.gate_counts.bitxor.as_value_mut::<W>() += N;
483 }
484 }
485
486 fn rotate_left<W: Word, const N: usize>(
487 &mut self,
488 _in_: WordIdx<W, N>,
489 _shift: usize,
490 _out: WordIdx<W, N>,
491 ) {
492 if N == 1 {
493 *self.data.gate_counts.rotate_left.as_value_mut::<W>() += 1;
494 } else {
495 *self.data.gate_counts.rotate_left.as_value_mut::<W>() += N;
496 *self.data.gate_counts.bitand_const.as_value_mut::<W>() += 2 * N;
497 *self.data.gate_counts.bitxor.as_value_mut::<W>() += N;
498 }
499 }
500
501 fn rotate_right<W: Word, const N: usize>(
502 &mut self,
503 _in_: WordIdx<W, N>,
504 _shift: usize,
505 _out: WordIdx<W, N>,
506 ) {
507 if N == 1 {
508 *self.data.gate_counts.rotate_right.as_value_mut::<W>() += 1;
509 } else {
510 *self.data.gate_counts.rotate_right.as_value_mut::<W>() += N;
511 *self.data.gate_counts.bitand_const.as_value_mut::<W>() += 2 * N;
512 *self.data.gate_counts.bitxor.as_value_mut::<W>() += N;
513 }
514 }
515
516 fn reverse_bits<W: Word, const N: usize>(&mut self, _in_: WordIdx<W, N>, _out: WordIdx<W, N>) {
517 *self.data.gate_counts.reverse_bits.as_value_mut::<W>() += N;
518 }
519
520 fn swap_bytes<W: Word, const N: usize>(&mut self, _in_: WordIdx<W, N>, _out: WordIdx<W, N>) {
521 *self.data.gate_counts.swap_bytes.as_value_mut::<W>() += N;
522 }
523
524 fn cast<W: Word, T: Word>(&mut self, _in_: WordIdx<W, 1>, _out: WordIdx<T, 1>) {
525 *self
526 .data
527 .gate_counts
528 .cast
529 .as_value_mut::<W>()
530 .as_value_mut::<T>() += 1;
531 }
532
533 fn carry<W: Word, const N: usize>(
534 &mut self,
535 _p: WordIdx<W, N>,
536 _g: WordIdx<W, N>,
537 _carry_in: bool,
538 _out: WordIdx<W, N>,
539 ) {
540 *self.data.gate_counts.carry.as_value_mut::<W>() += N;
541 }
542}