miden_crypto/lib.rs
1#![no_std]
2
3#[macro_use]
4extern crate alloc;
5#[cfg(feature = "std")]
6extern crate std;
7
8use field::PrimeCharacteristicRing;
9
10pub mod aead;
11pub mod dsa;
12pub mod ecdh;
13pub mod hash;
14pub mod ies;
15pub mod merkle;
16pub mod rand;
17pub mod utils;
18pub mod word;
19
20// RE-EXPORTS
21// ================================================================================================
22pub use p3_goldilocks::Goldilocks as Felt;
23pub use word::{Word, WordError};
24
25pub mod field {
26 //! Traits and utilities for working with the Goldilocks finite field (i.e.,
27 //! [Felt](super::Felt)).
28
29 pub use p3_field::{
30 BasedVectorSpace, ExtensionField, Field, PrimeCharacteristicRing, PrimeField64,
31 TwoAdicField, batch_multiplicative_inverse, extension::BinomialExtensionField,
32 integers::QuotientMap,
33 };
34
35 pub use super::batch_inversion::batch_inversion_allow_zeros;
36}
37
38pub mod parallel {
39 //! Conditional parallel iteration primitives.
40 //!
41 //! When the `concurrent` feature is enabled, this module re-exports parallel iterator
42 //! traits from `p3-maybe-rayon` backed by rayon. Without `concurrent`, these traits
43 //! fall back to sequential iteration.
44 pub use p3_maybe_rayon::prelude::*;
45}
46
47pub mod stark {
48 //! Foundational components for the STARK proving system based on Plonky3.
49 //!
50 //! This module contains components needed to build a STARK prover/verifier and define
51 //! Algebraic Intermediate Representation (AIR) for the Miden VM and other components.
52 //! It primarily consists of re-exports from the Plonky3 project with some Miden-specific
53 //! adaptations.
54 pub use p3_miden_prover::{
55 Commitments, Domain, Entry, OpenedValues, PackedChallenge, PackedVal, PcsError, Proof,
56 ProverConstraintFolder, StarkConfig, StarkGenericConfig, SymbolicAirBuilder,
57 SymbolicExpression, SymbolicVariable, Val, VerificationError, VerifierConstraintFolder,
58 generate_logup_trace, get_log_quotient_degree, get_max_constraint_degree,
59 get_symbolic_constraints, prove, quotient_values, recompose_quotient_from_chunks, verify,
60 verify_constraints,
61 };
62
63 pub mod air {
64 pub use p3_air::{
65 Air, AirBuilder, AirBuilderWithPublicValues, BaseAir, BaseAirWithPublicValues,
66 ExtensionBuilder, FilteredAirBuilder, PairBuilder, PairCol, PermutationAirBuilder,
67 VirtualPairCol,
68 };
69 pub use p3_miden_air::{
70 BaseAirWithAuxTrace, FilteredMidenAirBuilder, MidenAir, MidenAirBuilder,
71 };
72 }
73
74 pub mod challenger {
75 pub use p3_challenger::{HashChallenger, SerializingChallenger64};
76 }
77
78 pub mod commit {
79 pub use p3_commit::ExtensionMmcs;
80 pub use p3_merkle_tree::MerkleTreeMmcs;
81 }
82
83 pub mod dft {
84 pub use p3_dft::Radix2DitParallel;
85 }
86
87 pub mod matrix {
88 pub use p3_matrix::{Matrix, dense::RowMajorMatrix};
89 }
90
91 pub mod pcs {
92 pub use p3_miden_fri::{FriParameters, TwoAdicFriPcs};
93 }
94
95 pub mod symmetric {
96 pub use p3_symmetric::{
97 CompressionFunctionFromHasher, PaddingFreeSponge, SerializingHasher,
98 };
99 }
100}
101
102// TYPE ALIASES
103// ================================================================================================
104
105/// An alias for a key-value map.
106///
107/// By default, this is an alias for the [`alloc::collections::BTreeMap`], however, when the
108/// `hashmaps` feature is enabled, this is an alias for the `hashbrown`'s `HashMap`.
109#[cfg(feature = "hashmaps")]
110pub type Map<K, V> = hashbrown::HashMap<K, V>;
111
112#[cfg(feature = "hashmaps")]
113pub use hashbrown::hash_map::Entry as MapEntry;
114
115/// An alias for a key-value map.
116///
117/// By default, this is an alias for the [`alloc::collections::BTreeMap`], however, when the
118/// `hashmaps` feature is enabled, this is an alias for the `hashbrown`'s `HashMap`.
119#[cfg(not(feature = "hashmaps"))]
120pub type Map<K, V> = alloc::collections::BTreeMap<K, V>;
121
122#[cfg(not(feature = "hashmaps"))]
123pub use alloc::collections::btree_map::Entry as MapEntry;
124
125/// An alias for a simple set.
126///
127/// By default, this is an alias for the [`alloc::collections::BTreeSet`]. However, when the
128/// `hashmaps` feature is enabled, this becomes an alias for hashbrown's HashSet.
129#[cfg(feature = "hashmaps")]
130pub type Set<V> = hashbrown::HashSet<V>;
131
132/// An alias for a simple set.
133///
134/// By default, this is an alias for the [`alloc::collections::BTreeSet`]. However, when the
135/// `hashmaps` feature is enabled, this becomes an alias for hashbrown's HashSet.
136#[cfg(not(feature = "hashmaps"))]
137pub type Set<V> = alloc::collections::BTreeSet<V>;
138
139// CONSTANTS
140// ================================================================================================
141
142/// Number of field elements in a word.
143pub const WORD_SIZE: usize = 4;
144
145/// Field element representing ZERO in the Miden base filed.
146pub const ZERO: Felt = Felt::ZERO;
147
148/// Field element representing ONE in the Miden base filed.
149pub const ONE: Felt = Felt::ONE;
150
151/// Array of field elements representing word of ZEROs in the Miden base field.
152pub const EMPTY_WORD: Word = Word::new([ZERO; WORD_SIZE]);
153
154// TRAITS
155// ================================================================================================
156
157/// Defines how to compute a commitment to an object represented as a sequence of field elements.
158pub trait SequentialCommit {
159 /// A type of the commitment which must be derivable from [Word].
160 type Commitment: From<Word>;
161
162 /// Computes the commitment to the object.
163 ///
164 /// The default implementation of this function uses Poseidon2 hash function to hash the
165 /// sequence of elements returned from [Self::to_elements()].
166 fn to_commitment(&self) -> Self::Commitment {
167 hash::poseidon2::Poseidon2::hash_elements(&self.to_elements()).into()
168 }
169
170 /// Returns a representation of the object as a sequence of fields elements.
171 fn to_elements(&self) -> alloc::vec::Vec<Felt>;
172}
173
174// BATCH INVERSION
175// ================================================================================================
176
177mod batch_inversion {
178
179 use alloc::vec::Vec;
180
181 use p3_maybe_rayon::prelude::*;
182
183 use super::{Felt, ONE, ZERO, field::Field};
184
185 /// Parallel batch inversion using Montgomery's trick, with zeros left unchanged.
186 ///
187 /// Processes chunks in parallel using rayon, each chunk using Montgomery's trick.
188 pub fn batch_inversion_allow_zeros(values: &mut [Felt]) {
189 const CHUNK_SIZE: usize = 1024;
190
191 // We need to work with a copy since we're modifying in place
192 let input: Vec<Felt> = values.to_vec();
193
194 input.par_chunks(CHUNK_SIZE).zip(values.par_chunks_mut(CHUNK_SIZE)).for_each(
195 |(input_chunk, output_chunk)| {
196 batch_inversion_helper(input_chunk, output_chunk);
197 },
198 );
199 }
200
201 /// Montgomery's trick for batch inversion, handling zeros.
202 fn batch_inversion_helper(values: &[Felt], result: &mut [Felt]) {
203 debug_assert_eq!(values.len(), result.len());
204
205 if values.is_empty() {
206 return;
207 }
208
209 // Forward pass: compute cumulative products, skipping zeros
210 let mut last = ONE;
211 for (result, &value) in result.iter_mut().zip(values.iter()) {
212 *result = last;
213 if value != ZERO {
214 last *= value;
215 }
216 }
217
218 // Invert the final cumulative product
219 last = last.inverse();
220
221 // Backward pass: compute individual inverses
222 for i in (0..values.len()).rev() {
223 if values[i] == ZERO {
224 result[i] = ZERO;
225 } else {
226 result[i] *= last;
227 last *= values[i];
228 }
229 }
230 }
231
232 #[cfg(test)]
233 mod tests {
234 use super::*;
235
236 #[test]
237 fn test_batch_inversion_allow_zeros() {
238 let mut column = Vec::from([Felt::new(2), ZERO, Felt::new(4), Felt::new(5)]);
239 batch_inversion_allow_zeros(&mut column);
240
241 assert_eq!(column[0], Felt::new(2).inverse());
242 assert_eq!(column[1], ZERO);
243 assert_eq!(column[2], Felt::new(4).inverse());
244 assert_eq!(column[3], Felt::new(5).inverse());
245 }
246 }
247}
248
249// TESTS
250// ================================================================================================
251
252#[cfg(test)]
253mod tests {
254
255 #[test]
256 #[should_panic]
257 fn debug_assert_is_checked() {
258 // enforce the release checks to always have `RUSTFLAGS="-C debug-assertions"`.
259 //
260 // some upstream tests are performed with `debug_assert`, and we want to assert its
261 // correctness downstream.
262 //
263 // for reference, check
264 // https://github.com/0xMiden/miden-vm/issues/433
265 debug_assert!(false);
266 }
267
268 #[test]
269 #[should_panic]
270 #[allow(arithmetic_overflow)]
271 fn overflow_panics_for_test() {
272 // overflows might be disabled if tests are performed in release mode. these are critical,
273 // mandatory checks as overflows might be attack vectors.
274 //
275 // to enable overflow checks in release mode, ensure `RUSTFLAGS="-C overflow-checks"`
276 let a = 1_u64;
277 let b = 64;
278 assert_ne!(a << b, 0);
279 }
280}