miden_crypto/hash/blake/
mod.rs

1use alloc::{string::String, vec::Vec};
2use core::{
3    mem::{size_of, transmute, transmute_copy},
4    ops::Deref,
5    slice::{self, from_raw_parts},
6};
7
8use super::{Digest, ElementHasher, Felt, FieldElement, Hasher};
9use crate::utils::{
10    bytes_to_hex_string, hex_to_bytes, ByteReader, ByteWriter, Deserializable,
11    DeserializationError, HexParseError, Serializable,
12};
13
14#[cfg(test)]
15mod tests;
16
17// CONSTANTS
18// ================================================================================================
19
20const DIGEST32_BYTES: usize = 32;
21const DIGEST24_BYTES: usize = 24;
22const DIGEST20_BYTES: usize = 20;
23
24// BLAKE3 N-BIT OUTPUT
25// ================================================================================================
26
27/// N-bytes output of a blake3 function.
28///
29/// Note: `N` can't be greater than `32` because [`Digest::as_bytes`] currently supports only 32
30/// bytes.
31#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
32#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
33#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
34pub struct Blake3Digest<const N: usize>([u8; N]);
35
36impl<const N: usize> Blake3Digest<N> {
37    pub fn digests_as_bytes(digests: &[Blake3Digest<N>]) -> &[u8] {
38        let p = digests.as_ptr();
39        let len = digests.len() * N;
40        unsafe { slice::from_raw_parts(p as *const u8, len) }
41    }
42}
43
44impl<const N: usize> Default for Blake3Digest<N> {
45    fn default() -> Self {
46        Self([0; N])
47    }
48}
49
50impl<const N: usize> Deref for Blake3Digest<N> {
51    type Target = [u8];
52
53    fn deref(&self) -> &Self::Target {
54        &self.0
55    }
56}
57
58impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
59    fn from(value: Blake3Digest<N>) -> Self {
60        value.0
61    }
62}
63
64impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
65    fn from(value: [u8; N]) -> Self {
66        Self(value)
67    }
68}
69
70impl<const N: usize> From<Blake3Digest<N>> for String {
71    fn from(value: Blake3Digest<N>) -> Self {
72        bytes_to_hex_string(value.as_bytes())
73    }
74}
75
76impl<const N: usize> TryFrom<&str> for Blake3Digest<N> {
77    type Error = HexParseError;
78
79    fn try_from(value: &str) -> Result<Self, Self::Error> {
80        hex_to_bytes(value).map(|v| v.into())
81    }
82}
83
84impl<const N: usize> Serializable for Blake3Digest<N> {
85    fn write_into<W: ByteWriter>(&self, target: &mut W) {
86        target.write_bytes(&self.0);
87    }
88}
89
90impl<const N: usize> Deserializable for Blake3Digest<N> {
91    fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
92        source.read_array().map(Self)
93    }
94}
95
96impl<const N: usize> Digest for Blake3Digest<N> {
97    fn as_bytes(&self) -> [u8; 32] {
98        // compile-time assertion
99        assert!(N <= 32, "digest currently supports only 32 bytes!");
100        expand_bytes(&self.0)
101    }
102}
103
104// BLAKE3 256-BIT OUTPUT
105// ================================================================================================
106
107/// 256-bit output blake3 hasher.
108#[derive(Debug, Copy, Clone, Eq, PartialEq)]
109pub struct Blake3_256;
110
111impl Hasher for Blake3_256 {
112    /// Blake3 collision resistance is 128-bits for 32-bytes output.
113    const COLLISION_RESISTANCE: u32 = 128;
114
115    type Digest = Blake3Digest<32>;
116
117    fn hash(bytes: &[u8]) -> Self::Digest {
118        Blake3Digest(blake3::hash(bytes).into())
119    }
120
121    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
122        Self::hash(prepare_merge(values))
123    }
124
125    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
126        Blake3Digest(blake3::hash(Blake3Digest::digests_as_bytes(values)).into())
127    }
128
129    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
130        let mut hasher = blake3::Hasher::new();
131        hasher.update(&seed.0);
132        hasher.update(&value.to_le_bytes());
133        Blake3Digest(hasher.finalize().into())
134    }
135}
136
137impl ElementHasher for Blake3_256 {
138    type BaseField = Felt;
139
140    fn hash_elements<E>(elements: &[E]) -> Self::Digest
141    where
142        E: FieldElement<BaseField = Self::BaseField>,
143    {
144        Blake3Digest(hash_elements(elements))
145    }
146}
147
148impl Blake3_256 {
149    /// Returns a hash of the provided sequence of bytes.
150    #[inline(always)]
151    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST32_BYTES> {
152        <Self as Hasher>::hash(bytes)
153    }
154
155    /// Returns a hash of two digests. This method is intended for use in construction of
156    /// Merkle trees and verification of Merkle paths.
157    #[inline(always)]
158    pub fn merge(values: &[Blake3Digest<DIGEST32_BYTES>; 2]) -> Blake3Digest<DIGEST32_BYTES> {
159        <Self as Hasher>::merge(values)
160    }
161
162    /// Returns a hash of the provided field elements.
163    #[inline(always)]
164    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST32_BYTES>
165    where
166        E: FieldElement<BaseField = Felt>,
167    {
168        <Self as ElementHasher>::hash_elements(elements)
169    }
170}
171
172// BLAKE3 192-BIT OUTPUT
173// ================================================================================================
174
175/// 192-bit output blake3 hasher.
176#[derive(Debug, Copy, Clone, Eq, PartialEq)]
177pub struct Blake3_192;
178
179impl Hasher for Blake3_192 {
180    /// Blake3 collision resistance is 96-bits for 24-bytes output.
181    const COLLISION_RESISTANCE: u32 = 96;
182
183    type Digest = Blake3Digest<24>;
184
185    fn hash(bytes: &[u8]) -> Self::Digest {
186        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
187    }
188
189    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
190        let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
191        Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
192    }
193
194    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
195        Self::hash(prepare_merge(values))
196    }
197
198    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
199        let mut hasher = blake3::Hasher::new();
200        hasher.update(&seed.0);
201        hasher.update(&value.to_le_bytes());
202        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
203    }
204}
205
206impl ElementHasher for Blake3_192 {
207    type BaseField = Felt;
208
209    fn hash_elements<E>(elements: &[E]) -> Self::Digest
210    where
211        E: FieldElement<BaseField = Self::BaseField>,
212    {
213        Blake3Digest(hash_elements(elements))
214    }
215}
216
217impl Blake3_192 {
218    /// Returns a hash of the provided sequence of bytes.
219    #[inline(always)]
220    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST24_BYTES> {
221        <Self as Hasher>::hash(bytes)
222    }
223
224    /// Returns a hash of two digests. This method is intended for use in construction of
225    /// Merkle trees and verification of Merkle paths.
226    #[inline(always)]
227    pub fn merge(values: &[Blake3Digest<DIGEST24_BYTES>; 2]) -> Blake3Digest<DIGEST24_BYTES> {
228        <Self as Hasher>::merge(values)
229    }
230
231    /// Returns a hash of the provided field elements.
232    #[inline(always)]
233    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST24_BYTES>
234    where
235        E: FieldElement<BaseField = Felt>,
236    {
237        <Self as ElementHasher>::hash_elements(elements)
238    }
239}
240
241// BLAKE3 160-BIT OUTPUT
242// ================================================================================================
243
244/// 160-bit output blake3 hasher.
245#[derive(Debug, Copy, Clone, Eq, PartialEq)]
246pub struct Blake3_160;
247
248impl Hasher for Blake3_160 {
249    /// Blake3 collision resistance is 80-bits for 20-bytes output.
250    const COLLISION_RESISTANCE: u32 = 80;
251
252    type Digest = Blake3Digest<20>;
253
254    fn hash(bytes: &[u8]) -> Self::Digest {
255        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
256    }
257
258    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
259        Self::hash(prepare_merge(values))
260    }
261
262    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
263        let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
264        Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
265    }
266
267    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
268        let mut hasher = blake3::Hasher::new();
269        hasher.update(&seed.0);
270        hasher.update(&value.to_le_bytes());
271        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
272    }
273}
274
275impl ElementHasher for Blake3_160 {
276    type BaseField = Felt;
277
278    fn hash_elements<E>(elements: &[E]) -> Self::Digest
279    where
280        E: FieldElement<BaseField = Self::BaseField>,
281    {
282        Blake3Digest(hash_elements(elements))
283    }
284}
285
286impl Blake3_160 {
287    /// Returns a hash of the provided sequence of bytes.
288    #[inline(always)]
289    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST20_BYTES> {
290        <Self as Hasher>::hash(bytes)
291    }
292
293    /// Returns a hash of two digests. This method is intended for use in construction of
294    /// Merkle trees and verification of Merkle paths.
295    #[inline(always)]
296    pub fn merge(values: &[Blake3Digest<DIGEST20_BYTES>; 2]) -> Blake3Digest<DIGEST20_BYTES> {
297        <Self as Hasher>::merge(values)
298    }
299
300    /// Returns a hash of the provided field elements.
301    #[inline(always)]
302    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST20_BYTES>
303    where
304        E: FieldElement<BaseField = Felt>,
305    {
306        <Self as ElementHasher>::hash_elements(elements)
307    }
308}
309
310// HELPER FUNCTIONS
311// ================================================================================================
312
313/// Zero-copy ref shrink to array.
314fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
315    // compile-time assertion
316    assert!(M >= N, "N should fit in M so it can be safely transmuted into a smaller slice!");
317    // safety: bytes len is asserted
318    unsafe { transmute(bytes) }
319}
320
321/// Hash the elements into bytes and shrink the output.
322fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
323where
324    E: FieldElement<BaseField = Felt>,
325{
326    // don't leak assumptions from felt and check its actual implementation.
327    // this is a compile-time branch so it is for free
328    let digest = if Felt::IS_CANONICAL {
329        blake3::hash(E::elements_as_bytes(elements))
330    } else {
331        let mut hasher = blake3::Hasher::new();
332
333        // BLAKE3 state is 64 bytes - so, we can absorb 64 bytes into the state in a single
334        // permutation. we move the elements into the hasher via the buffer to give the CPU
335        // a chance to process multiple element-to-byte conversions in parallel
336        let mut buf = [0_u8; 64];
337        let mut chunk_iter = E::slice_as_base_elements(elements).chunks_exact(8);
338        for chunk in chunk_iter.by_ref() {
339            for i in 0..8 {
340                buf[i * 8..(i + 1) * 8].copy_from_slice(&chunk[i].as_int().to_le_bytes());
341            }
342            hasher.update(&buf);
343        }
344
345        for element in chunk_iter.remainder() {
346            hasher.update(&element.as_int().to_le_bytes());
347        }
348
349        hasher.finalize()
350    };
351    *shrink_bytes(&digest.into())
352}
353
354/// Owned bytes expansion.
355fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
356    // compile-time assertion
357    assert!(M <= N, "M should fit in N so M can be expanded!");
358    // this branch is constant so it will be optimized to be either one of the variants in release
359    // mode
360    if M == N {
361        // safety: the sizes are checked to be the same
362        unsafe { transmute_copy(bytes) }
363    } else {
364        let mut expanded = [0u8; N];
365        expanded[..M].copy_from_slice(bytes);
366        expanded
367    }
368}
369
370// Cast the slice into contiguous bytes.
371fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
372where
373    D: Deref<Target = [u8]>,
374{
375    // compile-time assertion
376    assert!(N > 0, "N shouldn't represent an empty slice!");
377    let values = args.as_ptr() as *const u8;
378    let len = size_of::<D>() * N;
379    // safety: the values are tested to be contiguous
380    let bytes = unsafe { from_raw_parts(values, len) };
381    debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
382    bytes
383}