miden_crypto/hash/blake/
mod.rs

1use alloc::string::String;
2use core::{
3    mem::{size_of, transmute, transmute_copy},
4    ops::Deref,
5    slice::{self, from_raw_parts},
6};
7
8use super::{Digest, ElementHasher, Felt, FieldElement, Hasher, HasherExt};
9use crate::utils::{
10    ByteReader, ByteWriter, Deserializable, DeserializationError, HexParseError, Serializable,
11    bytes_to_hex_string, hex_to_bytes,
12};
13
14#[cfg(test)]
15mod tests;
16
17// CONSTANTS
18// ================================================================================================
19
20const DIGEST32_BYTES: usize = 32;
21const DIGEST24_BYTES: usize = 24;
22const DIGEST20_BYTES: usize = 20;
23
24// BLAKE3 N-BIT OUTPUT
25// ================================================================================================
26
27/// N-bytes output of a blake3 function.
28///
29/// Note: `N` can't be greater than `32` because [`Digest::as_bytes`] currently supports only 32
30/// bytes.
31#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
32#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
33#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
34pub struct Blake3Digest<const N: usize>([u8; N]);
35
36impl<const N: usize> Blake3Digest<N> {
37    pub fn digests_as_bytes(digests: &[Blake3Digest<N>]) -> &[u8] {
38        let p = digests.as_ptr();
39        let len = digests.len() * N;
40        unsafe { slice::from_raw_parts(p as *const u8, len) }
41    }
42}
43
44impl<const N: usize> Default for Blake3Digest<N> {
45    fn default() -> Self {
46        Self([0; N])
47    }
48}
49
50impl<const N: usize> Deref for Blake3Digest<N> {
51    type Target = [u8];
52
53    fn deref(&self) -> &Self::Target {
54        &self.0
55    }
56}
57
58impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
59    fn from(value: Blake3Digest<N>) -> Self {
60        value.0
61    }
62}
63
64impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
65    fn from(value: [u8; N]) -> Self {
66        Self(value)
67    }
68}
69
70impl<const N: usize> From<Blake3Digest<N>> for String {
71    fn from(value: Blake3Digest<N>) -> Self {
72        bytes_to_hex_string(value.as_bytes())
73    }
74}
75
76impl<const N: usize> TryFrom<&str> for Blake3Digest<N> {
77    type Error = HexParseError;
78
79    fn try_from(value: &str) -> Result<Self, Self::Error> {
80        hex_to_bytes(value).map(|v| v.into())
81    }
82}
83
84impl<const N: usize> Serializable for Blake3Digest<N> {
85    fn write_into<W: ByteWriter>(&self, target: &mut W) {
86        target.write_bytes(&self.0);
87    }
88}
89
90impl<const N: usize> Deserializable for Blake3Digest<N> {
91    fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
92        source.read_array().map(Self)
93    }
94}
95
96impl<const N: usize> Digest for Blake3Digest<N> {
97    fn as_bytes(&self) -> [u8; 32] {
98        // compile-time assertion
99        assert!(N <= 32, "digest currently supports only 32 bytes!");
100        expand_bytes(&self.0)
101    }
102}
103
104// BLAKE3 256-BIT OUTPUT
105// ================================================================================================
106
107/// 256-bit output blake3 hasher.
108#[derive(Debug, Copy, Clone, Eq, PartialEq)]
109pub struct Blake3_256;
110
111impl Hasher for Blake3_256 {
112    /// Blake3 collision resistance is 128-bits for 32-bytes output.
113    const COLLISION_RESISTANCE: u32 = 128;
114
115    type Digest = Blake3Digest<32>;
116
117    fn hash(bytes: &[u8]) -> Self::Digest {
118        Blake3Digest(blake3::hash(bytes).into())
119    }
120
121    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
122        Self::hash(prepare_merge(values))
123    }
124
125    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
126        Blake3Digest(blake3::hash(Blake3Digest::digests_as_bytes(values)).into())
127    }
128
129    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
130        let mut hasher = blake3::Hasher::new();
131        hasher.update(&seed.0);
132        hasher.update(&value.to_le_bytes());
133        Blake3Digest(hasher.finalize().into())
134    }
135}
136
137impl ElementHasher for Blake3_256 {
138    type BaseField = Felt;
139
140    fn hash_elements<E>(elements: &[E]) -> Self::Digest
141    where
142        E: FieldElement<BaseField = Self::BaseField>,
143    {
144        Blake3Digest(hash_elements(elements))
145    }
146}
147
148impl HasherExt for Blake3_256 {
149    fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
150        let mut hasher = blake3::Hasher::new();
151        for slice in slices {
152            hasher.update(slice);
153        }
154        Blake3Digest(hasher.finalize().into())
155    }
156}
157
158impl Blake3_256 {
159    /// Returns a hash of the provided sequence of bytes.
160    #[inline(always)]
161    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST32_BYTES> {
162        <Self as Hasher>::hash(bytes)
163    }
164
165    /// Returns a hash of two digests. This method is intended for use in construction of
166    /// Merkle trees and verification of Merkle paths.
167    #[inline(always)]
168    pub fn merge(values: &[Blake3Digest<DIGEST32_BYTES>; 2]) -> Blake3Digest<DIGEST32_BYTES> {
169        <Self as Hasher>::merge(values)
170    }
171
172    /// Returns a hash of the provided field elements.
173    #[inline(always)]
174    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST32_BYTES>
175    where
176        E: FieldElement<BaseField = Felt>,
177    {
178        <Self as ElementHasher>::hash_elements(elements)
179    }
180
181    /// Hashes an iterator of byte slices.
182    #[inline(always)]
183    pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST32_BYTES> {
184        <Self as HasherExt>::hash_iter(slices)
185    }
186}
187
188// BLAKE3 192-BIT OUTPUT
189// ================================================================================================
190
191/// 192-bit output blake3 hasher.
192#[derive(Debug, Copy, Clone, Eq, PartialEq)]
193pub struct Blake3_192;
194
195impl HasherExt for Blake3_192 {
196    fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
197        let mut hasher = blake3::Hasher::new();
198        for slice in slices {
199            hasher.update(slice);
200        }
201        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
202    }
203}
204
205impl Hasher for Blake3_192 {
206    /// Blake3 collision resistance is 96-bits for 24-bytes output.
207    const COLLISION_RESISTANCE: u32 = 96;
208
209    type Digest = Blake3Digest<24>;
210
211    fn hash(bytes: &[u8]) -> Self::Digest {
212        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
213    }
214
215    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
216        let bytes = Blake3Digest::digests_as_bytes(values);
217        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
218    }
219
220    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
221        Self::hash(prepare_merge(values))
222    }
223
224    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
225        let mut hasher = blake3::Hasher::new();
226        hasher.update(&seed.0);
227        hasher.update(&value.to_le_bytes());
228        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
229    }
230}
231
232impl ElementHasher for Blake3_192 {
233    type BaseField = Felt;
234
235    fn hash_elements<E>(elements: &[E]) -> Self::Digest
236    where
237        E: FieldElement<BaseField = Self::BaseField>,
238    {
239        Blake3Digest(hash_elements(elements))
240    }
241}
242
243impl Blake3_192 {
244    /// Returns a hash of the provided sequence of bytes.
245    #[inline(always)]
246    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST24_BYTES> {
247        <Self as Hasher>::hash(bytes)
248    }
249
250    /// Returns a hash of two digests. This method is intended for use in construction of
251    /// Merkle trees and verification of Merkle paths.
252    #[inline(always)]
253    pub fn merge(values: &[Blake3Digest<DIGEST24_BYTES>; 2]) -> Blake3Digest<DIGEST24_BYTES> {
254        <Self as Hasher>::merge(values)
255    }
256
257    /// Returns a hash of the provided field elements.
258    #[inline(always)]
259    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST24_BYTES>
260    where
261        E: FieldElement<BaseField = Felt>,
262    {
263        <Self as ElementHasher>::hash_elements(elements)
264    }
265
266    /// Hashes an iterator of byte slices.
267    #[inline(always)]
268    pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST24_BYTES> {
269        <Self as HasherExt>::hash_iter(slices)
270    }
271}
272
273// BLAKE3 160-BIT OUTPUT
274// ================================================================================================
275
276/// 160-bit output blake3 hasher.
277#[derive(Debug, Copy, Clone, Eq, PartialEq)]
278pub struct Blake3_160;
279
280impl HasherExt for Blake3_160 {
281    fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
282        let mut hasher = blake3::Hasher::new();
283        for slice in slices {
284            hasher.update(slice);
285        }
286        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
287    }
288}
289
290impl Hasher for Blake3_160 {
291    /// Blake3 collision resistance is 80-bits for 20-bytes output.
292    const COLLISION_RESISTANCE: u32 = 80;
293
294    type Digest = Blake3Digest<20>;
295
296    fn hash(bytes: &[u8]) -> Self::Digest {
297        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
298    }
299
300    fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
301        Self::hash(prepare_merge(values))
302    }
303
304    fn merge_many(values: &[Self::Digest]) -> Self::Digest {
305        let bytes = Blake3Digest::digests_as_bytes(values);
306        Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
307    }
308
309    fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
310        let mut hasher = blake3::Hasher::new();
311        hasher.update(&seed.0);
312        hasher.update(&value.to_le_bytes());
313        Blake3Digest(*shrink_bytes(&hasher.finalize().into()))
314    }
315}
316
317impl ElementHasher for Blake3_160 {
318    type BaseField = Felt;
319
320    fn hash_elements<E>(elements: &[E]) -> Self::Digest
321    where
322        E: FieldElement<BaseField = Self::BaseField>,
323    {
324        Blake3Digest(hash_elements(elements))
325    }
326}
327
328impl Blake3_160 {
329    /// Returns a hash of the provided sequence of bytes.
330    #[inline(always)]
331    pub fn hash(bytes: &[u8]) -> Blake3Digest<DIGEST20_BYTES> {
332        <Self as Hasher>::hash(bytes)
333    }
334
335    /// Returns a hash of two digests. This method is intended for use in construction of
336    /// Merkle trees and verification of Merkle paths.
337    #[inline(always)]
338    pub fn merge(values: &[Blake3Digest<DIGEST20_BYTES>; 2]) -> Blake3Digest<DIGEST20_BYTES> {
339        <Self as Hasher>::merge(values)
340    }
341
342    /// Returns a hash of the provided field elements.
343    #[inline(always)]
344    pub fn hash_elements<E>(elements: &[E]) -> Blake3Digest<DIGEST20_BYTES>
345    where
346        E: FieldElement<BaseField = Felt>,
347    {
348        <Self as ElementHasher>::hash_elements(elements)
349    }
350
351    /// Hashes an iterator of byte slices.
352    #[inline(always)]
353    pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST20_BYTES> {
354        <Self as HasherExt>::hash_iter(slices)
355    }
356}
357
358// HELPER FUNCTIONS
359// ================================================================================================
360
361/// Zero-copy ref shrink to array.
362fn shrink_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> &[u8; N] {
363    // compile-time assertion
364    assert!(M >= N, "N should fit in M so it can be safely transmuted into a smaller slice!");
365    // safety: bytes len is asserted
366    unsafe { transmute(bytes) }
367}
368
369/// Hash the elements into bytes and shrink the output.
370fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
371where
372    E: FieldElement<BaseField = Felt>,
373{
374    // don't leak assumptions from felt and check its actual implementation.
375    // this is a compile-time branch so it is for free
376    let digest = if Felt::IS_CANONICAL {
377        blake3::hash(E::elements_as_bytes(elements))
378    } else {
379        let mut hasher = blake3::Hasher::new();
380
381        // BLAKE3 rate is 64 bytes - so, we can absorb 64 bytes into the state in a single
382        // permutation. we move the elements into the hasher via the buffer to give the CPU
383        // a chance to process multiple element-to-byte conversions in parallel
384        let mut buf = [0_u8; 64];
385        let mut chunk_iter = E::slice_as_base_elements(elements).chunks_exact(8);
386        for chunk in chunk_iter.by_ref() {
387            for i in 0..8 {
388                buf[i * 8..(i + 1) * 8].copy_from_slice(&chunk[i].as_int().to_le_bytes());
389            }
390            hasher.update(&buf);
391        }
392
393        for element in chunk_iter.remainder() {
394            hasher.update(&element.as_int().to_le_bytes());
395        }
396
397        hasher.finalize()
398    };
399    *shrink_bytes(&digest.into())
400}
401
402/// Owned bytes expansion.
403fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
404    // compile-time assertion
405    assert!(M <= N, "M should fit in N so M can be expanded!");
406    // this branch is constant so it will be optimized to be either one of the variants in release
407    // mode
408    if M == N {
409        // safety: the sizes are checked to be the same
410        unsafe { transmute_copy(bytes) }
411    } else {
412        let mut expanded = [0u8; N];
413        expanded[..M].copy_from_slice(bytes);
414        expanded
415    }
416}
417
418// Cast the slice into contiguous bytes.
419fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
420where
421    D: Deref<Target = [u8]>,
422{
423    // compile-time assertion
424    assert!(N > 0, "N shouldn't represent an empty slice!");
425    let values = args.as_ptr() as *const u8;
426    let len = size_of::<D>() * N;
427    // safety: the values are tested to be contiguous
428    let bytes = unsafe { from_raw_parts(values, len) };
429    debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
430    bytes
431}