miden_crypto/hash/blake/
mod.rs1use alloc::string::String;
2use core::{mem::size_of, ops::Deref, slice};
3
4use super::HasherExt;
5use crate::{
6 Felt,
7 field::{BasedVectorSpace, PrimeField64},
8 utils::{
9 ByteReader, ByteWriter, Deserializable, DeserializationError, HexParseError, Serializable,
10 bytes_to_hex_string, hex_to_bytes,
11 },
12};
13
14#[cfg(test)]
15mod tests;
16
17pub use p3_blake3::Blake3 as Blake3Hasher;
22
23const DIGEST32_BYTES: usize = 32;
27const DIGEST24_BYTES: usize = 24;
28
29#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
37#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
38#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
39#[repr(transparent)]
40pub struct Blake3Digest<const N: usize>([u8; N]);
41
42impl<const N: usize> Blake3Digest<N> {
43 pub fn as_bytes(&self) -> [u8; 32] {
44 assert!(N <= 32, "digest currently supports only 32 bytes!");
46 expand_bytes(&self.0)
47 }
48
49 pub fn digests_as_bytes(digests: &[Blake3Digest<N>]) -> &[u8] {
50 let p = digests.as_ptr();
51 let len = digests.len() * N;
52 unsafe { slice::from_raw_parts(p as *const u8, len) }
53 }
54}
55
56impl<const N: usize> Default for Blake3Digest<N> {
57 fn default() -> Self {
58 Self([0; N])
59 }
60}
61
62impl<const N: usize> Deref for Blake3Digest<N> {
63 type Target = [u8];
64
65 fn deref(&self) -> &Self::Target {
66 &self.0
67 }
68}
69
70impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
71 fn from(value: Blake3Digest<N>) -> Self {
72 value.0
73 }
74}
75
76impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
77 fn from(value: [u8; N]) -> Self {
78 Self(value)
79 }
80}
81
82impl<const N: usize> From<Blake3Digest<N>> for String {
83 fn from(value: Blake3Digest<N>) -> Self {
84 bytes_to_hex_string(value.as_bytes())
85 }
86}
87
88impl<const N: usize> TryFrom<&str> for Blake3Digest<N> {
89 type Error = HexParseError;
90
91 fn try_from(value: &str) -> Result<Self, Self::Error> {
92 hex_to_bytes(value).map(|v| v.into())
93 }
94}
95
96impl<const N: usize> Serializable for Blake3Digest<N> {
97 fn write_into<W: ByteWriter>(&self, target: &mut W) {
98 target.write_bytes(&self.0);
99 }
100}
101
102impl<const N: usize> Deserializable for Blake3Digest<N> {
103 fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
104 source.read_array().map(Self)
105 }
106}
107
108#[derive(Debug, Copy, Clone, Eq, PartialEq)]
113pub struct Blake3_256;
114
115impl HasherExt for Blake3_256 {
116 type Digest = Blake3Digest<32>;
117
118 fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
119 let mut hasher = blake3::Hasher::new();
120 for slice in slices {
121 hasher.update(slice);
122 }
123 Blake3Digest(hasher.finalize().into())
124 }
125}
126
127impl Blake3_256 {
128 pub const COLLISION_RESISTANCE: u32 = 128;
130
131 pub fn hash(bytes: &[u8]) -> Blake3Digest<32> {
132 Blake3Digest(blake3::hash(bytes).into())
133 }
134
135 pub fn merge(values: &[Blake3Digest<32>; 2]) -> Blake3Digest<32> {
139 Self::hash(Blake3Digest::digests_as_bytes(values))
140 }
141
142 pub fn merge_many(values: &[Blake3Digest<32>]) -> Blake3Digest<32> {
143 Blake3Digest(blake3::hash(Blake3Digest::digests_as_bytes(values)).into())
144 }
145
146 pub fn merge_with_int(seed: Blake3Digest<32>, value: u64) -> Blake3Digest<32> {
147 let mut hasher = blake3::Hasher::new();
148 hasher.update(&seed.0);
149 hasher.update(&value.to_le_bytes());
150 Blake3Digest(hasher.finalize().into())
151 }
152
153 #[inline(always)]
155 pub fn hash_elements<E: BasedVectorSpace<Felt>>(elements: &[E]) -> Blake3Digest<32> {
156 Blake3Digest(hash_elements(elements))
157 }
158
159 #[inline(always)]
161 pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST32_BYTES> {
162 <Self as HasherExt>::hash_iter(slices)
163 }
164}
165
166#[derive(Debug, Copy, Clone, Eq, PartialEq)]
171pub struct Blake3_192;
172
173impl HasherExt for Blake3_192 {
174 type Digest = Blake3Digest<24>;
175
176 fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
177 let mut hasher = blake3::Hasher::new();
178 for slice in slices {
179 hasher.update(slice);
180 }
181 Blake3Digest(shrink_array(hasher.finalize().into()))
182 }
183}
184
185impl Blake3_192 {
186 pub const COLLISION_RESISTANCE: u32 = 96;
188
189 pub fn hash(bytes: &[u8]) -> Blake3Digest<24> {
190 Blake3Digest(shrink_array(blake3::hash(bytes).into()))
191 }
192
193 pub fn merge_many(values: &[Blake3Digest<24>]) -> Blake3Digest<24> {
195 let bytes = Blake3Digest::digests_as_bytes(values);
196 Blake3Digest(shrink_array(blake3::hash(bytes).into()))
197 }
198
199 pub fn merge(values: &[Blake3Digest<24>; 2]) -> Blake3Digest<24> {
200 Self::hash(Blake3Digest::digests_as_bytes(values))
201 }
202
203 pub fn merge_with_int(seed: Blake3Digest<24>, value: u64) -> Blake3Digest<24> {
204 let mut hasher = blake3::Hasher::new();
205 hasher.update(&seed.0);
206 hasher.update(&value.to_le_bytes());
207 Blake3Digest(shrink_array(hasher.finalize().into()))
208 }
209
210 #[inline(always)]
212 pub fn hash_elements<E: BasedVectorSpace<Felt>>(elements: &[E]) -> Blake3Digest<32> {
213 Blake3Digest(hash_elements(elements))
214 }
215
216 #[inline(always)]
218 pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST24_BYTES> {
219 <Self as HasherExt>::hash_iter(slices)
220 }
221}
222
223fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
228where
229 E: BasedVectorSpace<Felt>,
230{
231 let digest = {
232 const FELT_BYTES: usize = size_of::<u64>();
233 const { assert!(FELT_BYTES == 8, "buffer arithmetic assumes 8-byte field elements") };
234
235 let mut hasher = blake3::Hasher::new();
236 let mut buf = [0_u8; 64];
238 let mut buf_offset = 0;
239
240 for elem in elements.iter() {
241 for &felt in E::as_basis_coefficients_slice(elem) {
242 buf[buf_offset..buf_offset + FELT_BYTES]
243 .copy_from_slice(&felt.as_canonical_u64().to_le_bytes());
244 buf_offset += FELT_BYTES;
245
246 if buf_offset == 64 {
247 hasher.update(&buf);
248 buf_offset = 0;
249 }
250 }
251 }
252
253 if buf_offset > 0 {
254 hasher.update(&buf[..buf_offset]);
255 }
256
257 hasher.finalize()
258 };
259
260 shrink_array(digest.into())
261}
262
263fn shrink_array<const M: usize, const N: usize>(source: [u8; M]) -> [u8; N] {
267 const {
268 assert!(M >= N, "size of destination should be smaller or equal than source");
269 }
270 core::array::from_fn(|i| source[i])
271}
272
273fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
275 assert!(M <= N, "M should fit in N so M can be expanded!");
277 let mut expanded = [0u8; N];
278 expanded[..M].copy_from_slice(bytes);
279 expanded
280}