miden_crypto/hash/blake/
mod.rs1use alloc::string::String;
2use core::{
3 mem::size_of,
4 ops::Deref,
5 slice::{self, from_raw_parts},
6};
7
8use p3_field::{BasedVectorSpace, PrimeField64};
9use p3_goldilocks::Goldilocks as Felt;
10
11use super::HasherExt;
12use crate::utils::{
13 ByteReader, ByteWriter, Deserializable, DeserializationError, HexParseError, Serializable,
14 bytes_to_hex_string, hex_to_bytes,
15};
16
17#[cfg(test)]
18mod tests;
19
20const DIGEST32_BYTES: usize = 32;
24const DIGEST24_BYTES: usize = 24;
25
26#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
34#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
35#[cfg_attr(feature = "serde", serde(into = "String", try_from = "&str"))]
36#[repr(transparent)]
37pub struct Blake3Digest<const N: usize>([u8; N]);
38
39impl<const N: usize> Blake3Digest<N> {
40 pub fn as_bytes(&self) -> [u8; 32] {
41 assert!(N <= 32, "digest currently supports only 32 bytes!");
43 expand_bytes(&self.0)
44 }
45
46 pub fn digests_as_bytes(digests: &[Blake3Digest<N>]) -> &[u8] {
47 let p = digests.as_ptr();
48 let len = digests.len() * N;
49 unsafe { slice::from_raw_parts(p as *const u8, len) }
50 }
51}
52
53impl<const N: usize> Default for Blake3Digest<N> {
54 fn default() -> Self {
55 Self([0; N])
56 }
57}
58
59impl<const N: usize> Deref for Blake3Digest<N> {
60 type Target = [u8];
61
62 fn deref(&self) -> &Self::Target {
63 &self.0
64 }
65}
66
67impl<const N: usize> From<Blake3Digest<N>> for [u8; N] {
68 fn from(value: Blake3Digest<N>) -> Self {
69 value.0
70 }
71}
72
73impl<const N: usize> From<[u8; N]> for Blake3Digest<N> {
74 fn from(value: [u8; N]) -> Self {
75 Self(value)
76 }
77}
78
79impl<const N: usize> From<Blake3Digest<N>> for String {
80 fn from(value: Blake3Digest<N>) -> Self {
81 bytes_to_hex_string(value.as_bytes())
82 }
83}
84
85impl<const N: usize> TryFrom<&str> for Blake3Digest<N> {
86 type Error = HexParseError;
87
88 fn try_from(value: &str) -> Result<Self, Self::Error> {
89 hex_to_bytes(value).map(|v| v.into())
90 }
91}
92
93impl<const N: usize> Serializable for Blake3Digest<N> {
94 fn write_into<W: ByteWriter>(&self, target: &mut W) {
95 target.write_bytes(&self.0);
96 }
97}
98
99impl<const N: usize> Deserializable for Blake3Digest<N> {
100 fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
101 source.read_array().map(Self)
102 }
103}
104
105#[derive(Debug, Copy, Clone, Eq, PartialEq)]
110pub struct Blake3_256;
111
112impl HasherExt for Blake3_256 {
113 type Digest = Blake3Digest<32>;
114
115 fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
116 let mut hasher = blake3::Hasher::new();
117 for slice in slices {
118 hasher.update(slice);
119 }
120 Blake3Digest(hasher.finalize().into())
121 }
122}
123
124impl Blake3_256 {
125 pub const COLLISION_RESISTANCE: u32 = 128;
127
128 pub fn hash(bytes: &[u8]) -> Blake3Digest<32> {
129 Blake3Digest(blake3::hash(bytes).into())
130 }
131
132 pub fn merge(values: &[Blake3Digest<32>; 2]) -> Blake3Digest<32> {
136 Self::hash(prepare_merge(values))
137 }
138
139 pub fn merge_many(values: &[Blake3Digest<32>]) -> Blake3Digest<32> {
140 Blake3Digest(blake3::hash(Blake3Digest::digests_as_bytes(values)).into())
141 }
142
143 pub fn merge_with_int(seed: Blake3Digest<32>, value: u64) -> Blake3Digest<32> {
144 let mut hasher = blake3::Hasher::new();
145 hasher.update(&seed.0);
146 hasher.update(&value.to_le_bytes());
147 Blake3Digest(hasher.finalize().into())
148 }
149
150 #[inline(always)]
152 pub fn hash_elements<E: BasedVectorSpace<Felt>>(elements: &[E]) -> Blake3Digest<32> {
153 Blake3Digest(hash_elements(elements))
154 }
155
156 #[inline(always)]
158 pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST32_BYTES> {
159 <Self as HasherExt>::hash_iter(slices)
160 }
161}
162
163#[derive(Debug, Copy, Clone, Eq, PartialEq)]
168pub struct Blake3_192;
169
170impl HasherExt for Blake3_192 {
171 type Digest = Blake3Digest<24>;
172
173 fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Self::Digest {
174 let mut hasher = blake3::Hasher::new();
175 for slice in slices {
176 hasher.update(slice);
177 }
178 Blake3Digest(shrink_array(hasher.finalize().into()))
179 }
180}
181
182impl Blake3_192 {
183 pub const COLLISION_RESISTANCE: u32 = 96;
185
186 pub fn hash(bytes: &[u8]) -> Blake3Digest<24> {
187 Blake3Digest(shrink_array(blake3::hash(bytes).into()))
188 }
189
190 pub fn merge_many(values: &[Blake3Digest<24>]) -> Blake3Digest<24> {
192 let bytes = Blake3Digest::digests_as_bytes(values);
193 Blake3Digest(shrink_array(blake3::hash(bytes).into()))
194 }
195
196 pub fn merge(values: &[Blake3Digest<24>; 2]) -> Blake3Digest<24> {
197 Self::hash(prepare_merge(values))
198 }
199
200 pub fn merge_with_int(seed: Blake3Digest<24>, value: u64) -> Blake3Digest<24> {
201 let mut hasher = blake3::Hasher::new();
202 hasher.update(&seed.0);
203 hasher.update(&value.to_le_bytes());
204 Blake3Digest(shrink_array(hasher.finalize().into()))
205 }
206
207 #[inline(always)]
209 pub fn hash_elements<E: BasedVectorSpace<Felt>>(elements: &[E]) -> Blake3Digest<32> {
210 Blake3Digest(hash_elements(elements))
211 }
212
213 #[inline(always)]
215 pub fn hash_iter<'a>(slices: impl Iterator<Item = &'a [u8]>) -> Blake3Digest<DIGEST24_BYTES> {
216 <Self as HasherExt>::hash_iter(slices)
217 }
218}
219
220fn hash_elements<const N: usize, E>(elements: &[E]) -> [u8; N]
225where
226 E: BasedVectorSpace<Felt>,
227{
228 let digest = {
229 const FELT_BYTES: usize = size_of::<u64>();
230 const { assert!(FELT_BYTES == 8, "buffer arithmetic assumes 8-byte field elements") };
231
232 let mut hasher = blake3::Hasher::new();
233 let mut buf = [0_u8; 64];
235 let mut buf_offset = 0;
236
237 for elem in elements.iter() {
238 for &felt in E::as_basis_coefficients_slice(elem) {
239 buf[buf_offset..buf_offset + FELT_BYTES]
240 .copy_from_slice(&felt.as_canonical_u64().to_le_bytes());
241 buf_offset += FELT_BYTES;
242
243 if buf_offset == 64 {
244 hasher.update(&buf);
245 buf_offset = 0;
246 }
247 }
248 }
249
250 if buf_offset > 0 {
251 hasher.update(&buf[..buf_offset]);
252 }
253
254 hasher.finalize()
255 };
256
257 shrink_array(digest.into())
258}
259
260fn shrink_array<const M: usize, const N: usize>(source: [u8; M]) -> [u8; N] {
264 const {
265 assert!(M >= N, "size of destination should be smaller or equal than source");
266 }
267 core::array::from_fn(|i| source[i])
268}
269
270fn expand_bytes<const M: usize, const N: usize>(bytes: &[u8; M]) -> [u8; N] {
272 assert!(M <= N, "M should fit in N so M can be expanded!");
274 let mut expanded = [0u8; N];
275 expanded[..M].copy_from_slice(bytes);
276 expanded
277}
278
279fn prepare_merge<const N: usize, D>(args: &[D; N]) -> &[u8]
281where
282 D: Deref<Target = [u8]>,
283{
284 assert!(N > 0, "N shouldn't represent an empty slice!");
286 let values = args.as_ptr() as *const u8;
287 let len = size_of::<D>() * N;
288 let bytes = unsafe { from_raw_parts(values, len) };
290 debug_assert_eq!(args[0].deref(), &bytes[..len / N]);
291 bytes
292}