light_indexed_merkle_tree/
zero_copy.rs

1use std::{
2    fmt,
3    marker::PhantomData,
4    mem,
5    ops::{Deref, DerefMut},
6};
7
8use light_bounded_vec::{CyclicBoundedVec, CyclicBoundedVecMetadata};
9use light_concurrent_merkle_tree::{
10    errors::ConcurrentMerkleTreeError,
11    offset::zero_copy::{read_array_like_ptr_at, read_ptr_at, write_at},
12    zero_copy::{ConcurrentMerkleTreeZeroCopy, ConcurrentMerkleTreeZeroCopyMut},
13    ConcurrentMerkleTree,
14};
15use light_hasher::Hasher;
16use num_traits::{CheckedAdd, CheckedSub, ToBytes, Unsigned};
17
18use crate::{errors::IndexedMerkleTreeError, IndexedMerkleTree};
19
20#[derive(Debug)]
21pub struct IndexedMerkleTreeZeroCopy<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize>
22where
23    H: Hasher,
24    I: CheckedAdd
25        + CheckedSub
26        + Copy
27        + Clone
28        + fmt::Debug
29        + PartialOrd
30        + ToBytes
31        + TryFrom<usize>
32        + Unsigned,
33    usize: From<I>,
34{
35    pub merkle_tree: mem::ManuallyDrop<IndexedMerkleTree<H, I, HEIGHT, NET_HEIGHT>>,
36    // The purpose of this field is ensuring that the wrapper does not outlive
37    // the buffer.
38    _bytes: &'a [u8],
39}
40
41impl<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize>
42    IndexedMerkleTreeZeroCopy<'a, H, I, HEIGHT, NET_HEIGHT>
43where
44    H: Hasher,
45    I: CheckedAdd
46        + CheckedSub
47        + Copy
48        + Clone
49        + fmt::Debug
50        + PartialOrd
51        + ToBytes
52        + TryFrom<usize>
53        + Unsigned,
54    usize: From<I>,
55{
56    /// Returns a zero-copy wrapper of `IndexedMerkleTree` created from the
57    /// data in the provided `bytes` buffer.
58    pub fn from_bytes_zero_copy(bytes: &'a [u8]) -> Result<Self, IndexedMerkleTreeError> {
59        let (merkle_tree, mut offset) =
60            ConcurrentMerkleTreeZeroCopy::struct_from_bytes_zero_copy(bytes)?;
61
62        let indexed_changelog_metadata: *mut CyclicBoundedVecMetadata =
63            unsafe { read_ptr_at(bytes, &mut offset) };
64
65        let expected_size = IndexedMerkleTree::<H, I, HEIGHT, NET_HEIGHT>::size_in_account(
66            merkle_tree.height,
67            merkle_tree.changelog.capacity(),
68            merkle_tree.roots.capacity(),
69            merkle_tree.canopy_depth,
70            unsafe { (*indexed_changelog_metadata).capacity() },
71        );
72        if bytes.len() < expected_size {
73            return Err(IndexedMerkleTreeError::ConcurrentMerkleTree(
74                ConcurrentMerkleTreeError::BufferSize(expected_size, bytes.len()),
75            ));
76        }
77
78        let indexed_changelog = unsafe {
79            CyclicBoundedVec::from_raw_parts(
80                indexed_changelog_metadata,
81                read_array_like_ptr_at(
82                    bytes,
83                    &mut offset,
84                    (*indexed_changelog_metadata).capacity(),
85                ),
86            )
87        };
88
89        Ok(Self {
90            merkle_tree: mem::ManuallyDrop::new(IndexedMerkleTree {
91                merkle_tree,
92                indexed_changelog,
93                _index: PhantomData,
94            }),
95            _bytes: bytes,
96        })
97    }
98}
99
100impl<H, I, const HEIGHT: usize, const NET_HEIGHT: usize> Deref
101    for IndexedMerkleTreeZeroCopy<'_, H, I, HEIGHT, NET_HEIGHT>
102where
103    H: Hasher,
104    I: CheckedAdd
105        + CheckedSub
106        + Copy
107        + Clone
108        + fmt::Debug
109        + PartialOrd
110        + ToBytes
111        + TryFrom<usize>
112        + Unsigned,
113    usize: From<I>,
114{
115    type Target = IndexedMerkleTree<H, I, HEIGHT, NET_HEIGHT>;
116
117    fn deref(&self) -> &Self::Target {
118        &self.merkle_tree
119    }
120}
121
122#[derive(Debug)]
123pub struct IndexedMerkleTreeZeroCopyMut<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize>(
124    IndexedMerkleTreeZeroCopy<'a, H, I, HEIGHT, NET_HEIGHT>,
125)
126where
127    H: Hasher,
128    I: CheckedAdd
129        + CheckedSub
130        + Copy
131        + Clone
132        + fmt::Debug
133        + PartialOrd
134        + ToBytes
135        + TryFrom<usize>
136        + Unsigned,
137    usize: From<I>;
138
139impl<'a, H, I, const HEIGHT: usize, const NET_HEIGHT: usize>
140    IndexedMerkleTreeZeroCopyMut<'a, H, I, HEIGHT, NET_HEIGHT>
141where
142    H: Hasher,
143    I: CheckedAdd
144        + CheckedSub
145        + Copy
146        + Clone
147        + fmt::Debug
148        + PartialOrd
149        + ToBytes
150        + TryFrom<usize>
151        + Unsigned,
152    usize: From<I>,
153{
154    pub fn from_bytes_zero_copy_mut(bytes: &'a mut [u8]) -> Result<Self, IndexedMerkleTreeError> {
155        Ok(Self(IndexedMerkleTreeZeroCopy::from_bytes_zero_copy(
156            bytes,
157        )?))
158    }
159
160    pub fn from_bytes_zero_copy_init(
161        bytes: &'a mut [u8],
162        height: usize,
163        canopy_depth: usize,
164        changelog_capacity: usize,
165        roots_capacity: usize,
166        indexed_changelog_capacity: usize,
167    ) -> Result<Self, IndexedMerkleTreeError> {
168        let _ = ConcurrentMerkleTreeZeroCopyMut::<H, HEIGHT>::fill_non_dyn_fields_in_buffer(
169            bytes,
170            height,
171            canopy_depth,
172            changelog_capacity,
173            roots_capacity,
174        )?;
175
176        let expected_size = IndexedMerkleTree::<H, I, HEIGHT, NET_HEIGHT>::size_in_account(
177            height,
178            changelog_capacity,
179            roots_capacity,
180            canopy_depth,
181            indexed_changelog_capacity,
182        );
183        if bytes.len() < expected_size {
184            return Err(IndexedMerkleTreeError::ConcurrentMerkleTree(
185                ConcurrentMerkleTreeError::BufferSize(expected_size, bytes.len()),
186            ));
187        }
188
189        let mut offset = ConcurrentMerkleTree::<H, HEIGHT>::size_in_account(
190            height,
191            changelog_capacity,
192            roots_capacity,
193            canopy_depth,
194        );
195
196        let indexed_changelog_metadata = CyclicBoundedVecMetadata::new(indexed_changelog_capacity);
197        write_at::<CyclicBoundedVecMetadata>(
198            bytes,
199            &indexed_changelog_metadata.to_le_bytes(),
200            &mut offset,
201        );
202
203        Self::from_bytes_zero_copy_mut(bytes)
204    }
205}
206
207impl<H, I, const HEIGHT: usize, const NET_HEIGHT: usize> Deref
208    for IndexedMerkleTreeZeroCopyMut<'_, H, I, HEIGHT, NET_HEIGHT>
209where
210    H: Hasher,
211    I: CheckedAdd
212        + CheckedSub
213        + Copy
214        + Clone
215        + fmt::Debug
216        + PartialOrd
217        + ToBytes
218        + TryFrom<usize>
219        + Unsigned,
220    usize: From<I>,
221{
222    type Target = IndexedMerkleTree<H, I, HEIGHT, NET_HEIGHT>;
223
224    fn deref(&self) -> &Self::Target {
225        &self.0.merkle_tree
226    }
227}
228
229impl<H, I, const HEIGHT: usize, const NET_HEIGHT: usize> DerefMut
230    for IndexedMerkleTreeZeroCopyMut<'_, H, I, HEIGHT, NET_HEIGHT>
231where
232    H: Hasher,
233    I: CheckedAdd
234        + CheckedSub
235        + Copy
236        + Clone
237        + fmt::Debug
238        + PartialOrd
239        + ToBytes
240        + TryFrom<usize>
241        + Unsigned,
242    usize: From<I>,
243{
244    fn deref_mut(&mut self) -> &mut Self::Target {
245        &mut self.0.merkle_tree
246    }
247}
248
249#[cfg(test)]
250mod test {
251    use light_hasher::{bigint::bigint_to_be_bytes_array, Poseidon};
252    use num_bigint::RandBigInt;
253    use rand::thread_rng;
254
255    use super::*;
256
257    fn from_bytes_zero_copy<
258        const HEIGHT: usize,
259        const NET_HEIGHT: usize,
260        const CHANGELOG_SIZE: usize,
261        const ROOTS: usize,
262        const CANOPY_DEPTH: usize,
263        const INDEXED_CHANGELOG_SIZE: usize,
264        const OPERATIONS: usize,
265    >() {
266        let mut mt_1 = IndexedMerkleTree::<Poseidon, usize, HEIGHT, NET_HEIGHT>::new(
267            HEIGHT,
268            CHANGELOG_SIZE,
269            ROOTS,
270            CANOPY_DEPTH,
271            INDEXED_CHANGELOG_SIZE,
272        )
273        .unwrap();
274        mt_1.init().unwrap();
275
276        let mut bytes = vec![
277            0u8;
278            IndexedMerkleTree::<Poseidon, usize, HEIGHT, NET_HEIGHT>::size_in_account(
279                HEIGHT,
280                CHANGELOG_SIZE,
281                ROOTS,
282                CANOPY_DEPTH,
283                INDEXED_CHANGELOG_SIZE
284            )
285        ];
286
287        {
288            let mut mt_2 =
289                IndexedMerkleTreeZeroCopyMut::<Poseidon, usize, HEIGHT, NET_HEIGHT>::from_bytes_zero_copy_init(
290                    &mut bytes,
291                    HEIGHT,
292                    CANOPY_DEPTH,
293                    CHANGELOG_SIZE,
294                    ROOTS,
295                    INDEXED_CHANGELOG_SIZE,
296                )
297                .unwrap();
298            mt_2.init().unwrap();
299
300            assert_eq!(mt_1, *mt_2);
301        }
302
303        let mut rng = thread_rng();
304
305        for _ in 0..OPERATIONS {
306            // Reload the tree from bytes on each iteration.
307            let mut mt_2 =
308                IndexedMerkleTreeZeroCopyMut::<Poseidon, usize, HEIGHT,NET_HEIGHT>::from_bytes_zero_copy_mut(
309                    &mut bytes,
310                )
311                .unwrap();
312
313            let leaf: [u8; 32] = bigint_to_be_bytes_array::<32>(&rng.gen_biguint(248)).unwrap();
314            mt_1.append(&leaf).unwrap();
315            mt_2.append(&leaf).unwrap();
316
317            assert_eq!(mt_1, *mt_2);
318        }
319    }
320
321    #[test]
322    fn test_from_bytes_zero_copy_26_1400_2400_10_256_1024() {
323        const HEIGHT: usize = 26;
324        const NET_HEIGHT: usize = 16;
325        const CHANGELOG_SIZE: usize = 1400;
326        const ROOTS: usize = 2400;
327        const CANOPY_DEPTH: usize = 10;
328        const INDEXED_CHANGELOG_SIZE: usize = 256;
329
330        const OPERATIONS: usize = 1024;
331
332        from_bytes_zero_copy::<
333            HEIGHT,
334            NET_HEIGHT,
335            CHANGELOG_SIZE,
336            ROOTS,
337            CANOPY_DEPTH,
338            INDEXED_CHANGELOG_SIZE,
339            OPERATIONS,
340        >()
341    }
342}