light_concurrent_merkle_tree/
copy.rs1use std::ops::Deref;
2
3use light_bounded_vec::{BoundedVecMetadata, CyclicBoundedVecMetadata};
4use light_hasher::Hasher;
5use light_utils::offset::copy::{read_bounded_vec_at, read_cyclic_bounded_vec_at, read_value_at};
6use memoffset::{offset_of, span_of};
7
8use crate::{errors::ConcurrentMerkleTreeError, ConcurrentMerkleTree};
9
10#[derive(Debug)]
11pub struct ConcurrentMerkleTreeCopy<H, const HEIGHT: usize>(ConcurrentMerkleTree<H, HEIGHT>)
12where
13 H: Hasher;
14
15impl<H, const HEIGHT: usize> ConcurrentMerkleTreeCopy<H, HEIGHT>
16where
17 H: Hasher,
18{
19 pub fn struct_from_bytes_copy(
20 bytes: &[u8],
21 ) -> Result<(ConcurrentMerkleTree<H, HEIGHT>, usize), ConcurrentMerkleTreeError> {
22 let expected_size = ConcurrentMerkleTree::<H, HEIGHT>::non_dyn_fields_size();
23 if bytes.len() < expected_size {
24 return Err(ConcurrentMerkleTreeError::BufferSize(
25 expected_size,
26 bytes.len(),
27 ));
28 }
29
30 let height = usize::from_le_bytes(
31 bytes[span_of!(ConcurrentMerkleTree<H, HEIGHT>, height)]
32 .try_into()
33 .unwrap(),
34 );
35 let canopy_depth = usize::from_le_bytes(
36 bytes[span_of!(ConcurrentMerkleTree<H, HEIGHT>, canopy_depth)]
37 .try_into()
38 .unwrap(),
39 );
40
41 let mut offset = offset_of!(ConcurrentMerkleTree<H, HEIGHT>, next_index);
42
43 let next_index = unsafe { read_value_at(bytes, &mut offset) };
44 let sequence_number = unsafe { read_value_at(bytes, &mut offset) };
45 let rightmost_leaf = unsafe { read_value_at(bytes, &mut offset) };
46 let filled_subtrees_metadata: BoundedVecMetadata =
47 unsafe { read_value_at(bytes, &mut offset) };
48 let changelog_metadata: CyclicBoundedVecMetadata =
49 unsafe { read_value_at(bytes, &mut offset) };
50 let roots_metadata: CyclicBoundedVecMetadata = unsafe { read_value_at(bytes, &mut offset) };
51 let canopy_metadata: BoundedVecMetadata = unsafe { read_value_at(bytes, &mut offset) };
52
53 let expected_size = ConcurrentMerkleTree::<H, HEIGHT>::size_in_account(
54 height,
55 changelog_metadata.capacity(),
56 roots_metadata.capacity(),
57 canopy_depth,
58 );
59 if bytes.len() < expected_size {
60 return Err(ConcurrentMerkleTreeError::BufferSize(
61 expected_size,
62 bytes.len(),
63 ));
64 }
65
66 let filled_subtrees =
67 unsafe { read_bounded_vec_at(bytes, &mut offset, &filled_subtrees_metadata) };
68 let changelog =
69 unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &changelog_metadata) };
70 let roots = unsafe { read_cyclic_bounded_vec_at(bytes, &mut offset, &roots_metadata) };
71 let canopy = unsafe { read_bounded_vec_at(bytes, &mut offset, &canopy_metadata) };
72
73 let mut merkle_tree = ConcurrentMerkleTree::new(
74 height,
75 changelog_metadata.capacity(),
76 roots_metadata.capacity(),
77 canopy_depth,
78 )?;
79 unsafe {
81 *merkle_tree.next_index = next_index;
82 *merkle_tree.sequence_number = sequence_number;
83 *merkle_tree.rightmost_leaf = rightmost_leaf;
84 }
85 merkle_tree.filled_subtrees = filled_subtrees;
86 merkle_tree.changelog = changelog;
87 merkle_tree.roots = roots;
88 merkle_tree.canopy = canopy;
89
90 Ok((merkle_tree, offset))
91 }
92
93 pub fn from_bytes_copy(bytes: &[u8]) -> Result<Self, ConcurrentMerkleTreeError> {
94 let (merkle_tree, _) = Self::struct_from_bytes_copy(bytes)?;
95 merkle_tree.check_size_constraints()?;
96 Ok(Self(merkle_tree))
97 }
98}
99
100impl<H, const HEIGHT: usize> Deref for ConcurrentMerkleTreeCopy<H, HEIGHT>
101where
102 H: Hasher,
103{
104 type Target = ConcurrentMerkleTree<H, HEIGHT>;
105
106 fn deref(&self) -> &Self::Target {
107 &self.0
108 }
109}
110
111#[cfg(test)]
112mod test {
113 use crate::zero_copy::ConcurrentMerkleTreeZeroCopyMut;
114
115 use super::*;
116
117 use ark_bn254::Fr;
118 use ark_ff::{BigInteger, PrimeField, UniformRand};
119 use light_hasher::Poseidon;
120 use rand::{thread_rng, Rng};
121
122 fn from_bytes_copy<
123 const HEIGHT: usize,
124 const CHANGELOG: usize,
125 const ROOTS: usize,
126 const CANOPY_DEPTH: usize,
127 const OPERATIONS: usize,
128 >() {
129 let mut mt_1 =
130 ConcurrentMerkleTree::<Poseidon, HEIGHT>::new(HEIGHT, CHANGELOG, ROOTS, CANOPY_DEPTH)
131 .unwrap();
132 mt_1.init().unwrap();
133
134 let mut bytes = vec![
138 0u8;
139 ConcurrentMerkleTree::<Poseidon, HEIGHT>::size_in_account(
140 HEIGHT,
141 CHANGELOG,
142 ROOTS,
143 CANOPY_DEPTH
144 )
145 ];
146 thread_rng().fill(bytes.as_mut_slice());
147
148 {
150 let mut mt =
151 ConcurrentMerkleTreeZeroCopyMut::<Poseidon, HEIGHT>::from_bytes_zero_copy_init(
152 bytes.as_mut_slice(),
153 HEIGHT,
154 CANOPY_DEPTH,
155 CHANGELOG,
156 ROOTS,
157 )
158 .unwrap();
159 mt.init().unwrap();
160
161 assert_eq!(mt.height, HEIGHT);
163 assert_eq!(mt.canopy_depth, CANOPY_DEPTH);
164 assert_eq!(mt.next_index(), 0);
165 assert_eq!(mt.sequence_number(), 0);
166 assert_eq!(mt.rightmost_leaf(), Poseidon::zero_bytes()[0]);
167
168 assert_eq!(mt.filled_subtrees.capacity(), HEIGHT);
169 assert_eq!(mt.filled_subtrees.len(), HEIGHT);
170
171 assert_eq!(mt.changelog.capacity(), CHANGELOG);
172 assert_eq!(mt.changelog.len(), 1);
173
174 assert_eq!(mt.roots.capacity(), ROOTS);
175 assert_eq!(mt.roots.len(), 1);
176
177 assert_eq!(
178 mt.canopy.capacity(),
179 ConcurrentMerkleTree::<Poseidon, HEIGHT>::canopy_size(CANOPY_DEPTH)
180 );
181
182 assert_eq!(mt.root(), Poseidon::zero_bytes()[HEIGHT]);
183 }
184
185 let mut rng = thread_rng();
186
187 for _ in 0..OPERATIONS {
188 let mut mt_2 =
190 ConcurrentMerkleTreeZeroCopyMut::<Poseidon, HEIGHT>::from_bytes_zero_copy_mut(
191 &mut bytes,
192 )
193 .unwrap();
194
195 let leaf: [u8; 32] = Fr::rand(&mut rng)
196 .into_bigint()
197 .to_bytes_be()
198 .try_into()
199 .unwrap();
200
201 mt_1.append(&leaf).unwrap();
202 mt_2.append(&leaf).unwrap();
203
204 assert_eq!(mt_1, *mt_2);
205 }
206
207 let mt_2 = ConcurrentMerkleTreeCopy::<Poseidon, HEIGHT>::from_bytes_copy(&bytes).unwrap();
209
210 assert_eq!(mt_1.height, mt_2.height);
211 assert_eq!(mt_1.canopy_depth, mt_2.canopy_depth);
212 assert_eq!(mt_1.next_index(), mt_2.next_index());
213 assert_eq!(mt_1.sequence_number(), mt_2.sequence_number());
214 assert_eq!(mt_1.rightmost_leaf(), mt_2.rightmost_leaf());
215 assert_eq!(
216 mt_1.filled_subtrees.as_slice(),
217 mt_2.filled_subtrees.as_slice()
218 );
219 }
220
221 #[test]
222 fn test_from_bytes_copy_26_1400_2400_10_256_1024() {
223 const HEIGHT: usize = 26;
224 const CHANGELOG_SIZE: usize = 1400;
225 const ROOTS: usize = 2400;
226 const CANOPY_DEPTH: usize = 10;
227
228 const OPERATIONS: usize = 1024;
229
230 from_bytes_copy::<HEIGHT, CHANGELOG_SIZE, ROOTS, CANOPY_DEPTH, OPERATIONS>()
231 }
232}