ssz/encode/
impls.rs

1use super::*;
2use core::num::NonZeroUsize;
3use ethereum_types::{H256, U128, U256};
4use smallvec::SmallVec;
5use std::sync::Arc;
6
7macro_rules! impl_encodable_for_uint {
8    ($type: ident, $bit_size: expr) => {
9        impl Encode for $type {
10            fn is_ssz_fixed_len() -> bool {
11                true
12            }
13
14            fn ssz_fixed_len() -> usize {
15                $bit_size / 8
16            }
17
18            fn ssz_bytes_len(&self) -> usize {
19                $bit_size / 8
20            }
21
22            fn ssz_append(&self, buf: &mut Vec<u8>) {
23                buf.extend_from_slice(&self.to_le_bytes());
24            }
25        }
26    };
27}
28
29impl_encodable_for_uint!(u8, 8);
30impl_encodable_for_uint!(u16, 16);
31impl_encodable_for_uint!(u32, 32);
32impl_encodable_for_uint!(u64, 64);
33
34#[cfg(target_pointer_width = "32")]
35impl_encodable_for_uint!(usize, 32);
36
37#[cfg(target_pointer_width = "64")]
38impl_encodable_for_uint!(usize, 64);
39
40// Based on the `tuple_impls` macro from the standard library.
41macro_rules! impl_encode_for_tuples {
42    ($(
43        $Tuple:ident {
44            $(($idx:tt) -> $T:ident)+
45        }
46    )+) => {
47        $(
48            impl<$($T: Encode),+> Encode for ($($T,)+) {
49                fn is_ssz_fixed_len() -> bool {
50                    $(
51                        <$T as Encode>::is_ssz_fixed_len() &&
52                    )*
53                        true
54                }
55
56                fn ssz_fixed_len() -> usize {
57                    if <Self as Encode>::is_ssz_fixed_len() {
58                        $(
59                            <$T as Encode>::ssz_fixed_len() +
60                        )*
61                            0
62                    } else {
63                        BYTES_PER_LENGTH_OFFSET
64                    }
65                }
66
67                fn ssz_bytes_len(&self) -> usize {
68                    if <Self as Encode>::is_ssz_fixed_len() {
69                        <Self as Encode>::ssz_fixed_len()
70                    } else {
71                        let mut len = 0;
72                        $(
73                            len += if <$T as Encode>::is_ssz_fixed_len() {
74                                <$T as Encode>::ssz_fixed_len()
75                            } else {
76                                BYTES_PER_LENGTH_OFFSET +
77                                self.$idx.ssz_bytes_len()
78                            };
79                        )*
80                        len
81                    }
82                }
83
84                fn ssz_append(&self, buf: &mut Vec<u8>) {
85                    let offset = $(
86                            <$T as Encode>::ssz_fixed_len() +
87                        )*
88                            0;
89
90                    let mut encoder = SszEncoder::container(buf, offset);
91
92                    $(
93                        encoder.append(&self.$idx);
94                    )*
95
96                    encoder.finalize();
97                }
98            }
99        )+
100    }
101}
102
103impl_encode_for_tuples! {
104    Tuple2 {
105        (0) -> A
106        (1) -> B
107    }
108    Tuple3 {
109        (0) -> A
110        (1) -> B
111        (2) -> C
112    }
113    Tuple4 {
114        (0) -> A
115        (1) -> B
116        (2) -> C
117        (3) -> D
118    }
119    Tuple5 {
120        (0) -> A
121        (1) -> B
122        (2) -> C
123        (3) -> D
124        (4) -> E
125    }
126    Tuple6 {
127        (0) -> A
128        (1) -> B
129        (2) -> C
130        (3) -> D
131        (4) -> E
132        (5) -> F
133    }
134    Tuple7 {
135        (0) -> A
136        (1) -> B
137        (2) -> C
138        (3) -> D
139        (4) -> E
140        (5) -> F
141        (6) -> G
142    }
143    Tuple8 {
144        (0) -> A
145        (1) -> B
146        (2) -> C
147        (3) -> D
148        (4) -> E
149        (5) -> F
150        (6) -> G
151        (7) -> H
152    }
153    Tuple9 {
154        (0) -> A
155        (1) -> B
156        (2) -> C
157        (3) -> D
158        (4) -> E
159        (5) -> F
160        (6) -> G
161        (7) -> H
162        (8) -> I
163    }
164    Tuple10 {
165        (0) -> A
166        (1) -> B
167        (2) -> C
168        (3) -> D
169        (4) -> E
170        (5) -> F
171        (6) -> G
172        (7) -> H
173        (8) -> I
174        (9) -> J
175    }
176    Tuple11 {
177        (0) -> A
178        (1) -> B
179        (2) -> C
180        (3) -> D
181        (4) -> E
182        (5) -> F
183        (6) -> G
184        (7) -> H
185        (8) -> I
186        (9) -> J
187        (10) -> K
188    }
189    Tuple12 {
190        (0) -> A
191        (1) -> B
192        (2) -> C
193        (3) -> D
194        (4) -> E
195        (5) -> F
196        (6) -> G
197        (7) -> H
198        (8) -> I
199        (9) -> J
200        (10) -> K
201        (11) -> L
202    }
203}
204
205impl<T: Encode> Encode for Arc<T> {
206    fn is_ssz_fixed_len() -> bool {
207        T::is_ssz_fixed_len()
208    }
209
210    fn ssz_fixed_len() -> usize {
211        T::ssz_fixed_len()
212    }
213
214    fn ssz_append(&self, buf: &mut Vec<u8>) {
215        self.as_ref().ssz_append(buf)
216    }
217
218    fn ssz_bytes_len(&self) -> usize {
219        self.as_ref().ssz_bytes_len()
220    }
221}
222
223macro_rules! impl_for_vec {
224    ($type: ty) => {
225        impl<T: Encode> Encode for $type {
226            fn is_ssz_fixed_len() -> bool {
227                false
228            }
229
230            fn ssz_bytes_len(&self) -> usize {
231                if <T as Encode>::is_ssz_fixed_len() {
232                    <T as Encode>::ssz_fixed_len() * self.len()
233                } else {
234                    let mut len = self.iter().map(|item| item.ssz_bytes_len()).sum();
235                    len += BYTES_PER_LENGTH_OFFSET * self.len();
236                    len
237                }
238            }
239
240            fn ssz_append(&self, buf: &mut Vec<u8>) {
241                if T::is_ssz_fixed_len() {
242                    buf.reserve(T::ssz_fixed_len() * self.len());
243
244                    for item in self {
245                        item.ssz_append(buf);
246                    }
247                } else {
248                    let mut encoder =
249                        SszEncoder::container(buf, self.len() * BYTES_PER_LENGTH_OFFSET);
250
251                    for item in self {
252                        encoder.append(item);
253                    }
254
255                    encoder.finalize();
256                }
257            }
258        }
259    };
260}
261
262impl_for_vec!(Vec<T>);
263impl_for_vec!(SmallVec<[T; 1]>);
264impl_for_vec!(SmallVec<[T; 2]>);
265impl_for_vec!(SmallVec<[T; 3]>);
266impl_for_vec!(SmallVec<[T; 4]>);
267impl_for_vec!(SmallVec<[T; 5]>);
268impl_for_vec!(SmallVec<[T; 6]>);
269impl_for_vec!(SmallVec<[T; 7]>);
270impl_for_vec!(SmallVec<[T; 8]>);
271
272impl Encode for bool {
273    fn is_ssz_fixed_len() -> bool {
274        true
275    }
276
277    fn ssz_fixed_len() -> usize {
278        1
279    }
280
281    fn ssz_bytes_len(&self) -> usize {
282        1
283    }
284
285    fn ssz_append(&self, buf: &mut Vec<u8>) {
286        buf.extend_from_slice(&(*self as u8).to_le_bytes());
287    }
288}
289
290impl Encode for NonZeroUsize {
291    fn is_ssz_fixed_len() -> bool {
292        <usize as Encode>::is_ssz_fixed_len()
293    }
294
295    fn ssz_fixed_len() -> usize {
296        <usize as Encode>::ssz_fixed_len()
297    }
298
299    fn ssz_bytes_len(&self) -> usize {
300        std::mem::size_of::<usize>()
301    }
302
303    fn ssz_append(&self, buf: &mut Vec<u8>) {
304        self.get().ssz_append(buf)
305    }
306}
307
308impl Encode for H256 {
309    fn is_ssz_fixed_len() -> bool {
310        true
311    }
312
313    fn ssz_fixed_len() -> usize {
314        32
315    }
316
317    fn ssz_bytes_len(&self) -> usize {
318        32
319    }
320
321    fn ssz_append(&self, buf: &mut Vec<u8>) {
322        buf.extend_from_slice(self.as_bytes());
323    }
324}
325
326impl Encode for U256 {
327    fn is_ssz_fixed_len() -> bool {
328        true
329    }
330
331    fn ssz_fixed_len() -> usize {
332        32
333    }
334
335    fn ssz_bytes_len(&self) -> usize {
336        32
337    }
338
339    fn ssz_append(&self, buf: &mut Vec<u8>) {
340        let n = <Self as Encode>::ssz_fixed_len();
341        let s = buf.len();
342
343        buf.resize(s + n, 0);
344        self.to_little_endian(&mut buf[s..]);
345    }
346}
347
348impl Encode for U128 {
349    fn is_ssz_fixed_len() -> bool {
350        true
351    }
352
353    fn ssz_fixed_len() -> usize {
354        16
355    }
356
357    fn ssz_bytes_len(&self) -> usize {
358        16
359    }
360
361    fn ssz_append(&self, buf: &mut Vec<u8>) {
362        let n = <Self as Encode>::ssz_fixed_len();
363        let s = buf.len();
364
365        buf.resize(s + n, 0);
366        self.to_little_endian(&mut buf[s..]);
367    }
368}
369
370macro_rules! impl_encodable_for_u8_array {
371    ($len: expr) => {
372        impl Encode for [u8; $len] {
373            fn is_ssz_fixed_len() -> bool {
374                true
375            }
376
377            fn ssz_fixed_len() -> usize {
378                $len
379            }
380
381            fn ssz_bytes_len(&self) -> usize {
382                $len
383            }
384
385            fn ssz_append(&self, buf: &mut Vec<u8>) {
386                buf.extend_from_slice(&self[..]);
387            }
388        }
389    };
390}
391
392impl_encodable_for_u8_array!(4);
393impl_encodable_for_u8_array!(32);
394
395#[cfg(test)]
396mod tests {
397    use super::*;
398
399    #[test]
400    fn vec_of_u8() {
401        let vec: Vec<u8> = vec![];
402        assert_eq!(vec.as_ssz_bytes(), vec![]);
403
404        let vec: Vec<u8> = vec![1];
405        assert_eq!(vec.as_ssz_bytes(), vec![1]);
406
407        let vec: Vec<u8> = vec![0, 1, 2, 3];
408        assert_eq!(vec.as_ssz_bytes(), vec![0, 1, 2, 3]);
409    }
410
411    #[test]
412    fn vec_of_vec_of_u8() {
413        let vec: Vec<Vec<u8>> = vec![];
414        assert_eq!(vec.as_ssz_bytes(), vec![]);
415
416        let vec: Vec<Vec<u8>> = vec![vec![]];
417        assert_eq!(vec.as_ssz_bytes(), vec![4, 0, 0, 0]);
418
419        let vec: Vec<Vec<u8>> = vec![vec![], vec![]];
420        assert_eq!(vec.as_ssz_bytes(), vec![8, 0, 0, 0, 8, 0, 0, 0]);
421
422        let vec: Vec<Vec<u8>> = vec![vec![0, 1, 2], vec![11, 22, 33]];
423        assert_eq!(
424            vec.as_ssz_bytes(),
425            vec![8, 0, 0, 0, 11, 0, 0, 0, 0, 1, 2, 11, 22, 33]
426        );
427    }
428
429    #[test]
430    fn ssz_encode_u8() {
431        assert_eq!(0_u8.as_ssz_bytes(), vec![0]);
432        assert_eq!(1_u8.as_ssz_bytes(), vec![1]);
433        assert_eq!(100_u8.as_ssz_bytes(), vec![100]);
434        assert_eq!(255_u8.as_ssz_bytes(), vec![255]);
435    }
436
437    #[test]
438    fn ssz_encode_u16() {
439        assert_eq!(1_u16.as_ssz_bytes(), vec![1, 0]);
440        assert_eq!(100_u16.as_ssz_bytes(), vec![100, 0]);
441        assert_eq!((1_u16 << 8).as_ssz_bytes(), vec![0, 1]);
442        assert_eq!(65535_u16.as_ssz_bytes(), vec![255, 255]);
443    }
444
445    #[test]
446    fn ssz_encode_u32() {
447        assert_eq!(1_u32.as_ssz_bytes(), vec![1, 0, 0, 0]);
448        assert_eq!(100_u32.as_ssz_bytes(), vec![100, 0, 0, 0]);
449        assert_eq!((1_u32 << 16).as_ssz_bytes(), vec![0, 0, 1, 0]);
450        assert_eq!((1_u32 << 24).as_ssz_bytes(), vec![0, 0, 0, 1]);
451        assert_eq!((!0_u32).as_ssz_bytes(), vec![255, 255, 255, 255]);
452    }
453
454    #[test]
455    fn ssz_encode_u64() {
456        assert_eq!(1_u64.as_ssz_bytes(), vec![1, 0, 0, 0, 0, 0, 0, 0]);
457        assert_eq!(
458            (!0_u64).as_ssz_bytes(),
459            vec![255, 255, 255, 255, 255, 255, 255, 255]
460        );
461    }
462
463    #[test]
464    fn ssz_encode_usize() {
465        assert_eq!(1_usize.as_ssz_bytes(), vec![1, 0, 0, 0, 0, 0, 0, 0]);
466        assert_eq!(
467            (!0_usize).as_ssz_bytes(),
468            vec![255, 255, 255, 255, 255, 255, 255, 255]
469        );
470    }
471
472    #[test]
473    fn ssz_encode_bool() {
474        assert_eq!(true.as_ssz_bytes(), vec![1]);
475        assert_eq!(false.as_ssz_bytes(), vec![0]);
476    }
477
478    #[test]
479    fn ssz_encode_h256() {
480        assert_eq!(H256::from(&[0; 32]).as_ssz_bytes(), vec![0; 32]);
481        assert_eq!(H256::from(&[1; 32]).as_ssz_bytes(), vec![1; 32]);
482
483        let bytes = vec![
484            1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
485            0, 0, 0,
486        ];
487
488        assert_eq!(H256::from_slice(&bytes).as_ssz_bytes(), bytes);
489    }
490
491    #[test]
492    fn ssz_encode_u8_array_4() {
493        assert_eq!([0, 0, 0, 0].as_ssz_bytes(), vec![0; 4]);
494        assert_eq!([1, 0, 0, 0].as_ssz_bytes(), vec![1, 0, 0, 0]);
495        assert_eq!([1, 2, 3, 4].as_ssz_bytes(), vec![1, 2, 3, 4]);
496    }
497
498    #[test]
499    fn tuple() {
500        assert_eq!((10u8, 11u8).as_ssz_bytes(), vec![10, 11]);
501        assert_eq!((10u32, 11u8).as_ssz_bytes(), vec![10, 0, 0, 0, 11]);
502        assert_eq!((10u8, 11u8, 12u8).as_ssz_bytes(), vec![10, 11, 12]);
503    }
504}