alloy_primitives/bits/flatten.rs
1use crate::FixedBytes;
2use alloc::vec::Vec;
3use core::slice;
4
5/// Extension trait for flattening a slice of `FixedBytes` to a byte slice.
6///
7/// This mirrors the standard library's `as_flattened` and `as_flattened_mut` methods for
8/// `&[[T; N]]`.
9pub trait FixedBytesSliceExt {
10 /// Takes a `&[FixedBytes<N>]` and flattens it to a `&[u8]`.
11 ///
12 /// # Panics
13 ///
14 /// This panics if the length of the resulting slice would overflow a `usize`.
15 ///
16 /// This is only possible when `N == 0`, which tends to be irrelevant in practice.
17 ///
18 /// # Examples
19 ///
20 /// ```
21 /// use alloy_primitives::{FixedBytes, FixedBytesSliceExt};
22 ///
23 /// let arr = [FixedBytes::<4>::new([1, 2, 3, 4]), FixedBytes::new([5, 6, 7, 8])];
24 /// assert_eq!(arr.as_flattened(), &[1, 2, 3, 4, 5, 6, 7, 8]);
25 /// ```
26 fn as_flattened(&self) -> &[u8];
27
28 /// Takes a `&mut [FixedBytes<N>]` and flattens it to a `&mut [u8]`.
29 ///
30 /// # Panics
31 ///
32 /// This panics if the length of the resulting slice would overflow a `usize`.
33 ///
34 /// This is only possible when `N == 0`, which tends to be irrelevant in practice.
35 ///
36 /// # Examples
37 ///
38 /// ```
39 /// use alloy_primitives::{FixedBytes, FixedBytesSliceExt};
40 ///
41 /// fn add_one(slice: &mut [u8]) {
42 /// for b in slice {
43 /// *b = b.wrapping_add(1);
44 /// }
45 /// }
46 ///
47 /// let mut arr = [FixedBytes::<4>::new([1, 2, 3, 4]), FixedBytes::new([5, 6, 7, 8])];
48 /// add_one(arr.as_flattened_mut());
49 /// assert_eq!(arr[0].as_slice(), &[2, 3, 4, 5]);
50 /// ```
51 fn as_flattened_mut(&mut self) -> &mut [u8];
52}
53
54impl<const N: usize> FixedBytesSliceExt for [FixedBytes<N>] {
55 #[inline]
56 fn as_flattened(&self) -> &[u8] {
57 // SAFETY: `self.len() * N` cannot overflow because `self` is
58 // already in the address space.
59 let len = unsafe { self.len().unchecked_mul(N) };
60 // SAFETY: `FixedBytes<N>` is `repr(transparent)` over `[u8; N]`.
61 unsafe { slice::from_raw_parts(self.as_ptr().cast(), len) }
62 }
63
64 #[inline]
65 fn as_flattened_mut(&mut self) -> &mut [u8] {
66 // SAFETY: `self.len() * N` cannot overflow because `self` is
67 // already in the address space.
68 let len = unsafe { self.len().unchecked_mul(N) };
69 // SAFETY: `FixedBytes<N>` is `repr(transparent)` over `[u8; N]`.
70 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr().cast(), len) }
71 }
72}
73
74/// Extension trait for flattening a `Vec` of `FixedBytes` to a `Vec<u8>`.
75///
76/// This mirrors the standard library's `into_flattened` method for `Vec<[T; N]>`.
77pub trait FixedBytesVecExt {
78 /// Takes a `Vec<FixedBytes<N>>` and flattens it into a `Vec<u8>`.
79 ///
80 /// # Panics
81 ///
82 /// This panics if the length of the resulting vector would overflow a `usize`.
83 ///
84 /// This is only possible when `N == 0`, which tends to be irrelevant in practice.
85 ///
86 /// # Examples
87 ///
88 /// ```
89 /// use alloy_primitives::{FixedBytes, FixedBytesVecExt};
90 ///
91 /// let mut vec = vec![
92 /// FixedBytes::<4>::new([1, 2, 3, 4]),
93 /// FixedBytes::new([5, 6, 7, 8]),
94 /// FixedBytes::new([9, 10, 11, 12]),
95 /// ];
96 /// assert_eq!(vec.pop(), Some(FixedBytes::new([9, 10, 11, 12])));
97 ///
98 /// let mut flattened = vec.into_flattened();
99 /// assert_eq!(flattened.pop(), Some(8));
100 /// ```
101 fn into_flattened(self) -> Vec<u8>;
102}
103
104impl<const N: usize> FixedBytesVecExt for Vec<FixedBytes<N>> {
105 #[inline]
106 fn into_flattened(self) -> Vec<u8> {
107 let mut this = core::mem::ManuallyDrop::new(self);
108 let (ptr, len, cap) = (this.as_mut_ptr(), this.len(), this.capacity());
109 // SAFETY:
110 // - `cap * N` cannot overflow because the allocation is already in
111 // the address space.
112 // - Each `[T; N]` has `N` valid elements, so there are `len * N`
113 // valid elements in the allocation.
114 let (new_len, new_cap) = unsafe { (len.unchecked_mul(N), cap.unchecked_mul(N)) };
115 // SAFETY:
116 // - `ptr` was allocated by `self`
117 // - `ptr` is well-aligned because `FixedBytes<N>` has the same alignment as `u8` (since
118 // `FixedBytes<N>` is `repr(transparent)` over `[u8; N]`)
119 // - `new_cap * size_of::<u8>()` == `cap * size_of::<FixedBytes<N>>()`
120 // - `len <= cap`, so `len * N <= cap * N`
121 unsafe { Vec::from_raw_parts(ptr.cast(), new_len, new_cap) }
122 }
123}
124
125// Can't put in `wrap_fixed_bytes` macro due to orphan rules.
126macro_rules! impl_flatten {
127 ([$($gen:tt)*] $t:ty, $n:expr) => {
128 impl<$($gen)*> $crate::FixedBytesSliceExt for [$t] {
129 #[inline]
130 fn as_flattened(&self) -> &[u8] {
131 unsafe { core::mem::transmute::<&[$t], &[FixedBytes<$n>]>(self) }.as_flattened()
132 }
133
134 #[inline]
135 fn as_flattened_mut(&mut self) -> &mut [u8] {
136 unsafe { core::mem::transmute::<&mut [$t], &mut [FixedBytes<$n>]>(self) }
137 .as_flattened_mut()
138 }
139 }
140
141 impl<$($gen)*> $crate::FixedBytesVecExt for $crate::private::Vec<$t> {
142 #[inline]
143 fn into_flattened(self) -> $crate::private::Vec<u8> {
144 unsafe { core::mem::transmute::<Vec<$t>, Vec<FixedBytes<$n>>>(self) }
145 .into_flattened()
146 }
147 }
148 };
149}
150
151impl_flatten!([] crate::Address, 20);
152impl_flatten!([] crate::Bloom, 256);
153impl_flatten!([const BITS: usize, const LIMBS: usize] crate::Uint<BITS, LIMBS>, 32);
154
155#[cfg(test)]
156mod tests {
157 use super::*;
158 use crate::Address;
159
160 #[test]
161 fn test_as_flattened() {
162 let arr = [FixedBytes::<4>::new([1, 2, 3, 4]), FixedBytes::new([5, 6, 7, 8])];
163 assert_eq!(arr.as_flattened(), &[1, 2, 3, 4, 5, 6, 7, 8]);
164 }
165
166 #[test]
167 fn test_as_flattened_empty() {
168 let arr: [FixedBytes<4>; 0] = [];
169 assert!(arr.as_flattened().is_empty());
170 }
171
172 #[test]
173 fn test_as_flattened_mut() {
174 let mut arr = [FixedBytes::<4>::new([1, 2, 3, 4]), FixedBytes::new([5, 6, 7, 8])];
175 for b in arr.as_flattened_mut() {
176 *b = b.wrapping_add(1);
177 }
178 assert_eq!(arr[0].as_slice(), &[2, 3, 4, 5]);
179 assert_eq!(arr[1].as_slice(), &[6, 7, 8, 9]);
180 }
181
182 #[test]
183 fn test_into_flattened() {
184 let vec = vec![FixedBytes::<4>::new([1, 2, 3, 4]), FixedBytes::new([5, 6, 7, 8])];
185 assert_eq!(vec.into_flattened(), vec![1, 2, 3, 4, 5, 6, 7, 8]);
186 }
187
188 #[test]
189 fn test_into_flattened_empty() {
190 let vec: Vec<FixedBytes<4>> = vec![];
191 assert!(vec.into_flattened().is_empty());
192 }
193
194 #[test]
195 fn test_address_as_flattened() {
196 let arr = [Address::repeat_byte(0x11), Address::repeat_byte(0x22)];
197 let flattened = arr.as_flattened();
198 assert_eq!(flattened.len(), 40);
199 assert_eq!(&flattened[..20], &[0x11; 20]);
200 assert_eq!(&flattened[20..], &[0x22; 20]);
201 }
202
203 #[test]
204 fn test_address_as_flattened_mut() {
205 let mut arr = [Address::repeat_byte(0x11), Address::repeat_byte(0x22)];
206 arr.as_flattened_mut()[0] = 0xff;
207 assert_eq!(arr[0].0[0], 0xff);
208 }
209
210 #[test]
211 fn test_address_into_flattened() {
212 let vec = vec![Address::repeat_byte(0x11), Address::repeat_byte(0x22)];
213 let flattened = vec.into_flattened();
214 assert_eq!(flattened.len(), 40);
215 assert_eq!(&flattened[..20], &[0x11; 20]);
216 assert_eq!(&flattened[20..], &[0x22; 20]);
217 }
218}