hopper_core/collections/
fixed_vec.rs1use crate::account::{FixedLayout, Pod};
12use hopper_runtime::error::ProgramError;
13
14const HEADER_SIZE: usize = 4;
16
17pub struct FixedVec<'a, T: Pod + FixedLayout> {
22 data: &'a mut [u8],
23 _phantom: core::marker::PhantomData<T>,
24}
25
26impl<'a, T: Pod + FixedLayout> FixedVec<'a, T> {
27 #[inline]
32 pub fn from_bytes(data: &'a mut [u8]) -> Result<Self, ProgramError> {
33 if data.len() < HEADER_SIZE {
34 return Err(ProgramError::AccountDataTooSmall);
35 }
36 Ok(Self {
37 data,
38 _phantom: core::marker::PhantomData,
39 })
40 }
41
42 #[inline(always)]
44 pub fn len(&self) -> usize {
45 let bytes = [self.data[0], self.data[1], self.data[2], self.data[3]];
46 u32::from_le_bytes(bytes) as usize
47 }
48
49 #[inline(always)]
51 pub fn capacity(&self) -> usize {
52 (self.data.len() - HEADER_SIZE) / T::SIZE
53 }
54
55 #[inline(always)]
57 pub fn is_empty(&self) -> bool {
58 self.len() == 0
59 }
60
61 #[inline(always)]
63 pub fn is_full(&self) -> bool {
64 self.len() >= self.capacity()
65 }
66
67 #[inline(always)]
69 fn set_len(&mut self, len: usize) {
70 let bytes = (len as u32).to_le_bytes();
71 self.data[0] = bytes[0];
72 self.data[1] = bytes[1];
73 self.data[2] = bytes[2];
74 self.data[3] = bytes[3];
75 }
76
77 #[inline(always)]
79 fn element_offset(&self, index: usize) -> usize {
80 HEADER_SIZE + index * T::SIZE
81 }
82
83 #[inline]
85 pub fn get(&self, index: usize) -> Result<T, ProgramError> {
86 let len = self.len();
87 if index >= len {
88 return Err(ProgramError::InvalidArgument);
89 }
90 let offset = self.element_offset(index);
91 Ok(unsafe { core::ptr::read_unaligned(self.data.as_ptr().add(offset) as *const T) })
93 }
94
95 #[inline]
97 pub fn get_ref(&self, index: usize) -> Result<&T, ProgramError> {
98 let len = self.len();
99 if index >= len {
100 return Err(ProgramError::InvalidArgument);
101 }
102 let offset = self.element_offset(index);
103 Ok(unsafe { &*(self.data.as_ptr().add(offset) as *const T) })
105 }
106
107 #[inline]
109 pub fn set(&mut self, index: usize, value: T) -> Result<(), ProgramError> {
110 let len = self.len();
111 if index >= len {
112 return Err(ProgramError::InvalidArgument);
113 }
114 let offset = self.element_offset(index);
115 unsafe {
117 core::ptr::write_unaligned(self.data.as_mut_ptr().add(offset) as *mut T, value);
118 }
119 Ok(())
120 }
121
122 #[inline]
124 pub fn push(&mut self, value: T) -> Result<(), ProgramError> {
125 let len = self.len();
126 if len >= self.capacity() {
127 return Err(ProgramError::AccountDataTooSmall);
128 }
129 let offset = self.element_offset(len);
130 unsafe {
132 core::ptr::write_unaligned(self.data.as_mut_ptr().add(offset) as *mut T, value);
133 }
134 self.set_len(len + 1);
135 Ok(())
136 }
137
138 #[inline]
140 pub fn pop(&mut self) -> Result<T, ProgramError> {
141 let len = self.len();
142 if len == 0 {
143 return Err(ProgramError::InvalidArgument);
144 }
145 let offset = self.element_offset(len - 1);
146 let value =
148 unsafe { core::ptr::read_unaligned(self.data.as_ptr().add(offset) as *const T) };
149 for byte in &mut self.data[offset..offset + T::SIZE] {
151 *byte = 0;
152 }
153 self.set_len(len - 1);
154 Ok(value)
155 }
156
157 #[inline]
159 pub fn swap_remove(&mut self, index: usize) -> Result<T, ProgramError> {
160 let len = self.len();
161 if index >= len {
162 return Err(ProgramError::InvalidArgument);
163 }
164 let removed_offset = self.element_offset(index);
165 let removed = unsafe {
167 core::ptr::read_unaligned(self.data.as_ptr().add(removed_offset) as *const T)
168 };
169
170 let last_index = len - 1;
171 if index != last_index {
172 let last_offset = self.element_offset(last_index);
173 let size = T::SIZE;
175 for i in 0..size {
176 self.data[removed_offset + i] = self.data[last_offset + i];
177 }
178 for byte in &mut self.data[last_offset..last_offset + size] {
180 *byte = 0;
181 }
182 } else {
183 for byte in &mut self.data[removed_offset..removed_offset + T::SIZE] {
185 *byte = 0;
186 }
187 }
188 self.set_len(last_index);
189 Ok(removed)
190 }
191
192 #[inline]
194 pub fn clear(&mut self) {
195 let len = self.len();
196 let end = self.element_offset(len);
197 for byte in &mut self.data[HEADER_SIZE..end] {
198 *byte = 0;
199 }
200 self.set_len(0);
201 }
202
203 #[inline(always)]
205 pub const fn required_bytes(capacity: usize) -> usize {
206 HEADER_SIZE + capacity * T::SIZE
207 }
208}
209
210#[cfg(test)]
211mod tests {
212 use super::*;
213 use crate::abi::WireU64;
214
215 #[test]
216 fn push_pop_roundtrip() {
217 let mut buf = [0u8; 4 + 8 * 4]; let mut vec = FixedVec::<WireU64>::from_bytes(&mut buf).unwrap();
219 assert_eq!(vec.len(), 0);
220 assert_eq!(vec.capacity(), 4);
221
222 vec.push(WireU64::new(10)).unwrap();
223 vec.push(WireU64::new(20)).unwrap();
224 assert_eq!(vec.len(), 2);
225
226 let val = vec.pop().unwrap();
227 assert_eq!(val.get(), 20);
228 assert_eq!(vec.len(), 1);
229
230 let val = vec.get(0).unwrap();
231 assert_eq!(val.get(), 10);
232 }
233
234 #[test]
235 fn swap_remove_works() {
236 let mut buf = [0u8; 4 + 8 * 4];
237 let mut vec = FixedVec::<WireU64>::from_bytes(&mut buf).unwrap();
238 vec.push(WireU64::new(100)).unwrap();
239 vec.push(WireU64::new(200)).unwrap();
240 vec.push(WireU64::new(300)).unwrap();
241
242 let removed = vec.swap_remove(0).unwrap();
243 assert_eq!(removed.get(), 100);
244 assert_eq!(vec.len(), 2);
245 assert_eq!(vec.get(0).unwrap().get(), 300);
247 assert_eq!(vec.get(1).unwrap().get(), 200);
248 }
249
250 #[test]
251 fn full_returns_error() {
252 let mut buf = [0u8; 4 + 8]; let mut vec = FixedVec::<WireU64>::from_bytes(&mut buf).unwrap();
254 vec.push(WireU64::new(1)).unwrap();
255 assert!(vec.push(WireU64::new(2)).is_err());
256 }
257}