hopper_core/account/
segment.rs1use super::pod::{FixedLayout, Pod};
14use hopper_runtime::error::ProgramError;
15
16pub const SEGMENT_DESC_SIZE: usize = 12;
18
19pub const MAX_SEGMENTS: usize = 256;
21
22#[derive(Clone, Copy, PartialEq, Eq)]
24#[repr(C)]
25pub struct SegmentDescriptor {
26 offset_bytes: [u8; 4],
27 count_bytes: [u8; 2],
28 capacity_bytes: [u8; 2],
29 element_size_bytes: [u8; 2],
30 flags_bytes: [u8; 2],
31}
32
33const _: () = assert!(core::mem::size_of::<SegmentDescriptor>() == SEGMENT_DESC_SIZE);
34const _: () = assert!(core::mem::align_of::<SegmentDescriptor>() == 1);
35
36#[cfg(feature = "hopper-native-backend")]
38unsafe impl ::hopper_runtime::__hopper_native::bytemuck::Zeroable for SegmentDescriptor {}
39#[cfg(feature = "hopper-native-backend")]
40unsafe impl ::hopper_runtime::__hopper_native::bytemuck::Pod for SegmentDescriptor {}
41
42unsafe impl Pod for SegmentDescriptor {}
44unsafe impl ::hopper_runtime::__sealed::HopperZeroCopySealed for SegmentDescriptor {}
46
47impl FixedLayout for SegmentDescriptor {
48 const SIZE: usize = SEGMENT_DESC_SIZE;
49}
50
51impl SegmentDescriptor {
52 #[inline(always)]
54 pub fn offset(&self) -> u32 {
55 u32::from_le_bytes(self.offset_bytes)
56 }
57
58 #[inline(always)]
60 pub fn count(&self) -> u16 {
61 u16::from_le_bytes(self.count_bytes)
62 }
63
64 #[inline(always)]
66 pub fn capacity(&self) -> u16 {
67 u16::from_le_bytes(self.capacity_bytes)
68 }
69
70 #[inline(always)]
72 pub fn element_size(&self) -> u16 {
73 u16::from_le_bytes(self.element_size_bytes)
74 }
75
76 #[inline(always)]
78 pub fn flags(&self) -> u16 {
79 u16::from_le_bytes(self.flags_bytes)
80 }
81
82 #[inline(always)]
84 pub fn is_full(&self) -> bool {
85 self.count() >= self.capacity()
86 }
87
88 #[inline(always)]
90 pub fn data_len(&self) -> usize {
91 (self.count() as usize) * (self.element_size() as usize)
92 }
93
94 #[inline(always)]
96 pub fn allocated_len(&self) -> usize {
97 (self.capacity() as usize) * (self.element_size() as usize)
98 }
99
100 #[inline(always)]
102 pub fn set_count(&mut self, count: u16) {
103 self.count_bytes = count.to_le_bytes();
104 }
105
106 #[inline(always)]
108 pub fn set_offset(&mut self, offset: u32) {
109 self.offset_bytes = offset.to_le_bytes();
110 }
111
112 #[inline(always)]
114 pub fn set_capacity(&mut self, capacity: u16) {
115 self.capacity_bytes = capacity.to_le_bytes();
116 }
117
118 #[inline(always)]
120 pub fn set_element_size(&mut self, size: u16) {
121 self.element_size_bytes = size.to_le_bytes();
122 }
123}
124
125pub struct SegmentTable<'a> {
127 data: &'a [u8],
128 count: usize,
129}
130
131impl<'a> SegmentTable<'a> {
132 #[inline]
134 pub fn from_bytes(data: &'a [u8], count: usize) -> Result<Self, ProgramError> {
135 if data.len() < count * SEGMENT_DESC_SIZE {
136 return Err(ProgramError::AccountDataTooSmall);
137 }
138 Ok(Self { data, count })
139 }
140
141 #[inline(always)]
143 pub fn segment_count(&self) -> usize {
144 self.count
145 }
146
147 #[inline(always)]
149 pub fn descriptor(&self, index: usize) -> Result<&SegmentDescriptor, ProgramError> {
150 if index >= self.count {
151 return Err(ProgramError::InvalidArgument);
152 }
153 let offset = index * SEGMENT_DESC_SIZE;
154 Ok(unsafe { &*(self.data.as_ptr().add(offset) as *const SegmentDescriptor) })
156 }
157}
158
159pub struct SegmentTableMut<'a> {
161 data: &'a mut [u8],
162 count: usize,
163}
164
165impl<'a> SegmentTableMut<'a> {
166 #[inline]
168 pub fn from_bytes_mut(data: &'a mut [u8], count: usize) -> Result<Self, ProgramError> {
169 if data.len() < count * SEGMENT_DESC_SIZE {
170 return Err(ProgramError::AccountDataTooSmall);
171 }
172 Ok(Self { data, count })
173 }
174
175 #[inline(always)]
177 pub fn descriptor_mut(&mut self, index: usize) -> Result<&mut SegmentDescriptor, ProgramError> {
178 if index >= self.count {
179 return Err(ProgramError::InvalidArgument);
180 }
181 let offset = index * SEGMENT_DESC_SIZE;
182 Ok(unsafe { &mut *(self.data.as_mut_ptr().add(offset) as *mut SegmentDescriptor) })
184 }
185
186 #[inline]
191 pub fn init(&mut self, data_start: u32, specs: &[(u16, u16, u16)]) -> Result<(), ProgramError> {
192 if specs.len() > self.count {
193 return Err(ProgramError::InvalidArgument);
194 }
195
196 let mut current_offset = data_start;
197 for (i, &(element_size, count, capacity)) in specs.iter().enumerate() {
198 let desc = self.descriptor_mut(i)?;
199 desc.set_offset(current_offset);
200 desc.set_count(count);
201 desc.set_capacity(capacity);
202 desc.set_element_size(element_size);
203 current_offset += (capacity as u32) * (element_size as u32);
204 }
205 Ok(())
206 }
207}
208
209pub struct SegmentSlice<'a, T: Pod + FixedLayout> {
211 data: &'a [u8],
212 count: usize,
213 _phantom: core::marker::PhantomData<T>,
214}
215
216impl<'a, T: Pod + FixedLayout> SegmentSlice<'a, T> {
217 #[inline]
219 pub fn from_descriptor(
220 account_data: &'a [u8],
221 desc: &SegmentDescriptor,
222 ) -> Result<Self, ProgramError> {
223 let offset = desc.offset() as usize;
224 let count = desc.count() as usize;
225 let needed = offset + count * T::SIZE;
226 if needed > account_data.len() {
227 return Err(ProgramError::AccountDataTooSmall);
228 }
229 Ok(Self {
230 data: &account_data[offset..],
231 count,
232 _phantom: core::marker::PhantomData,
233 })
234 }
235
236 #[inline(always)]
238 pub fn len(&self) -> usize {
239 self.count
240 }
241
242 #[inline(always)]
244 pub fn is_empty(&self) -> bool {
245 self.count == 0
246 }
247
248 #[inline(always)]
250 pub fn read(&self, index: usize) -> Result<T, ProgramError> {
251 if index >= self.count {
252 return Err(ProgramError::InvalidArgument);
253 }
254 let offset = index * T::SIZE;
255 Ok(unsafe { core::ptr::read_unaligned(self.data.as_ptr().add(offset) as *const T) })
257 }
258
259 #[inline(always)]
261 pub fn get(&self, index: usize) -> Result<&T, ProgramError> {
262 if index >= self.count {
263 return Err(ProgramError::InvalidArgument);
264 }
265 let offset = index * T::SIZE;
266 Ok(unsafe { &*(self.data.as_ptr().add(offset) as *const T) })
268 }
269}
270
271pub struct SegmentSliceMut<'a, T: Pod + FixedLayout> {
273 data: &'a mut [u8],
274 count: usize,
275 capacity: usize,
276 _phantom: core::marker::PhantomData<T>,
277}
278
279impl<'a, T: Pod + FixedLayout> SegmentSliceMut<'a, T> {
280 #[inline]
282 pub fn from_descriptor(
283 account_data: &'a mut [u8],
284 desc: &SegmentDescriptor,
285 ) -> Result<Self, ProgramError> {
286 let offset = desc.offset() as usize;
287 let count = desc.count() as usize;
288 let capacity = desc.capacity() as usize;
289 let needed = offset + capacity * T::SIZE;
290 if needed > account_data.len() {
291 return Err(ProgramError::AccountDataTooSmall);
292 }
293 Ok(Self {
294 data: &mut account_data[offset..],
295 count,
296 capacity,
297 _phantom: core::marker::PhantomData,
298 })
299 }
300
301 #[inline(always)]
303 pub fn len(&self) -> usize {
304 self.count
305 }
306
307 #[inline(always)]
309 pub fn is_empty(&self) -> bool {
310 self.count == 0
311 }
312
313 #[inline(always)]
315 pub fn capacity(&self) -> usize {
316 self.capacity
317 }
318
319 #[inline(always)]
321 pub fn read(&self, index: usize) -> Result<T, ProgramError> {
322 if index >= self.count {
323 return Err(ProgramError::InvalidArgument);
324 }
325 let offset = index * T::SIZE;
326 Ok(unsafe { core::ptr::read_unaligned(self.data.as_ptr().add(offset) as *const T) })
328 }
329
330 #[inline(always)]
332 pub fn write(&mut self, index: usize, value: T) -> Result<(), ProgramError> {
333 if index >= self.count {
334 return Err(ProgramError::InvalidArgument);
335 }
336 let offset = index * T::SIZE;
337 unsafe {
339 core::ptr::write_unaligned(self.data.as_mut_ptr().add(offset) as *mut T, value);
340 }
341 Ok(())
342 }
343
344 #[inline]
346 pub fn swap(&mut self, i: usize, j: usize) -> Result<(), ProgramError> {
347 if i >= self.count || j >= self.count {
348 return Err(ProgramError::InvalidArgument);
349 }
350 if i == j {
351 return Ok(());
352 }
353 let size = T::SIZE;
354 let oi = i * size;
355 let oj = j * size;
356 for k in 0..size {
358 self.data.swap(oi + k, oj + k);
359 }
360 Ok(())
361 }
362}