memlink_protocol/
arena.rs1use alloc::string::ToString;
7
8use core::ptr::NonNull;
9
10use crate::error::{ProtocolError, Result};
11
12#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
13pub struct ArenaSlice {
14 offset: usize,
15 len: usize,
16}
17
18impl ArenaSlice {
19 pub const fn new(offset: usize, len: usize) -> Self {
20 Self { offset, len }
21 }
22
23 pub const fn empty() -> Self {
24 Self { offset: 0, len: 0 }
25 }
26
27 pub const fn offset(&self) -> usize {
28 self.offset
29 }
30
31 pub const fn len(&self) -> usize {
32 self.len
33 }
34
35 pub fn is_empty(&self) -> bool {
36 self.len == 0
37 }
38
39 pub unsafe fn resolve<'a>(&self, arena_base: *const u8) -> &'a [u8] {
40 if self.len == 0 {
41 return &[];
42 }
43
44 let ptr = arena_base.add(self.offset);
45 core::slice::from_raw_parts(ptr, self.len)
46 }
47
48 pub unsafe fn resolve_mut<'a>(&self, arena_base: *mut u8) -> &'a mut [u8] {
49 if self.len == 0 {
50 return &mut [];
51 }
52
53 let ptr = arena_base.add(self.offset);
54 core::slice::from_raw_parts_mut(ptr, self.len)
55 }
56
57 pub fn validate(&self, arena_size: usize) -> Result<()> {
58 let end = self
59 .offset
60 .checked_add(self.len)
61 .ok_or(ProtocolError::BufferOverflow {
62 required: usize::MAX,
63 available: arena_size,
64 })?;
65
66 if end > arena_size {
67 return Err(ProtocolError::BufferOverflow {
68 required: end,
69 available: arena_size,
70 });
71 }
72
73 Ok(())
74 }
75
76 pub unsafe fn from_ptr(arena_base: *const u8, ptr: *const u8, len: usize) -> Result<Self> {
77 if ptr < arena_base {
78 return Err(ProtocolError::InvalidHeader(
79 "pointer is before arena base".to_string(),
80 ));
81 }
82
83 let offset = (ptr as usize) - (arena_base as usize);
84 Ok(Self::new(offset, len))
85 }
86
87 pub const fn end(&self) -> usize {
88 self.offset + self.len
89 }
90
91 pub fn sub_slice(&self, start: usize, len: usize) -> Result<Self> {
92 if start > self.len {
93 return Err(ProtocolError::BufferOverflow {
94 required: self.offset + start,
95 available: self.offset + self.len,
96 });
97 }
98
99 let end = start
100 .checked_add(len)
101 .ok_or(ProtocolError::BufferOverflow {
102 required: usize::MAX,
103 available: self.len,
104 })?;
105
106 if end > self.len {
107 return Err(ProtocolError::BufferOverflow {
108 required: self.offset + end,
109 available: self.offset + self.len,
110 });
111 }
112
113 Ok(Self::new(self.offset + start, len))
114 }
115}
116
117#[derive(Debug, Clone, Copy)]
118pub struct ArenaConfig {
119 pub size: usize,
120 pub alignment: usize,
121 pub max_allocation: usize,
122}
123
124impl Default for ArenaConfig {
125 fn default() -> Self {
126 Self {
127 size: 64 * 1024 * 1024,
128 alignment: 8,
129 max_allocation: 16 * 1024 * 1024,
130 }
131 }
132}
133
134impl ArenaConfig {
135 pub const fn new(size: usize, alignment: usize, max_allocation: usize) -> Self {
136 Self {
137 size,
138 alignment,
139 max_allocation,
140 }
141 }
142
143 pub fn validate_allocation(&self, size: usize) -> Result<()> {
144 if size > self.max_allocation {
145 return Err(ProtocolError::PayloadTooLarge(size, self.max_allocation));
146 }
147
148 if size > self.size {
149 return Err(ProtocolError::PayloadTooLarge(size, self.size));
150 }
151
152 Ok(())
153 }
154}
155
156#[derive(Debug)]
157pub struct ArenaRef<'a> {
158 base: NonNull<u8>,
159 size: usize,
160 _phantom: core::marker::PhantomData<&'a ()>,
161}
162
163impl<'a> ArenaRef<'a> {
164 pub unsafe fn new(base: *mut u8, size: usize) -> Self {
165 Self {
166 base: NonNull::new(base).expect("ArenaRef base pointer is null"),
167 size,
168 _phantom: core::marker::PhantomData,
169 }
170 }
171
172 pub fn base(&self) -> *const u8 {
173 self.base.as_ptr()
174 }
175
176 pub fn size(&self) -> usize {
177 self.size
178 }
179
180 pub unsafe fn resolve_slice(&self, slice: &ArenaSlice) -> Result<&'a [u8]> {
181 slice.validate(self.size)?;
182 Ok(slice.resolve(self.base.as_ptr()))
183 }
184
185 pub fn remaining(&self, offset: usize) -> usize {
186 self.size.saturating_sub(offset)
187 }
188}