miraland_memory_management/
aligned_memory.rs1use std::{mem, ptr};
4
5pub trait Pod {}
7
8impl Pod for bool {}
9impl Pod for u8 {}
10impl Pod for u16 {}
11impl Pod for u32 {}
12impl Pod for u64 {}
13impl Pod for i8 {}
14impl Pod for i16 {}
15impl Pod for i32 {}
16impl Pod for i64 {}
17
18#[derive(Debug, PartialEq, Eq)]
20pub struct AlignedMemory<const ALIGN: usize> {
21 max_len: usize,
22 align_offset: usize,
23 mem: Vec<u8>,
24 zero_up_to_max_len: bool,
25}
26
27impl<const ALIGN: usize> AlignedMemory<ALIGN> {
28 fn get_mem(max_len: usize) -> (Vec<u8>, usize) {
29 let mut mem: Vec<u8> = Vec::with_capacity(max_len.saturating_add(ALIGN));
30 mem.push(0);
31 let align_offset = mem.as_ptr().align_offset(ALIGN);
32 mem.resize(align_offset, 0);
33 (mem, align_offset)
34 }
35 fn get_mem_zeroed(max_len: usize) -> (Vec<u8>, usize) {
36 let mut mem = vec![0; max_len];
40 let align_offset = mem.as_ptr().align_offset(ALIGN);
41 mem.resize(max_len.saturating_add(align_offset), 0);
42 (mem, align_offset)
43 }
44 pub fn from_slice(data: &[u8]) -> Self {
46 let max_len = data.len();
47 let (mut mem, align_offset) = Self::get_mem(max_len);
48 mem.extend_from_slice(data);
49 Self {
50 max_len,
51 align_offset,
52 mem,
53 zero_up_to_max_len: false,
54 }
55 }
56 pub fn with_capacity(max_len: usize) -> Self {
58 let (mem, align_offset) = Self::get_mem(max_len);
59 Self {
60 max_len,
61 align_offset,
62 mem,
63 zero_up_to_max_len: false,
64 }
65 }
66 pub fn with_capacity_zeroed(max_len: usize) -> Self {
68 let (mut mem, align_offset) = Self::get_mem_zeroed(max_len);
69 mem.truncate(align_offset);
70 Self {
71 max_len,
72 align_offset,
73 mem,
74 zero_up_to_max_len: true,
75 }
76 }
77 pub fn zero_filled(max_len: usize) -> Self {
79 let (mem, align_offset) = Self::get_mem_zeroed(max_len);
80 Self {
81 max_len,
82 align_offset,
83 mem,
84 zero_up_to_max_len: true,
85 }
86 }
87 pub fn mem_size(&self) -> usize {
89 self.mem.capacity().saturating_add(mem::size_of::<Self>())
90 }
91 pub fn len(&self) -> usize {
93 self.mem.len().saturating_sub(self.align_offset)
94 }
95 pub fn is_empty(&self) -> bool {
97 self.mem.len() == self.align_offset
98 }
99 pub fn write_index(&self) -> usize {
101 self.mem.len()
102 }
103 pub fn as_slice(&self) -> &[u8] {
105 let start = self.align_offset;
106 let end = self.mem.len();
107 &self.mem[start..end]
108 }
109 pub fn as_slice_mut(&mut self) -> &mut [u8] {
111 let start = self.align_offset;
112 let end = self.mem.len();
113 &mut self.mem[start..end]
114 }
115 pub fn fill_write(&mut self, num: usize, value: u8) -> std::io::Result<()> {
117 let new_len = match (
118 self.mem.len().checked_add(num),
119 self.align_offset.checked_add(self.max_len),
120 ) {
121 (Some(new_len), Some(allocation_end)) if new_len <= allocation_end => new_len,
122 _ => {
123 return Err(std::io::Error::new(
124 std::io::ErrorKind::InvalidInput,
125 "aligned memory resize failed",
126 ))
127 }
128 };
129 if self.zero_up_to_max_len && value == 0 {
130 unsafe {
132 self.mem.set_len(new_len);
133 }
134 } else {
135 self.mem.resize(new_len, value);
136 }
137 Ok(())
138 }
139
140 pub unsafe fn write_unchecked<T: Pod>(&mut self, value: T) {
146 let pos = self.mem.len();
147 let new_len = pos.saturating_add(mem::size_of::<T>());
148 debug_assert!(new_len <= self.align_offset.saturating_add(self.max_len));
149 self.mem.set_len(new_len);
150 ptr::write_unaligned(
151 self.mem.get_unchecked_mut(pos..new_len).as_mut_ptr().cast(),
152 value,
153 );
154 }
155
156 pub unsafe fn write_all_unchecked(&mut self, value: &[u8]) {
162 let pos = self.mem.len();
163 let new_len = pos.saturating_add(value.len());
164 debug_assert!(new_len <= self.align_offset.saturating_add(self.max_len));
165 self.mem.set_len(new_len);
166 self.mem
167 .get_unchecked_mut(pos..new_len)
168 .copy_from_slice(value);
169 }
170}
171
172impl<const ALIGN: usize> Clone for AlignedMemory<ALIGN> {
176 fn clone(&self) -> Self {
177 AlignedMemory::from_slice(self.as_slice())
178 }
179}
180
181impl<const ALIGN: usize> std::io::Write for AlignedMemory<ALIGN> {
182 fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
183 match (
184 self.mem.len().checked_add(buf.len()),
185 self.align_offset.checked_add(self.max_len),
186 ) {
187 (Some(new_len), Some(allocation_end)) if new_len <= allocation_end => {}
188 _ => {
189 return Err(std::io::Error::new(
190 std::io::ErrorKind::InvalidInput,
191 "aligned memory write failed",
192 ))
193 }
194 }
195 self.mem.extend_from_slice(buf);
196 Ok(buf.len())
197 }
198 fn flush(&mut self) -> std::io::Result<()> {
199 Ok(())
200 }
201}
202
203impl<const ALIGN: usize, T: AsRef<[u8]>> From<T> for AlignedMemory<ALIGN> {
204 fn from(bytes: T) -> Self {
205 AlignedMemory::from_slice(bytes.as_ref())
206 }
207}
208
209#[cfg(test)]
210mod tests {
211 #![allow(clippy::arithmetic_side_effects)]
212 use {super::*, std::io::Write};
213
214 fn do_test<const ALIGN: usize>() {
215 let mut aligned_memory = AlignedMemory::<ALIGN>::with_capacity(10);
216
217 assert_eq!(aligned_memory.write(&[42u8; 1]).unwrap(), 1);
218 assert_eq!(aligned_memory.write(&[42u8; 9]).unwrap(), 9);
219 assert_eq!(aligned_memory.as_slice(), &[42u8; 10]);
220 assert_eq!(aligned_memory.write(&[42u8; 0]).unwrap(), 0);
221 assert_eq!(aligned_memory.as_slice(), &[42u8; 10]);
222 aligned_memory.write(&[42u8; 1]).unwrap_err();
223 assert_eq!(aligned_memory.as_slice(), &[42u8; 10]);
224 aligned_memory.as_slice_mut().copy_from_slice(&[84u8; 10]);
225 assert_eq!(aligned_memory.as_slice(), &[84u8; 10]);
226
227 let mut aligned_memory = AlignedMemory::<ALIGN>::with_capacity_zeroed(10);
228 aligned_memory.fill_write(5, 0).unwrap();
229 aligned_memory.fill_write(2, 1).unwrap();
230 assert_eq!(aligned_memory.write(&[2u8; 3]).unwrap(), 3);
231 assert_eq!(aligned_memory.as_slice(), &[0, 0, 0, 0, 0, 1, 1, 2, 2, 2]);
232 aligned_memory.fill_write(1, 3).unwrap_err();
233 aligned_memory.write(&[4u8; 1]).unwrap_err();
234 assert_eq!(aligned_memory.as_slice(), &[0, 0, 0, 0, 0, 1, 1, 2, 2, 2]);
235
236 let aligned_memory = AlignedMemory::<ALIGN>::zero_filled(10);
237 assert_eq!(aligned_memory.len(), 10);
238 assert_eq!(aligned_memory.as_slice(), &[0u8; 10]);
239
240 let mut aligned_memory = AlignedMemory::<ALIGN>::with_capacity_zeroed(15);
241 unsafe {
242 aligned_memory.write_unchecked::<u8>(42);
243 assert_eq!(aligned_memory.len(), 1);
244 aligned_memory.write_unchecked::<u64>(0xCAFEBADDDEADCAFE);
245 assert_eq!(aligned_memory.len(), 9);
246 aligned_memory.fill_write(3, 0).unwrap();
247 aligned_memory.write_all_unchecked(b"foo");
248 assert_eq!(aligned_memory.len(), 15);
249 }
250 let mem = aligned_memory.as_slice();
251 assert_eq!(mem[0], 42);
252 assert_eq!(
253 unsafe {
254 ptr::read_unaligned::<u64>(mem[1..1 + mem::size_of::<u64>()].as_ptr().cast())
255 },
256 0xCAFEBADDDEADCAFE
257 );
258 assert_eq!(&mem[1 + mem::size_of::<u64>()..][..3], &[0, 0, 0]);
259 assert_eq!(&mem[1 + mem::size_of::<u64>() + 3..], b"foo");
260 }
261
262 #[test]
263 fn test_aligned_memory() {
264 do_test::<1>();
265 do_test::<32768>();
266 }
267
268 #[cfg(debug_assertions)]
269 #[test]
270 #[should_panic(expected = "<= self.align_offset.saturating_add(self.max_len)")]
271 fn test_write_unchecked_debug_assert() {
272 let mut aligned_memory = AlignedMemory::<8>::with_capacity(15);
273 unsafe {
274 aligned_memory.write_unchecked::<u64>(42);
275 aligned_memory.write_unchecked::<u64>(24);
276 }
277 }
278
279 #[cfg(debug_assertions)]
280 #[test]
281 #[should_panic(expected = "<= self.align_offset.saturating_add(self.max_len)")]
282 fn test_write_all_unchecked_debug_assert() {
283 let mut aligned_memory = AlignedMemory::<8>::with_capacity(5);
284 unsafe {
285 aligned_memory.write_all_unchecked(b"foo");
286 aligned_memory.write_all_unchecked(b"bar");
287 }
288 }
289}