1use core::{
8 cell::UnsafeCell,
9 sync::atomic::{AtomicBool, Ordering},
10};
11
12pub struct EndpointMemory<const SIZE: usize> {
26 buffer: UnsafeCell<[u8; SIZE]>,
27 taken: AtomicBool,
28}
29
30impl<const SIZE: usize> Default for EndpointMemory<SIZE> {
31 fn default() -> Self {
32 Self::new()
33 }
34}
35
36impl<const SIZE: usize> EndpointMemory<SIZE> {
37 pub const fn new() -> Self {
39 Self {
40 buffer: UnsafeCell::new([0; SIZE]),
41 taken: AtomicBool::new(false),
42 }
43 }
44
45 pub(crate) fn allocator(&'static self) -> Option<Allocator> {
49 if self.taken.swap(true, Ordering::SeqCst) {
50 None
51 } else {
52 Some(Allocator::new(unsafe { &mut *self.buffer.get() }))
55 }
56 }
57}
58
59unsafe impl<const SIZE: usize> Sync for EndpointMemory<SIZE> {}
60
61pub struct Allocator {
63 start: *mut u8,
64 ptr: *mut u8,
65}
66
67unsafe impl Send for crate::buffer::Allocator {}
70
71impl Allocator {
72 fn new(buffer: &'static mut [u8]) -> Self {
74 unsafe { Self::from_buffer(buffer) }
76 }
77
78 pub(crate) unsafe fn from_buffer(buffer: &mut [u8]) -> Self {
85 let start = buffer.as_mut_ptr();
86 let ptr = unsafe { start.add(buffer.len()) };
87 Allocator { start, ptr }
88 }
89
90 pub fn allocate(&mut self, size: usize) -> Option<Buffer> {
95 let ptr = self.ptr as usize;
96 let ptr = ptr.checked_sub(size)?;
97 let start = self.start as usize;
98 if ptr < start {
99 None
100 } else {
101 self.ptr = ptr as *mut u8;
102 Some(Buffer {
103 ptr: self.ptr,
104 len: size,
105 })
106 }
107 }
108}
109
110pub struct Buffer {
112 ptr: *mut u8,
113 len: usize,
114}
115
116unsafe impl Send for Buffer {}
119
120impl Buffer {
121 pub fn volatile_read(&self, buffer: &mut [u8]) -> usize {
126 let size = buffer.len().min(self.len);
127 buffer
128 .iter_mut()
129 .take(size)
130 .fold(self.ptr, |src, dst| unsafe {
131 *dst = src.read_volatile();
134 src.add(1)
135 });
136 size
137 }
138
139 pub fn volatile_write(&mut self, buffer: &[u8]) -> usize {
144 let size = buffer.len().min(self.len);
145 buffer.iter().take(size).fold(self.ptr, |dst, src| unsafe {
146 dst.write_volatile(*src);
149 dst.add(1)
150 });
151 size
152 }
153
154 pub fn as_ptr_mut(&mut self) -> *mut u8 {
156 self.ptr
157 }
158
159 pub fn len(&self) -> usize {
161 self.len
162 }
163
164 pub fn clean_invalidate_dcache(&self, len: usize) {
168 crate::cache::clean_invalidate_dcache_by_address(self.ptr as usize, self.len.min(len));
169 }
170}
171
172#[cfg(test)]
173mod test {
174 use super::Allocator;
175
176 #[test]
177 fn allocate_entire_buffer() {
178 let mut buffer: [u8; 32] = [0; 32];
179 let mut alloc = unsafe { Allocator::from_buffer(&mut buffer) };
180 let ptr = alloc.allocate(32);
181 assert!(ptr.is_some());
182 assert_eq!(ptr.unwrap().ptr, buffer.as_mut_ptr());
183
184 let ptr = alloc.allocate(1);
185 assert!(ptr.is_none());
186 }
187
188 #[test]
189 fn allocate_partial_buffers() {
190 let mut buffer: [u8; 32] = [0; 32];
191 let mut alloc = unsafe { Allocator::from_buffer(&mut buffer) };
192
193 let ptr = alloc.allocate(7);
194 assert!(ptr.is_some());
195 assert_eq!(ptr.unwrap().ptr, unsafe { buffer.as_mut_ptr().add(32 - 7) });
196
197 let ptr = alloc.allocate(7);
198 assert!(ptr.is_some());
199 assert_eq!(ptr.unwrap().ptr, unsafe {
200 buffer.as_mut_ptr().add(32 - 14)
201 });
202
203 let ptr = alloc.allocate(19);
204 assert!(ptr.is_none());
205 }
206
207 #[test]
208 fn allocate_empty() {
209 let mut alloc = Allocator {
210 start: core::ptr::null_mut(),
211 ptr: core::ptr::null_mut(),
212 };
213 assert!(alloc.allocate(1).is_none());
214 }
215}