1use core::mem::MaybeUninit;
2use std::marker::PhantomData;
3
4use rkyv::{
5 Archive, Deserialize, Place, Portable, Serialize,
6 api::low::LowSerializer,
7 rancor::{Failure, Strategy},
8 ser::{Positional, allocator::SubAllocator, writer::Buffer as RkyvBuffer},
9 with::{ArchiveWith, Identity, SerializeWith},
10};
11use xous::{
12 CID, Error, MemoryAddress, MemoryFlags, MemoryMessage, MemoryRange, MemorySize, Message, Result,
13 map_memory, send_message, try_send_message, unmap_memory,
14};
15
16#[derive(Debug)]
17pub struct Buffer<'buf> {
18 pages: MemoryRange,
19 used: usize,
20 slice: &'buf mut [u8],
21 should_drop: bool,
22 memory_message: Option<&'buf mut MemoryMessage>,
23}
24const PAGE_SIZE: usize = 0x1000;
25
26type Serializer<'a, 'b> = LowSerializer<RkyvBuffer<'b>, SubAllocator<'a>, Failure>;
27
28impl<'buf> Buffer<'buf> {
29 #[allow(dead_code)]
30 pub fn new(len: usize) -> Self {
31 let flags = MemoryFlags::R | MemoryFlags::W;
32 let len_to_page = (len + (PAGE_SIZE - 1)) & !(PAGE_SIZE - 1);
33
34 let new_mem = map_memory(None, None, len_to_page, flags).expect("xous-ipc: OOM in buffer allocation");
36
37 Buffer {
38 pages: new_mem,
39 slice: unsafe { core::slice::from_raw_parts_mut(new_mem.as_mut_ptr(), len_to_page) },
40 used: 0,
41 should_drop: true,
42 memory_message: None,
43 }
44 }
45
46 pub fn volatile_clear(&mut self) {
49 let b = self.slice.as_mut_ptr();
50 for i in 0..self.slice.len() {
51 unsafe {
52 b.add(i).write_volatile(core::mem::zeroed());
53 }
54 }
55 core::sync::atomic::compiler_fence(core::sync::atomic::Ordering::SeqCst);
59 }
60
61 #[allow(dead_code)]
64 pub unsafe fn to_raw_parts(&self) -> (usize, usize, usize) {
65 (self.pages.as_ptr() as usize, self.pages.len(), self.used)
66 }
67
68 #[allow(dead_code)]
71 pub unsafe fn from_raw_parts(address: usize, len: usize, offset: usize) -> Self {
72 let mem = MemoryRange::new(address, len).expect("invalid memory range args");
73 Buffer {
74 pages: mem,
75 slice: core::slice::from_raw_parts_mut(mem.as_mut_ptr(), mem.len()),
76 used: offset,
77 should_drop: false,
78 memory_message: None,
79 }
80 }
81
82 pub fn into_inner(mut self) -> core::result::Result<(MemoryRange, usize), Error> {
88 if self.memory_message.is_none() {
89 self.should_drop = false;
90 Ok((self.pages, self.used))
91 } else {
92 Err(Error::ShareViolation)
93 }
94 }
95
96 pub unsafe fn from_inner(pages: MemoryRange, used: usize) -> Self {
100 Buffer {
101 pages,
102 slice: core::slice::from_raw_parts_mut(pages.as_mut_ptr(), pages.len()),
103 used,
104 should_drop: false,
105 memory_message: None,
106 }
107 }
108
109 #[allow(dead_code)]
110 pub unsafe fn from_memory_message(mem: &'buf MemoryMessage) -> Self {
111 Buffer {
112 pages: mem.buf,
113 slice: core::slice::from_raw_parts_mut(mem.buf.as_mut_ptr(), mem.buf.len()),
114 used: mem.offset.map_or(0, |v| v.get()),
115 should_drop: false,
116 memory_message: None,
117 }
118 }
119
120 #[allow(dead_code)]
121 pub unsafe fn from_memory_message_mut(mem: &'buf mut MemoryMessage) -> Self {
122 Buffer {
123 pages: mem.buf,
124 slice: core::slice::from_raw_parts_mut(mem.buf.as_mut_ptr(), mem.buf.len()),
125 used: mem.offset.map_or(0, |v| v.get()),
126 should_drop: false,
127 memory_message: Some(mem),
128 }
129 }
130
131 #[allow(dead_code)]
133 pub fn lend_mut(&mut self, connection: CID, id: u32) -> core::result::Result<Result, Error> {
134 let msg = MemoryMessage {
135 id: id as usize,
136 buf: self.pages,
137 offset: MemoryAddress::new(self.used),
138 valid: MemorySize::new(self.pages.len()),
139 };
140
141 let result = send_message(connection, Message::MutableBorrow(msg));
143 if let Ok(Result::MemoryReturned(offset, _valid)) = result {
144 self.used = offset.map_or(0, |v| v.get());
145 }
146
147 result
148 }
149
150 #[allow(dead_code)]
151 pub fn lend(&self, connection: CID, id: u32) -> core::result::Result<Result, Error> {
152 let msg = MemoryMessage {
153 id: id as usize,
154 buf: self.pages,
155 offset: MemoryAddress::new(self.used),
156 valid: MemorySize::new(self.pages.len()),
157 };
158 send_message(connection, Message::Borrow(msg))
159 }
160
161 #[allow(dead_code)]
162 pub fn send(mut self, connection: CID, id: u32) -> core::result::Result<Result, Error> {
163 let msg = MemoryMessage {
164 id: id as usize,
165 buf: self.pages,
166 offset: MemoryAddress::new(self.used),
167 valid: MemorySize::new(self.pages.len()),
168 };
169 let result = send_message(connection, Message::Move(msg))?;
170
171 self.should_drop = false;
173 Ok(result)
174 }
175
176 #[allow(dead_code)]
177 pub fn try_send(mut self, connection: CID, id: u32) -> core::result::Result<Result, Error> {
178 let msg = MemoryMessage {
179 id: id as usize,
180 buf: self.pages,
181 offset: MemoryAddress::new(self.used),
182 valid: MemorySize::new(self.pages.len()),
183 };
184 let result = try_send_message(connection, Message::Move(msg))?;
185
186 self.should_drop = false;
188 Ok(result)
189 }
190
191 fn into_buf_inner<F, T>(src: &T) -> core::result::Result<Self, ()>
192 where
193 F: for<'a, 'b> SerializeWith<T, Serializer<'a, 'b>>,
194 {
195 struct Wrap<'a, F, T>(&'a T, PhantomData<F>);
196
197 impl<F, T> Archive for Wrap<'_, F, T>
198 where
199 F: ArchiveWith<T>,
200 {
201 type Archived = <F as ArchiveWith<T>>::Archived;
202 type Resolver = <F as ArchiveWith<T>>::Resolver;
203
204 fn resolve(&self, resolver: Self::Resolver, out: Place<Self::Archived>) {
205 F::resolve_with(self.0, resolver, out)
206 }
207 }
208
209 impl<'a, 'b, F, T> Serialize<Serializer<'a, 'b>> for Wrap<'_, F, T>
210 where
211 F: SerializeWith<T, Serializer<'a, 'b>>,
212 {
213 fn serialize(
214 &self,
215 serializer: &mut Serializer<'a, 'b>,
216 ) -> core::result::Result<Self::Resolver, Failure> {
217 F::serialize_with(self.0, serializer)
218 }
219 }
220 let mut xous_buf = Self::new(core::mem::size_of::<T>());
221 let mut scratch = [MaybeUninit::<u8>::uninit(); 256];
222
223 let wrap = Wrap(src, PhantomData::<F>);
224 let writer = RkyvBuffer::from(&mut xous_buf.slice[..]);
225 let alloc = SubAllocator::new(&mut scratch);
226
227 let serialized_buf =
228 rkyv::api::low::to_bytes_in_with_alloc::<_, _, Failure>(&wrap, writer, alloc).map_err(|_| ())?;
229 xous_buf.used = serialized_buf.pos();
230 Ok(xous_buf)
231 }
232
233 #[allow(dead_code)]
234 pub fn into_buf<T>(src: T) -> core::result::Result<Self, ()>
235 where
236 T: for<'b, 'a> rkyv::Serialize<
237 rkyv::rancor::Strategy<
238 rkyv::ser::Serializer<
239 rkyv::ser::writer::Buffer<'b>,
240 rkyv::ser::allocator::SubAllocator<'a>,
241 (),
242 >,
243 rkyv::rancor::Failure,
244 >,
245 >,
246 {
247 Buffer::into_buf_inner::<Identity, T>(&src)
248 }
249
250 fn replace_inner<F, T>(&mut self, src: T) -> core::result::Result<(), &'static str>
251 where
252 F: for<'a, 'b> SerializeWith<T, Serializer<'a, 'b>>,
253 {
254 struct Wrap<'a, F, T>(&'a T, PhantomData<F>);
255
256 impl<F, T> Archive for Wrap<'_, F, T>
257 where
258 F: ArchiveWith<T>,
259 {
260 type Archived = <F as ArchiveWith<T>>::Archived;
261 type Resolver = <F as ArchiveWith<T>>::Resolver;
262
263 fn resolve(&self, resolver: Self::Resolver, out: Place<Self::Archived>) {
264 F::resolve_with(self.0, resolver, out)
265 }
266 }
267
268 impl<'a, 'b, F, T> Serialize<Serializer<'a, 'b>> for Wrap<'_, F, T>
269 where
270 F: SerializeWith<T, Serializer<'a, 'b>>,
271 {
272 fn serialize(
273 &self,
274 serializer: &mut Serializer<'a, 'b>,
275 ) -> core::result::Result<Self::Resolver, Failure> {
276 F::serialize_with(self.0, serializer)
277 }
278 }
279
280 let mut scratch = [MaybeUninit::<u8>::uninit(); 256];
281
282 let wrap = Wrap(&src, PhantomData::<F>);
283 let writer = RkyvBuffer::from(&mut self.slice[..]);
284 let alloc = SubAllocator::new(&mut scratch);
285
286 let serialized_buf =
287 rkyv::api::low::to_bytes_in_with_alloc::<_, _, Failure>(&wrap, writer, alloc).unwrap();
288 self.used = serialized_buf.pos();
289
290 if let Some(ref mut msg) = self.memory_message.as_mut() {
291 msg.offset = MemoryAddress::new(self.used);
292 }
293 Ok(())
294 }
295
296 #[allow(dead_code)]
297 pub fn replace<T>(&mut self, src: T) -> core::result::Result<(), &'static str>
298 where
299 T: for<'b, 'a> rkyv::Serialize<
300 rkyv::rancor::Strategy<
301 rkyv::ser::Serializer<
302 rkyv::ser::writer::Buffer<'b>,
303 rkyv::ser::allocator::SubAllocator<'a>,
304 (),
305 >,
306 rkyv::rancor::Failure,
307 >,
308 >,
309 {
310 self.replace_inner::<Identity, T>(src)
311 }
312
313 #[allow(dead_code)]
316 pub fn as_flat<T, U>(&self) -> core::result::Result<&U, ()>
317 where
318 T: rkyv::Archive<Archived = U>,
319 U: Portable,
320 {
321 let r = unsafe { rkyv::access_unchecked::<U>(&self.slice[..self.used]) };
322 Ok(r)
323 }
324
325 #[allow(dead_code)]
328 pub fn to_original<T, U>(&self) -> core::result::Result<T, Error>
329 where
330 T: rkyv::Archive<Archived = U>,
331 U: Portable,
332 <T as Archive>::Archived: Deserialize<T, Strategy<rkyv::de::Pool, rkyv::rancor::Error>>,
333 {
334 let r = unsafe { rkyv::access_unchecked::<U>(&self.slice[..self.used]) };
335 rkyv::deserialize::<T, rkyv::rancor::Error>(r).map_err(|_| Error::InternalError)
336 }
337
338 pub fn used(&self) -> usize { self.used }
339}
340
341impl<'a> core::convert::AsRef<[u8]> for Buffer<'a> {
342 fn as_ref(&self) -> &[u8] { &self.slice[..self.used] }
343}
344
345impl<'a> core::convert::AsMut<[u8]> for Buffer<'a> {
346 fn as_mut(&mut self) -> &mut [u8] { &mut self.slice[..self.used] }
347}
348
349impl<'a> core::ops::Deref for Buffer<'a> {
350 type Target = [u8];
351
352 fn deref(&self) -> &Self::Target { &*(&self.slice[..self.used]) }
353}
354
355impl<'a> core::ops::DerefMut for Buffer<'a> {
356 fn deref_mut(&mut self) -> &mut Self::Target { &mut *(&mut self.slice[..self.used]) }
357}
358
359impl<'a> Drop for Buffer<'a> {
360 fn drop(&mut self) {
361 if self.should_drop {
362 unmap_memory(self.pages).expect("Buffer: failed to drop memory");
363 }
364 }
365}