1use crate::Atom;
11use std::cell::Cell;
12use std::marker::Unpin;
13use std::mem::{size_of, size_of_val};
14use urid::URID;
15
16#[derive(Clone, Copy)]
20pub struct Space<'a> {
21 data: Option<&'a [u8]>,
22}
23
24impl<'a> Space<'a> {
25 #[allow(clippy::trivially_copy_pass_by_ref)]
33 pub unsafe fn from_atom(atom: &sys::LV2_Atom) -> Self {
34 let size = atom.size as usize;
35 let data = std::slice::from_raw_parts(
36 atom as *const sys::LV2_Atom as *const u8,
37 size + size_of::<sys::LV2_Atom>(),
38 );
39 Self::from_slice(data)
40 }
41
42 pub fn from_slice(data: &'a [u8]) -> Self {
46 Space { data: Some(data) }
47 }
48
49 pub fn split_raw(self, size: usize) -> Option<(&'a [u8], Self)> {
53 let data = self.data?;
54
55 if size > data.len() {
56 return None;
57 }
58 let (lower_space, upper_space) = data.split_at(size);
59
60 let padding = if size % 8 == 0 { 0 } else { 8 - size % 8 };
62 let upper_space = if padding <= upper_space.len() {
63 let upper_space = upper_space.split_at(padding).1;
64 Some(upper_space)
65 } else {
66 None
67 };
68 let upper_space = Self { data: upper_space };
69
70 Some((lower_space, upper_space))
71 }
72
73 pub fn split_space(self, size: usize) -> Option<(Self, Self)> {
77 self.split_raw(size)
78 .map(|(data, rhs)| (Self::from_slice(data), rhs))
79 }
80
81 pub fn split_type<T>(self) -> Option<(&'a T, Self)>
85 where
86 T: Unpin + Copy + Send + Sync + Sized + 'static,
87 {
88 self.split_raw(size_of::<T>())
89 .map(|(data, rhs)| (unsafe { &*(data.as_ptr() as *const T) }, rhs))
90 }
91
92 pub fn split_atom(self) -> Option<(Self, Self)> {
98 let (header, _) = self.split_type::<sys::LV2_Atom>()?;
99 self.split_space(size_of::<sys::LV2_Atom>() + header.size as usize)
100 }
101
102 pub fn split_atom_body<T: ?Sized>(self, urid: URID<T>) -> Option<(Self, Self)> {
108 let (header, space) = self.split_type::<sys::LV2_Atom>()?;
109 if header.type_ != urid.get() {
110 return None;
111 }
112 space.split_space(header.size as usize)
113 }
114
115 pub fn from_reference<T: ?Sized>(instance: &'a T) -> Self {
117 let data = unsafe {
118 std::slice::from_raw_parts(instance as *const T as *const u8, size_of_val(instance))
119 };
120 assert_eq!(data.as_ptr() as usize % 8, 0);
121 Space { data: Some(data) }
122 }
123
124 pub fn concat(lhs: Self, rhs: Self) -> Option<Self> {
128 let lhs_data = match lhs.data {
129 Some(data) => data,
130 None => return Some(rhs),
131 };
132 let rhs_data = match rhs.data {
133 Some(data) => data,
134 None => return Some(lhs),
135 };
136 if unsafe { lhs_data.as_ptr().add(lhs_data.len()) } == rhs_data.as_ptr() {
137 Some(Self::from_slice(unsafe {
138 std::slice::from_raw_parts(lhs_data.as_ptr(), lhs_data.len() + rhs_data.len())
139 }))
140 } else {
141 None
142 }
143 }
144
145 pub fn data(&self) -> Option<&'a [u8]> {
147 self.data
148 }
149
150 pub fn mut_data(&mut self) -> &mut Option<&'a [u8]> {
152 &mut self.data
153 }
154}
155
156pub trait MutSpace<'a> {
160 fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])>;
166
167 fn write_raw(&mut self, data: &[u8], apply_padding: bool) -> Option<&'a mut [u8]> {
171 self.allocate(data.len(), apply_padding).map(|(_, space)| {
172 space.copy_from_slice(data);
173 space
174 })
175 }
176}
177
178pub struct RootMutSpace<'a> {
180 space: Cell<Option<&'a mut [u8]>>,
181 allocated_bytes: usize,
182}
183
184impl<'a> RootMutSpace<'a> {
185 pub unsafe fn from_atom(atom: &mut sys::LV2_Atom) -> Self {
193 let space = std::slice::from_raw_parts_mut(
194 atom as *mut _ as *mut u8,
195 atom.size as usize + size_of::<sys::LV2_Atom>(),
196 );
197 Self::new(space)
198 }
199
200 pub fn new(space: &'a mut [u8]) -> Self {
204 RootMutSpace {
205 space: Cell::new(Some(space)),
206 allocated_bytes: 0,
207 }
208 }
209}
210
211impl<'a> MutSpace<'a> for RootMutSpace<'a> {
212 fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
213 if self.space.get_mut().is_none() {
214 return None;
215 }
216 let mut space = self.space.replace(None).unwrap();
217
218 let padding = if apply_padding {
219 let alignment = self.allocated_bytes % 8;
220 let padding = if alignment == 0 { 0 } else { 8 - alignment };
221 if padding > space.len() {
222 return None;
223 }
224 space = space.split_at_mut(padding).1;
225 self.allocated_bytes += padding;
226 padding
227 } else {
228 0
229 };
230
231 if size > space.len() {
232 return None;
233 }
234 let (lower_slice, upper_slice) = space.split_at_mut(size);
235 self.allocated_bytes += size;
236
237 self.space.set(Some(upper_slice));
238 Some((padding, lower_slice))
239 }
240}
241
242pub struct SpaceElement {
246 next: Option<(Box<Self>, Box<[u8]>)>,
247}
248
249impl Default for SpaceElement {
250 fn default() -> Self {
251 Self { next: None }
252 }
253}
254
255impl SpaceElement {
256 pub fn allocate(&mut self, size: usize) -> Option<(&mut Self, &mut [u8])> {
260 if self.next.is_some() {
261 return None;
262 }
263
264 let new_data = vec![0u8; size].into_boxed_slice();
265 let new_element = Box::new(Self::default());
266 self.next = Some((new_element, new_data));
267 self.next
268 .as_mut()
269 .map(|(new_element, new_data): &mut (Box<Self>, Box<[u8]>)| {
270 (new_element.as_mut(), new_data.as_mut())
271 })
272 }
273
274 pub fn to_vec(&self) -> Vec<u8> {
276 self.iter()
277 .map(|slice| slice.iter())
278 .flatten()
279 .cloned()
280 .collect()
281 }
282
283 pub fn iter(&self) -> impl Iterator<Item = &[u8]> {
285 std::iter::successors(self.next.as_ref(), |element| element.0.next.as_ref())
286 .map(|(_, data)| data.as_ref())
287 }
288}
289
290pub struct SpaceHead<'a> {
321 element: Option<&'a mut SpaceElement>,
322 allocated_space: usize,
323}
324
325impl<'a> SpaceHead<'a> {
326 pub fn new(element: &'a mut SpaceElement) -> Self {
328 Self {
329 element: Some(element),
330 allocated_space: 0,
331 }
332 }
333
334 fn internal_allocate(&mut self, size: usize) -> Option<&'a mut [u8]> {
335 let element = self.element.take()?;
336 let (new_element, new_space) = element.allocate(size)?;
337 self.element = Some(new_element);
338 self.allocated_space += size;
339 Some(new_space)
340 }
341}
342
343impl<'a> MutSpace<'a> for SpaceHead<'a> {
344 fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
345 let padding: usize = if apply_padding {
346 (8 - self.allocated_space % 8) % 8
347 } else {
348 0
349 };
350
351 if padding != 0 {
352 self.internal_allocate(padding);
353 }
354
355 self.internal_allocate(size)
356 .map(|new_space| (padding, new_space))
357 }
358}
359
360pub struct FramedMutSpace<'a, 'b> {
362 atom: &'a mut sys::LV2_Atom,
363 parent: &'b mut dyn MutSpace<'a>,
364}
365
366impl<'a, 'b> FramedMutSpace<'a, 'b> {
367 pub fn new<A: ?Sized>(parent: &'b mut dyn MutSpace<'a>, urid: URID<A>) -> Option<Self> {
369 let atom = sys::LV2_Atom {
370 size: 0,
371 type_: urid.get(),
372 };
373 let atom: &'a mut sys::LV2_Atom = parent.write(&atom, true)?;
374 Some(Self { atom, parent })
375 }
376}
377
378impl<'a, 'b> MutSpace<'a> for FramedMutSpace<'a, 'b> {
379 fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
380 self.parent
381 .allocate(size, apply_padding)
382 .map(|(padding, data)| {
383 self.atom.size += (size + padding) as u32;
384 (padding, data)
385 })
386 }
387}
388
389impl<'a, 'b> dyn MutSpace<'a> + 'b {
390 pub fn write<T>(&mut self, instance: &T, apply_padding: bool) -> Option<&'a mut T>
394 where
395 T: Unpin + Copy + Send + Sync + Sized + 'static,
396 {
397 let size = std::mem::size_of::<T>();
398 let input_data =
399 unsafe { std::slice::from_raw_parts(instance as *const T as *const u8, size) };
400
401 let output_data = self.write_raw(input_data, apply_padding)?;
402
403 assert_eq!(size, output_data.len());
404 Some(unsafe { &mut *(output_data.as_mut_ptr() as *mut T) })
405 }
406
407 pub fn init<'c, A: Atom<'a, 'c>>(
409 &'c mut self,
410 urid: URID<A>,
411 parameter: A::WriteParameter,
412 ) -> Option<A::WriteHandle> {
413 let new_space = FramedMutSpace::new(self, urid)?;
414 A::init(new_space, parameter)
415 }
416}
417
418#[cfg(test)]
419mod tests {
420 use crate::space::*;
421 use std::mem::{size_of, size_of_val};
422 use urid::*;
423
424 #[test]
425 fn test_space() {
426 let mut vector: Vec<u8> = vec![0; 256];
427 for i in 0..128 {
428 vector[i] = i as u8;
429 }
430 unsafe {
431 let ptr = vector.as_mut_slice().as_mut_ptr().add(128) as *mut u32;
432 *(ptr) = 0x42424242;
433 }
434
435 let space = Space::from_slice(vector.as_slice());
436 let (lower_space, space) = space.split_raw(128).unwrap();
437 for i in 0..128 {
438 assert_eq!(lower_space[i], i as u8);
439 }
440
441 let (integer, _) = space.split_type::<u32>().unwrap();
442 assert_eq!(*integer, 0x42424242);
443 }
444
445 #[test]
446 fn test_split_atom() {
447 let mut data: Box<[u64]> = Box::new([0; 256]);
448 let urid: URID = unsafe { URID::new_unchecked(17) };
449
450 unsafe {
452 *(data.as_mut_ptr() as *mut sys::LV2_Atom_Int) = sys::LV2_Atom_Int {
453 atom: sys::LV2_Atom {
454 size: size_of::<i32>() as u32,
455 type_: urid.get(),
456 },
457 body: 42,
458 }
459 }
460
461 let space = Space::from_reference(data.as_ref());
462 let (atom, _) = space.split_atom().unwrap();
463 let (body, _) = atom.split_atom_body(urid).unwrap();
464 let body = body.data().unwrap();
465
466 assert_eq!(size_of::<i32>(), size_of_val(body));
467 assert_eq!(42, unsafe { *(body.as_ptr() as *const i32) });
468 }
469
470 #[test]
471 fn test_from_reference() {
472 let value: u64 = 0x42424242;
473 let space = Space::from_reference(&value);
474 assert_eq!(value, *space.split_type::<u64>().unwrap().0);
475 }
476
477 #[test]
478 fn test_concat() {
479 let data: Box<[u64]> = Box::new([0; 64]);
480 let space = Space::from_reference(data.as_ref());
481 let (lhs, rhs) = space.split_space(8).unwrap();
482 let concated_space = Space::concat(lhs, rhs).unwrap();
483 assert_eq!(
484 space.data().unwrap().as_ptr(),
485 concated_space.data().unwrap().as_ptr()
486 );
487 assert_eq!(
488 space.data().unwrap().len(),
489 concated_space.data().unwrap().len()
490 );
491 }
492
493 fn test_mut_space<'a, S: MutSpace<'a>>(mut space: S) {
494 let map = HashURIDMapper::new();
495 let urids = crate::AtomURIDCollection::from_map(&map).unwrap();
496
497 let mut test_data: Vec<u8> = vec![0; 24];
498 for i in 0..test_data.len() {
499 test_data[i] = i as u8;
500 }
501
502 match space.write_raw(test_data.as_slice(), true) {
503 Some(written_data) => assert_eq!(test_data.as_slice(), written_data),
504 None => panic!("Writing failed!"),
505 }
506
507 let test_atom = sys::LV2_Atom { size: 42, type_: 1 };
508 let written_atom = (&mut space as &mut dyn MutSpace)
509 .write(&test_atom, true)
510 .unwrap();
511 assert_eq!(written_atom.size, test_atom.size);
512 assert_eq!(written_atom.type_, test_atom.type_);
513
514 let created_space = unsafe { RootMutSpace::from_atom(written_atom) }
515 .space
516 .take()
517 .unwrap();
518 assert_eq!(
519 created_space.as_ptr() as usize,
520 written_atom as *mut _ as usize
521 );
522 assert_eq!(created_space.len(), size_of::<sys::LV2_Atom>() + 42);
523
524 let mut atom_frame =
525 FramedMutSpace::new(&mut space as &mut dyn MutSpace, urids.chunk).unwrap();
526
527 let mut test_data: Vec<u8> = vec![0; 24];
528 for i in 0..test_data.len() {
529 test_data[i] = i as u8;
530 }
531
532 let written_data = atom_frame.write_raw(test_data.as_slice(), true).unwrap();
533 assert_eq!(test_data.as_slice(), written_data);
534 assert_eq!(atom_frame.atom.size, test_data.len() as u32);
535
536 let test_atom = sys::LV2_Atom { size: 42, type_: 1 };
537 let borrowed_frame = &mut atom_frame as &mut dyn MutSpace;
538 let written_atom = borrowed_frame.write(&test_atom, true).unwrap();
539 assert_eq!(written_atom.size, test_atom.size);
540 assert_eq!(written_atom.type_, test_atom.type_);
541 assert_eq!(
542 atom_frame.atom.size as usize,
543 test_data.len() + size_of_val(&test_atom)
544 );
545 }
546
547 #[test]
548 fn test_root_mut_space() {
549 const MEMORY_SIZE: usize = 256;
550 let mut memory: [u64; MEMORY_SIZE] = [0; MEMORY_SIZE];
551 let frame: RootMutSpace = RootMutSpace::new(unsafe {
552 std::slice::from_raw_parts_mut(
553 (&mut memory).as_mut_ptr() as *mut u8,
554 MEMORY_SIZE * size_of::<u64>(),
555 )
556 });
557
558 test_mut_space(frame);
559 }
560
561 #[test]
562 fn test_space_head() {
563 let mut space = SpaceElement::default();
564 let head = SpaceHead::new(&mut space);
565 test_mut_space(head);
566 }
567
568 #[test]
569 fn test_padding_inside_frame() {
570 const MEMORY_SIZE: usize = 256;
571 let mut memory: [u64; MEMORY_SIZE] = [0; MEMORY_SIZE];
572 let raw_space: &mut [u8] = unsafe {
573 std::slice::from_raw_parts_mut(
574 (&mut memory).as_mut_ptr() as *mut u8,
575 MEMORY_SIZE * size_of::<u64>(),
576 )
577 };
578
579 {
581 let mut root: RootMutSpace = RootMutSpace::new(raw_space);
582 let mut frame =
583 FramedMutSpace::new(&mut root as &mut dyn MutSpace, URID::<()>::new(1).unwrap())
584 .unwrap();
585 {
586 let frame = &mut frame as &mut dyn MutSpace;
587 frame.write::<u32>(&42, true).unwrap();
588 frame.write::<u32>(&17, true).unwrap();
589 }
590 }
591
592 {
594 let (atom, space) = raw_space.split_at(size_of::<sys::LV2_Atom>());
595 let atom = unsafe { &*(atom.as_ptr() as *const sys::LV2_Atom) };
596 assert_eq!(atom.type_, 1);
597 assert_eq!(atom.size as usize, 12);
598
599 let (value, space) = space.split_at(size_of::<u32>());
600 let value = unsafe { *(value.as_ptr() as *const u32) };
601 assert_eq!(value, 42);
602 let (_, space) = space.split_at(4);
603
604 let (value, _) = space.split_at(size_of::<u32>());
605 let value = unsafe { *(value.as_ptr() as *const u32) };
606 assert_eq!(value, 17);
607 }
608 }
609
610 #[test]
611 fn unaligned_root_write() {
612 let mut raw_space = Box::new([0u8; 8]);
613
614 {
615 let mut root_space = RootMutSpace::new(&mut raw_space[3..]);
616 (&mut root_space as &mut dyn MutSpace)
617 .write(&42u8, true)
618 .unwrap();
619 }
620
621 assert_eq!(&[0, 0, 0, 42, 0, 0, 0, 0], raw_space.as_ref());
622 }
623}