lv2_atom/
space.rs

1//! Smart pointers with safe atom reading and writing methods.
2//!
3//! # Safety
4//!
5//! The only unsafe things that happen in this module is when either space is created from a reference to a `sys::LV2_Atom` and when space is re-interpreted as typed data.
6//!
7//! In the first case, we have to trust that the space behind the atom header is accessible since we have no way to check whether it is or not. Therefore, we have to assume that it is sound.
8//!
9//! The second case is sound since a) the data is contained in a slice and therefore is accessible, b) generic type parameter bounds assure that the type is plain-old-data and c) 64-bit padding is assured.
10use crate::Atom;
11use std::cell::Cell;
12use std::marker::Unpin;
13use std::mem::{size_of, size_of_val};
14use urid::URID;
15
16/// Specialized smart pointer to retrieve struct instances from a slice of memory.
17///
18/// The accessor methods of this struct all behave in a similar way: If the internal slice is big enough, they create a reference to the start of the slice with the desired type and create a new space object that contains the space after the references instance.
19#[derive(Clone, Copy)]
20pub struct Space<'a> {
21    data: Option<&'a [u8]>,
22}
23
24impl<'a> Space<'a> {
25    /// Create a new space from an atom pointer.
26    ///
27    /// The method creates a space that contains the atom as well as it's body.
28    ///
29    /// # Safety
30    ///
31    /// Since the body is not included in the atom reference, this method has to assume that it is valid memory and therefore is unsafe but sound.
32    #[allow(clippy::trivially_copy_pass_by_ref)]
33    pub unsafe fn from_atom(atom: &sys::LV2_Atom) -> Self {
34        let size = atom.size as usize;
35        let data = std::slice::from_raw_parts(
36            atom as *const sys::LV2_Atom as *const u8,
37            size + size_of::<sys::LV2_Atom>(),
38        );
39        Self::from_slice(data)
40    }
41
42    /// Create a new space from a slice.
43    ///
44    /// Since everything regarding atoms is 64-bit-aligned, this method panics if the data slice is not 64-bit-aligned.
45    pub fn from_slice(data: &'a [u8]) -> Self {
46        Space { data: Some(data) }
47    }
48
49    /// Try to retrieve a slice of bytes.
50    ///
51    /// This method basically splits off the lower part of the internal bytes slice and creates a new atom space pointer of the upper part. Since atoms have to be 64-bit-aligned, there might be a padding space that's neither in the lower nor in the upper part.
52    pub fn split_raw(self, size: usize) -> Option<(&'a [u8], Self)> {
53        let data = self.data?;
54
55        if size > data.len() {
56            return None;
57        }
58        let (lower_space, upper_space) = data.split_at(size);
59
60        // Apply padding.
61        let padding = if size % 8 == 0 { 0 } else { 8 - size % 8 };
62        let upper_space = if padding <= upper_space.len() {
63            let upper_space = upper_space.split_at(padding).1;
64            Some(upper_space)
65        } else {
66            None
67        };
68        let upper_space = Self { data: upper_space };
69
70        Some((lower_space, upper_space))
71    }
72
73    /// Try to retrieve space.
74    ///
75    /// This method calls [`split_raw`](#method.split_raw) and wraps the returned slice in an atom space. The second space is the space after the first one.
76    pub fn split_space(self, size: usize) -> Option<(Self, Self)> {
77        self.split_raw(size)
78            .map(|(data, rhs)| (Self::from_slice(data), rhs))
79    }
80
81    /// Try to retrieve a reference to a sized type.
82    ///
83    /// This method retrieves a slice of memory using the [`split_raw`](#method.split_raw) method and interprets it as an instance of `T`. Since there is no way to check that the memory is actually a valid instance of `T`, this method is unsafe. The second return value is the space after the instance of `T`.
84    pub fn split_type<T>(self) -> Option<(&'a T, Self)>
85    where
86        T: Unpin + Copy + Send + Sync + Sized + 'static,
87    {
88        self.split_raw(size_of::<T>())
89            .map(|(data, rhs)| (unsafe { &*(data.as_ptr() as *const T) }, rhs))
90    }
91
92    /// Try to retrieve the space occupied by an atom.
93    ///
94    /// This method assumes that the space contains an atom and retrieves the space occupied by the atom, including the atom header. The second return value is the rest of the space behind the atom.
95    ///
96    /// The difference to [`split_atom_body`](#method.split_atom_body) is that the returned space contains the header of the atom and that the type of the atom is not checked.
97    pub fn split_atom(self) -> Option<(Self, Self)> {
98        let (header, _) = self.split_type::<sys::LV2_Atom>()?;
99        self.split_space(size_of::<sys::LV2_Atom>() + header.size as usize)
100    }
101
102    /// Try to retrieve the body of the atom.
103    ///
104    /// This method retrieves the header of the atom. If the type URID in the header matches the given URID, it returns the body of the atom. If not, it returns `None`. The first space is the body of the atom, the second one is the space behind it.
105    ///
106    /// The difference to [`split_atom`](#method.split_atom) is that the returned space does not contain the header of the atom and that the type of the atom is checked.
107    pub fn split_atom_body<T: ?Sized>(self, urid: URID<T>) -> Option<(Self, Self)> {
108        let (header, space) = self.split_type::<sys::LV2_Atom>()?;
109        if header.type_ != urid.get() {
110            return None;
111        }
112        space.split_space(header.size as usize)
113    }
114
115    /// Create a space from a reference.
116    pub fn from_reference<T: ?Sized>(instance: &'a T) -> Self {
117        let data = unsafe {
118            std::slice::from_raw_parts(instance as *const T as *const u8, size_of_val(instance))
119        };
120        assert_eq!(data.as_ptr() as usize % 8, 0);
121        Space { data: Some(data) }
122    }
123
124    /// Concatenate two spaces.
125    ///
126    /// There are situations where a space is split too often and you might want to reunite these two adjacent spaces. This method checks if the given spaces are adjacent, which means that the left space has to end exactly where the right one begins. In this case, the concatenated space is returned. If this is not the case, this method returns `None`.
127    pub fn concat(lhs: Self, rhs: Self) -> Option<Self> {
128        let lhs_data = match lhs.data {
129            Some(data) => data,
130            None => return Some(rhs),
131        };
132        let rhs_data = match rhs.data {
133            Some(data) => data,
134            None => return Some(lhs),
135        };
136        if unsafe { lhs_data.as_ptr().add(lhs_data.len()) } == rhs_data.as_ptr() {
137            Some(Self::from_slice(unsafe {
138                std::slice::from_raw_parts(lhs_data.as_ptr(), lhs_data.len() + rhs_data.len())
139            }))
140        } else {
141            None
142        }
143    }
144
145    /// Return the internal slice of the space.
146    pub fn data(&self) -> Option<&'a [u8]> {
147        self.data
148    }
149
150    /// Return a mutable reference to the internal slice of the space.
151    pub fn mut_data(&mut self) -> &mut Option<&'a [u8]> {
152        &mut self.data
153    }
154}
155
156/// A smart pointer that writes atom data to an internal slice.
157///
158/// The methods provided by this trait are fairly minimalistic. More convenient writing methods are implemented for `dyn MutSpace`.
159pub trait MutSpace<'a> {
160    /// Try to allocate memory on the internal data slice.
161    ///
162    /// If `apply_padding` is `true`, the method will assure that the allocated memory is 64-bit-aligned. The first return value is the number of padding bytes that has been used and the second return value is a mutable slice referencing the allocated data.
163    ///
164    /// After the memory has been allocated, the `MutSpace` can not allocate it again. The next allocated slice is directly behind it.
165    fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])>;
166
167    /// Try to write data to the internal data slice.
168    ///
169    /// The method allocates a slice with the [`allocate`](#tymethod.allocate) method and copies the data to the slice.
170    fn write_raw(&mut self, data: &[u8], apply_padding: bool) -> Option<&'a mut [u8]> {
171        self.allocate(data.len(), apply_padding).map(|(_, space)| {
172            space.copy_from_slice(data);
173            space
174        })
175    }
176}
177
178/// A `MutSpace` that directly manages it's own internal data slice.
179pub struct RootMutSpace<'a> {
180    space: Cell<Option<&'a mut [u8]>>,
181    allocated_bytes: usize,
182}
183
184impl<'a> RootMutSpace<'a> {
185    /// Create new space from an atom.
186    ///
187    /// The method creates a space that contains the atom as well as it's body.
188    ///
189    /// # Safety
190    ///
191    /// Since the body is not included in the atom reference, this method has to assume that it is valid memory and therefore is unsafe.
192    pub unsafe fn from_atom(atom: &mut sys::LV2_Atom) -> Self {
193        let space = std::slice::from_raw_parts_mut(
194            atom as *mut _ as *mut u8,
195            atom.size as usize + size_of::<sys::LV2_Atom>(),
196        );
197        Self::new(space)
198    }
199
200    /// Create a new instance.
201    ///
202    /// This method takes the space reserved for the value and interprets it as a slice of bytes (`&mut [u8]`).
203    pub fn new(space: &'a mut [u8]) -> Self {
204        RootMutSpace {
205            space: Cell::new(Some(space)),
206            allocated_bytes: 0,
207        }
208    }
209}
210
211impl<'a> MutSpace<'a> for RootMutSpace<'a> {
212    fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
213        if self.space.get_mut().is_none() {
214            return None;
215        }
216        let mut space = self.space.replace(None).unwrap();
217
218        let padding = if apply_padding {
219            let alignment = self.allocated_bytes % 8;
220            let padding = if alignment == 0 { 0 } else { 8 - alignment };
221            if padding > space.len() {
222                return None;
223            }
224            space = space.split_at_mut(padding).1;
225            self.allocated_bytes += padding;
226            padding
227        } else {
228            0
229        };
230
231        if size > space.len() {
232            return None;
233        }
234        let (lower_slice, upper_slice) = space.split_at_mut(size);
235        self.allocated_bytes += size;
236
237        self.space.set(Some(upper_slice));
238        Some((padding, lower_slice))
239    }
240}
241
242/// Linked list element for dynamic atom writing.
243///
244/// This struct works in conjunction with [`SpaceHead`](struct.SpaceHead.html) to provide a way to write atoms to dynamically allocated memory.
245pub struct SpaceElement {
246    next: Option<(Box<Self>, Box<[u8]>)>,
247}
248
249impl Default for SpaceElement {
250    fn default() -> Self {
251        Self { next: None }
252    }
253}
254
255impl SpaceElement {
256    /// Append an element to the list.
257    ///
258    /// If this is the last element of the list, allocate a slice of the required length and append a new element to the list. If not, do nothing and return `None`.
259    pub fn allocate(&mut self, size: usize) -> Option<(&mut Self, &mut [u8])> {
260        if self.next.is_some() {
261            return None;
262        }
263
264        let new_data = vec![0u8; size].into_boxed_slice();
265        let new_element = Box::new(Self::default());
266        self.next = Some((new_element, new_data));
267        self.next
268            .as_mut()
269            .map(|(new_element, new_data): &mut (Box<Self>, Box<[u8]>)| {
270                (new_element.as_mut(), new_data.as_mut())
271            })
272    }
273
274    /// Create a vector containing the data from all elements following this one.
275    pub fn to_vec(&self) -> Vec<u8> {
276        self.iter()
277            .map(|slice| slice.iter())
278            .flatten()
279            .cloned()
280            .collect()
281    }
282
283    /// Return an iterator over the chunks of all elements following this one.
284    pub fn iter(&self) -> impl Iterator<Item = &[u8]> {
285        std::iter::successors(self.next.as_ref(), |element| element.0.next.as_ref())
286            .map(|(_, data)| data.as_ref())
287    }
288}
289
290/// A mutable space that dynamically allocates memory.
291///
292/// This space uses a linked list of [`SpaceElement`s](struct.SpaceElement.html) to allocate memory. Every time `allocate` is called, a new element is appended to the list and a new byte slice is created.
293///
294/// In order to use this space and retrieve the written data once it was written, you create a `SpaceElement` and create a new head with it. Then, you use the head like any other `MutSpace` and when you're done, you retrieve the written data by either calling [`to_vec`](struct.SpaceElement.html#method.to_vec) or [`iter`](struct.SpaceElement.html#iter).
295///
296/// # Usage example
297///
298/// ```
299/// # use lv2_core::prelude::*;
300/// # use lv2_atom::prelude::*;
301/// # use lv2_atom::space::*;
302/// # use urid::*;
303/// # use std::pin::Pin;
304/// # let map = HashURIDMapper::new();
305/// // URID cache creation is omitted.
306/// let urids: AtomURIDCollection = map.populate_collection().unwrap();
307///
308/// // Creating the first element in the list and the writing head.
309/// let mut element = SpaceElement::default();
310/// let mut head = SpaceHead::new(&mut element);
311///
312/// // Writing an integer.
313/// (&mut head as &mut dyn MutSpace).init(urids.int, 42).unwrap();
314///
315/// // Retrieving a continuos vector with the written data and verifying it's contents.
316/// let written_data: Vec<u8> = element.to_vec();
317/// let atom = UnidentifiedAtom::new(Space::from_slice(written_data.as_ref()));
318/// assert_eq!(42, atom.read(urids.int, ()).unwrap());
319/// ```
320pub struct SpaceHead<'a> {
321    element: Option<&'a mut SpaceElement>,
322    allocated_space: usize,
323}
324
325impl<'a> SpaceHead<'a> {
326    /// Create a new head that references the given element.
327    pub fn new(element: &'a mut SpaceElement) -> Self {
328        Self {
329            element: Some(element),
330            allocated_space: 0,
331        }
332    }
333
334    fn internal_allocate(&mut self, size: usize) -> Option<&'a mut [u8]> {
335        let element = self.element.take()?;
336        let (new_element, new_space) = element.allocate(size)?;
337        self.element = Some(new_element);
338        self.allocated_space += size;
339        Some(new_space)
340    }
341}
342
343impl<'a> MutSpace<'a> for SpaceHead<'a> {
344    fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
345        let padding: usize = if apply_padding {
346            (8 - self.allocated_space % 8) % 8
347        } else {
348            0
349        };
350
351        if padding != 0 {
352            self.internal_allocate(padding);
353        }
354
355        self.internal_allocate(size)
356            .map(|new_space| (padding, new_space))
357    }
358}
359
360/// A `MutSpace` that notes the amount of allocated space in an atom header.
361pub struct FramedMutSpace<'a, 'b> {
362    atom: &'a mut sys::LV2_Atom,
363    parent: &'b mut dyn MutSpace<'a>,
364}
365
366impl<'a, 'b> FramedMutSpace<'a, 'b> {
367    /// Create a new framed space with the given parent and type URID.
368    pub fn new<A: ?Sized>(parent: &'b mut dyn MutSpace<'a>, urid: URID<A>) -> Option<Self> {
369        let atom = sys::LV2_Atom {
370            size: 0,
371            type_: urid.get(),
372        };
373        let atom: &'a mut sys::LV2_Atom = parent.write(&atom, true)?;
374        Some(Self { atom, parent })
375    }
376}
377
378impl<'a, 'b> MutSpace<'a> for FramedMutSpace<'a, 'b> {
379    fn allocate(&mut self, size: usize, apply_padding: bool) -> Option<(usize, &'a mut [u8])> {
380        self.parent
381            .allocate(size, apply_padding)
382            .map(|(padding, data)| {
383                self.atom.size += (size + padding) as u32;
384                (padding, data)
385            })
386    }
387}
388
389impl<'a, 'b> dyn MutSpace<'a> + 'b {
390    /// Write a sized object to the space.
391    ///
392    /// If `apply_padding` is `true`, the method will assure that the written instance is 64-bit-aligned.
393    pub fn write<T>(&mut self, instance: &T, apply_padding: bool) -> Option<&'a mut T>
394    where
395        T: Unpin + Copy + Send + Sync + Sized + 'static,
396    {
397        let size = std::mem::size_of::<T>();
398        let input_data =
399            unsafe { std::slice::from_raw_parts(instance as *const T as *const u8, size) };
400
401        let output_data = self.write_raw(input_data, apply_padding)?;
402
403        assert_eq!(size, output_data.len());
404        Some(unsafe { &mut *(output_data.as_mut_ptr() as *mut T) })
405    }
406
407    /// Initialize a new atom in the space.
408    pub fn init<'c, A: Atom<'a, 'c>>(
409        &'c mut self,
410        urid: URID<A>,
411        parameter: A::WriteParameter,
412    ) -> Option<A::WriteHandle> {
413        let new_space = FramedMutSpace::new(self, urid)?;
414        A::init(new_space, parameter)
415    }
416}
417
418#[cfg(test)]
419mod tests {
420    use crate::space::*;
421    use std::mem::{size_of, size_of_val};
422    use urid::*;
423
424    #[test]
425    fn test_space() {
426        let mut vector: Vec<u8> = vec![0; 256];
427        for i in 0..128 {
428            vector[i] = i as u8;
429        }
430        unsafe {
431            let ptr = vector.as_mut_slice().as_mut_ptr().add(128) as *mut u32;
432            *(ptr) = 0x42424242;
433        }
434
435        let space = Space::from_slice(vector.as_slice());
436        let (lower_space, space) = space.split_raw(128).unwrap();
437        for i in 0..128 {
438            assert_eq!(lower_space[i], i as u8);
439        }
440
441        let (integer, _) = space.split_type::<u32>().unwrap();
442        assert_eq!(*integer, 0x42424242);
443    }
444
445    #[test]
446    fn test_split_atom() {
447        let mut data: Box<[u64]> = Box::new([0; 256]);
448        let urid: URID = unsafe { URID::new_unchecked(17) };
449
450        // Writing an integer atom.
451        unsafe {
452            *(data.as_mut_ptr() as *mut sys::LV2_Atom_Int) = sys::LV2_Atom_Int {
453                atom: sys::LV2_Atom {
454                    size: size_of::<i32>() as u32,
455                    type_: urid.get(),
456                },
457                body: 42,
458            }
459        }
460
461        let space = Space::from_reference(data.as_ref());
462        let (atom, _) = space.split_atom().unwrap();
463        let (body, _) = atom.split_atom_body(urid).unwrap();
464        let body = body.data().unwrap();
465
466        assert_eq!(size_of::<i32>(), size_of_val(body));
467        assert_eq!(42, unsafe { *(body.as_ptr() as *const i32) });
468    }
469
470    #[test]
471    fn test_from_reference() {
472        let value: u64 = 0x42424242;
473        let space = Space::from_reference(&value);
474        assert_eq!(value, *space.split_type::<u64>().unwrap().0);
475    }
476
477    #[test]
478    fn test_concat() {
479        let data: Box<[u64]> = Box::new([0; 64]);
480        let space = Space::from_reference(data.as_ref());
481        let (lhs, rhs) = space.split_space(8).unwrap();
482        let concated_space = Space::concat(lhs, rhs).unwrap();
483        assert_eq!(
484            space.data().unwrap().as_ptr(),
485            concated_space.data().unwrap().as_ptr()
486        );
487        assert_eq!(
488            space.data().unwrap().len(),
489            concated_space.data().unwrap().len()
490        );
491    }
492
493    fn test_mut_space<'a, S: MutSpace<'a>>(mut space: S) {
494        let map = HashURIDMapper::new();
495        let urids = crate::AtomURIDCollection::from_map(&map).unwrap();
496
497        let mut test_data: Vec<u8> = vec![0; 24];
498        for i in 0..test_data.len() {
499            test_data[i] = i as u8;
500        }
501
502        match space.write_raw(test_data.as_slice(), true) {
503            Some(written_data) => assert_eq!(test_data.as_slice(), written_data),
504            None => panic!("Writing failed!"),
505        }
506
507        let test_atom = sys::LV2_Atom { size: 42, type_: 1 };
508        let written_atom = (&mut space as &mut dyn MutSpace)
509            .write(&test_atom, true)
510            .unwrap();
511        assert_eq!(written_atom.size, test_atom.size);
512        assert_eq!(written_atom.type_, test_atom.type_);
513
514        let created_space = unsafe { RootMutSpace::from_atom(written_atom) }
515            .space
516            .take()
517            .unwrap();
518        assert_eq!(
519            created_space.as_ptr() as usize,
520            written_atom as *mut _ as usize
521        );
522        assert_eq!(created_space.len(), size_of::<sys::LV2_Atom>() + 42);
523
524        let mut atom_frame =
525            FramedMutSpace::new(&mut space as &mut dyn MutSpace, urids.chunk).unwrap();
526
527        let mut test_data: Vec<u8> = vec![0; 24];
528        for i in 0..test_data.len() {
529            test_data[i] = i as u8;
530        }
531
532        let written_data = atom_frame.write_raw(test_data.as_slice(), true).unwrap();
533        assert_eq!(test_data.as_slice(), written_data);
534        assert_eq!(atom_frame.atom.size, test_data.len() as u32);
535
536        let test_atom = sys::LV2_Atom { size: 42, type_: 1 };
537        let borrowed_frame = &mut atom_frame as &mut dyn MutSpace;
538        let written_atom = borrowed_frame.write(&test_atom, true).unwrap();
539        assert_eq!(written_atom.size, test_atom.size);
540        assert_eq!(written_atom.type_, test_atom.type_);
541        assert_eq!(
542            atom_frame.atom.size as usize,
543            test_data.len() + size_of_val(&test_atom)
544        );
545    }
546
547    #[test]
548    fn test_root_mut_space() {
549        const MEMORY_SIZE: usize = 256;
550        let mut memory: [u64; MEMORY_SIZE] = [0; MEMORY_SIZE];
551        let frame: RootMutSpace = RootMutSpace::new(unsafe {
552            std::slice::from_raw_parts_mut(
553                (&mut memory).as_mut_ptr() as *mut u8,
554                MEMORY_SIZE * size_of::<u64>(),
555            )
556        });
557
558        test_mut_space(frame);
559    }
560
561    #[test]
562    fn test_space_head() {
563        let mut space = SpaceElement::default();
564        let head = SpaceHead::new(&mut space);
565        test_mut_space(head);
566    }
567
568    #[test]
569    fn test_padding_inside_frame() {
570        const MEMORY_SIZE: usize = 256;
571        let mut memory: [u64; MEMORY_SIZE] = [0; MEMORY_SIZE];
572        let raw_space: &mut [u8] = unsafe {
573            std::slice::from_raw_parts_mut(
574                (&mut memory).as_mut_ptr() as *mut u8,
575                MEMORY_SIZE * size_of::<u64>(),
576            )
577        };
578
579        // writing
580        {
581            let mut root: RootMutSpace = RootMutSpace::new(raw_space);
582            let mut frame =
583                FramedMutSpace::new(&mut root as &mut dyn MutSpace, URID::<()>::new(1).unwrap())
584                    .unwrap();
585            {
586                let frame = &mut frame as &mut dyn MutSpace;
587                frame.write::<u32>(&42, true).unwrap();
588                frame.write::<u32>(&17, true).unwrap();
589            }
590        }
591
592        // checking
593        {
594            let (atom, space) = raw_space.split_at(size_of::<sys::LV2_Atom>());
595            let atom = unsafe { &*(atom.as_ptr() as *const sys::LV2_Atom) };
596            assert_eq!(atom.type_, 1);
597            assert_eq!(atom.size as usize, 12);
598
599            let (value, space) = space.split_at(size_of::<u32>());
600            let value = unsafe { *(value.as_ptr() as *const u32) };
601            assert_eq!(value, 42);
602            let (_, space) = space.split_at(4);
603
604            let (value, _) = space.split_at(size_of::<u32>());
605            let value = unsafe { *(value.as_ptr() as *const u32) };
606            assert_eq!(value, 17);
607        }
608    }
609
610    #[test]
611    fn unaligned_root_write() {
612        let mut raw_space = Box::new([0u8; 8]);
613
614        {
615            let mut root_space = RootMutSpace::new(&mut raw_space[3..]);
616            (&mut root_space as &mut dyn MutSpace)
617                .write(&42u8, true)
618                .unwrap();
619        }
620
621        assert_eq!(&[0, 0, 0, 42, 0, 0, 0, 0], raw_space.as_ref());
622    }
623}