1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
use core::cell::UnsafeCell;
use core::mem;
use core::ptr;
use core::slice;

use musli::context::Buffer;

use crate::allocator::Allocator;
use crate::fixed::FixedVec;

/// Buffer used in combination with a `Context`.
///
/// This type of allocator has a fixed capacity specified by `C` and can be
/// constructed statically.
pub struct NoStd<const C: usize> {
    // This must be an unsafe cell, since it's mutably accessed through an
    // immutable pointers. We simply make sure that those accesses do not
    // clobber each other, which we can do since the API is restricted through
    // the `Buffer` trait.
    scratch: UnsafeCell<FixedVec<u8, C>>,
}

impl<const C: usize> NoStd<C> {
    /// Build a new no-std allocator.
    pub const fn new() -> Self {
        Self {
            scratch: UnsafeCell::new(FixedVec::new()),
        }
    }
}

impl<const C: usize> Default for NoStd<C> {
    #[inline]
    fn default() -> Self {
        Self::new()
    }
}

impl<'a, const C: usize> Allocator for &'a NoStd<C> {
    type Buf = Buf<'a, C>;

    #[inline(always)]
    fn alloc(&self) -> Self::Buf {
        unsafe {
            let n = (*self.scratch.get()).len();

            Buf {
                base: n,
                len: 0,
                data: &self.scratch,
            }
        }
    }
}

/// A no-std allocated buffer.
pub struct Buf<'a, const C: usize> {
    base: usize,
    len: usize,
    data: &'a UnsafeCell<FixedVec<u8, C>>,
}

impl<'a, const C: usize> Buffer for Buf<'a, C> {
    #[inline]
    fn write(&mut self, bytes: &[u8]) -> bool {
        unsafe {
            let data = &mut *self.data.get();
            assert_eq!(data.len(), self.len.wrapping_add(self.base));

            if data.try_extend_from_slice(bytes).is_err() {
                return false;
            }

            self.len = self.len.wrapping_add(bytes.len());
        }

        true
    }

    #[inline]
    fn write_at(&mut self, at: usize, bytes: &[u8]) -> bool {
        unsafe {
            if at.wrapping_add(bytes.len()) > self.len {
                return false;
            }

            let data = &mut *self.data.get();

            let Some(data) = data.get_mut(at..at.wrapping_add(bytes.len())) else {
                return false;
            };

            data.copy_from_slice(bytes);
            true
        }
    }

    #[inline]
    fn copy_back<B>(&mut self, other: B) -> bool
    where
        B: Buffer,
    {
        let (ptr, from, len) = other.raw_parts();

        unsafe {
            let data = &mut *self.data.get();
            let same = ptr::eq(ptr, data.as_ptr());
            let to = self.base.wrapping_add(self.len);

            if to.wrapping_add(len) > data.capacity() {
                return false;
            }

            if same {
                if from != to {
                    assert!(from.wrapping_add(len) <= data.len());
                    let from = data.as_ptr().wrapping_add(from);
                    let to = data.as_mut_ptr().wrapping_add(to);
                    ptr::copy(from, to, len);
                }

                // We forget the other buffer, so that it doesn't clobber the
                // underlying allocator data when dropped.
                mem::forget(other);
            } else {
                let from = ptr.wrapping_add(from);
                let to = data.as_mut_ptr().wrapping_add(to);
                ptr::copy_nonoverlapping(from, to, len);
            }

            self.len = self.len.wrapping_add(len);
            data.set_len(to.wrapping_add(len));
            true
        }
    }

    #[inline(always)]
    fn len(&self) -> usize {
        self.len
    }

    #[inline(always)]
    fn raw_parts(&self) -> (*const u8, usize, usize) {
        unsafe {
            let data = &*self.data.get();
            (data.as_ptr(), self.base, self.len)
        }
    }

    #[inline(always)]
    unsafe fn as_slice(&self) -> &[u8] {
        unsafe {
            let data = &*self.data.get();
            slice::from_raw_parts(data.as_ptr().wrapping_add(self.base), self.len)
        }
    }
}

impl<'a, const C: usize> Drop for Buf<'a, C> {
    fn drop(&mut self) {
        // SAFETY: During construction of the buffer, we fetch the length of the
        // vector which is known to be initialized. Since the only way the
        // vector can be extended is through `Buffer::write`.
        unsafe {
            let data = &mut *self.data.get();
            data.set_len(self.base);
        }
    }
}