teaspoon/
segment.rs

1// Copyright © 2024 Andrea Corbellini and contributors
2// SPDX-License-Identifier: BSD-3-Clause
3
4use crate::arena::Arena;
5use crate::ptr::SegmentHeaderPtr;
6use crate::sizing::Sizing;
7use core::alloc::Layout;
8use core::num::NonZero;
9use core::ptr::NonNull;
10
11#[derive(Copy, Clone, Debug)]
12pub(crate) struct Segment<'a, S: Sizing> {
13    arena: Arena<'a, S>,
14    ptr: SegmentHeaderPtr<S>,
15    size: usize,
16    prev_ptr: Option<SegmentHeaderPtr<S>>,
17    next_ptr: Option<SegmentHeaderPtr<S>>,
18}
19
20impl<'a, S: Sizing> Segment<'a, S> {
21    /// Find an optimal placement for a segment containing data with the given [`Layout`].
22    ///
23    /// This method returns a pointer contained in `slice` such that:
24    ///
25    /// 1. it allows to place a segment header directly followed by the data (with no padding in
26    ///    between);
27    /// 2. the header is correctly aligned (as specified by the layout of `S::SegmentHeaderRepr`);
28    /// 3. the data is correctly aligned (as specified by `data_layout`).
29    ///
30    /// If the segment cannot fit in `slice`, this method returns `None`.
31    ///
32    /// # Safety
33    ///
34    /// `slice` must point to a contiguous memory location that is part of the same [allocated
35    /// object](https://doc.rust-lang.org/std/ptr/index.html#allocated-object).
36    #[must_use]
37    unsafe fn placement(slice: NonNull<[u8]>, data_layout: Layout) -> Option<NonNull<[u8]>> {
38        let header_layout = Layout::new::<S::SegmentHeaderRepr>();
39        let mut required_size = header_layout.size().checked_add(data_layout.size())?;
40        if slice.len() < required_size {
41            return None;
42        }
43
44        let start = slice.cast::<u8>();
45
46        // Find the minimum value for `header_pad` necessary to ensure that the header is properly
47        // aligned.
48        let header_pad = start.align_offset(header_layout.align());
49        required_size = required_size.checked_add(header_pad)?;
50        if slice.len() < required_size {
51            return None;
52        }
53
54        let header_start = start.byte_add(header_pad);
55        let header_end = header_start.byte_add(header_layout.size());
56
57        // Find an initial (not optimized) value for `data_pad` necessary to ensure that the data
58        // is properly aligned.
59        let data_pad = header_end.align_offset(data_layout.align());
60        required_size = required_size.checked_add(data_pad)?;
61        if slice.len() < required_size {
62            return None;
63        }
64
65        // Move the header to the "right", closer to the data, still keeping the header aligned.
66        debug_assert_eq!(
67            data_pad % header_layout.align(),
68            0,
69            "expected data padding to be a multiple of the header alignment"
70        );
71        let header_start = header_start.byte_add(data_pad);
72
73        // Ensure that the final data size is a multiple of `S::min_align()`
74        let data_size = data_layout
75            .size()
76            .checked_next_multiple_of(S::min_align())?;
77        required_size = required_size.checked_add(data_size - data_layout.size())?;
78        if slice.len() < required_size {
79            return None;
80        }
81
82        Some(NonNull::slice_from_raw_parts(header_start, data_size))
83    }
84
85    /// Create a new segment large enough to hold data with the given [`Layout`].
86    ///
87    /// This method returns `None` if the `slice` is not large enough to contain the segment.
88    ///
89    /// You need to call [`write()`](Self::write) in order to actually write the segment to memory.
90    ///
91    /// # Safety
92    ///
93    /// * `slice` must be part of `arena`.
94    /// * `slice` must point to unallocated memory.
95    #[must_use]
96    pub(crate) unsafe fn new_in(
97        arena: Arena<'a, S>,
98        slice: NonNull<[u8]>,
99        data_layout: Layout,
100    ) -> Option<Self> {
101        debug_assert!(arena.contains_bytes(slice.cast(), slice.len()));
102
103        let placement = Self::placement(slice, data_layout)?;
104        let ptr = SegmentHeaderPtr::new(placement.cast::<u8>());
105        let size = placement.len();
106
107        if arena.segment_ptr_to_offset(ptr).get() > S::max_offset() || size > S::max_size() {
108            return None;
109        }
110
111        Some(Self {
112            arena,
113            ptr,
114            size,
115            prev_ptr: None,
116            next_ptr: None,
117        })
118    }
119
120    /// Return a pointer to the segment header.
121    #[inline]
122    pub(crate) const fn ptr(&self) -> SegmentHeaderPtr<S> {
123        self.ptr
124    }
125
126    /// Returns a pointer to the segment data.
127    ///
128    /// The pointer is correctly aligned for the given [`Layout`].
129    ///
130    /// The layout does not necessarily need to be the same layout that was passed to [`new_in()`].
131    ///
132    /// # Panics
133    ///
134    /// If the layout does not fit into the usable space of the segment.
135    pub(crate) fn data(&self, data_layout: Layout) -> NonNull<[u8]> {
136        let usable = self.usable();
137        let usable_start = usable.cast::<u8>();
138
139        let data_pad = usable_start.align_offset(data_layout.align());
140        assert!(
141            data_layout.size().saturating_add(data_pad) <= usable.len(),
142            "segment too small to contain `data_layout`"
143        );
144
145        // SAFETY: Both `usable_start` and `data_start` are in bounds of `usable` (this is checked
146        // by the `assert` above), and `usable` points to a contiguous memory location part of the
147        // same allocated object.
148        let data_start = unsafe { usable_start.byte_add(data_pad) };
149        let data_size = usable.len() - data_pad;
150        NonNull::slice_from_raw_parts(data_start, data_size)
151    }
152
153    /// Returns a pointer to the memory space of the segment that can contain data.
154    ///
155    /// This method is similar to [`data()`]. The main difference is that [`data()`] returns a
156    /// pointer with size and alignment specified by a given [`Layout`], while this method returns
157    /// *all* available data. As such, [`data()`] may return a subset of [`usable()`].
158    pub(crate) fn usable(&self) -> NonNull<[u8]> {
159        let header_layout = Layout::new::<S::SegmentHeaderRepr>();
160        // SAFETY: It is assumed that `self.ptr` points to a contiguous memory location large
161        // enough to contain the segment header and its data. This is enforced by `new_in()` and is
162        // required by `read()`.
163        let header_end = unsafe { self.ptr.byte_add(header_layout.size()) };
164        NonNull::slice_from_raw_parts(header_end, self.size)
165    }
166
167    /// Returns a pointer to the memory space that comes right after the segment.
168    ///
169    /// The returned memory is essentially the unused memory space between this segment and the
170    /// next one (if any), or between this segment and the end of the arena.
171    pub(crate) fn trailing(&self) -> NonNull<[u8]> {
172        let header_layout = Layout::new::<S::SegmentHeaderRepr>();
173        let header_end = unsafe { self.ptr.byte_add(header_layout.size()) };
174        let data_end = unsafe { header_end.byte_add(self.size) };
175
176        let limit = match self.next_ptr {
177            Some(next_ptr) => next_ptr.as_nonnull(),
178            None => self.arena.end(),
179        };
180
181        // TODO: Use `limit.sub_ptr(data_end)` once that's stabilized
182        let unused_size = unsafe { limit.offset_from(data_end) };
183        debug_assert!(unused_size >= 0, "negative offset");
184
185        NonNull::slice_from_raw_parts(data_end, unused_size as usize)
186    }
187
188    /// Returns a pointer to the contiguous memory space that the segment may grow into.
189    ///
190    /// The returned memory space includes: the segment header, the segment data, and any unused
191    /// memory that follows the segment.
192    ///
193    /// This can be thought as the concatenation of the segment header memory, the memory from
194    /// [`usable()`], the memory from [`trailing()`].
195    pub(crate) fn available(&self) -> NonNull<[u8]> {
196        let header_start = self.ptr.as_nonnull();
197
198        let limit = match self.next_ptr {
199            Some(next_ptr) => next_ptr.as_nonnull(),
200            None => self.arena.end(),
201        };
202
203        // TODO: Use `limit.sub_ptr(data_end)` once that's stabilized
204        let available_size = unsafe { limit.offset_from(header_start) };
205        debug_assert!(available_size >= 0, "negative offset");
206
207        NonNull::slice_from_raw_parts(header_start, available_size as usize)
208    }
209
210    pub(crate) fn prev(&self) -> Option<Self> {
211        self.prev_ptr
212            .map(|prev_ptr| unsafe { Self::read(self.arena, prev_ptr) })
213    }
214
215    pub(crate) const fn prev_ptr(&self) -> Option<SegmentHeaderPtr<S>> {
216        self.prev_ptr
217    }
218
219    pub(crate) fn next(&self) -> Option<Self> {
220        self.next_ptr
221            .map(|next_ptr| unsafe { Self::read(self.arena, next_ptr) })
222    }
223
224    pub(crate) const fn next_ptr(&self) -> Option<SegmentHeaderPtr<S>> {
225        self.next_ptr
226    }
227
228    #[inline]
229    #[must_use]
230    pub(crate) unsafe fn read(arena: Arena<'a, S>, ptr: SegmentHeaderPtr<S>) -> Self {
231        let header_ptr = ptr.cast::<S::SegmentHeaderRepr>();
232        debug_assert!(header_ptr.is_aligned(), "pointer is not aligned");
233        debug_assert!(
234            arena.contains_segment(ptr),
235            "pointer does not belong to the arena"
236        );
237
238        let repr_ref = unsafe { header_ptr.as_ref() };
239        let header = S::read_segment_header(repr_ref);
240
241        let prev_ptr = header
242            .prev_offset
243            .map(|offset| arena.segment_offset_to_ptr(offset));
244        let next_ptr = header
245            .next_offset
246            .map(|offset| arena.segment_offset_to_ptr(offset));
247        let size = header.size;
248
249        Self {
250            arena,
251            ptr,
252            size,
253            prev_ptr,
254            next_ptr,
255        }
256    }
257
258    #[inline]
259    pub(crate) fn write(&self) {
260        let mut header_ptr = self.ptr.cast::<S::SegmentHeaderRepr>();
261        debug_assert!(header_ptr.is_aligned(), "pointer is not aligned");
262        debug_assert!(
263            self.arena.contains_segment(self.ptr),
264            "pointer does not belong to the arena"
265        );
266
267        let header = SegmentHeader {
268            prev_offset: self
269                .prev_ptr
270                .map(|ptr| self.arena.segment_ptr_to_offset(ptr)),
271            next_offset: self
272                .next_ptr
273                .map(|ptr| self.arena.segment_ptr_to_offset(ptr)),
274            size: self.size,
275        };
276
277        let repr_mut = unsafe { header_ptr.as_mut() };
278        S::write_segment_header(repr_mut, &header);
279    }
280
281    pub(crate) fn connect(left: &mut Self, right: &mut Self) {
282        debug_assert_eq!(
283            left.arena, right.arena,
284            "segments do not belong to the same arena"
285        );
286        left.next_ptr = Some(right.ptr);
287        right.prev_ptr = Some(left.ptr);
288        left.write();
289        right.write();
290    }
291
292    pub(crate) fn disconnect(self) {
293        if let Some(mut prev) = self.prev() {
294            prev.next_ptr = self.next_ptr;
295            prev.write();
296        }
297        if let Some(mut next) = self.next() {
298            next.prev_ptr = self.prev_ptr;
299            next.write();
300        }
301    }
302}
303
304#[derive(Copy, Clone, PartialEq, Eq, Debug)]
305pub(crate) struct SegmentHeader {
306    pub(crate) prev_offset: Option<NonZero<usize>>,
307    pub(crate) next_offset: Option<NonZero<usize>>,
308    pub(crate) size: usize,
309}