1use crate::arena::Arena;
5use crate::ptr::SegmentHeaderPtr;
6use crate::sizing::Sizing;
7use core::alloc::Layout;
8use core::num::NonZero;
9use core::ptr::NonNull;
10
11#[derive(Copy, Clone, Debug)]
12pub(crate) struct Segment<'a, S: Sizing> {
13 arena: Arena<'a, S>,
14 ptr: SegmentHeaderPtr<S>,
15 size: usize,
16 prev_ptr: Option<SegmentHeaderPtr<S>>,
17 next_ptr: Option<SegmentHeaderPtr<S>>,
18}
19
20impl<'a, S: Sizing> Segment<'a, S> {
21 #[must_use]
37 unsafe fn placement(slice: NonNull<[u8]>, data_layout: Layout) -> Option<NonNull<[u8]>> {
38 let header_layout = Layout::new::<S::SegmentHeaderRepr>();
39 let mut required_size = header_layout.size().checked_add(data_layout.size())?;
40 if slice.len() < required_size {
41 return None;
42 }
43
44 let start = slice.cast::<u8>();
45
46 let header_pad = start.align_offset(header_layout.align());
49 required_size = required_size.checked_add(header_pad)?;
50 if slice.len() < required_size {
51 return None;
52 }
53
54 let header_start = start.byte_add(header_pad);
55 let header_end = header_start.byte_add(header_layout.size());
56
57 let data_pad = header_end.align_offset(data_layout.align());
60 required_size = required_size.checked_add(data_pad)?;
61 if slice.len() < required_size {
62 return None;
63 }
64
65 debug_assert_eq!(
67 data_pad % header_layout.align(),
68 0,
69 "expected data padding to be a multiple of the header alignment"
70 );
71 let header_start = header_start.byte_add(data_pad);
72
73 let data_size = data_layout
75 .size()
76 .checked_next_multiple_of(S::min_align())?;
77 required_size = required_size.checked_add(data_size - data_layout.size())?;
78 if slice.len() < required_size {
79 return None;
80 }
81
82 Some(NonNull::slice_from_raw_parts(header_start, data_size))
83 }
84
85 #[must_use]
96 pub(crate) unsafe fn new_in(
97 arena: Arena<'a, S>,
98 slice: NonNull<[u8]>,
99 data_layout: Layout,
100 ) -> Option<Self> {
101 debug_assert!(arena.contains_bytes(slice.cast(), slice.len()));
102
103 let placement = Self::placement(slice, data_layout)?;
104 let ptr = SegmentHeaderPtr::new(placement.cast::<u8>());
105 let size = placement.len();
106
107 if arena.segment_ptr_to_offset(ptr).get() > S::max_offset() || size > S::max_size() {
108 return None;
109 }
110
111 Some(Self {
112 arena,
113 ptr,
114 size,
115 prev_ptr: None,
116 next_ptr: None,
117 })
118 }
119
120 #[inline]
122 pub(crate) const fn ptr(&self) -> SegmentHeaderPtr<S> {
123 self.ptr
124 }
125
126 pub(crate) fn data(&self, data_layout: Layout) -> NonNull<[u8]> {
136 let usable = self.usable();
137 let usable_start = usable.cast::<u8>();
138
139 let data_pad = usable_start.align_offset(data_layout.align());
140 assert!(
141 data_layout.size().saturating_add(data_pad) <= usable.len(),
142 "segment too small to contain `data_layout`"
143 );
144
145 let data_start = unsafe { usable_start.byte_add(data_pad) };
149 let data_size = usable.len() - data_pad;
150 NonNull::slice_from_raw_parts(data_start, data_size)
151 }
152
153 pub(crate) fn usable(&self) -> NonNull<[u8]> {
159 let header_layout = Layout::new::<S::SegmentHeaderRepr>();
160 let header_end = unsafe { self.ptr.byte_add(header_layout.size()) };
164 NonNull::slice_from_raw_parts(header_end, self.size)
165 }
166
167 pub(crate) fn trailing(&self) -> NonNull<[u8]> {
172 let header_layout = Layout::new::<S::SegmentHeaderRepr>();
173 let header_end = unsafe { self.ptr.byte_add(header_layout.size()) };
174 let data_end = unsafe { header_end.byte_add(self.size) };
175
176 let limit = match self.next_ptr {
177 Some(next_ptr) => next_ptr.as_nonnull(),
178 None => self.arena.end(),
179 };
180
181 let unused_size = unsafe { limit.offset_from(data_end) };
183 debug_assert!(unused_size >= 0, "negative offset");
184
185 NonNull::slice_from_raw_parts(data_end, unused_size as usize)
186 }
187
188 pub(crate) fn available(&self) -> NonNull<[u8]> {
196 let header_start = self.ptr.as_nonnull();
197
198 let limit = match self.next_ptr {
199 Some(next_ptr) => next_ptr.as_nonnull(),
200 None => self.arena.end(),
201 };
202
203 let available_size = unsafe { limit.offset_from(header_start) };
205 debug_assert!(available_size >= 0, "negative offset");
206
207 NonNull::slice_from_raw_parts(header_start, available_size as usize)
208 }
209
210 pub(crate) fn prev(&self) -> Option<Self> {
211 self.prev_ptr
212 .map(|prev_ptr| unsafe { Self::read(self.arena, prev_ptr) })
213 }
214
215 pub(crate) const fn prev_ptr(&self) -> Option<SegmentHeaderPtr<S>> {
216 self.prev_ptr
217 }
218
219 pub(crate) fn next(&self) -> Option<Self> {
220 self.next_ptr
221 .map(|next_ptr| unsafe { Self::read(self.arena, next_ptr) })
222 }
223
224 pub(crate) const fn next_ptr(&self) -> Option<SegmentHeaderPtr<S>> {
225 self.next_ptr
226 }
227
228 #[inline]
229 #[must_use]
230 pub(crate) unsafe fn read(arena: Arena<'a, S>, ptr: SegmentHeaderPtr<S>) -> Self {
231 let header_ptr = ptr.cast::<S::SegmentHeaderRepr>();
232 debug_assert!(header_ptr.is_aligned(), "pointer is not aligned");
233 debug_assert!(
234 arena.contains_segment(ptr),
235 "pointer does not belong to the arena"
236 );
237
238 let repr_ref = unsafe { header_ptr.as_ref() };
239 let header = S::read_segment_header(repr_ref);
240
241 let prev_ptr = header
242 .prev_offset
243 .map(|offset| arena.segment_offset_to_ptr(offset));
244 let next_ptr = header
245 .next_offset
246 .map(|offset| arena.segment_offset_to_ptr(offset));
247 let size = header.size;
248
249 Self {
250 arena,
251 ptr,
252 size,
253 prev_ptr,
254 next_ptr,
255 }
256 }
257
258 #[inline]
259 pub(crate) fn write(&self) {
260 let mut header_ptr = self.ptr.cast::<S::SegmentHeaderRepr>();
261 debug_assert!(header_ptr.is_aligned(), "pointer is not aligned");
262 debug_assert!(
263 self.arena.contains_segment(self.ptr),
264 "pointer does not belong to the arena"
265 );
266
267 let header = SegmentHeader {
268 prev_offset: self
269 .prev_ptr
270 .map(|ptr| self.arena.segment_ptr_to_offset(ptr)),
271 next_offset: self
272 .next_ptr
273 .map(|ptr| self.arena.segment_ptr_to_offset(ptr)),
274 size: self.size,
275 };
276
277 let repr_mut = unsafe { header_ptr.as_mut() };
278 S::write_segment_header(repr_mut, &header);
279 }
280
281 pub(crate) fn connect(left: &mut Self, right: &mut Self) {
282 debug_assert_eq!(
283 left.arena, right.arena,
284 "segments do not belong to the same arena"
285 );
286 left.next_ptr = Some(right.ptr);
287 right.prev_ptr = Some(left.ptr);
288 left.write();
289 right.write();
290 }
291
292 pub(crate) fn disconnect(self) {
293 if let Some(mut prev) = self.prev() {
294 prev.next_ptr = self.next_ptr;
295 prev.write();
296 }
297 if let Some(mut next) = self.next() {
298 next.prev_ptr = self.prev_ptr;
299 next.write();
300 }
301 }
302}
303
304#[derive(Copy, Clone, PartialEq, Eq, Debug)]
305pub(crate) struct SegmentHeader {
306 pub(crate) prev_offset: Option<NonZero<usize>>,
307 pub(crate) next_offset: Option<NonZero<usize>>,
308 pub(crate) size: usize,
309}