1use core::{alloc::Layout, cell::Cell, fmt, mem::align_of, num::NonZeroUsize, ops::Range, ptr::NonNull};
2
3use crate::{
4 ChunkHeader, ErrorBehavior, SupportedMinimumAlignment,
5 alloc::{AllocError, Allocator},
6 bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
7 chunk_size::{ChunkSize, ChunkSizeHint},
8 layout::LayoutProps,
9 polyfill::{self, non_null},
10 stats::Stats,
11};
12
13#[repr(transparent)]
21pub(crate) struct RawChunk<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> {
22 header: NonNull<ChunkHeader<A>>,
24}
25
26impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> Copy for RawChunk<A, UP, GUARANTEED_ALLOCATED> {}
27
28impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> Clone for RawChunk<A, UP, GUARANTEED_ALLOCATED> {
29 #[inline(always)]
30 fn clone(&self) -> Self {
31 *self
32 }
33}
34
35impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> PartialEq for RawChunk<A, UP, GUARANTEED_ALLOCATED> {
36 #[inline(always)]
37 fn eq(&self, other: &Self) -> bool {
38 self.header == other.header
39 }
40
41 #[inline(always)]
42 fn ne(&self, other: &Self) -> bool {
43 self.header != other.header
44 }
45}
46
47impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> Eq for RawChunk<A, UP, GUARANTEED_ALLOCATED> {}
48
49impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> fmt::Debug for RawChunk<A, UP, GUARANTEED_ALLOCATED> {
50 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51 f.debug_tuple("RawChunk").field(&self.header.as_ptr().cast::<u8>()).finish()
52 }
53}
54
55impl<A, const UP: bool> RawChunk<A, UP, false> {
56 pub(crate) const UNALLOCATED: Self = Self {
57 header: ChunkHeader::UNALLOCATED.cast(),
58 };
59}
60
61impl<A, const UP: bool> RawChunk<A, UP, true> {
62 pub(crate) fn new_in<E: ErrorBehavior>(chunk_size: ChunkSize<A, UP>, prev: Option<Self>, allocator: A) -> Result<Self, E>
63 where
64 A: Allocator,
65 {
66 let layout = chunk_size.layout().ok_or_else(E::capacity_overflow)?;
67
68 let allocation = match allocator.allocate(layout) {
69 Ok(ok) => ok,
70 Err(AllocError) => return Err(E::allocation(layout)),
71 };
72
73 let ptr = non_null::as_non_null_ptr(allocation);
74 let size = allocation.len();
75
76 let size = chunk_size.align_allocation_size(size);
88
89 debug_assert!(size >= layout.size());
90 debug_assert!(size % MIN_CHUNK_ALIGN == 0);
91
92 let prev = Cell::new(prev.map(|c| c.header));
93 let next = Cell::new(None);
94
95 let header = unsafe {
96 if UP {
97 let header = ptr.cast::<ChunkHeader<A>>();
98
99 header.as_ptr().write(ChunkHeader {
100 pos: Cell::new(header.add(1).cast()),
101 end: ptr.add(size),
102 prev,
103 next,
104 allocator,
105 });
106
107 header
108 } else {
109 let header = ptr.add(size).cast::<ChunkHeader<A>>().sub(1);
110
111 header.as_ptr().write(ChunkHeader {
112 pos: Cell::new(header.cast()),
113 end: ptr,
114 prev,
115 next,
116 allocator,
117 });
118
119 header
120 }
121 };
122
123 Ok(RawChunk { header })
124 }
125
126 pub(crate) fn coerce_guaranteed_allocated<const NEW_GUARANTEED_ALLOCATED: bool>(
127 self,
128 ) -> RawChunk<A, UP, NEW_GUARANTEED_ALLOCATED> {
129 RawChunk { header: self.header }
130 }
131
132 pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
136 where
137 A: Allocator + Clone,
138 {
139 debug_assert!(self.next().is_none());
140
141 let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
142 let grown_size = self.grow_size()?;
143 let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
144
145 let allocator = unsafe { self.header.as_ref().allocator.clone() };
146 let new_chunk = RawChunk::new_in::<B>(size, Some(self), allocator)?;
147
148 self.set_next(Some(new_chunk));
149 Ok(new_chunk)
150 }
151
152 #[inline(always)]
155 pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
156 unsafe { self.set_pos_addr(ptr.addr().get()) };
157 }
158
159 #[inline(always)]
162 pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
163 unsafe { self.header.as_ref().pos.set(self.with_addr(addr)) };
164 }
165
166 #[inline(always)]
167 pub(crate) fn reset(self) {
168 unsafe {
169 if UP {
170 self.set_pos(self.content_start());
171 } else {
172 self.set_pos(self.content_end());
173 }
174 }
175 }
176
177 #[inline(always)]
178 pub(crate) fn allocator<'a>(self) -> &'a A {
179 unsafe { &self.header.as_ref().allocator }
180 }
181
182 #[inline(always)]
183 pub(crate) fn set_prev(self, value: Option<Self>) {
184 unsafe {
185 self.header.as_ref().prev.set(value.map(|c| c.header));
186 }
187 }
188
189 #[inline(always)]
190 pub(crate) fn set_next(self, value: Option<Self>) {
191 unsafe {
192 self.header.as_ref().next.set(value.map(|c| c.header));
193 }
194 }
195
196 pub(crate) unsafe fn deallocate(self)
199 where
200 A: Allocator,
201 {
202 let ptr = self.chunk_start();
203 let layout = self.layout();
204
205 unsafe {
206 let allocator_ptr = &raw const (*self.header.as_ptr()).allocator;
207 let allocator = allocator_ptr.read();
208 allocator.deallocate(ptr, layout);
209 }
210 }
211
212 #[inline(always)]
213 fn after_header(self) -> NonNull<u8> {
214 unsafe { self.header.add(1).cast() }
215 }
216
217 #[inline(always)]
218 pub(crate) fn chunk_start(self) -> NonNull<u8> {
219 unsafe { if UP { self.header.cast() } else { self.header.as_ref().end } }
220 }
221
222 #[inline(always)]
223 pub(crate) fn chunk_end(self) -> NonNull<u8> {
224 unsafe { if UP { self.header.as_ref().end } else { self.after_header() } }
225 }
226
227 #[inline(always)]
228 pub(crate) fn content_start(self) -> NonNull<u8> {
229 if UP { self.after_header() } else { self.chunk_start() }
230 }
231
232 #[inline(always)]
233 pub(crate) fn content_end(self) -> NonNull<u8> {
234 if UP { self.chunk_end() } else { self.header.cast() }
235 }
236
237 #[inline(always)]
238 pub(crate) fn capacity(self) -> usize {
239 let start = self.content_start().addr().get();
240 let end = self.content_end().addr().get();
241 end - start
242 }
243
244 #[inline(always)]
245 fn allocated_range(self) -> Range<NonNull<u8>> {
246 if UP {
247 self.content_start()..self.pos()
248 } else {
249 self.pos()..self.content_end()
250 }
251 }
252
253 #[inline(always)]
254 pub(crate) fn contains_addr_or_end(self, addr: usize) -> bool {
255 let start = self.content_start().addr().get();
256 let end = self.content_end().addr().get();
257 addr >= start && addr <= end
258 }
259
260 #[inline(always)]
261 pub(crate) fn allocated(self) -> usize {
262 let range = self.allocated_range();
263 let start = range.start.addr().get();
264 let end = range.end.addr().get();
265 end - start
266 }
267
268 #[inline(always)]
269 pub(crate) fn remaining(self) -> usize {
270 let range = self.remaining_range();
271 let start = range.start.addr().get();
272 let end = range.end.addr().get();
273 end - start
274 }
275
276 pub(crate) fn remaining_range(self) -> Range<NonNull<u8>> {
277 if UP {
278 let start = self.pos();
279 let end = self.content_end();
280 start..end
281 } else {
282 let start = self.content_start();
283 let end = self.pos();
284 start..end
285 }
286 }
287
288 #[inline(always)]
289 pub(crate) fn size(self) -> NonZeroUsize {
290 let start = self.chunk_start().addr().get();
291 let end = self.chunk_end().addr().get();
292 unsafe { NonZeroUsize::new_unchecked(end - start) }
293 }
294
295 #[inline(always)]
296 pub(crate) fn layout(self) -> Layout {
297 unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
299 }
300
301 #[inline(always)]
302 fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<A, UP>, B> {
303 let Some(size) = self.size().get().checked_mul(2) else {
304 return Err(B::capacity_overflow());
305 };
306
307 Ok(ChunkSizeHint::<A, UP>::new(size))
308 }
309}
310
311impl<A, const UP: bool, const GUARANTEED_ALLOCATED: bool> RawChunk<A, UP, GUARANTEED_ALLOCATED> {
312 pub(crate) fn is_allocated(self) -> bool {
313 GUARANTEED_ALLOCATED || self.header.cast() != ChunkHeader::UNALLOCATED
314 }
315
316 pub(crate) fn is_unallocated(self) -> bool {
317 !GUARANTEED_ALLOCATED && self.header.cast() == ChunkHeader::UNALLOCATED
318 }
319
320 pub(crate) fn guaranteed_allocated(self) -> Option<RawChunk<A, UP, true>> {
321 if self.is_unallocated() {
322 return None;
323 }
324
325 Some(RawChunk { header: self.header })
326 }
327
328 pub(crate) unsafe fn guaranteed_allocated_unchecked(self) -> RawChunk<A, UP, true> {
329 debug_assert!(self.is_allocated());
330 RawChunk { header: self.header }
331 }
332
333 pub(crate) fn not_guaranteed_allocated(self) -> RawChunk<A, UP, false> {
334 RawChunk { header: self.header }
335 }
336
337 pub(crate) fn header(self) -> NonNull<ChunkHeader<A>> {
338 self.header
339 }
340
341 pub(crate) const unsafe fn from_header(header: NonNull<ChunkHeader<A>>) -> Self {
342 Self { header }
343 }
344
345 #[inline(always)]
346 pub(crate) fn bump_props<M, L>(self, _: M, layout: L) -> BumpProps
347 where
348 M: SupportedMinimumAlignment,
349 L: LayoutProps,
350 {
351 debug_assert!(non_null::is_aligned_to(self.pos(), M::MIN_ALIGN));
352
353 let pos = self.pos().addr().get();
354 let end = unsafe { self.header.as_ref() }.end.addr().get();
355
356 let start = if UP { pos } else { end };
357 let end = if UP { end } else { pos };
358
359 debug_assert!(if self.is_unallocated() { end - start == 0 } else { true });
360
361 BumpProps {
362 start,
363 end,
364 layout: *layout,
365 min_align: M::MIN_ALIGN,
366 align_is_const: L::ALIGN_IS_CONST,
367 size_is_const: L::SIZE_IS_CONST,
368 size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
369 }
370 }
371
372 #[inline(always)]
376 pub(crate) fn alloc<M, L>(self, minimum_alignment: M, layout: L) -> Option<NonNull<u8>>
377 where
378 M: SupportedMinimumAlignment,
379 L: LayoutProps,
380 {
381 if !GUARANTEED_ALLOCATED && layout.size() == 0 {
389 return Some(polyfill::layout::dangling(*layout));
390 }
391
392 let props = self.bump_props(minimum_alignment, layout);
393
394 unsafe {
395 if UP {
396 let BumpUp { new_pos, ptr } = bump_up(props)?;
397
398 let chunk = self.guaranteed_allocated_unchecked();
400 chunk.set_pos_addr(new_pos);
401 Some(chunk.with_addr(ptr))
402 } else {
403 let ptr = bump_down(props)?;
404
405 let chunk = self.guaranteed_allocated_unchecked();
407 let ptr = chunk.with_addr(ptr);
408 chunk.set_pos(ptr);
409 Some(ptr)
410 }
411 }
412 }
413
414 #[inline(always)]
420 pub(crate) fn prepare_allocation<M, L>(self, minimum_alignment: M, layout: L) -> Option<NonNull<u8>>
421 where
422 M: SupportedMinimumAlignment,
423 L: LayoutProps,
424 {
425 let props = self.bump_props(minimum_alignment, layout);
426
427 unsafe {
428 if UP {
429 let BumpUp { ptr, .. } = bump_up(props)?;
430 Some(self.with_addr(ptr))
431 } else {
432 let ptr = bump_down(props)?;
433 Some(self.with_addr(ptr))
434 }
435 }
436 }
437
438 #[inline(always)]
453 pub(crate) fn prepare_allocation_range(
454 self,
455 minimum_alignment: impl SupportedMinimumAlignment,
456 layout: impl LayoutProps,
457 ) -> Option<Range<NonNull<u8>>> {
458 debug_assert_ne!(layout.size(), 0);
459 let props = self.bump_props(minimum_alignment, layout);
460
461 unsafe {
462 if UP {
463 let range = bump_prepare_up(props)?;
464 Some(self.with_addr_range(range))
465 } else {
466 let range = bump_prepare_down(props)?;
467 Some(self.with_addr_range(range))
468 }
469 }
470 }
471
472 #[inline(always)]
473 pub(crate) fn pos(self) -> NonNull<u8> {
474 unsafe { self.header.as_ref().pos.get() }
475 }
476
477 #[inline(always)]
480 pub(crate) unsafe fn with_addr(self, addr: usize) -> NonNull<u8> {
481 unsafe {
482 debug_assert!(if let Some(chunk) = self.guaranteed_allocated() {
483 chunk.contains_addr_or_end(addr)
484 } else {
485 true });
487 let ptr = self.header.cast();
488 let addr = NonZeroUsize::new_unchecked(addr);
489 ptr.with_addr(addr)
490 }
491 }
492
493 #[inline(always)]
494 pub(crate) unsafe fn with_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
495 unsafe {
496 debug_assert!(range.start <= range.end);
497 let start = self.with_addr(range.start);
498 let end = self.with_addr(range.end);
499 start..end
500 }
501 }
502
503 #[inline(always)]
504 pub(crate) fn prev(self) -> Option<RawChunk<A, UP, true>> {
505 unsafe { Some(RawChunk::from_header(self.header.as_ref().prev.get()?)) }
507 }
508
509 #[inline(always)]
510 pub(crate) fn next(self) -> Option<RawChunk<A, UP, true>> {
511 unsafe { Some(RawChunk::from_header(self.header.as_ref().next.get()?)) }
513 }
514
515 pub(crate) fn for_each_prev(self, mut f: impl FnMut(RawChunk<A, UP, true>)) {
517 let mut iter = self.prev();
518
519 while let Some(chunk) = iter {
520 iter = chunk.prev();
521 f(chunk);
522 }
523 }
524
525 pub(crate) fn for_each_next(self, mut f: impl FnMut(RawChunk<A, UP, true>)) {
527 let mut iter = self.next();
528
529 while let Some(chunk) = iter {
530 iter = chunk.next();
531 f(chunk);
532 }
533 }
534
535 #[inline(always)]
536 pub(crate) fn stats<'a>(self) -> Stats<'a, A, UP, GUARANTEED_ALLOCATED> {
537 Stats::from_raw_chunk(self)
538 }
539}