static_alloc/unsync/
chain.rs1use core::{
4 alloc::{Layout, LayoutErr},
5 cell::Cell,
6 mem::MaybeUninit,
7 ptr::{self, NonNull},
8};
9
10use alloc::{
11 alloc::alloc_zeroed,
12 boxed::Box,
13};
14
15use crate::bump::Failure;
16use crate::unsync::bump::MemBump;
17use crate::leaked::LeakBox;
18
19#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
22pub struct TryNewError {
23 inner: RawAllocError,
24}
25
26impl TryNewError {
27 pub const fn allocation_size(&self) -> usize {
30 self.inner.allocation_size()
31 }
32}
33
34type LinkPtr = Option<NonNull<Link>>;
35
36struct Link {
37 next: Cell<LinkPtr>,
41 bump: MemBump,
43}
44
45pub struct Chain {
49 root: Cell<LinkPtr>,
52}
53
54impl Chain {
55 pub fn new(size: usize) -> Result<Self, TryNewError> {
58 let link = Link::alloc(size).map_err(|e| TryNewError { inner: e })?;
59 Ok(Chain {
60 root: Cell::new(Some(link)),
61 })
62 }
63
64 pub fn bump_box<'bump, T: 'bump>(&'bump self)
66 -> Result<LeakBox<'bump, MaybeUninit<T>>, Failure>
67 {
68 let root = self.root().ok_or(Failure::Exhausted)?;
69 root.as_bump().bump_box()
70 }
71
72 pub fn chain(&self, new: Chain) {
78 let self_bump = self.root.take();
80
81 match new.root() {
82 None => {
83 self.root.set(self_bump)
84 }
85 Some(root) => {
86 unsafe { root.set_next(self_bump) };
87 self.root.set(new.root.take())
88 }
89 }
90 }
91
92 pub fn capacity(&self) -> usize {
96 match self.root() {
97 None => 0,
98 Some(root) => root.as_bump().capacity(),
99 }
100 }
101
102 pub fn remaining_capacity(&self) -> usize {
106 match self.root() {
107 None => 0,
108 Some(root) => self.capacity() - root.as_bump().level().0,
109 }
110 }
111
112 fn root(&self) -> Option<&Link> {
113 unsafe {
114 let bump_ptr = self.root.get()?.as_ptr();
115 Some(&*bump_ptr)
116 }
117 }
118}
119
120#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
123pub(crate) struct RawAllocError {
124 allocation_size: usize,
125 kind: RawAllocFailure,
126}
127
128#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
129enum RawAllocFailure {
130 Exhausted,
131 Layout,
132}
133
134impl Link {
135 pub(crate) unsafe fn set_next(&self, next: LinkPtr) {
140 if let Some(next) = self.next.replace(next) {
141 let _ = Box::from_raw(next.as_ptr());
142 }
143 }
144
145 pub(crate) fn take_next(&self) -> Option<Box<Link>> {
147 let ptr = self.next.take()?;
148 Some(unsafe {
149 Box::from_raw(ptr.as_ptr())
150 })
151 }
152
153 pub(crate) fn as_bump(&self) -> &MemBump {
154 &self.bump
155 }
156
157 pub(crate) fn layout_from_size(size: usize) -> Result<Layout, LayoutErr> {
158 Layout::new::<Cell<LinkPtr>>()
159 .extend(MemBump::layout_from_size(size)?)
160 .map(|layout| layout.0)
161 }
162
163 unsafe fn alloc_raw(layout: Layout) -> Result<NonNull<u8>, RawAllocError> {
164 let ptr = alloc_zeroed(layout);
165 NonNull::new(ptr).ok_or_else(|| {
166 RawAllocError::new(layout.size(), RawAllocFailure::Exhausted)
167 })
168 }
169
170 pub(crate) fn alloc(capacity: usize) -> Result<NonNull<Self>, RawAllocError> {
172 let layout = Self::layout_from_size(capacity)
173 .map_err(|_| {
174 RawAllocError::new(capacity, RawAllocFailure::Layout)
175 })?;
176
177 unsafe {
178 let raw = Link::alloc_raw(layout)?;
179 let raw_mut: *mut [Cell<MaybeUninit<u8>>] =
180 ptr::slice_from_raw_parts_mut(raw.cast().as_ptr(), capacity);
181 Ok(NonNull::new_unchecked(raw_mut as *mut Link))
182 }
183 }
184}
185
186impl RawAllocError {
187 const fn new(allocation_size: usize, kind: RawAllocFailure) -> Self {
188 Self {
189 allocation_size,
190 kind,
191 }
192 }
193
194 pub(crate) const fn allocation_size(&self) -> usize {
195 self.allocation_size
196 }
197}
198
199impl Drop for Chain {
201 fn drop(&mut self) {
202 let mut current = self.root.take();
203 while let Some(non_null) = current {
204 let link = unsafe { Box::from_raw(non_null.as_ptr()) };
206 current = link.next.take();
207 }
208 }
209}
210
211impl Drop for Link {
212 fn drop(&mut self) {
213 let mut current = self.take_next();
214 while let Some(link) = current {
215 current = link.take_next();
216 }
217 }
218}