1use core::{
2 alloc::Layout,
3 marker::PhantomData,
4 mem::{self, ManuallyDrop},
5 ptr::{self, NonNull},
6};
7
8use thiserror::Error;
9
10use crate::Allocator;
11
12#[derive(Debug)]
13pub struct RawBuffer<T, A: Allocator> {
14 inner: RawBufferInner<A>,
15 _marker: PhantomData<T>,
16}
17
18#[derive(Debug)]
19struct RawBufferInner<A> {
20 ptr: NonNull<u8>,
21 cap: usize,
22 alloc: A,
23}
24
25#[derive(Copy, Clone, PartialEq, Eq, Debug, Error)]
26pub enum TryReserveError {
27 #[error("capacity overflow")]
30 CapacityOverflow,
31
32 #[error("allocation error for layout {:?}", layout)]
34 AllocError {
35 layout: Layout,
37 },
38}
39
40impl<T, A: Allocator> RawBuffer<T, A> {
41 #[inline]
44 pub const fn new_in(alloc: A) -> Self {
45 Self { inner: RawBufferInner::new_in(alloc, align_of::<T>()), _marker: PhantomData }
46 }
47
48 #[inline]
49 pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
50 Self { inner: RawBufferInner::with_capacity_in::<T>(capacity, alloc), _marker: PhantomData }
51 }
52
53 #[inline]
54 pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
55 match RawBufferInner::try_with_capacity_in::<T>(capacity, alloc) {
56 Ok(inner) => Ok(Self { inner, _marker: PhantomData }),
57 Err(e) => Err(e),
58 }
59 }
60
61 #[must_use = "losing the pointer will leak memory"]
62 pub fn into_raw_parts(self) -> (*mut T, usize, A) {
63 let me = ManuallyDrop::new(self);
64 let capacity = me.capacity();
65 let ptr = me.ptr();
66 let alloc = unsafe { ptr::read(me.allocator()) };
67 (ptr, capacity, alloc)
68 }
69
70 #[inline]
81 pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
82 unsafe {
84 let ptr = ptr.cast();
85 Self {
86 inner: RawBufferInner::from_raw_parts_in(ptr, capacity, alloc),
87 _marker: PhantomData,
88 }
89 }
90 }
91
92 #[inline]
96 pub fn ptr(&self) -> *mut T {
97 self.inner.ptr()
98 }
99
100 #[inline]
101 pub fn non_null(&self) -> NonNull<T> {
102 self.inner.non_null()
103 }
104
105 #[inline]
109 pub fn capacity(&self) -> usize {
110 self.inner.capacity(size_of::<T>())
111 }
112
113 #[inline]
115 pub const fn allocator(&self) -> &A {
116 self.inner.allocator()
117 }
118
119 #[inline]
123 pub unsafe fn allocator_mut(&mut self) -> &mut A {
124 &mut self.inner.alloc
125 }
126}
127
128impl<A: Allocator> RawBufferInner<A> {
129 #[inline]
132 const fn new_in(alloc: A, align: usize) -> Self {
133 let ptr = unsafe { core::mem::transmute::<usize, NonNull<u8>>(align) };
134 Self { ptr, cap: 0, alloc }
136 }
137
138 #[inline]
139 fn with_capacity_in<T>(capacity: usize, alloc: A) -> Self {
140 match Self::try_allocate_in::<T>(capacity, alloc) {
141 Ok(this) => this,
142 Err(err) => handle_error(err),
143 }
144 }
145
146 fn try_allocate_in<T>(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
147 let layout = Layout::array::<T>(capacity).map_err(|_| TryReserveError::CapacityOverflow)?;
150
151 if layout.size() == 0 {
153 return Ok(Self::new_in(alloc, layout.align()));
154 }
155
156 alloc_guard(layout.size())?;
157
158 let result = unsafe { alloc.allocate(layout) };
159
160 let ptr = match result {
161 Ok(ptr) => ptr,
162 Err(_) => return Err(TryReserveError::AllocError { layout }),
163 };
164
165 Ok(Self { ptr: ptr.cast(), cap: capacity, alloc })
169 }
170
171 #[inline]
172 fn ptr<T>(&self) -> *mut T {
173 self.non_null::<T>().as_ptr()
174 }
175
176 #[inline]
177 fn non_null<T>(&self) -> NonNull<T> {
178 self.ptr.cast()
179 }
180
181 #[inline]
182 fn capacity(&self, elem_size: usize) -> usize {
183 if elem_size == 0 {
184 usize::MAX
185 } else {
186 self.cap
187 }
188 }
189
190 #[inline]
191 unsafe fn from_raw_parts_in(ptr: *mut u8, cap: usize, alloc: A) -> Self {
192 Self { ptr: unsafe { NonNull::new_unchecked(ptr) }, cap, alloc }
193 }
194
195 #[inline]
196 const fn allocator(&self) -> &A {
197 &self.alloc
198 }
199
200 #[inline]
201 fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
202 if elem_layout.size() == 0 || self.cap == 0 {
203 None
204 } else {
205 unsafe {
211 let alloc_size = elem_layout.size().unchecked_mul(self.cap);
212 let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align());
213 Some((self.ptr, layout))
214 }
215 }
216 }
217
218 #[inline]
219 fn try_with_capacity_in<T>(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
220 Self::try_allocate_in::<T>(capacity, alloc)
221 }
222
223 unsafe fn deallocate(&mut self, elem_layout: Layout) {
231 if let Some((ptr, layout)) = self.current_memory(elem_layout) {
232 unsafe {
233 self.alloc.deallocate(ptr, layout);
234 }
235 }
236 }
237}
238
239impl<T, A: Allocator> Drop for RawBuffer<T, A> {
240 fn drop(&mut self) {
242 unsafe {
244 let layout =
245 Layout::from_size_align_unchecked(mem::size_of::<T>(), mem::align_of::<T>());
246 self.inner.deallocate(layout)
247 }
248 }
249}
250
251#[cold]
253fn handle_error(e: TryReserveError) -> ! {
254 match e {
255 TryReserveError::CapacityOverflow => capacity_overflow(),
256 TryReserveError::AllocError { layout } => handle_alloc_error(layout),
257 }
258}
259
260#[inline(never)]
264fn capacity_overflow() -> ! {
265 panic!("capacity overflow");
266}
267
268#[cold]
269pub const fn handle_alloc_error(layout: Layout) -> ! {
270 const fn ct_error(_: Layout) -> ! {
271 panic!("allocation failed");
272 }
273
274 ct_error(layout)
275}
276
277#[inline]
286fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
287 if usize::BITS < 64 && alloc_size > isize::MAX as usize {
288 Err(TryReserveError::CapacityOverflow)
289 } else {
290 Ok(())
291 }
292}