1use core::alloc::{Layout, LayoutError};
2use core::fmt;
3use core::marker::PhantomData;
4use core::mem::{align_of, ManuallyDrop};
5use core::ptr;
6use core::ptr::NonNull;
7
8use const_default::ConstDefault;
9
10use super::RawBuffer;
11use crate::alloc::{AllocateIn, Allocator, AllocatorDefault};
12use crate::error::StorageError;
13
14#[cfg(feature = "zeroize")]
15use crate::alloc::AllocatorZeroizes;
16
17pub trait BufferHeader<T: ?Sized>: Copy + fmt::Debug + Sized {
19 const EMPTY: Self;
21
22 fn is_empty(&self) -> bool;
24
25 fn layout(&self) -> Result<Layout, LayoutError>;
27
28 fn update_for_alloc(&mut self, ptr: NonNull<[u8]>, exact: bool) -> NonNull<T>;
30}
31
32#[derive(Debug)]
34pub struct FatBuffer<T: ?Sized, H: BufferHeader<T>, A: Allocator> {
35 pub(crate) header: H,
36 pub(crate) data: NonNull<T>,
37 pub(crate) alloc: A,
38}
39
40impl<T, H: BufferHeader<T>, A: Allocator> FatBuffer<T, H, A> {
41 #[inline]
42 pub(crate) fn allocate_in<I>(header: H, alloc_in: I, exact: bool) -> Result<Self, StorageError>
43 where
44 I: AllocateIn<Alloc = A>,
45 {
46 let layout = header.layout()?;
47 let (ptr, alloc) = alloc_in
48 .allocate_in(layout)
49 .map_err(|_| StorageError::AllocError(layout))?;
50 let mut header = H::EMPTY;
51 let data = header.update_for_alloc(ptr, exact);
52 Ok(Self {
53 header,
54 data,
55 alloc,
56 })
57 }
58
59 #[inline]
60 pub(crate) fn grow(&mut self, mut new_header: H, exact: bool) -> Result<(), StorageError> {
61 let new_layout = new_header.layout()?;
62 let ptr = if self.is_dangling() {
63 self.alloc
64 .allocate(new_layout)
65 .map_err(|_| StorageError::AllocError(new_layout))?
66 } else {
67 let old_layout: Layout = self.header.layout()?;
68 unsafe { self.alloc.grow(self.data.cast(), old_layout, new_layout) }
72 .map_err(|_| StorageError::AllocError(new_layout))?
73 };
74 self.data = new_header.update_for_alloc(ptr, exact);
75 self.header = new_header;
76 Ok(())
77 }
78
79 #[inline]
80 pub(crate) fn shrink(&mut self, mut new_header: H) -> Result<(), StorageError> {
81 if new_header.is_empty() {
82 if !self.is_dangling() {
83 let layout = self.header.layout()?;
84 unsafe { self.alloc.deallocate(self.data.cast(), layout) };
88 self.data = NonNull::dangling();
89 }
90 } else {
91 let new_layout = new_header.layout()?;
92 let ptr = if self.is_dangling() {
93 self.alloc
94 .allocate(new_layout)
95 .map_err(|_| StorageError::AllocError(new_layout))?
96 } else {
97 let old_layout: Layout = self.header.layout()?;
98 unsafe { self.alloc.shrink(self.data.cast(), old_layout, new_layout) }
102 .map_err(|_| StorageError::AllocError(new_layout))?
103 };
104 self.data = new_header.update_for_alloc(ptr, true);
105 }
106 self.header = new_header;
107 Ok(())
108 }
109}
110
111impl<T, H: BufferHeader<T>, A: Allocator> FatBuffer<T, H, A> {
112 #[inline]
113 pub(crate) const fn dangling(alloc: A) -> Self {
114 Self {
115 header: H::EMPTY,
116 data: NonNull::dangling(),
117 alloc,
118 }
119 }
120
121 #[inline]
122 pub(crate) fn is_dangling(&self) -> bool {
123 self.data == NonNull::dangling()
124 }
125}
126
127impl<T: ?Sized, H: BufferHeader<T>, A: Allocator> FatBuffer<T, H, A> {
128 #[inline]
129 pub(crate) fn from_parts(header: H, data: NonNull<T>, alloc: A) -> Self {
130 Self {
131 header,
132 data,
133 alloc,
134 }
135 }
136
137 #[inline]
138 pub(crate) fn into_parts(self) -> (H, NonNull<T>, A) {
139 let slf = ManuallyDrop::new(self);
140 (slf.header, slf.data, unsafe { ptr::read(&slf.alloc) })
141 }
142}
143
144impl<T, H, A> ConstDefault for FatBuffer<T, H, A>
145where
146 H: BufferHeader<T>,
147 A: AllocatorDefault,
148{
149 const DEFAULT: Self = Self::dangling(A::DEFAULT);
150}
151
152impl<T: ?Sized, H: BufferHeader<T>, A: Allocator> RawBuffer for FatBuffer<T, H, A> {
153 type RawData = T;
154
155 #[inline]
156 fn data_ptr(&self) -> *const T {
157 self.data.as_ptr()
158 }
159
160 #[inline]
161 fn data_ptr_mut(&mut self) -> *mut T {
162 self.data.as_ptr()
163 }
164}
165
166impl<T: ?Sized, H: BufferHeader<T>, A: Allocator> Drop for FatBuffer<T, H, A> {
167 fn drop(&mut self) {
168 let layout = self.header.layout().expect("Layout error");
169 if layout.size() > 0 {
170 unsafe {
171 self.alloc.deallocate(self.data.cast(), layout);
172 }
173 }
174 }
175}
176
177#[cfg(feature = "zeroize")]
178impl<T: ?Sized, H: BufferHeader<T>, A: AllocatorZeroizes> zeroize::ZeroizeOnDrop
179 for FatBuffer<T, H, A>
180{
181}
182
183pub(crate) struct ThinPtr<T, H: BufferHeader<T>>(NonNull<T>, PhantomData<H>);
184
185impl<T, H: BufferHeader<T>> ThinPtr<T, H> {
186 const DATA_OFFSET: usize = {
187 let header = Layout::new::<H>();
192 let data_align = align_of::<T>();
193 header.size().wrapping_add(data_align).wrapping_sub(1) & !data_align.wrapping_sub(1)
194 };
195
196 #[inline]
197 pub const fn dangling() -> Self {
198 Self(NonNull::dangling(), PhantomData)
199 }
200
201 #[inline]
202 pub fn is_dangling(&self) -> bool {
203 ptr::eq(self.0.as_ptr(), NonNull::dangling().as_ptr())
204 }
205
206 #[inline]
207 pub fn from_alloc(mut header: H, ptr: NonNull<[u8]>, exact: bool) -> Self {
208 #[allow(clippy::len_zero)]
209 if ptr.len() == 0 {
210 Self::dangling()
211 } else {
212 assert!(
213 ptr.len() >= Self::DATA_OFFSET,
214 "allocation too small for thin ptr"
215 );
216 let head = unsafe { ptr.cast::<u8>().add(Self::DATA_OFFSET) };
218 let data_alloc = NonNull::slice_from_raw_parts(head, ptr.len() - Self::DATA_OFFSET);
219 let data = header.update_for_alloc(data_alloc, exact);
220 unsafe { ptr.cast::<H>().as_ptr().write(header) };
221 Self(data, PhantomData)
222 }
223 }
224
225 #[inline]
226 fn layout(header: &H) -> Result<Layout, LayoutError> {
227 if header.is_empty() {
228 Ok(unsafe { Layout::from_size_align_unchecked(0, align_of::<H>()) })
229 } else {
230 let data_layout = header.layout()?;
231 match Layout::new::<H>().extend(data_layout) {
232 Ok((layout, _)) => Ok(layout),
233 Err(err) => Err(err),
234 }
235 }
236 }
237
238 #[inline]
241 pub const unsafe fn to_alloc(&self) -> NonNull<u8> {
242 NonNull::new_unchecked(self.header_ptr()).cast()
243 }
244
245 #[inline]
246 pub const fn as_ptr(&self) -> *mut T {
247 self.0.as_ptr()
248 }
249
250 #[inline]
251 pub const fn header_ptr(&self) -> *mut H {
252 unsafe { (self.0.as_ptr() as *mut u8).sub(Self::DATA_OFFSET) as *mut _ }
253 }
254}
255
256impl<T, H: BufferHeader<T>> fmt::Debug for ThinPtr<T, H> {
257 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
258 write!(f, "{:?}", &self.0)
259 }
260}
261
262#[derive(Debug)]
264pub struct ThinBuffer<T, H: BufferHeader<T>, A: Allocator> {
265 pub(crate) data: ThinPtr<T, H>,
266 pub(crate) alloc: A,
267}
268
269impl<T, H: BufferHeader<T>, A: Allocator> ThinBuffer<T, H, A> {
270 #[inline]
271 pub(crate) fn allocate_in<I>(header: H, alloc_in: I, exact: bool) -> Result<Self, StorageError>
272 where
273 I: AllocateIn<Alloc = A>,
274 {
275 let layout = ThinPtr::layout(&header)?;
276 let (ptr, alloc) = alloc_in
277 .allocate_in(layout)
278 .map_err(|_| StorageError::AllocError(layout))?;
279 let data = ThinPtr::from_alloc(header, ptr, exact);
280 Ok(Self { data, alloc })
281 }
282
283 #[inline]
284 pub(crate) fn grow(&mut self, new_header: H, exact: bool) -> Result<(), StorageError> {
285 let old_header = self.header();
286 let new_layout = ThinPtr::layout(&new_header)?;
287 assert!(new_layout.size() != 0, "Cannot grow to empty buffer");
288 let ptr = if old_header.is_empty() {
289 self.alloc
290 .allocate(new_layout)
291 .map_err(|_| StorageError::AllocError(new_layout))?
292 } else {
293 let old_layout: Layout = ThinPtr::<T, H>::layout(&old_header)?;
294 unsafe {
295 self.alloc
296 .grow(self.data.to_alloc(), old_layout, new_layout)
297 }
298 .map_err(|_| StorageError::AllocError(new_layout))?
299 };
300 self.data = ThinPtr::from_alloc(new_header, ptr, exact);
301 Ok(())
302 }
303
304 #[inline]
305 pub(crate) fn shrink(&mut self, new_header: H) -> Result<(), StorageError> {
306 let old_header = self.header();
307 let old_layout = if old_header.is_empty() {
308 None
309 } else {
310 Some(ThinPtr::<T, H>::layout(&old_header)?)
311 };
312 if new_header.is_empty() {
313 if let Some(old_layout) = old_layout {
314 unsafe { self.alloc.deallocate(self.data.to_alloc(), old_layout) };
315 self.data = ThinPtr::dangling();
316 }
317 } else {
318 let new_layout = ThinPtr::<T, H>::layout(&new_header)?;
319 let ptr = if let Some(old_layout) = old_layout {
320 unsafe {
321 self.alloc
322 .shrink(self.data.to_alloc(), old_layout, new_layout)
323 }
324 .map_err(|_| StorageError::AllocError(new_layout))?
325 } else {
326 self.alloc
327 .allocate(new_layout)
328 .map_err(|_| StorageError::AllocError(new_layout))?
329 };
330 self.data = ThinPtr::from_alloc(new_header, ptr, true);
331 }
332 Ok(())
333 }
334}
335
336impl<T, H: BufferHeader<T>, A: Allocator> ThinBuffer<T, H, A> {
337 #[inline]
338 pub(crate) const fn dangling(alloc: A) -> Self {
339 Self {
340 data: ThinPtr::dangling(),
341 alloc,
342 }
343 }
344
345 #[inline]
346 pub(crate) fn is_dangling(&self) -> bool {
347 self.data.is_dangling()
348 }
349
350 #[inline]
351 pub(crate) fn header(&self) -> H {
352 if self.is_dangling() {
353 H::EMPTY
354 } else {
355 unsafe { ptr::read(self.data.header_ptr()) }
356 }
357 }
358
359 #[inline]
360 pub(crate) unsafe fn set_header(&mut self, header: H) {
361 self.data.header_ptr().write(header)
362 }
363}
364
365impl<T, H: BufferHeader<T>, A: Allocator> ThinBuffer<T, H, A> {
366 #[inline]
367 pub(crate) fn from_parts(_header: H, data: NonNull<T>, alloc: A) -> Self {
368 Self {
370 data: ThinPtr(data, PhantomData),
371 alloc,
372 }
373 }
374
375 #[inline]
376 pub(crate) fn into_parts(self) -> (H, NonNull<T>, A) {
377 let slf = ManuallyDrop::new(self);
378 let header = if slf.is_dangling() {
379 H::EMPTY
380 } else {
381 unsafe { ptr::read(slf.data.header_ptr()) }
382 };
383 (header, slf.data.0, unsafe { ptr::read(&slf.alloc) })
384 }
385}
386
387impl<T, H, A> ConstDefault for ThinBuffer<T, H, A>
388where
389 H: BufferHeader<T>,
390 A: AllocatorDefault,
391{
392 const DEFAULT: Self = Self::dangling(A::DEFAULT);
393}
394
395impl<T, H: BufferHeader<T>, A: Allocator> RawBuffer for ThinBuffer<T, H, A> {
396 type RawData = T;
397
398 #[inline]
399 fn data_ptr(&self) -> *const T {
400 self.data.as_ptr()
401 }
402
403 #[inline]
404 fn data_ptr_mut(&mut self) -> *mut T {
405 self.data.as_ptr()
406 }
407}
408
409#[cfg(feature = "zeroize")]
410impl<T, H: BufferHeader<T>, A: AllocatorZeroizes> zeroize::ZeroizeOnDrop for ThinBuffer<T, H, A> {}
411
412impl<T, H: BufferHeader<T>, A: Allocator> Drop for ThinBuffer<T, H, A> {
413 fn drop(&mut self) {
414 let header = self.header();
415 if !header.is_empty() {
416 let layout = ThinPtr::<T, H>::layout(&header).expect("Layout error");
417 unsafe { self.alloc.deallocate(self.data.to_alloc(), layout) };
421 }
422 }
423}