stabby_abi/alloc/mod.rs
1//
2// Copyright (c) 2023 ZettaScale Technology
3//
4// This program and the accompanying materials are made available under the
5// terms of the Eclipse Public License 2.0 which is available at
6// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0
7// which is available at https://www.apache.org/licenses/LICENSE-2.0.
8//
9// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
10//
11// Contributors:
12// Pierre Avital, <pierre.avital@me.com>
13//
14
15#![allow(deprecated)]
16use core::{marker::PhantomData, mem::MaybeUninit, ptr::NonNull, sync::atomic::AtomicUsize};
17
18use self::vec::ptr_diff;
19
20/// Allocators provided by `stabby`
21pub mod allocators;
22
23/// A generic allocation error.
24#[crate::stabby]
25#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
26pub struct AllocationError();
27impl core::fmt::Display for AllocationError {
28 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
29 f.write_str("AllocationError")
30 }
31}
32#[cfg(feature = "std")]
33impl std::error::Error for AllocationError {}
34
35/// [`alloc::boxed`](https://doc.rust-lang.org/stable/alloc/boxed/), but ABI-stable.
36pub mod boxed;
37/// Allocated collections, including immutable ones.
38pub mod collections;
39/// A vector that stores a single element on the stack until allocation is necessary.
40pub mod single_or_vec;
41/// [`alloc::string`](https://doc.rust-lang.org/stable/alloc/string/), but ABI-stable
42pub mod string;
43/// [`alloc::sync`](https://doc.rust-lang.org/stable/alloc/sync/), but ABI-stable
44pub mod sync;
45/// [`alloc::vec`](https://doc.rust-lang.org/stable/alloc/vec/), but ABI-stable
46pub mod vec;
47
48/// The default allocator, depending on which of the following is available:
49/// - RustAlloc: Rust's `GlobalAlloc`, through a vtable that ensures FFI-safety.
50/// - LibcAlloc: libc::malloc, which is 0-sized.
51/// - None. I _am_ working on getting a 0-dependy allocator working, but you should probably go with `feature = "alloc-rs"` anyway.
52///
53/// You can also use the `stabby_default_alloc` cfg to override the default allocator regardless of feature flags.
54pub type DefaultAllocator = allocators::DefaultAllocator;
55
56#[crate::stabby]
57#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
58/// ABI-stable equivalent of std::mem::Layout
59pub struct Layout {
60 /// The expected size of the allocation.
61 pub size: usize,
62 /// The expected alignment of the allocation.
63 pub align: usize,
64}
65impl Layout {
66 /// Returns the [`Layout`] corresponding to `T`
67 pub const fn of<T: Sized>() -> Self {
68 Layout {
69 size: core::mem::size_of::<T>(),
70 align: core::mem::align_of::<T>(),
71 }
72 }
73 /// Returns the [`Layout`] corresponding to `[T; n]`.
74 ///
75 /// Note that while this ensures that even if `T`'s size is not a multiple of its alignment,
76 /// the layout will have sufficient memory to store `n` of `T` in an aligned fashion.
77 pub const fn array<T: Sized>(n: usize) -> Self {
78 let Self { size, align } = Self::of::<T>();
79 Layout {
80 size: size * n,
81 align,
82 }
83 }
84 /// Concatenates a layout to `self`, ensuring that alignment padding is taken into account.
85 pub const fn concat(mut self, other: Self) -> Self {
86 self.size += other.size;
87 self.realign(if self.align < other.align {
88 other.align
89 } else {
90 self.align
91 })
92 }
93 /// Returns the first pointer where `output >= ptr` such that `output % self.align == 0`.
94 #[inline]
95 pub fn next_matching<T>(self, ptr: *mut T) -> *mut T {
96 fn next_matching(align: usize, ptr: *mut u8) -> *mut u8 {
97 unsafe { ptr.add(ptr.align_offset(align)) }
98 }
99 next_matching(self.align, ptr.cast()).cast()
100 }
101 /// Changes the alignment of the layout, adding padding if necessary.
102 pub const fn realign(mut self, new_align: usize) -> Self {
103 self.align = new_align;
104 self.size = self.size
105 + (new_align - (self.size % new_align)) * (((self.size % new_align) != 0) as usize);
106 self
107 }
108}
109
110/// An interface to an allocator.
111///
112/// Note that `stabby` often stores allocators inside allocations they made, so allocators that cannot allocate
113/// more than their size on stack will systematically fail to construct common stabby types.
114///
115/// Since the allocator may be moved, it must also be safe to do so, including after it has performed allocations.
116pub trait IAlloc: Unpin {
117 /// Allocates at least as much memory as requested by layout, ensuring the requested alignment is respected.
118 ///
119 /// If the requested size is 0, or allocation failed, then a null pointer is returned.
120 fn alloc(&mut self, layout: Layout) -> *mut ();
121 /// Frees the allocation
122 ///
123 /// # Safety
124 /// `ptr` MUST have been allocated through a succesful call to `Self::alloc` or `Self::realloc` with the same instance of `Self`
125 unsafe fn free(&mut self, ptr: *mut ());
126 /// Reallocates `ptr`, ensuring that it has enough memory for the newly requested layout.
127 ///
128 /// If the requested size is 0, or allocation failed, then a null pointer is returned, and `ptr` is not freed.
129 ///
130 /// # Safety
131 /// `ptr` MUST have been allocated through a succesful call to `Self::alloc` with the same instance of `Self`
132 unsafe fn realloc(&mut self, ptr: *mut (), prev_layout: Layout, new_size: usize) -> *mut () {
133 let ret = self.alloc(Layout {
134 size: new_size,
135 align: prev_layout.align,
136 });
137 if !ret.is_null() {
138 unsafe {
139 core::ptr::copy_nonoverlapping(ptr.cast::<u8>(), ret.cast(), prev_layout.size);
140 self.free(ptr);
141 }
142 }
143 ret
144 }
145}
146
147/// An ABI stable equivalent to [`IAlloc`].
148#[crate::stabby]
149#[deprecated = "Stabby doesn't actually use this trait due to conflicts."]
150pub trait IStableAlloc: Unpin {
151 /// Allocates at least as much memory as requested by layout, ensuring the requested alignment is respected.
152 ///
153 /// If the requested size is 0, or allocation failed, then a null pointer is returned.
154 extern "C" fn alloc(&mut self, layout: Layout) -> *mut ();
155 /// Frees the allocation
156 ///
157 /// # Safety
158 /// `ptr` MUST have been allocated through a succesful call to `Self::alloc` or `Self::realloc` with the same instance of `Self`
159 extern "C" fn free(&mut self, ptr: *mut ());
160 /// Reallocates `ptr`, ensuring that it has enough memory for the newly requested layout.
161 ///
162 /// If the requested size is 0, or allocation failed, then a null pointer is returned, and `ptr` is not freed.
163 ///
164 /// # Safety
165 /// `ptr` MUST have been allocated through a succesful call to `Self::alloc` with the same instance of `Self`
166 extern "C" fn realloc(
167 &mut self,
168 ptr: *mut (),
169 prev_layout: Layout,
170 new_size: usize,
171 ) -> *mut () {
172 let ret = self.alloc(Layout {
173 size: new_size,
174 align: prev_layout.align,
175 });
176 if !ret.is_null() {
177 unsafe {
178 core::ptr::copy_nonoverlapping(ptr.cast::<u8>(), ret.cast(), prev_layout.size);
179 self.free(ptr);
180 }
181 }
182 ret
183 }
184}
185#[allow(clippy::not_unsafe_ptr_arg_deref)]
186impl<T: IAlloc> IStableAlloc for T {
187 extern "C" fn alloc(&mut self, layout: Layout) -> *mut () {
188 IAlloc::alloc(self, layout)
189 }
190 extern "C" fn free(&mut self, ptr: *mut ()) {
191 unsafe { IAlloc::free(self, ptr) }
192 }
193 extern "C" fn realloc(
194 &mut self,
195 ptr: *mut (),
196 prev_layout: Layout,
197 new_size: usize,
198 ) -> *mut () {
199 unsafe { IAlloc::realloc(self, ptr, prev_layout, new_size) }
200 }
201}
202
203impl<T: IStableAllocDynMut<crate::vtable::H> + Unpin> IAlloc for T {
204 fn alloc(&mut self, layout: Layout) -> *mut () {
205 IStableAllocDynMut::alloc(self, layout)
206 }
207 unsafe fn free(&mut self, ptr: *mut ()) {
208 IStableAllocDynMut::free(self, ptr)
209 }
210 unsafe fn realloc(&mut self, ptr: *mut (), prev_layout: Layout, new_size: usize) -> *mut () {
211 IStableAllocDynMut::realloc(self, ptr, prev_layout, new_size)
212 }
213}
214impl IAlloc for core::convert::Infallible {
215 fn alloc(&mut self, _layout: Layout) -> *mut () {
216 unreachable!()
217 }
218 unsafe fn free(&mut self, _ptr: *mut ()) {
219 unreachable!()
220 }
221}
222
223/// The prefix common to all allocations in [`stabby::alloc`](crate::alloc).
224///
225/// This allows reuse of allocations when converting between container types.
226#[crate::stabby]
227pub struct AllocPrefix<Alloc> {
228 /// The strong count for reference counted types.
229 pub strong: core::sync::atomic::AtomicUsize,
230 /// The weak count for reference counted types.
231 pub weak: core::sync::atomic::AtomicUsize,
232 /// A slot to store a vector's capacity when it's turned into a boxed/arced slice.
233 pub capacity: core::sync::atomic::AtomicUsize,
234 /// The origin of the prefix
235 pub origin: NonNull<()>,
236 /// A slot for the allocator.
237 pub alloc: core::mem::MaybeUninit<Alloc>,
238}
239impl<Alloc> AllocPrefix<Alloc> {
240 /// The offset between the prefix and a field of type `T`.
241 pub const fn skip_to<T>() -> usize {
242 let mut size = core::mem::size_of::<Self>();
243 let align = core::mem::align_of::<T>();
244 let sizemodalign = size % align;
245 if sizemodalign != 0 {
246 size += align;
247 size -= sizemodalign;
248 }
249 size
250 }
251}
252
253/// A non-null pointer guaranteed to be preceded by a valid
254/// [`AllocPrefix`] unless the pointer is dangling.
255///
256/// This means that unless `T` is a ZST, the pointer is guaranteed to be aligned to the maximum of `T`'s alignment and the alignment of the prefix, which itself is ptr-size aligned.
257#[crate::stabby]
258#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
259pub struct AllocPtr<T, Alloc> {
260 /// The pointer to the data.
261 pub ptr: NonNull<T>,
262 /// Remember the allocator's type.
263 pub marker: PhantomData<Alloc>,
264}
265impl<T, Alloc> Copy for AllocPtr<T, Alloc> {}
266impl<T, Alloc> Clone for AllocPtr<T, Alloc> {
267 fn clone(&self) -> Self {
268 *self
269 }
270}
271impl<T, Alloc> core::ops::Deref for AllocPtr<T, Alloc> {
272 type Target = NonNull<T>;
273 fn deref(&self) -> &Self::Target {
274 &self.ptr
275 }
276}
277impl<T, Alloc> core::ops::DerefMut for AllocPtr<T, Alloc> {
278 fn deref_mut(&mut self) -> &mut Self::Target {
279 &mut self.ptr
280 }
281}
282impl<T, Alloc> AllocPtr<MaybeUninit<T>, Alloc> {
283 /// Assumes the internals of the pointer have been initialized.
284 /// # Safety
285 /// The internals of the pointer must have been initialized.
286 pub const unsafe fn assume_init(self) -> AllocPtr<T, Alloc> {
287 unsafe { core::mem::transmute::<Self, AllocPtr<T, Alloc>>(self) }
288 }
289}
290impl<T, Alloc> AllocPtr<T, Alloc> {
291 /// Constructs a dangling pointer.
292 pub const fn dangling() -> Self {
293 Self {
294 ptr: NonNull::dangling(),
295 marker: PhantomData,
296 }
297 }
298 /// Casts an allocated pointer.
299 pub const fn cast<U>(self) -> AllocPtr<U, Alloc> {
300 AllocPtr {
301 ptr: self.ptr.cast(),
302 marker: PhantomData,
303 }
304 }
305 ///The pointer to the prefix for this allocation
306 const fn prefix_ptr(&self) -> NonNull<AllocPrefix<Alloc>> {
307 unsafe { NonNull::new_unchecked(self.ptr.as_ptr().cast::<AllocPrefix<Alloc>>().sub(1)) }
308 }
309 /// A reference to the prefix for this allocation.
310 /// # Safety
311 /// `self` must not be dangling, and have been properly allocated, using [`Self::alloc`] or [`Self::realloc`] for example.
312 #[rustversion::attr(since(1.73), const)]
313 pub unsafe fn prefix(&self) -> &AllocPrefix<Alloc> {
314 unsafe { self.prefix_ptr().as_ref() }
315 }
316 /// A mutable reference to the prefix for this allocation.
317 /// # Safety
318 /// `self` must not be dangling, and have been properly allocated, using [`Self::alloc`] or [`Self::realloc`] for example.
319 /// Since this type is [`Copy`], the `&mut self` is not a sufficient guarantee of uniqueness.
320 #[rustversion::attr(since(1.86), const)]
321 pub unsafe fn prefix_mut(&mut self) -> &mut AllocPrefix<Alloc> {
322 unsafe { self.prefix_ptr().as_mut() }
323 }
324 /// Returns mutable access to the prefix and the data.
325 /// # Safety
326 /// `self` must not be dangling, and have been properly allocated, using [`Self::alloc`] or [`Self::realloc`] for example.
327 #[rustversion::attr(since(1.86), const)]
328 pub unsafe fn split_mut(&mut self) -> (&mut AllocPrefix<Alloc>, &mut T) {
329 let prefix = self.prefix_ptr().as_mut();
330 let data = self.ptr.as_mut();
331 (prefix, data)
332 }
333 /// Initializes any given pointer:
334 /// - The returned pointer is guaranteed to be correctly aligned for `T`
335 /// - It is guaranteed to preceded without padding by an `AllocPrefix<Alloc>`
336 /// # Safety
337 /// `ptr` MUST be word-aligned, and MUST be valid for writes for at least the size of
338 /// `#[repr(C)] struct { prefix: AllocPrefix<Alloc>, data: [T; capacity] }`
339 pub unsafe fn init(ptr: NonNull<()>, capacity: usize) -> Self {
340 let shifted_for_prefix = ptr
341 .as_ptr()
342 .cast::<AllocPrefix<Alloc>>()
343 .add(1)
344 .cast::<u8>();
345 let inited = shifted_for_prefix
346 .cast::<u8>()
347 .add(shifted_for_prefix.align_offset(core::mem::align_of::<T>()))
348 .cast::<T>();
349 let this: Self = AllocPtr {
350 ptr: NonNull::new_unchecked(inited),
351 marker: core::marker::PhantomData,
352 };
353 this.prefix_ptr().as_ptr().write(AllocPrefix {
354 strong: AtomicUsize::new(1),
355 weak: AtomicUsize::new(1),
356 capacity: AtomicUsize::new(capacity),
357 origin: ptr,
358 alloc: core::mem::MaybeUninit::uninit(),
359 });
360 this
361 }
362}
363impl<T, Alloc: IAlloc> AllocPtr<T, Alloc> {
364 /// Allocates a pointer to a single element of `T`, prefixed by an [`AllocPrefix`]
365 pub fn alloc(alloc: &mut Alloc) -> Option<Self> {
366 Self::alloc_array(alloc, 1)
367 }
368 /// Allocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`]
369 pub fn alloc_array(alloc: &mut Alloc, capacity: usize) -> Option<Self> {
370 let mut layout = Layout::of::<AllocPrefix<Alloc>>().concat(Layout::array::<T>(capacity));
371 layout.align = core::mem::align_of::<AllocPrefix<Alloc>>();
372 let ptr = alloc.alloc(layout);
373 NonNull::new(ptr).map(|ptr| unsafe { Self::init(ptr, capacity) })
374 }
375 /// Reallocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`].
376 ///
377 /// In case of failure of the allocator, this will return `None` and `self` will not have been freed.
378 ///
379 /// # Safety
380 /// `self` must not be dangling
381 pub unsafe fn realloc(
382 self,
383 alloc: &mut Alloc,
384 prev_capacity: usize,
385 new_capacity: usize,
386 ) -> Option<Self> {
387 let mut layout =
388 Layout::of::<AllocPrefix<Alloc>>().concat(Layout::array::<T>(prev_capacity));
389 layout.align = core::mem::align_of::<AllocPrefix<Alloc>>();
390 let ptr = alloc.realloc(
391 self.prefix_ptr().cast().as_ptr(),
392 layout,
393 Layout::of::<AllocPrefix<Alloc>>()
394 .concat(Layout::array::<T>(new_capacity))
395 .size,
396 );
397 NonNull::new(ptr).map(|ptr| unsafe { Self::init(ptr, new_capacity) })
398 }
399 /// Reallocates a pointer to an array of `capacity` `T`, prefixed by an [`AllocPrefix`]
400 /// # Safety
401 /// `self` must not be dangling, and is freed after this returns.
402 pub unsafe fn free(self, alloc: &mut Alloc) {
403 alloc.free(self.prefix().origin.as_ptr())
404 }
405}
406
407/// A helper to work with allocated slices.
408#[crate::stabby]
409#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
410pub struct AllocSlice<T, Alloc> {
411 /// The start of the slice.
412 pub start: AllocPtr<T, Alloc>,
413 /// The end of the slice (exclusive).
414 pub end: NonNull<T>,
415}
416impl<T, Alloc> AllocSlice<T, Alloc> {
417 /// Returns the number of elements in the slice.
418 pub const fn len(&self) -> usize {
419 ptr_diff(self.end, self.start.ptr)
420 }
421 /// Returns `true` if the slice is empty.
422 pub const fn is_empty(&self) -> bool {
423 self.len() == 0
424 }
425 /// Returns this slice.
426 /// # Safety
427 /// `self` must be valid.
428 pub const unsafe fn as_slice(&self) -> &[T] {
429 core::slice::from_raw_parts(self.start.ptr.as_ptr(), ptr_diff(self.end, self.start.ptr))
430 }
431}
432impl<T, Alloc> Copy for AllocSlice<T, Alloc> {}
433impl<T, Alloc> Clone for AllocSlice<T, Alloc> {
434 fn clone(&self) -> Self {
435 *self
436 }
437}