without_alloc/alloc.rs
1//! Allocator extension traits.
2use core::{alloc, mem};
3use alloc_traits::{NonZeroLayout, LocalAlloc};
4use super::{
5 boxed::Box,
6 fixed_vec::FixedVec,
7 rc::Rc,
8 uninit::Uninit,
9};
10
11/// Values of for some allocation including the [`Uninit`].
12///
13/// See [`Uninit`] for a better picture of the potential usage of this result.
14///
15/// [`Uninit`]: ../uninit/struct.Uninit.html
16#[derive(Debug)]
17pub struct LeakedAllocation<'a, T: ?Sized=()> {
18 /// Uninit pointer to the region with specified layout.
19 pub uninit: Uninit<'a, T>,
20}
21
22/// Leak allocations into uninit regions.
23pub trait LocalAllocLeakExt<'alloc>: LocalAlloc<'alloc> {
24 /// Leak an allocation with detailed layout.
25 ///
26 /// Provides an [`Uninit`] wrapping several aspects of initialization in a safe interface,
27 /// bound by the lifetime of the reference to the allocator.
28 ///
29 /// [`Uninit`]: ../uninit/struct.Uninit.html
30 fn alloc_layout(&'alloc self, layout: NonZeroLayout)
31 -> Option<LeakedAllocation<'alloc>>
32 {
33 let alloc = self.alloc(layout)?;
34 let uninit = unsafe {
35 Uninit::from_memory(alloc.ptr, alloc.layout.size().into())
36 };
37
38 Some(LeakedAllocation {
39 uninit,
40 })
41 }
42
43 /// Leak an allocation for a specific type.
44 ///
45 /// It is not yet initialized but provides a safe interface for that initialization. Note that
46 /// the type **can** be a ZST in which case a dangling pointer is substituted for the true
47 /// allocation.
48 ///
49 /// ## Usage
50 ///
51 /// ```
52 /// # use static_alloc::Bump;
53 /// # use without_alloc::alloc::LocalAllocLeakExt;
54 /// use core::cell::{Ref, RefCell};
55 ///
56 /// let slab: Bump<[Ref<'static, usize>; 1]> = Bump::uninit();
57 /// let data = RefCell::new(0xff);
58 ///
59 /// // We can place a `Ref` here but we did not yet.
60 /// let alloc = slab.alloc_t::<Ref<usize>>().unwrap();
61 /// let cell_ref = alloc.uninit.init(data.borrow());
62 ///
63 /// assert_eq!(**cell_ref, 0xff);
64 /// ```
65 fn alloc_t<V>(&'alloc self) -> Option<LeakedAllocation<'alloc, V>> {
66 match NonZeroLayout::new::<V>() {
67 None => Some(LeakedAllocation::zst_fake_alloc()),
68 Some(alloc) => {
69 let allocation = self.alloc_layout(alloc)?;
70 let right_type = allocation.cast().unwrap();
71 Some(right_type)
72 },
73 }
74 }
75
76 /// Allocate a [`Box`].
77 ///
78 /// This will allocate some memory with the correct layout for a [`Box`], then place the
79 /// provided value into the allocation by constructing an [`Box`].
80 ///
81 /// [`Box`]: ../boxed/struct.Box.html
82 fn boxed<V>(&'alloc self, val: V) -> Option<Box<'alloc, V>> {
83 let alloc = self.alloc_t::<V>()?;
84 Some(Box::new(val, alloc.uninit))
85 }
86
87 /// Allocate a [`FixedVec`].
88 ///
89 /// This will allocate some memory with the correct layout for a [`FixedVec`] of the given
90 /// capacity (in elements) and wrap it. Returns `None` if it is not possible to allocate the
91 /// layout.
92 ///
93 /// [`FixedVec`]: ../fixed_vec/struct.FixedVec.html
94 fn fixed_vec<V>(&'alloc self, capacity: usize) -> Option<FixedVec<'alloc, V>> {
95 let size = mem::size_of::<V>().checked_mul(capacity)?;
96 let layout = alloc::Layout::from_size_align(size, mem::align_of::<V>()).ok()?;
97
98 let uninit = match NonZeroLayout::from_layout(layout.into()) {
99 None => Uninit::empty(),
100 Some(layout) => {
101 let allocation = self.alloc_layout(layout)?;
102 let right_type = allocation.cast_slice().unwrap();
103 right_type.uninit
104 }
105 };
106
107 Some(FixedVec::new(uninit))
108 }
109
110 /// Allocate an [`Rc`].
111 ///
112 /// This will allocate some memory with the correct layout for an [`Rc`], then place the
113 /// provided value into the allocation by constructing an [`Rc`].
114 ///
115 /// [`Rc`]: ../rc/struct.Rc.html
116 fn rc<V>(&'alloc self, val: V) -> Option<Rc<'alloc, V>> {
117 let layout = Rc::<V>::layout();
118 // Unwrap since this is surely never an empty layout, always have counter.
119 let layout = NonZeroLayout::from_layout(layout.into()).unwrap();
120 let alloc = self.alloc_layout(layout)?;
121 Some(Rc::new(val, alloc.uninit))
122 }
123
124 /// Allocate a slice of a copyable type.
125 ///
126 /// This will allocate some memory with the same layout as required by the slice, then copy all
127 /// values into the new allocation via a byte copy.
128 ///
129 /// ```
130 /// # use static_alloc::Bump;
131 /// # use without_alloc::alloc::LocalAllocLeakExt;
132 /// let slab: Bump<[usize; 16]> = Bump::uninit();
133 /// let data: &[u8] = b"Hello, World!";
134 ///
135 /// let slice = slab.copy_slice(data).unwrap();
136 /// assert_eq!(data, slice);
137 /// ```
138 fn copy_slice<T: Copy>(&'alloc self, slice: &[T]) -> Option<&'alloc mut [T]> {
139 let layout = alloc::Layout::for_value(slice);
140 let uninit = match NonZeroLayout::from_layout(layout.into()) {
141 None => Uninit::empty(),
142 Some(layout) => {
143 let allocation = self.alloc_layout(layout)?;
144 let right_type = allocation.cast_slice().unwrap();
145 right_type.uninit
146 }
147 };
148
149 unsafe {
150 // SAFETY:
151 // * the source is trivially valid for reads as it is a slice
152 // * the memory is valid for the same layout as slice, so aligned and large enough
153 // * both are aligned, uninit due to allocator requirements
154 core::ptr::copy(slice.as_ptr(), uninit.as_begin_ptr(), slice.len());
155 }
156
157 Some(unsafe {
158 // SAFETY: this is a copy of `slice` which is initialized.
159 uninit.into_mut()
160 })
161 }
162
163 /// Allocate a dynamically sized string.
164 ///
165 /// This will allocate some memory with the same layout as required by the string, then copy
166 /// all characters into the new allocation via a byte copy.
167 ///
168 /// ```
169 /// # use static_alloc::Bump;
170 /// # use without_alloc::alloc::LocalAllocLeakExt;
171 /// let slab: Bump<[u8; 16]> = Bump::uninit();
172 /// let data: &str = "Hello, World!";
173 ///
174 /// let slice = slab.copy_str(data).unwrap();
175 /// assert_eq!(data, slice);
176 /// ```
177 fn copy_str(&'alloc self, st: &str) -> Option<&'alloc str> {
178 let bytes = self.copy_slice(st.as_bytes())?;
179
180 Some(unsafe {
181 // SAFETY: this is a copy of `st` which is valid utf-8
182 core::str::from_utf8_unchecked(bytes)
183 })
184 }
185
186 /// Allocate a copy of a generic dynamically sized type.
187 ///
188 /// This method takes a `ManuallyDrop<T>` wrapper instead of a `T` directly. These types are of
189 /// course layout compatible and you may soundly cast one reference type to the other. However
190 /// this choice forces acknowledgment that the value _must not_ be dropped by the caller
191 /// afterwards and makes this reasonably more safe in case of panics.
192 ///
193 /// Note further that mutable access is however explicitly _not_ required in contrast to
194 /// `ManuallyDrop::take`. Otherwise, the caller would have to ensure that the value is not
195 /// aliased and actually mutable. Keeping these guarantees often involves moving the value into
196 /// a new stack slot which is obviously not possible for dynamically sized values. This
197 /// interfaces promises not to overwrite any byte which does not restrict its functionality.
198 ///
199 /// # Safety
200 ///
201 /// This is quite unsafe and relies on the nightly `set_ptr_value` feature. Furthermore this
202 /// method does not require that `T` is in fact `Copy` as doing so would not be possible for
203 /// dynamically sized values. You must either require this bound on the expose interface or
204 /// must ensure the source value behind the pointer is not used further, not dropped and
205 /// basically discarded. You should act as if `take` had been called on the supplied value.
206 ///
207 /// # Example
208 ///
209 /// ```
210 /// # use static_alloc::Bump;
211 /// # use without_alloc::alloc::LocalAllocLeakExt;
212 /// use core::fmt::Debug;
213 /// use core::mem::ManuallyDrop;
214 ///
215 /// let slab: Bump<[u8; 16]> = Bump::uninit();
216 /// let debuggable = ManuallyDrop::new(1usize);
217 /// let debug = unsafe {
218 /// slab.copy_dst::<dyn Debug>(&debuggable).unwrap()
219 /// };
220 /// assert_eq!(format!("{:?}", debug), "1");
221 /// ```
222 #[cfg(feature = "nightly_set_ptr_value")]
223 #[allow(unused_unsafe)]
224 unsafe fn copy_dst<T: ?Sized>(&'alloc self, val: &core::mem::ManuallyDrop<T>) -> Option<&'alloc mut T> {
225 let layout = alloc::Layout::for_value(val);
226 let uninit = match NonZeroLayout::from_layout(layout.into()) {
227 None => Uninit::invent_for_zst(),
228 Some(layout) => self.alloc_layout(layout)?.uninit,
229 };
230
231 unsafe {
232 // SAFETY:
233 // * the source is valid for reads for its own layout
234 // * the memory is valid for the same layout as val, so aligned and large enough
235 // * both are aligned, uninit due to allocator requirements
236 core::ptr::copy(val as *const _ as *const u8, uninit.as_ptr() as *mut u8, layout.size());
237 }
238
239 let ptr = val as *const _ as *mut T;
240 let ptr = uninit.as_ptr().with_metadata_of(ptr);
241 Some(unsafe {
242 // SAFETY: The byte copy above put the value into a valid state. Caller promises that
243 // we can logically move the value.
244 &mut *ptr
245 })
246 }
247}
248
249impl<'alloc, T> LocalAllocLeakExt<'alloc> for T
250 where T: LocalAlloc<'alloc>,
251{ }
252
253impl<Zst> LeakedAllocation<'_, Zst> {
254 /// Invent a new allocation for a zero-sized type (ZST).
255 ///
256 /// # Panics
257 /// This method panics when the type parameter is not a zero sized type.
258 pub fn zst_fake_alloc() -> Self {
259 LeakedAllocation {
260 uninit: Uninit::invent_for_zst(),
261 }
262 }
263}
264
265impl<'a, T> LeakedAllocation<'a, T> {
266 fn cast<U>(self) -> Option<LeakedAllocation<'a, U>> {
267 Some(LeakedAllocation {
268 uninit: self.uninit.cast().ok()?,
269 })
270 }
271
272 fn cast_slice<U>(self) -> Option<LeakedAllocation<'a, [U]>> {
273 Some(LeakedAllocation {
274 uninit: self.uninit.cast_slice().ok()?,
275 })
276 }
277}