static_alloc/leaked.rs
1//! This module contains an owning wrapper of a leaked struct.
2//!
3//! FIXME(breaking): Naming. `leaking` implies the `Drop` of the value as well but we do the
4//! precise opposite.
5use core::pin::Pin;
6use alloc_traits::AllocTime;
7
8use core::{
9 alloc::Layout,
10 fmt,
11 hash,
12 marker::PhantomData,
13 mem::{ManuallyDrop, MaybeUninit},
14 ops::{Deref, DerefMut},
15 ptr::{self, NonNull},
16};
17
18/// Zero-sized marker struct that allows running one or several methods.
19///
20/// This ensures that allocation does not exceed certain limits that would likely blow the stack
21/// and run into Rust's canary, this aborting the process.
22pub struct Alloca<T> {
23 marker: PhantomData<[T]>,
24 len: usize,
25}
26
27impl<T> Alloca<T> {
28 /// Try to create a representation, that allows functions with dynamically stack-allocated
29 /// slices.
30 pub fn new(len: usize) -> Option<Self> {
31 // Check that it's okay to create the padded layout. This is pure so it will again work
32 // when we try during `run`.
33 let _padded_layout = Layout::array::<T>(len + 1).ok()?;
34 Some(Alloca {
35 marker: PhantomData,
36 len,
37 })
38 }
39
40 fn padded_layout(&self) -> Layout {
41 Layout::array::<T>(self.len + 1).expect("Checked this in the constructor")
42 }
43
44 /// Allocate a slice of elements.
45 ///
46 /// Please note that instantiating this method relies on the optimizer, to an extent. In
47 /// particular we will create stack slots of differing sizes depending on the internal size.
48 /// This shouldn't have an effect other than moving the stack pointer for various amounts and
49 /// should never have more than one `T` in overhead. However, we can't enforce this. In theory
50 /// llvm might still reserve stack space for all variants including a probe and thus
51 /// prematurely assume we have hit the bottom of the available stack space. This is not very
52 /// likely to occur in practice.
53 pub fn run<R>(
54 &self,
55 run: impl FnOnce(&mut [MaybeUninit<T>]) -> R
56 ) -> R {
57 // Required size to surely have enough space for an aligned allocation.
58 let required_size = self.padded_layout().size();
59
60 if required_size <= 8 {
61 self.run_with::<[u64; 1], _, _>(run)
62 } else if required_size <= 16 {
63 self.run_with::<[u64; 2], _, _>(run)
64 } else if required_size <= 32 {
65 self.run_with::<[u64; 4], _, _>(run)
66 } else if required_size <= 64 {
67 self.run_with::<[u64; 8], _, _>(run)
68 } else if required_size <= 128 {
69 self.run_with::<[u64; 16], _, _>(run)
70 } else if required_size <= 256 {
71 self.run_with::<[u64; 32], _, _>(run)
72 } else if required_size <= 512 {
73 self.run_with::<[u64; 64], _, _>(run)
74 } else if required_size <= 1024 {
75 self.run_with::<[u64; 128], _, _>(run)
76 } else if required_size <= 2048 {
77 self.run_with::<[u64; 256], _, _>(run)
78 } else if required_size <= (1 << 12) {
79 self.run_with::<[u64; 512], _, _>(run)
80 } else if required_size <= (1 << 13) {
81 self.run_with::<[u64; 1 << 10], _, _>(run)
82 } else if required_size <= (1 << 14) {
83 self.run_with::<[u64; 1 << 11], _, _>(run)
84 } else if required_size <= (1 << 15) {
85 self.run_with::<[u64; 1 << 12], _, _>(run)
86 } else if required_size <= (1 << 16) {
87 self.run_with::<[u64; 1 << 13], _, _>(run)
88 } else if required_size <= (1 << 17) {
89 self.run_with::<[u64; 1 << 14], _, _>(run)
90 } else if required_size <= (1 << 18) {
91 self.run_with::<[u64; 1 << 15], _, _>(run)
92 } else if required_size <= (1 << 19) {
93 self.run_with::<[u64; 1 << 16], _, _>(run)
94 } else if required_size <= (1 << 20) {
95 self.run_with::<[u64; 1 << 17], _, _>(run)
96 } else {
97 panic!("Stack allocation is too big");
98 }
99 }
100
101 fn run_with<I, R, F:FnOnce(&mut [MaybeUninit<T>]) -> R>(
102 &self,
103 run: F
104 ) -> R {
105 use crate::unsync::Bump;
106 let mem = Bump::<I>::uninit();
107 let slot = mem.bump_array::<T>(self.len).unwrap();
108 run(LeakBox::leak(slot))
109 }
110}
111
112/// Represents an allocation within a Bump.
113///
114/// This is an owning pointer comparable to `Box`. It drops the contained value when it is dropped
115/// itself. The difference is that no deallocation logic is ever executed.
116///
117/// FIXME(non-breaking): the name is rather confusing. Maybe it should be `BumpBox` or `RefBox`?
118/// Not `StackBox` because the value's location in memory is not the defining feature.
119///
120/// # Usage
121///
122/// This box can be used to manage one valid instance constructed within the memory provided by a
123/// `MaybeUninit` instance.
124///
125/// ```
126/// use core::mem::MaybeUninit;
127/// use static_alloc::leaked::LeakBox;
128///
129/// let mut storage = MaybeUninit::uninit();
130/// let leak_box = LeakBox::from(&mut storage);
131/// // The string itself is not managed by `static_alloc`.
132/// let mut instance = LeakBox::write(leak_box, String::new());
133///
134/// instance.push_str("Hello world!");
135/// ```
136///
137/// This box is the result of allocating from one of the `Bump` allocators using its explicit API.
138///
139/// Being a box-like type, an `Option` has the same size.
140///
141/// ```
142/// use core::mem::size_of;
143/// use static_alloc::leaked::LeakBox;
144///
145/// type Boxed = LeakBox<'static, usize>;
146/// type Optional = Option<Boxed>;
147///
148/// assert_eq!(size_of::<Boxed>(), size_of::<Optional>());
149/// ```
150///
151/// TODO: On nightly the inner type should be [unsizable][unsize-coercion].
152///
153/// [unsize-coercion]: https://doc.rust-lang.org/reference/type-coercions.html#coercion-types
154pub struct LeakBox<'ctx, T: ?Sized> {
155 #[allow(unused)]
156 lifetime: AllocTime<'ctx>,
157 // Covariance should be OK.
158 pointer: NonNull<T>,
159}
160
161impl<'ctx, T> LeakBox<'ctx, T> {
162 /// Construct from a raw pointer.
163 ///
164 /// # Safety
165 ///
166 /// The allocation must be valid for a write of the value. The memory must also outlive the
167 /// lifetime `'ctx` and pointer must not be aliased by any other reference for that scope.
168 pub(crate) unsafe fn new_from_raw_non_null(
169 pointer: NonNull<T>,
170 val: T,
171 lifetime: AllocTime<'ctx>,
172 ) -> Self {
173 // SAFETY:
174 // * `ptr` points to an allocation with correct layout for `V`.
175 // * It is valid for write as it is the only pointer to it.
176 // * The allocation lives for at least `'ctx`.
177 core::ptr::write(pointer.as_ptr(), val);
178 Self { pointer, lifetime, }
179 }
180}
181
182impl<'ctx, T: ?Sized> LeakBox<'ctx, T> {
183 /// Retrieve the raw pointer wrapped by this box.
184 ///
185 /// After this method the caller is responsible for managing the value in the place behind the
186 /// pointer. It will need to be dropped manually.
187 ///
188 /// # Usage
189 ///
190 /// You might manually drop the contained instance at a later point.
191 ///
192 /// ```
193 /// use static_alloc::{Bump, leaked::LeakBox};
194 ///
195 /// # fn fake() -> Option<()> {
196 /// let bump: Bump<[usize; 128]> = Bump::uninit();
197 /// let leak_box = bump.leak_box(String::from("Hello"))?;
198 /// let ptr = LeakBox::into_raw(leak_box);
199 ///
200 /// unsafe {
201 /// core::ptr::drop_in_place(ptr);
202 /// }
203 /// # Some(()) }
204 /// ```
205 ///
206 /// An alternative is to later re-wrap the pointer
207 ///
208 /// ```
209 /// use static_alloc::{Bump, leaked::LeakBox};
210 ///
211 /// # fn fake() -> Option<()> {
212 /// let bump: Bump<[usize; 128]> = Bump::uninit();
213 /// let leak_box = bump.leak_box(String::from("Hello"))?;
214 /// let ptr = LeakBox::into_raw(leak_box);
215 ///
216 /// unsafe {
217 /// let _ = LeakBox::from_raw(ptr);
218 /// };
219 /// # Some(()) }
220 /// ```
221 pub fn into_raw(this: Self) -> *mut T {
222 let this = ManuallyDrop::new(this);
223 this.pointer.as_ptr()
224 }
225
226 /// Wrap a raw pointer.
227 ///
228 /// The most immediate use is to rewrap a pointer returned from [`into_raw`].
229 ///
230 /// [`into_raw`]: #method.into_raw
231 ///
232 /// # Safety
233 ///
234 /// The pointer must point to a valid instance of `T` that is not aliased by any other
235 /// reference for the lifetime `'ctx`. In particular it must be valid aligned and initialized.
236 /// Dropping this `LeakBox` will drop the instance, which the caller must also guarantee to be
237 /// sound.
238 pub unsafe fn from_raw(pointer: *mut T) -> Self {
239 debug_assert!(!pointer.is_null(), "Null pointer passed to LeakBox::from_raw");
240 LeakBox {
241 lifetime: AllocTime::default(),
242 pointer: NonNull::new_unchecked(pointer),
243 }
244 }
245
246 /// Wrap a mutable reference to a complex value as if it were owned.
247 ///
248 /// # Safety
249 ///
250 /// The value must be owned by the caller. That is, the mutable reference must not be used
251 /// after the `LeakBox` is dropped. In particular the value must not be dropped by the caller.
252 ///
253 /// # Example
254 ///
255 /// ```rust
256 /// use core::mem::ManuallyDrop;
257 /// use static_alloc::leaked::LeakBox;
258 ///
259 /// fn with_stack_drop<T>(val: T) {
260 /// let mut val = ManuallyDrop::new(val);
261 /// // Safety:
262 /// // - Shadows the variable, rendering the prior inaccessible.
263 /// // - Dropping is now the responsibility of `LeakBox`.
264 /// let val = unsafe { LeakBox::from_mut_unchecked(&mut *val) };
265 /// }
266 ///
267 /// // Demonstrate that it is correctly dropped.
268 /// let variable = core::cell::RefCell::new(0);
269 /// with_stack_drop(variable.borrow_mut());
270 /// assert!(variable.try_borrow_mut().is_ok());
271 /// ```
272 #[allow(unused_unsafe)]
273 pub unsafe fn from_mut_unchecked(val: &'ctx mut T) -> Self {
274 // SAFETY:
275 // * Is valid instance
276 // * Not aliased as by mut reference
277 // * Dropping soundness is guaranteed by the caller.
278 // * We don't invalidate any value, nor can the caller.
279 unsafe { LeakBox::from_raw(val) }
280 }
281
282 /// Leak the instances as a mutable reference.
283 ///
284 /// After calling this method the value is no longer managed by `LeakBox`. Its Drop impl will
285 /// not be automatically called.
286 ///
287 /// # Usage
288 ///
289 /// ```
290 /// use static_alloc::{Bump, leaked::LeakBox};
291 ///
292 /// # fn fake() -> Option<()> {
293 /// let bump: Bump<[usize; 128]> = Bump::uninit();
294 /// let leak_box = bump.leak_box(String::from("Hello"))?;
295 ///
296 /// let st: &mut String = LeakBox::leak(leak_box);
297 /// # Some(()) }
298 /// ```
299 ///
300 /// You can't leak past the lifetime of the allocator.
301 ///
302 /// ```compile_fail
303 /// # use static_alloc::{Bump, leaked::LeakBox};
304 /// # fn fake() -> Option<()> {
305 /// let bump: Bump<[usize; 128]> = Bump::uninit();
306 /// let leak_box = bump.leak_box(String::from("Hello"))?;
307 /// let st: &mut String = LeakBox::leak(leak_box);
308 ///
309 /// drop(bump);
310 /// // error[E0505]: cannot move out of `bump` because it is borrowed
311 /// st.to_lowercase();
312 /// //-- borrow later used here
313 /// # Some(()) }
314 /// ```
315 pub fn leak<'a>(this: Self) -> &'a mut T
316 where 'ctx: 'a
317 {
318 let pointer = LeakBox::into_raw(this);
319 // SAFETY:
320 // * The LeakBox type guarantees this is initialized and not mutably aliased.
321 // * For the lifetime 'a which is at most 'ctx.
322 unsafe { &mut *pointer }
323 }
324}
325
326impl<T: 'static> LeakBox<'static, T> {
327 /// Pin an instance that's leaked for the remaining program runtime.
328 ///
329 /// After calling this method the value can only safely be referenced mutably if it is `Unpin`,
330 /// otherwise it is only accessible behind a `Pin`. Note that this does _not_ imply that the
331 /// `Drop` glue, or explicit `Drop`-impl, is guaranteed to run.
332 ///
333 /// # Usage
334 ///
335 /// A decent portion of futures must be _pinned_ before the can be awaited inside another
336 /// future. In particular this is required for self-referential futures that store pointers
337 /// into their own object's memory. This is the case for the future type of an `asnyc fn` if
338 /// there are potentially any stack references when it is suspended/waiting on another future.
339 /// Consider this example:
340 ///
341 /// ```compile_fail
342 /// use static_alloc::{Bump, leaked::LeakBox};
343 ///
344 /// async fn example(x: usize) -> usize {
345 /// // Holding reference across yield point.
346 /// // This requires pinning to run this future.
347 /// let y = &x;
348 /// core::future::ready(()).await;
349 /// *y
350 /// }
351 ///
352 /// static POOL: Bump<[usize; 128]> = Bump::uninit();
353 /// let mut future = POOL.leak_box(example(0))
354 /// .expect("Enough space for small async fn");
355 ///
356 /// let usage = async move {
357 /// // error[E0277]: `GenFuture<[static generator@src/leaked.rs …]>` cannot be unpinned
358 /// let _ = (&mut *future).await;
359 /// };
360 /// ```
361 ///
362 /// This method can be used to pin instances allocated from a global pool without requiring the
363 /// use of a macro or unsafe on the caller's part. Now, with the correct usage of `into_pin`:
364 ///
365 /// ```
366 /// use static_alloc::{Bump, leaked::LeakBox};
367 ///
368 /// async fn example(x: usize) -> usize {
369 /// // Holding reference across yield point.
370 /// // This requires pinning to run this future.
371 /// let y = &x;
372 /// core::future::ready(()).await;
373 /// *y
374 /// }
375 ///
376 /// static POOL: Bump<[usize; 128]> = Bump::uninit();
377 /// let future = POOL.leak_box(example(0))
378 /// .expect("Enough space for small async fn");
379 ///
380 /// // PIN this future!
381 /// let mut future = LeakBox::into_pin(future);
382 ///
383 /// let usage = async move {
384 /// let _ = future.as_mut().await;
385 /// };
386 /// ```
387 pub fn into_pin(this: Self) -> Pin<Self> {
388 // SAFETY:
389 // * This memory is valid for `'static` duration, independent of the fate of `this` and
390 // even when it is forgotten. This trivially implies that any Drop is called before the
391 // memory is invalidated, as required by `Pin`.
392 unsafe { Pin::new_unchecked(this) }
393 }
394}
395
396impl<'ctx, T> LeakBox<'ctx, T> {
397 /// Remove the value, forgetting the box in the process.
398 ///
399 /// This is similar to dereferencing a box (`*leak_box`) but no deallocation is involved. This
400 /// becomes useful when the allocator turns out to have too short of a lifetime.
401 ///
402 /// # Usage
403 ///
404 /// You may want to move a long-lived value out of the current scope where it's been allocated.
405 ///
406 /// ```
407 /// # use core::cell::RefCell;
408 /// use static_alloc::{Bump, leaked::LeakBox};
409 ///
410 /// let cell = RefCell::new(0usize);
411 ///
412 /// let guard = {
413 /// let bump: Bump<[usize; 128]> = Bump::uninit();
414 ///
415 /// let mut leaked = bump.leak_box(cell.borrow_mut()).unwrap();
416 /// **leaked = 1usize;
417 ///
418 /// // Take the value, allowing use independent of the lifetime of bump
419 /// LeakBox::take(leaked)
420 /// };
421 ///
422 /// assert!(cell.try_borrow().is_err());
423 /// drop(guard);
424 /// assert!(cell.try_borrow().is_ok());
425 /// ```
426 pub fn take(this: Self) -> T {
427 // Do not drop this.
428 let this = ManuallyDrop::new(this);
429 // SAFETY:
430 // * `ptr` points to an initialized allocation according to the constructors of `LeakBox`.
431 // * The old value is forgotten and no longer dropped.
432 unsafe { core::ptr::read(this.pointer.as_ptr()) }
433 }
434
435 /// Wrap a mutable reference to a trivial value as if it were a box.
436 ///
437 /// This is safe because such values can not have any Drop code and can be duplicated at will.
438 ///
439 /// The usefulness of this operation is questionable but the author would be delighted to hear
440 /// about any actual use case.
441 pub fn from_mut(val: &'ctx mut T) -> Self
442 where
443 T: Copy
444 {
445 // SAFETY:
446 // * Is valid instance
447 // * Not aliased as by mut reference
448 // * Dropping is a no-op
449 // * We don't invalidate anyones value
450 unsafe { LeakBox::from_raw(val) }
451 }
452}
453
454impl<'ctx, T> LeakBox<'ctx, MaybeUninit<T>> {
455 /// Write a value into this box, initializing it.
456 ///
457 /// This can be used to delay the computation of a value until after an allocation succeeded
458 /// while maintaining all types necessary for a safe initialization.
459 ///
460 /// # Usage
461 ///
462 /// ```
463 /// # fn some_expensive_operation() -> [u8; 4] { [0u8; 4] }
464 /// # use core::mem::MaybeUninit;
465 /// #
466 /// # fn fake_main() -> Option<()> {
467 /// #
468 /// use static_alloc::{Bump, leaked::LeakBox};
469 ///
470 /// let bump: Bump<[usize; 128]> = Bump::uninit();
471 /// let memory = bump.leak_box(MaybeUninit::uninit())?;
472 ///
473 /// let value = LeakBox::write(memory, some_expensive_operation());
474 /// # Some(()) } fn main() {}
475 /// ```
476 pub fn write(mut this: Self, val: T) -> LeakBox<'ctx, T> {
477 unsafe {
478 // SAFETY: MaybeUninit<T> is valid for writing a T.
479 ptr::write(this.as_mut_ptr(), val);
480 // SAFETY: initialized by the write before.
481 LeakBox::assume_init(this)
482 }
483 }
484
485 /// Converts to `LeakBox<T>`.
486 ///
487 /// # Safety
488 ///
489 /// The value must have been initialized as required by `MaybeUninit::assume_init`. Calling
490 /// this when the content is not yet fully initialized causes immediate undefined behavior.
491 pub unsafe fn assume_init(this: Self) -> LeakBox<'ctx, T> {
492 LeakBox {
493 pointer: this.pointer.cast(),
494 lifetime: this.lifetime,
495 }
496 }
497}
498
499impl<'ctx, T: ?Sized> Deref for LeakBox<'ctx, T> {
500 type Target = T;
501
502 fn deref(&self) -> &Self::Target {
503 // SAFETY: constructor guarantees this is initialized and not mutably aliased.
504 unsafe { self.pointer.as_ref() }
505 }
506}
507
508impl<'ctx, T: ?Sized> DerefMut for LeakBox<'ctx, T> {
509 fn deref_mut(&mut self) -> &mut Self::Target {
510 // SAFETY: constructor guarantees this is initialized and not aliased.
511 unsafe { self.pointer.as_mut() }
512 }
513}
514
515impl<T: ?Sized> Drop for LeakBox<'_, T> {
516 fn drop(&mut self) {
517 // SAFETY: constructor guarantees this was initialized.
518 unsafe { ptr::drop_in_place(self.pointer.as_ptr()) }
519 }
520}
521
522/// Construct a LeakBox to an existing MaybeUninit.
523///
524/// The MaybeUninit type is special in that we can treat any unique reference to an owned value as
525/// an owned value itself since it has no representational invariants.
526impl<'ctx, T> From<&'ctx mut MaybeUninit<T>> for LeakBox<'ctx, MaybeUninit<T>> {
527 fn from(uninit: &'ctx mut MaybeUninit<T>) -> Self {
528 // SAFETY:
529 // * An instance of MaybeUninit is always valid.
530 // * The mut references means it can not be aliased.
531 // * Dropping a MaybeUninit is a no-op and can not invalidate any validity or security
532 // invariants of this MaybeUninit or the contained T.
533 unsafe { LeakBox::from_raw(uninit) }
534 }
535}
536
537/// Construct a LeakBox to an existing slice of MaybeUninit.
538impl<'ctx, T> From<&'ctx mut [MaybeUninit<T>]> for LeakBox<'ctx, [MaybeUninit<T>]> {
539 fn from(uninit: &'ctx mut [MaybeUninit<T>]) -> Self {
540 // SAFETY:
541 // * An instance of MaybeUninit is always valid.
542 // * The mut references means it can not be aliased.
543 // * Dropping a MaybeUninit is a no-op and can not invalidate any validity or security
544 // invariants of this MaybeUninit or the contained T.
545 unsafe { LeakBox::from_raw(uninit) }
546 }
547}
548
549impl<T: ?Sized> AsRef<T> for LeakBox<'_, T> {
550 fn as_ref(&self) -> &T {
551 &**self
552 }
553}
554
555impl<T: ?Sized> AsMut<T> for LeakBox<'_, T> {
556 fn as_mut(&mut self) -> &mut T {
557 &mut **self
558 }
559}
560
561impl<T: fmt::Debug + ?Sized> fmt::Debug for LeakBox<'_, T> {
562 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
563 self.as_ref().fmt(f)
564 }
565}
566
567impl<T: fmt::Display + ?Sized> fmt::Display for LeakBox<'_, T> {
568 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
569 self.as_ref().fmt(f)
570 }
571}
572
573impl<T: ?Sized> fmt::Pointer for LeakBox<'_, T> {
574 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
575 self.pointer.fmt(f)
576 }
577}
578
579impl<T: hash::Hash + ?Sized> hash::Hash for LeakBox<'_, T> {
580 fn hash<H: hash::Hasher>(&self, h: &mut H) {
581 self.as_ref().hash(h)
582 }
583}
584
585// TODO: iterators, read, write?