lockedbox/
lib.rs

1#![doc = include_str!("../README.md")]
2#![warn(clippy::pedantic, clippy::cargo)]
3#![no_std]
4
5#[cfg(test)]
6extern crate std;
7
8use core::mem::{size_of, ManuallyDrop};
9use core::ops::{Deref, DerefMut};
10use core::panic::UnwindSafe;
11use core::ptr::{self, NonNull};
12
13/// A [`Box`]-like type that uses `mlock` to prevent paging the allocated memory
14/// to disk.
15///
16/// **Note: This type allocates in multiples of the operating system's page
17/// size. This could lead to more memory usage than expected if many instances
18/// of this type are used.**
19pub struct LockedBox<T>(NonNull<T>);
20
21impl<T> LockedBox<T> {
22    /// Creates a new locked box with `contained` in a newly allocated,
23    /// `mlock`-protected region of memory.
24    ///
25    /// # Panics
26    ///
27    /// This function panics if `size_of::<T>() >= usize::MAX - 4 * PAGE_SIZE`
28    /// or the underlying allocation fails.
29    pub fn new(contained: T) -> Self {
30        Self::try_new(contained).expect("allocation too large")
31    }
32
33    /// Creates a new locked box with `contained` in a newly allocated,
34    /// `mlock`-protected region of memory.
35    ///
36    /// Returns `None` if `size_of::<T>() >= usize::MAX - 4 * PAGE_SIZE` or the
37    /// underlying allocation fails.
38    pub fn try_new(contained: T) -> Option<Self> {
39        // SAFETY: no references are made to the data contained by the allocated
40        // memory until after `contained` as been written. The size of the
41        // allocation is checked by `memsec`.
42        let memory = unsafe {
43            let memory = memsec::malloc::<T>()?;
44            // It is important to lock the memory before storing the value,
45            // otherwise the process could be preempted between the write and
46            // the mlock calls, and the memory theoretically could be paged to
47            // disk during this preemption. By locking before writing, we ensure
48            // `contained` is not paged to disk. The stack, however, could still
49            // be paged, but that is a problem for another crate to solve.
50            memsec::mlock(memory.as_ptr().cast(), size_of::<T>());
51            ptr::write(memory.as_ptr(), contained);
52            memory
53        };
54        Some(Self(memory))
55    }
56
57    /// Returns the pointer to the underlying data.
58    #[must_use]
59    pub const fn ptr(boxed: &LockedBox<T>) -> *mut T {
60        boxed.0.as_ptr()
61    }
62
63    /// Extracts the contained value from `boxed`.
64    #[must_use]
65    pub fn unbox(boxed: LockedBox<T>) -> T {
66        // Prevent our `drop` implementation from double-dropping the contained value.
67        let boxed = ManuallyDrop::new(boxed);
68        unsafe {
69            let contained = ptr::read(boxed.0.as_ptr());
70            memsec::free(boxed.0);
71            contained
72        }
73    }
74}
75
76// SAFETY: This type adds no restrictions over whether it is Sync other than
77// whether T is Sync.
78unsafe impl<T> Sync for LockedBox<T> where T: Sync {}
79// SAFETY: This type adds no restrictions over whether it is Send other than
80// whether T is Send.
81unsafe impl<T> Send for LockedBox<T> where T: Send {}
82impl<T> UnwindSafe for LockedBox<T> where T: UnwindSafe {}
83
84impl<T> Deref for LockedBox<T> {
85    type Target = T;
86
87    fn deref(&self) -> &Self::Target {
88        // SAFETY: The borrow checker prevents any invalid reference
89        // attempts since the `NonNull` is owned by `LockedBox` and never
90        // exposed.
91        unsafe { self.0.as_ref() }
92    }
93}
94
95impl<T> DerefMut for LockedBox<T> {
96    fn deref_mut(&mut self) -> &mut Self::Target {
97        // SAFETY: The borrow checker prevents any invalid reference
98        // attempts since the `NonNull` is owned by `LockedBox` and never
99        // exposed.
100        unsafe { self.0.as_mut() }
101    }
102}
103
104impl<T> Drop for LockedBox<T> {
105    fn drop(&mut self) {
106        // SAFETY: The `NonNull` is exclusively owned by us, and `unbox` uses
107        // ManuallyDrop to ensure this code isn't executed during that function.
108        unsafe {
109            ptr::drop_in_place(self.0.as_ptr());
110            memsec::free(self.0);
111        }
112    }
113}
114
115#[test]
116fn doesnt_crash() {
117    let locked = LockedBox::new(1_u8);
118    assert_eq!(*locked, 1);
119}
120
121#[test]
122fn drops_correctly() {
123    use std::{cell::RefCell, rc::Rc};
124
125    #[derive(Default)]
126    struct Droppable(Rc<RefCell<bool>>);
127
128    impl Drop for Droppable {
129        fn drop(&mut self) {
130            let mut dropped = (*self.0).borrow_mut();
131            assert!(!*dropped, "already dropped");
132            *dropped = true;
133        }
134    }
135
136    // Verify drop is called exactly once when dropping.
137    let locked = LockedBox::new(Droppable::default());
138    let dropped = (*locked).0.clone();
139    drop(locked);
140    let mut dropped = Rc::try_unwrap(dropped).expect("Rc has clones");
141    assert!(*dropped.get_mut());
142
143    // Verify drop is called exactly once when unboxing
144    let locked = LockedBox::new(Droppable::default());
145    let unboxed = LockedBox::unbox(locked);
146    drop(unboxed);
147}
148
149#[test]
150fn allows_zero_sized() {
151    LockedBox::new(());
152}