flex_alloc_secure/
boxed.rs

1//! Support for memory protection around collection types.
2
3use core::any::type_name;
4use core::cell::UnsafeCell;
5use core::fmt;
6use core::mem::ManuallyDrop;
7use core::ptr::{addr_of, addr_of_mut};
8use core::slice;
9use core::sync::atomic;
10use std::sync::Once;
11
12use chacha20poly1305::{
13    aead::{AeadInPlace, KeyInit},
14    ChaCha8Poly1305,
15};
16use flex_alloc::boxed::Box;
17use rand_core::{OsRng, RngCore};
18use zeroize::ZeroizeOnDrop;
19
20use crate::alloc::{ProtectionMode, SecureAlloc};
21use crate::bytes::FillBytes;
22use crate::protect::{ExposeProtected, SecureRef};
23use crate::vec::SecureVec;
24
25const ASSOC_DATA_SIZE: usize = 16384;
26
27/// A [`flex-alloc Box`](flex_alloc::boxed::Box) which is backed by a
28/// secured allocator and keeps its contents in physical memory. When
29/// released, the allocated memory is securely zeroed.
30///
31/// This container should be converted into a
32/// [`ProtectedBox`] or [`ShieldedBox`] to protect secret data.
33///
34/// This type does NOT protect against accidental output of
35/// contained values using the [`Debug`] trait.
36///
37/// When possible, prefer initialization of the protected container
38/// using the [`ProtectedInit`](`crate::ProtectedInit`) or
39/// [`ProtectedInitSlice`](`crate::ProtectedInitSlice`) traits.
40pub type SecureBox<T> = Box<T, SecureAlloc>;
41
42/// A [`flex-alloc Box`](flex_alloc::boxed::Box) container type which applies
43/// additional protections around the memory allocation.
44///
45/// - The memory is allocated using [`SecureAlloc`] and flagged to remain
46///   resident in physical memory (using `mlock`/`VirtualLock`).
47/// - When released, the allocated memory is securely zeroed.
48/// - When not currently being accessed by the methods of the
49///   [`ExposeProtected`] trait, the allocated memory pages are flagged
50///   for protection from other processes (using `mprotect`/`VirtualProtect`).
51pub struct ProtectedBox<T: ?Sized> {
52    shared: SharedAccess<SecureBox<T>>,
53}
54
55impl<T: Default> Default for ProtectedBox<T> {
56    fn default() -> Self {
57        Self::from(SecureBox::default())
58    }
59}
60
61impl<T: ?Sized> fmt::Debug for ProtectedBox<T> {
62    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
63        f.write_fmt(format_args!("ProtectedBox<{}>", type_name::<T>()))
64    }
65}
66
67impl<T: ?Sized> Drop for ProtectedBox<T> {
68    fn drop(&mut self) {
69        BoxData::for_boxed(self.shared.as_mut()).set_protection_mode(ProtectionMode::ReadWrite);
70    }
71}
72
73impl<T: ?Sized> ExposeProtected for ProtectedBox<T> {
74    type Target = T;
75
76    fn expose_read<F>(&self, f: F)
77    where
78        F: FnOnce(SecureRef<&T>),
79    {
80        let shared = &self.shared;
81        let data = shared.acquire_read(|boxed| {
82            BoxData::for_boxed(boxed).set_protection_mode(ProtectionMode::ReadOnly);
83        });
84        let guard = OnDrop::new(|| {
85            shared.release_read(|boxed| {
86                BoxData::for_boxed(boxed).set_protection_mode(ProtectionMode::NoAccess);
87            });
88        });
89        f(SecureRef::new(data));
90        drop(guard);
91    }
92
93    fn expose_write<F>(&mut self, f: F)
94    where
95        F: FnOnce(SecureRef<&mut Self::Target>),
96    {
97        let boxed = self.shared.as_mut();
98        let mut data = BoxData::for_boxed(boxed);
99        data.set_protection_mode(ProtectionMode::ReadWrite);
100        let guard = OnDrop::new(|| {
101            data.set_protection_mode(ProtectionMode::NoAccess);
102        });
103        f(SecureRef::new_mut(boxed.as_mut()));
104        drop(guard);
105    }
106
107    fn unprotect(self) -> SecureBox<Self::Target> {
108        let mut shared = unsafe { addr_of!(ManuallyDrop::new(self).shared).read() };
109        BoxData::for_boxed(shared.as_mut()).set_protection_mode(ProtectionMode::ReadWrite);
110        shared.into_inner()
111    }
112}
113
114impl<T> From<T> for ProtectedBox<T> {
115    fn from(value: T) -> Self {
116        Self::from(SecureBox::from(value))
117    }
118}
119
120impl<T: Clone> From<&[T]> for ProtectedBox<[T]> {
121    fn from(data: &[T]) -> Self {
122        Self::from(SecureBox::from(data))
123    }
124}
125
126impl<T, const N: usize> From<[T; N]> for ProtectedBox<[T]> {
127    fn from(data: [T; N]) -> Self {
128        Self::from(SecureBox::from(data))
129    }
130}
131
132impl From<&str> for ProtectedBox<str> {
133    fn from(data: &str) -> Self {
134        Self::from(SecureBox::from(data))
135    }
136}
137
138impl<T: ?Sized> From<SecureBox<T>> for ProtectedBox<T> {
139    fn from(boxed: SecureBox<T>) -> Self {
140        let mut wrapper = Self {
141            shared: SharedAccess::new(boxed),
142        };
143        BoxData::for_boxed(wrapper.shared.as_mut()).set_protection_mode(ProtectionMode::NoAccess);
144        wrapper
145    }
146}
147
148impl<T> From<SecureVec<T>> for ProtectedBox<[T]> {
149    fn from(vec: SecureVec<T>) -> Self {
150        Self::from(vec.into_boxed_slice())
151    }
152}
153
154unsafe impl<T: Send + ?Sized> Send for ProtectedBox<T> {}
155unsafe impl<T: Sync + ?Sized> Sync for ProtectedBox<T> {}
156
157impl<T: ?Sized> ZeroizeOnDrop for ProtectedBox<T> {}
158
159/// A [`flex-alloc Box`](flex_alloc::boxed::Box) container type which applies
160/// additional protections around the allocated memory, and is encrypted when
161/// not currently being accessed.
162///
163/// - The memory is allocated using [`SecureAlloc`] and flagged to remain
164///   resident in physical memory (using `mlock`/`VirtualLock`).
165/// - When released, the allocated memory is securely zeroed.
166/// - When not currently being accessed by the methods of the
167///   [`ExposeProtected`] trait, the allocated memory pages are flagged
168///   for protection from other processes using (`mprotect`/`VirtualProtect`).
169/// - When not currently being accessed, the allocated memory is
170///   encrypted using the ChaCha8 encryption cipher. A large (16Kb)
171///   buffer of randomized bytes is used as associated data during the
172///   encryption and decryption process.
173pub struct ShieldedBox<T: ?Sized> {
174    shared: SharedAccess<SecureBox<T>>,
175    key: chacha20poly1305::Key,
176    tag: chacha20poly1305::Tag,
177}
178
179impl<T: Default> Default for ShieldedBox<T> {
180    fn default() -> Self {
181        Self::from(SecureBox::default())
182    }
183}
184
185impl<T: ?Sized> fmt::Debug for ShieldedBox<T> {
186    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
187        f.write_fmt(format_args!("ShieldedBox<{}>", type_name::<T>()))
188    }
189}
190
191impl<T: ?Sized> Drop for ShieldedBox<T> {
192    fn drop(&mut self) {
193        BoxData::for_boxed(self.shared.as_mut()).set_protection_mode(ProtectionMode::ReadWrite);
194    }
195}
196
197impl<T: ?Sized> ExposeProtected for ShieldedBox<T> {
198    type Target = T;
199
200    fn expose_read<F>(&self, f: F)
201    where
202        F: FnOnce(SecureRef<&T>),
203    {
204        let shared = &self.shared;
205        let expose = shared.acquire_read(|boxed| {
206            let mut data = BoxData::for_boxed(boxed);
207            data.set_protection_mode(ProtectionMode::ReadWrite);
208            data.decrypt(&self.key, self.tag);
209        });
210        let guard = OnDrop::new(|| {
211            shared.release_read(|boxed| {
212                let mut data = BoxData::for_boxed(boxed);
213                data.set_protection_mode(ProtectionMode::ReadWrite);
214                let tag = data.encrypt(&self.key);
215                if self.tag != tag {
216                    panic!("Unshielded box was modified while read-only");
217                }
218                data.set_protection_mode(ProtectionMode::NoAccess);
219            });
220        });
221        f(SecureRef::new(expose));
222        drop(guard);
223    }
224
225    fn expose_write<F>(&mut self, f: F)
226    where
227        F: FnOnce(SecureRef<&mut Self::Target>),
228    {
229        let boxed = self.shared.as_mut();
230        let mut data = BoxData::for_boxed(boxed);
231        data.set_protection_mode(ProtectionMode::ReadWrite);
232        data.decrypt(&self.key, self.tag);
233        let guard = OnDrop::new(|| {
234            self.tag = data.encrypt(&self.key);
235            data.set_protection_mode(ProtectionMode::NoAccess);
236        });
237        f(SecureRef::new_mut(boxed.as_mut()));
238        drop(guard);
239    }
240
241    fn unprotect(self) -> SecureBox<Self::Target> {
242        let (key, tag) = (self.key, self.tag);
243        let mut shared = unsafe { addr_of!(ManuallyDrop::new(self).shared).read() };
244        let mut data = BoxData::for_boxed(shared.as_mut());
245        data.set_protection_mode(ProtectionMode::ReadWrite);
246        data.decrypt(&key, tag);
247        shared.into_inner()
248    }
249}
250
251impl<T> From<T> for ShieldedBox<T> {
252    fn from(value: T) -> Self {
253        Self::from(SecureBox::from(value))
254    }
255}
256
257impl<T: Clone> From<&[T]> for ShieldedBox<[T]> {
258    fn from(data: &[T]) -> Self {
259        Self::from(SecureBox::from(data))
260    }
261}
262
263impl<T, const N: usize> From<[T; N]> for ShieldedBox<[T]> {
264    fn from(data: [T; N]) -> Self {
265        Self::from(SecureBox::from(data))
266    }
267}
268
269impl From<&str> for ShieldedBox<str> {
270    fn from(data: &str) -> Self {
271        Self::from(SecureBox::from(data))
272    }
273}
274
275impl<T: ?Sized> From<SecureBox<T>> for ShieldedBox<T> {
276    fn from(boxed: SecureBox<T>) -> Self {
277        let mut wrapper = Self {
278            shared: SharedAccess::new(boxed),
279            key: Default::default(),
280            tag: Default::default(),
281        };
282        wrapper.key.fill_random(OsRng);
283        let mut data = BoxData::for_boxed(wrapper.shared.as_mut());
284        wrapper.tag = data.encrypt(&wrapper.key);
285        data.set_protection_mode(ProtectionMode::NoAccess);
286        wrapper
287    }
288}
289
290impl<T> From<SecureVec<T>> for ShieldedBox<[T]> {
291    fn from(vec: SecureVec<T>) -> Self {
292        Self::from(vec.into_boxed_slice())
293    }
294}
295
296unsafe impl<T: Send + ?Sized> Send for ShieldedBox<T> {}
297unsafe impl<T: Sync + ?Sized> Sync for ShieldedBox<T> {}
298
299impl<T: ?Sized> ZeroizeOnDrop for ShieldedBox<T> {}
300
301struct OnDrop<F: FnOnce()>(Option<F>);
302
303impl<F: FnOnce()> OnDrop<F> {
304    pub fn new(f: F) -> Self {
305        Self(Some(f))
306    }
307}
308
309impl<F: FnOnce()> Drop for OnDrop<F> {
310    #[inline]
311    fn drop(&mut self) {
312        if let Some(f) = self.0.take() {
313            f()
314        }
315    }
316}
317
318struct SharedAccess<T> {
319    data: UnsafeCell<T>,
320    refs: atomic::AtomicUsize,
321}
322
323impl<T> SharedAccess<T> {
324    const fn new(data: T) -> Self {
325        Self {
326            data: UnsafeCell::new(data),
327            refs: atomic::AtomicUsize::new(0),
328        }
329    }
330
331    fn acquire_read(&self, acquire: impl FnOnce(&mut T)) -> &T {
332        let mut rounds = 0;
333        loop {
334            let prev = self.refs.fetch_or(1, atomic::Ordering::Acquire);
335            if prev == 0 {
336                // our responsibility to acquire
337                let data = unsafe { &mut *self.data.get() };
338                acquire(data);
339                // any other readers are queued
340                self.refs.store(2, atomic::Ordering::Release);
341                break;
342            } else if prev & 1 == 0 {
343                if (prev + 2) >> (usize::BITS - 1) != 0 {
344                    panic!("exceeded maximum number of references");
345                }
346                // other readers could leave while lock is held
347                self.refs.fetch_add(1, atomic::Ordering::Release);
348                break;
349            } else {
350                // busy loop
351                rounds += 1;
352                if rounds >= 100 {
353                    std::thread::yield_now();
354                    rounds = 0;
355                }
356            }
357        }
358        unsafe { &*self.data.get() }
359    }
360
361    fn release_read(&self, release: impl FnOnce(&mut T)) {
362        let prev = self.refs.fetch_or(1, atomic::Ordering::Acquire);
363        if prev == 2 {
364            // our responsibility to release
365            let data = unsafe { &mut *self.data.get() };
366            release(data);
367            self.refs.store(0, atomic::Ordering::Release);
368        } else if prev & 1 == 0 {
369            // acquired lock, but not our responsibility
370            self.refs.fetch_sub(3, atomic::Ordering::Release);
371        } else {
372            // did not acquire lock, but we can leave
373            self.refs.fetch_sub(2, atomic::Ordering::Release);
374        }
375    }
376
377    pub fn as_mut(&mut self) -> &mut T {
378        self.data.get_mut()
379    }
380
381    pub fn into_inner(self) -> T {
382        self.data.into_inner()
383    }
384}
385
386struct BoxData {
387    ptr: *mut u8,
388    len: usize,
389}
390
391impl BoxData {
392    #[inline]
393    pub fn for_boxed<T: ?Sized>(boxed: &mut SecureBox<T>) -> Self {
394        let len = size_of_val(boxed.as_ref());
395        let ptr = boxed.as_mut_ptr() as *mut u8;
396        Self { ptr, len }
397    }
398
399    pub fn set_protection_mode(&mut self, mode: ProtectionMode) {
400        SecureAlloc
401            .set_page_protection(self.ptr, self.len, mode)
402            .expect("Error setting page protection");
403    }
404
405    #[must_use]
406    fn encrypt(&mut self, key: &chacha20poly1305::Key) -> chacha20poly1305::Tag {
407        let buffer = unsafe { slice::from_raw_parts_mut(self.ptr, self.len) };
408        let engine = ChaCha8Poly1305::new(key);
409        let nonce = Default::default();
410        engine
411            .encrypt_in_place_detached(&nonce, encryption_assoc_data(), buffer)
412            .expect("Shielded box encryption error")
413    }
414
415    pub fn decrypt(&mut self, key: &chacha20poly1305::Key, tag: chacha20poly1305::Tag) {
416        let buffer = unsafe { slice::from_raw_parts_mut(self.ptr, self.len) };
417        let engine = ChaCha8Poly1305::new(key);
418        let nonce = Default::default();
419        engine
420            .decrypt_in_place_detached(&nonce, encryption_assoc_data(), buffer, &tag)
421            .expect("Shielded box decryption error")
422    }
423}
424
425fn encryption_assoc_data() -> &'static [u8; ASSOC_DATA_SIZE] {
426    static mut DATA: [u8; ASSOC_DATA_SIZE] = [0u8; ASSOC_DATA_SIZE];
427    static INIT: Once = Once::new();
428    INIT.call_once(|| {
429        OsRng.fill_bytes(unsafe { &mut *addr_of_mut!(DATA) });
430    });
431    unsafe { &*addr_of!(DATA) }
432}
433
434#[cfg(test)]
435mod tests {
436
437    use super::{
438        encryption_assoc_data, ExposeProtected, ProtectedBox, ShieldedBox, ASSOC_DATA_SIZE,
439    };
440    use crate::vec::SecureVec;
441
442    #[test]
443    fn enc_assoc_data_init() {
444        let data = encryption_assoc_data();
445        assert_ne!(data, &[0u8; ASSOC_DATA_SIZE]);
446    }
447
448    #[test]
449    fn protected_mut() {
450        let mut prot = ProtectedBox::<usize>::default();
451        prot.expose_read(|r| {
452            assert_eq!(r.as_ref(), &0);
453        });
454        prot.expose_write(|mut w| {
455            *w = 10;
456        });
457        prot.expose_read(|r| {
458            assert_eq!(r.as_ref(), &10);
459        });
460    }
461
462    #[test]
463    fn shielded_mut() {
464        let mut prot = ShieldedBox::<usize>::default();
465        prot.expose_read(|r| {
466            assert_eq!(r.as_ref(), &0);
467        });
468        prot.expose_write(|mut w| {
469            *w = 10;
470        });
471        prot.expose_read(|r| {
472            assert_eq!(r.as_ref(), &10);
473        });
474    }
475
476    // #[test]
477    // fn protected_ref_count() {
478    //     let prot = ProtectedBox::<usize>::default();
479    //     prot.expose_read(|r1| {
480    //         assert_eq!(prot.refs.load(atomic::Ordering::Relaxed), 3);
481    //         prot.expose_read(|r2| {
482    //             assert_eq!(prot.refs.load(atomic::Ordering::Relaxed), 5);
483    //         });
484    //         assert_eq!(prot.refs.load(atomic::Ordering::Relaxed), 3);
485    //     });
486    //     assert_eq!(prot.refs.load(atomic::Ordering::Relaxed), 0);
487    // }
488
489    #[test]
490    fn protected_vec() {
491        let mut vec = SecureVec::new();
492        vec.resize(100, 1usize);
493        let boxed = ProtectedBox::<[usize]>::from(vec);
494        boxed.expose_read(|r| {
495            assert_eq!(r.len(), 100);
496        });
497    }
498
499    // #[test]
500    // fn protected_check_protection_crash() {
501    //     use crate::boxed::SecureBox;
502    //     let boxed = SecureBox::<usize>::default();
503    //     let ptr = boxed.as_ptr();
504    //     let prot = boxed.protect();
505    //     // let read = prot.read_protected(); // would change protection mode
506    //     println!("inner: {}", unsafe { &*ptr });
507    // }
508}