#![allow(unsafe_code)]
use crate::ffi::sodium;
use crate::traits::*;
use std::cell::Cell;
use std::fmt::{self, Debug};
use std::ptr::NonNull;
use std::slice;
use std::thread;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum Prot {
NoAccess,
ReadOnly,
ReadWrite,
}
type RefCount = u8;
#[derive(Eq)]
pub(crate) struct Box<T: Bytes> {
ptr: NonNull<T>,
len: usize,
prot: Cell<Prot>,
refs: Cell<RefCount>,
}
impl<T: Bytes> Box<T> {
pub(crate) fn new<F>(len: usize, init: F) -> Self
where
F: FnOnce(&mut Self),
{
let mut boxed = Self::new_unlocked(len);
proven!(boxed.ptr != NonNull::dangling());
proven!(boxed.len == len);
init(&mut boxed);
boxed.lock();
boxed
}
pub(crate) fn try_new<U, E, F>(len: usize, init: F) -> Result<Self, E>
where
F: FnOnce(&mut Self) -> Result<U, E>
{
let mut boxed = Self::new_unlocked(len);
proven!(boxed.ptr != NonNull::dangling());
proven!(boxed.len == len);
let result = init(&mut boxed);
boxed.lock();
result.map(|_| boxed)
}
#[allow(clippy::missing_const_for_fn)] pub(crate) fn len(&self) -> usize {
self.len
}
#[allow(clippy::missing_const_for_fn)] pub(crate) fn is_empty(&self) -> bool {
self.len == 0
}
pub(crate) fn size(&self) -> usize {
self.len * T::size()
}
pub(crate) fn unlock(&self) -> &Self {
self.retain(Prot::ReadOnly);
self
}
pub(crate) fn unlock_mut(&mut self) -> &mut Self {
self.retain(Prot::ReadWrite);
self
}
pub(crate) fn lock(&self) {
self.release();
}
pub(crate) fn as_ref(&self) -> &T {
never!(self.is_empty(),
"secrets: attempted to dereference a zero-length pointer");
proven!(self.prot.get() != Prot::NoAccess,
"secrets: may not call Box::as_ref while locked");
unsafe { self.ptr.as_ref() }
}
pub(crate) fn as_mut(&mut self) -> &mut T {
never!(self.is_empty(),
"secrets: attempted to dereference a zero-length pointer");
proven!(self.prot.get() == Prot::ReadWrite,
"secrets: may not call Box::as_mut unless mutably unlocked");
unsafe { self.ptr.as_mut() }
}
pub(crate) fn as_slice(&self) -> &[T] {
proven!(self.prot.get() != Prot::NoAccess,
"secrets: may not call Box::as_slice while locked");
unsafe {
slice::from_raw_parts(
self.ptr.as_ptr(),
self.len,
)
}
}
pub(crate) fn as_mut_slice(&mut self) -> &mut [T] {
proven!(self.prot.get() == Prot::ReadWrite,
"secrets: may not call Box::as_mut_slice unless mutably unlocked");
unsafe {
slice::from_raw_parts_mut(
self.ptr.as_ptr(),
self.len,
)
}
}
fn new_unlocked(len: usize) -> Self {
tested!(len == 0);
tested!(size_of::<T>() == 0);
assert!(sodium::init(), "secrets: failed to initialize libsodium");
let ptr = NonNull::new(unsafe { sodium::allocarray::<T>(len) })
.expect("secrets: failed to allocate memory");
Self {
ptr,
len,
prot: Cell::new(Prot::ReadWrite),
refs: Cell::new(1),
}
}
fn retain(&self, prot: Prot) {
let refs = self.refs.get();
tested!(refs == RefCount::min_value());
tested!(refs == RefCount::max_value());
tested!(prot == Prot::NoAccess);
if refs == 0 {
proven!(prot != Prot::NoAccess,
"secrets: must retain readably or writably");
self.prot.set(prot);
mprotect(self.ptr.as_ptr(), prot);
} else {
proven!(Prot::NoAccess != self.prot.get(),
"secrets: out-of-order retain/release detected");
proven!(Prot::ReadWrite != self.prot.get(),
"secrets: cannot unlock mutably more than once");
proven!(Prot::ReadOnly == prot,
"secrets: cannot unlock mutably while unlocked immutably");
}
match refs.checked_add(1) {
Some(v) => self.refs.set(v),
None if self.is_locked() => panic!("secrets: out-of-order retain/release detected"),
None => panic!("secrets: retained too many times"),
}
}
fn release(&self) {
proven!(self.refs.get() != 0,
"secrets: releases exceeded retains");
proven!(self.prot.get() != Prot::NoAccess,
"secrets: releasing memory that's already locked");
let refs = self.refs.get().wrapping_sub(1);
self.refs.set(refs);
if refs == 0 {
mprotect(self.ptr.as_ptr(), Prot::NoAccess);
self.prot.set(Prot::NoAccess);
}
}
fn is_locked(&self) -> bool {
self.prot.get() == Prot::NoAccess
}
}
impl<T: Bytes + Randomizable> Box<T> {
pub(crate) fn random(len: usize) -> Self {
Self::new(len, |b| b.as_mut_slice().randomize())
}
}
impl<T: Bytes + Zeroable> Box<T> {
pub(crate) fn zero(len: usize) -> Self {
Self::new(len, |b| b.as_mut_slice().zero())
}
}
impl<T: Bytes> Drop for Box<T> {
fn drop(&mut self) {
if !thread::panicking() {
proven!(self.refs.get() == 0,
"secrets: retains exceeded releases");
proven!(self.prot.get() == Prot::NoAccess,
"secrets: dropped secret was still accessible");
}
unsafe { sodium::free(self.ptr.as_mut()) }
}
}
impl<T: Bytes> Debug for Box<T> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{{ {} bytes redacted }}", self.size())
}
}
impl<T: Bytes> Clone for Box<T> {
fn clone(&self) -> Self {
Self::new(self.len, |b| {
b.as_mut_slice().copy_from_slice(self.unlock().as_slice());
self.lock();
})
}
}
impl<T: Bytes + ConstantEq> PartialEq for Box<T> {
fn eq(&self, other: &Self) -> bool {
if self.len != other.len {
return false;
}
let lhs = self.unlock().as_slice();
let rhs = other.unlock().as_slice();
let ret = lhs.constant_eq(rhs);
self.lock();
other.lock();
ret
}
}
impl<T: Bytes + Zeroable> From<&mut T> for Box<T> {
fn from(data: &mut T) -> Self {
Self::new(1, |b| {
let _ = &data; unsafe { data.transfer(b.as_mut()) }
})
}
}
impl<T: Bytes + Zeroable> From<&mut [T]> for Box<T> {
fn from(data: &mut [T]) -> Self {
Self::new(data.len(), |b| {
let _ = &data; unsafe { data.transfer(b.as_mut_slice()) }
})
}
}
unsafe impl<T: Bytes + Send> Send for Box<T> {}
fn mprotect<T>(ptr: *mut T, prot: Prot) {
if !match prot {
Prot::NoAccess => unsafe { sodium::mprotect_noaccess(ptr) },
Prot::ReadOnly => unsafe { sodium::mprotect_readonly(ptr) },
Prot::ReadWrite => unsafe { sodium::mprotect_readwrite(ptr) },
} {
panic!("secrets: error setting memory protection to {prot:?}");
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_allows_custom_initialization() {
let boxed = Box::<u8>::new(1, |secret| {
secret.as_mut_slice().clone_from_slice(b"\x04");
});
assert_eq!(boxed.unlock().as_slice(), [0x04]);
boxed.lock();
}
#[test]
fn it_initializes_with_garbage() {
let boxed = Box::<u8>::new(4, |_| {});
let unboxed = boxed.unlock().as_slice();
let garbage = unsafe {
let garbage_ptr = sodium::allocarray::<u8>(1);
let garbage_byte = *garbage_ptr;
sodium::free(garbage_ptr);
vec![garbage_byte; unboxed.len()]
};
assert_ne!(garbage, vec![0; garbage.len()]);
assert_eq!(unboxed, &garbage[..]);
boxed.lock();
}
#[test]
fn it_initializes_with_zero() {
let boxed = Box::<u32>::zero(4);
assert_eq!(boxed.unlock().as_slice(), [0, 0, 0, 0]);
boxed.lock();
}
#[test]
fn it_initializes_from_values() {
let mut value = [4_u64];
let boxed = Box::from(&mut value[..]);
assert_eq!(value, [0]);
assert_eq!(boxed.unlock().as_slice(), [4]);
boxed.lock();
}
#[test]
fn it_compares_equality() {
let boxed_1 = Box::<u8>::random(1);
let boxed_2 = boxed_1.clone();
assert_eq!(boxed_1, boxed_2);
assert_eq!(boxed_2, boxed_1);
}
#[test]
fn it_compares_inequality() {
let boxed_1 = Box::<u128>::random(32);
let boxed_2 = Box::<u128>::random(32);
assert_ne!(boxed_1, boxed_2);
assert_ne!(boxed_2, boxed_1);
}
#[test]
fn it_compares_inequality_using_size() {
let boxed_1 = Box::<u8>::from(&mut [0, 0, 0, 0][..]);
let boxed_2 = Box::<u8>::from(&mut [0, 0, 0, 0, 0][..]);
assert_ne!(boxed_1, boxed_2);
assert_ne!(boxed_2, boxed_1);
}
#[test]
fn it_initializes_with_zero_refs() {
let boxed = Box::<u8>::zero(10);
assert_eq!(0, boxed.refs.get());
}
#[test]
fn it_tracks_ref_counts_accurately() {
let mut boxed = Box::<u8>::random(10);
let _ = boxed.unlock();
let _ = boxed.unlock();
let _ = boxed.unlock();
assert_eq!(3, boxed.refs.get());
boxed.lock(); boxed.lock(); boxed.lock();
assert_eq!(0, boxed.refs.get());
let _ = boxed.unlock_mut();
assert_eq!(1, boxed.refs.get());
boxed.lock();
assert_eq!(0, boxed.refs.get());
}
#[test]
fn it_doesnt_overflow_early() {
let boxed = Box::<u64>::zero(4);
for _ in 0..u8::max_value() {
let _ = boxed.unlock();
}
for _ in 0..u8::max_value() {
boxed.lock();
}
}
#[test]
fn it_allows_arbitrary_readers() {
let boxed = Box::<u8>::zero(1);
let mut count = 0_u8;
sodium::memrandom(count.as_mut_bytes());
for _ in 0..count {
let _ = boxed.unlock();
}
for _ in 0..count {
boxed.lock()
}
}
#[test]
fn it_can_be_sent_between_threads() {
use std::sync::mpsc;
use std::thread;
let (tx, rx) = mpsc::channel();
let child = thread::spawn(move || {
let boxed = Box::<u64>::random(1);
let value = boxed.unlock().as_slice().to_vec();
tx.send((boxed, value)).expect("failed to send to channel");
});
let (boxed, value) = rx.recv().expect("failed to read from channel");
assert_eq!(Prot::ReadOnly, boxed.prot.get());
assert_eq!(value, boxed.as_slice());
child.join().expect("child terminated");
boxed.lock();
}
#[test]
#[should_panic(expected = "secrets: retained too many times")]
fn it_doesnt_allow_overflowing_readers() {
let boxed = Box::<[u64; 8]>::zero(4);
for _ in 0..=u8::max_value() {
let _ = boxed.unlock();
}
for _ in 0..boxed.refs.get() {
boxed.lock()
}
}
#[test]
#[should_panic(expected = "secrets: out-of-order retain/release detected")]
fn it_detects_out_of_order_retains_and_releases_that_underflow() {
let boxed = Box::<u8>::zero(5);
boxed.refs.set(boxed.refs.get().wrapping_sub(1));
boxed.prot.set(Prot::NoAccess);
boxed.retain(Prot::ReadOnly);
}
#[test]
#[should_panic(expected = "secrets: failed to initialize libsodium")]
fn it_detects_sodium_init_failure() {
sodium::fail();
let _ = Box::<u8>::zero(0);
}
#[test]
#[should_panic(expected = "secrets: error setting memory protection to NoAccess")]
fn it_detects_sodium_mprotect_failure() {
sodium::fail();
mprotect(std::ptr::null_mut::<u8>(), Prot::NoAccess);
}
}
#[cfg(all(test, target_family = "unix"))]
mod tests_sigsegv {
use super::*;
use std::process;
fn assert_sigsegv<F>(f: F)
where
F: FnOnce(),
{
unsafe {
let pid : libc::pid_t = libc::fork();
let mut stat : libc::c_int = 0;
match pid {
-1 => panic!("`fork(2)` failed"),
0 => { f(); process::exit(0) },
_ => {
if libc::waitpid(pid, &mut stat, 0) == -1 {
panic!("`waitpid(2)` failed");
};
assert!(libc::WIFSIGNALED(stat));
assert!(
libc::WTERMSIG(stat) == libc::SIGBUS ||
libc::WTERMSIG(stat) == libc::SIGSEGV
);
}
}
}
}
#[test]
fn it_kills_attempts_to_read_while_locked() {
assert_sigsegv(|| {
let val = unsafe { Box::<u32>::zero(1).ptr.as_ptr().read() };
let _ = sodium::memcmp(val.as_bytes(), val.as_bytes());
});
}
#[test]
fn it_kills_attempts_to_write_while_locked() {
assert_sigsegv(|| {
unsafe { Box::<u64>::zero(1).ptr.as_ptr().write(1) };
});
}
#[test]
fn it_kills_attempts_to_read_after_explicitly_locked() {
assert_sigsegv(|| {
let boxed = Box::<u32>::random(4);
let val = boxed.unlock().as_slice();
let _ = boxed.unlock();
boxed.lock();
boxed.lock();
let _ = sodium::memcmp(
val.as_bytes(),
val.as_bytes(),
);
});
}
}
#[cfg(all(test, profile = "debug"))]
mod tests_proven_statements {
use super::*;
#[test]
#[should_panic(expected = "secrets: attempted to dereference a zero-length pointer")]
fn it_doesnt_allow_referencing_zero_length() {
let boxed = Box::<u8>::new_unlocked(0);
let _ = boxed.as_ref();
}
#[test]
#[should_panic(expected = "secrets: cannot unlock mutably more than once")]
fn it_doesnt_allow_multiple_writers() {
let mut boxed = Box::<u64>::zero(1);
let _ = boxed.unlock_mut();
let _ = boxed.unlock_mut();
}
#[test]
#[should_panic(expected = "secrets: releases exceeded retains")]
fn it_doesnt_allow_negative_users() {
Box::<u64>::zero(10).lock();
}
#[test]
#[should_panic(expected = "secrets: releases exceeded retains")]
fn it_doesnt_allow_unbalanced_locking() {
let boxed = Box::<u64>::zero(4);
let _ = boxed.unlock();
boxed.lock();
boxed.lock();
}
#[test]
#[should_panic(expected = "secrets: cannot unlock mutably while unlocked immutably")]
fn it_doesnt_allow_different_access_types() {
let mut boxed = Box::<[u128; 128]>::zero(5);
let _ = boxed.unlock();
let _ = boxed.unlock_mut();
}
#[test]
#[should_panic(expected = "secrets: retains exceeded releases")]
fn it_doesnt_allow_outstanding_readers() {
let _ = Box::<u8>::zero(1).unlock();
}
#[test]
#[should_panic(expected = "secrets: retains exceeded releases")]
fn it_doesnt_allow_outstanding_writers() {
let _ = Box::<u8>::zero(1).unlock_mut();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_ref while locked")]
fn it_doesnt_allow_as_ref_while_locked() {
let _ = Box::<u8>::zero(1).as_ref();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_mut unless mutably unlocked")]
fn it_doesnt_allow_as_mut_while_locked() {
let _ = Box::<u8>::zero(1).as_mut();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_mut unless mutably unlocked")]
fn it_doesnt_allow_as_mut_while_readonly() {
let mut boxed = Box::<u8>::zero(1);
let _ = boxed.unlock();
let _ = boxed.as_mut();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_slice while locked")]
fn it_doesnt_allow_as_slice_while_locked() {
let _ = Box::<u8>::zero(1).as_slice();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_mut_slice unless mutably unlocked")]
fn it_doesnt_allow_as_mut_slice_while_locked() {
let _ = Box::<u8>::zero(1).as_mut_slice();
}
#[test]
#[should_panic(expected = "secrets: may not call Box::as_mut_slice unless mutably unlocked")]
fn it_doesnt_allow_as_mut_slice_while_readonly() {
let mut boxed = Box::<u8>::zero(1);
let _ = boxed.unlock();
let _ = boxed.as_mut_slice();
}
}