#![deny(unsafe_code)]
#![deny(missing_docs)]
#![deny(clippy::unwrap_used)]
#![deny(clippy::panic)]
use crate::prelude::error::Result;
use std::ops::{Deref, DerefMut};
use zeroize::{Zeroize, ZeroizeOnDrop};
#[derive(Zeroize, ZeroizeOnDrop)]
pub struct SecureBytes {
inner: Vec<u8>,
}
impl SecureBytes {
#[must_use]
pub fn new(data: Vec<u8>) -> Self {
Self { inner: data }
}
#[must_use]
pub fn from(data: &[u8]) -> Self {
Self { inner: data.to_vec() }
}
#[must_use]
pub fn zeros(len: usize) -> Self {
Self { inner: vec![0u8; len] }
}
#[must_use]
pub fn len(&self) -> usize {
self.inner.len()
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
#[must_use]
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
pub fn extend_from_slice(&mut self, other: &[u8]) {
self.inner.extend_from_slice(other);
}
#[must_use]
pub fn as_slice(&self) -> &[u8] {
&self.inner
}
pub fn as_mut_slice(&mut self) -> &mut [u8] {
&mut self.inner
}
#[must_use]
pub fn into_vec(mut self) -> Vec<u8> {
std::mem::take(&mut self.inner)
}
pub fn resize(&mut self, new_len: usize) {
self.inner.resize(new_len, 0);
}
}
impl Deref for SecureBytes {
type Target = [u8];
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for SecureBytes {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl AsRef<[u8]> for SecureBytes {
fn as_ref(&self) -> &[u8] {
&self.inner
}
}
impl std::fmt::Debug for SecureBytes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SecureBytes([REDACTED; {} bytes])", self.len())
}
}
impl subtle::ConstantTimeEq for SecureBytes {
fn ct_eq(&self, other: &Self) -> subtle::Choice {
let len_equal = self.inner.len().ct_eq(&other.inner.len());
len_equal & self.inner.ct_eq(&other.inner)
}
}
impl PartialEq for SecureBytes {
fn eq(&self, other: &Self) -> bool {
use subtle::ConstantTimeEq;
self.ct_eq(other).into()
}
}
impl Eq for SecureBytes {}
impl From<Vec<u8>> for SecureBytes {
fn from(data: Vec<u8>) -> Self {
SecureBytes::new(data)
}
}
#[must_use]
pub fn secure_compare(a: &[u8], b: &[u8]) -> bool {
use subtle::ConstantTimeEq;
let max_len = a.len().max(b.len());
let mut padded_a = vec![0u8; max_len];
let mut padded_b = vec![0u8; max_len];
if let Some(dest) = padded_a.get_mut(..a.len()) {
dest.copy_from_slice(a);
}
if let Some(dest) = padded_b.get_mut(..b.len()) {
dest.copy_from_slice(b);
}
let len_equal = a.len().ct_eq(&b.len());
let content_equal = padded_a.ct_eq(&padded_b);
(len_equal & content_equal).into()
}
pub fn secure_zeroize(data: &mut [u8]) {
use zeroize::Zeroize;
data.zeroize();
}
pub fn get_memory_pool() -> &'static MemoryPool {
static POOL: OnceLock<MemoryPool> = OnceLock::new();
POOL.get_or_init(MemoryPool::new)
}
pub struct MemoryPool {
pool: Mutex<std::collections::HashMap<usize, Vec<SecureBytes>>>,
}
impl MemoryPool {
#[must_use]
pub fn new() -> Self {
Self { pool: Mutex::new(std::collections::HashMap::new()) }
}
pub fn allocate(&self, size: usize) -> Result<SecureBytes> {
let mut pool = self.pool.lock().map_err(|_e| {
crate::prelude::error::LatticeArcError::MemoryError(
"Memory pool lock poisoned".to_string(),
)
})?;
if let Some(allocations) = pool.get_mut(&size)
&& let Some(memory) = allocations.pop()
{
return Ok(memory);
}
Self::allocate_secure(size)
}
pub fn deallocate(&self, memory: SecureBytes) {
let size = memory.len();
if let Ok(mut pool) = self.pool.lock() {
const MAX_POOL_SIZE: usize = 100;
let allocations = pool.entry(size).or_default();
if allocations.len() < MAX_POOL_SIZE {
allocations.push(memory);
}
}
}
fn allocate_secure(size: usize) -> Result<SecureBytes> {
if size == 0 {
return Err(crate::prelude::error::LatticeArcError::MemoryError(
"Cannot allocate zero-sized secure memory".to_string(),
));
}
const MAX_SECURE_ALLOCATION_SIZE: usize = 1024 * 1024; if size > MAX_SECURE_ALLOCATION_SIZE {
return Err(crate::prelude::error::LatticeArcError::MemoryError(format!(
"Secure memory allocation size {} exceeds maximum allowed size {}",
size, MAX_SECURE_ALLOCATION_SIZE
)));
}
Ok(SecureBytes { inner: vec![0u8; size] })
}
}
impl Default for MemoryPool {
fn default() -> Self {
Self::new()
}
}
use rand::rngs::OsRng;
pub type SecureRng = OsRng;
use rand::{RngCore, SeedableRng};
use rand_chacha::ChaCha20Rng;
use std::sync::{Mutex, OnceLock};
thread_local! {
static FALLBACK_RNG: Mutex<ChaCha20Rng> = Mutex::new(ChaCha20Rng::from_entropy());
}
#[non_exhaustive]
pub enum RngHandle<'a> {
Global(&'a Mutex<OsRng>),
ThreadLocal,
}
impl<'a> RngHandle<'a> {
pub fn secure() -> Result<RngHandle<'a>> {
match get_global_secure_rng() {
Ok(global) => Ok(RngHandle::Global(global)),
Err(_err) => {
#[cfg(feature = "fips")]
{
Err(crate::prelude::error::LatticeArcError::RandomError)
}
#[cfg(not(feature = "fips"))]
{
tracing::warn!("Global OsRng unavailable; falling back to thread-local RNG");
Ok(RngHandle::ThreadLocal)
}
}
}
}
pub fn fill_bytes(&self, dest: &mut [u8]) -> Result<()> {
match self {
RngHandle::Global(mutex) => {
match mutex.lock() {
Ok(mut rng) => {
rng.fill_bytes(dest);
Ok(())
}
Err(_) => {
FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => {
rng.fill_bytes(dest);
Ok(())
}
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
})
}
}
}
RngHandle::ThreadLocal => FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => {
rng.fill_bytes(dest);
Ok(())
}
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
}),
}
}
pub fn next_u64(&self) -> Result<u64> {
match self {
RngHandle::Global(mutex) => {
match mutex.lock() {
Ok(mut rng) => Ok(rng.next_u64()),
Err(_) => {
FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => Ok(rng.next_u64()),
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
})
}
}
}
RngHandle::ThreadLocal => FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => Ok(rng.next_u64()),
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
}),
}
}
pub fn next_u32(&self) -> Result<u32> {
match self {
RngHandle::Global(mutex) => {
match mutex.lock() {
Ok(mut rng) => Ok(rng.next_u32()),
Err(_) => {
FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => Ok(rng.next_u32()),
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
})
}
}
}
RngHandle::ThreadLocal => FALLBACK_RNG.with(|rng| match rng.lock() {
Ok(mut rng) => Ok(rng.next_u32()),
Err(_) => Err(crate::prelude::error::LatticeArcError::RandomError),
}),
}
}
}
static GLOBAL_SECURE_RNG: OnceLock<Mutex<OsRng>> = OnceLock::new();
pub fn get_global_secure_rng() -> Result<&'static Mutex<OsRng>> {
Ok(GLOBAL_SECURE_RNG.get_or_init(|| Mutex::new(OsRng)))
}
pub fn initialize_global_secure_rng() -> Result<()> {
let _ = get_global_secure_rng()?;
Ok(())
}
pub fn generate_secure_random_bytes(len: usize) -> Result<Vec<u8>> {
let mut bytes = vec![0u8; len];
RngHandle::secure()?.fill_bytes(&mut bytes)?;
Ok(bytes)
}
pub fn generate_secure_random_u64() -> Result<u64> {
RngHandle::secure()?.next_u64()
}
pub fn generate_secure_random_u32() -> Result<u32> {
RngHandle::secure()?.next_u32()
}
#[cfg(test)]
#[allow(clippy::unwrap_used)]
#[allow(clippy::expect_used)]
#[allow(clippy::indexing_slicing)]
mod tests {
use super::*;
#[test]
fn test_secure_compare_equal_is_secure_succeeds() {
let a = b"hello world";
let b = b"hello world";
assert!(secure_compare(a, b));
}
#[test]
fn test_secure_compare_different_is_secure_succeeds() {
let a = b"hello world";
let b = b"hello xorld";
assert!(!secure_compare(a, b));
}
#[test]
fn test_secure_compare_different_lengths_is_secure_has_correct_size() {
let a = b"hello";
let b = b"hello world";
assert!(!secure_compare(a, b));
}
#[test]
fn test_secure_compare_empty_is_secure_succeeds() {
let a = b"";
let b = b"";
assert!(secure_compare(a, b));
}
#[test]
fn test_secure_compare_empty_vs_nonempty_is_secure_succeeds() {
let a = b"";
let b = b"hello";
assert!(!secure_compare(a, b));
}
#[test]
fn test_secure_compare_constant_time_is_secure_succeeds() {
let a = b"hello world";
let b = b"hello xorld";
for _ in 0..100 {
assert!(!secure_compare(a, b));
}
}
#[test]
fn test_secure_bytes_new_succeeds() {
let data = vec![1, 2, 3, 4, 5];
let sb = SecureBytes::new(data.clone());
assert_eq!(sb.as_slice(), &data[..]);
}
#[test]
fn test_secure_bytes_from_slice_succeeds() {
let data = [10, 20, 30];
let sb = SecureBytes::from(&data);
assert_eq!(sb.as_slice(), &data);
}
#[test]
fn test_secure_bytes_zeros_clears_bytes_succeeds() {
let sb = SecureBytes::zeros(16);
assert_eq!(sb.len(), 16);
assert!(sb.as_slice().iter().all(|&b| b == 0));
}
#[test]
fn test_secure_bytes_len_and_is_empty_succeeds() {
let sb = SecureBytes::new(vec![1, 2, 3]);
assert_eq!(sb.len(), 3);
assert!(!sb.is_empty());
let empty = SecureBytes::new(vec![]);
assert_eq!(empty.len(), 0);
assert!(empty.is_empty());
}
#[test]
fn test_secure_bytes_capacity_succeeds() {
let sb = SecureBytes::new(Vec::with_capacity(64));
assert!(sb.capacity() >= 64);
}
#[test]
fn test_secure_bytes_extend_from_slice_succeeds() {
let mut sb = SecureBytes::new(vec![1, 2]);
sb.extend_from_slice(&[3, 4, 5]);
assert_eq!(sb.as_slice(), &[1, 2, 3, 4, 5]);
}
#[test]
fn test_secure_bytes_as_mut_slice_succeeds() {
let mut sb = SecureBytes::new(vec![0, 0, 0]);
let s = sb.as_mut_slice();
s[0] = 1;
s[1] = 2;
s[2] = 3;
assert_eq!(sb.as_slice(), &[1, 2, 3]);
}
#[test]
fn test_secure_bytes_into_vec_succeeds() {
let sb = SecureBytes::new(vec![10, 20, 30]);
let v = sb.into_vec();
assert_eq!(v, vec![10, 20, 30]);
}
#[test]
fn test_secure_bytes_resize_succeeds() {
let mut sb = SecureBytes::new(vec![1, 2, 3]);
sb.resize(5);
assert_eq!(sb.len(), 5);
assert_eq!(sb.as_slice(), &[1, 2, 3, 0, 0]);
sb.resize(2);
assert_eq!(sb.len(), 2);
assert_eq!(sb.as_slice(), &[1, 2]);
}
#[test]
fn test_secure_bytes_deref_succeeds() {
let sb = SecureBytes::new(vec![1, 2, 3]);
let slice: &[u8] = &sb;
assert_eq!(slice, &[1, 2, 3]);
}
#[test]
fn test_secure_bytes_deref_mut_succeeds() {
let mut sb = SecureBytes::new(vec![0, 0]);
let slice: &mut [u8] = &mut sb;
slice[0] = 42;
assert_eq!(sb.as_slice()[0], 42);
}
#[test]
fn test_secure_bytes_as_ref_succeeds() {
let sb = SecureBytes::new(vec![5, 6, 7]);
let r: &[u8] = sb.as_ref();
assert_eq!(r, &[5, 6, 7]);
}
#[test]
fn test_secure_bytes_debug_redacted_is_secure_succeeds() {
let sb = SecureBytes::new(vec![0xDE, 0xAD]);
let debug = format!("{:?}", sb);
assert!(debug.contains("REDACTED"));
assert!(debug.contains("2 bytes"));
assert!(!debug.contains("DE"));
}
#[test]
fn test_secure_bytes_partial_eq_equal_is_secure_succeeds() {
let a = SecureBytes::new(vec![1, 2, 3]);
let b = SecureBytes::new(vec![1, 2, 3]);
assert_eq!(a, b);
}
#[test]
fn test_secure_bytes_partial_eq_different_is_secure_succeeds() {
let a = SecureBytes::new(vec![1, 2, 3]);
let b = SecureBytes::new(vec![1, 2, 4]);
assert_ne!(a, b);
}
#[test]
fn test_secure_bytes_partial_eq_different_lengths_is_secure_has_correct_size() {
let a = SecureBytes::new(vec![1, 2]);
let b = SecureBytes::new(vec![1, 2, 3]);
assert_ne!(a, b);
}
#[test]
fn test_secure_bytes_from_vec_succeeds() {
let sb: SecureBytes = vec![9, 8, 7].into();
assert_eq!(sb.as_slice(), &[9, 8, 7]);
}
#[test]
fn test_secure_zeroize_clears_bytes_succeeds() {
let mut data = vec![0xFF; 32];
secure_zeroize(&mut data);
assert!(data.iter().all(|&b| b == 0));
}
#[test]
fn test_memory_pool_new_succeeds() {
let pool = MemoryPool::new();
let _default = MemoryPool::default();
let mem = pool.allocate(32).unwrap();
assert_eq!(mem.len(), 32);
}
#[test]
fn test_memory_pool_allocate_and_deallocate_succeeds() {
let pool = MemoryPool::new();
let mem = pool.allocate(64).unwrap();
assert_eq!(mem.len(), 64);
assert!(mem.as_slice().iter().all(|&b| b == 0));
pool.deallocate(mem);
let mem2 = pool.allocate(64).unwrap();
assert_eq!(mem2.len(), 64);
}
#[test]
fn test_memory_pool_zero_size_error_fails() {
let pool = MemoryPool::new();
let result = pool.allocate(0);
assert!(result.is_err());
}
#[test]
fn test_memory_pool_too_large_error_fails() {
let pool = MemoryPool::new();
let result = pool.allocate(2 * 1024 * 1024); assert!(result.is_err());
}
#[test]
fn test_memory_pool_global_succeeds() {
let pool1 = get_memory_pool();
let pool2 = get_memory_pool();
assert!(std::ptr::eq(pool1, pool2));
}
#[test]
fn test_rng_handle_secure_is_secure_succeeds() {
let handle = RngHandle::secure().unwrap();
let mut buf = [0u8; 32];
handle.fill_bytes(&mut buf).unwrap();
assert!(buf.iter().any(|&b| b != 0));
}
#[test]
fn test_rng_handle_fill_bytes_global_succeeds() {
let handle = RngHandle::secure().unwrap();
let mut buf1 = [0u8; 16];
let mut buf2 = [0u8; 16];
handle.fill_bytes(&mut buf1).unwrap();
handle.fill_bytes(&mut buf2).unwrap();
assert_ne!(buf1, buf2);
}
#[test]
fn test_rng_handle_next_u64_succeeds() {
let handle = RngHandle::secure().unwrap();
let v1 = handle.next_u64().unwrap();
let v2 = handle.next_u64().unwrap();
assert_ne!(v1, v2);
}
#[test]
fn test_rng_handle_next_u32_succeeds() {
let handle = RngHandle::secure().unwrap();
let v = handle.next_u32().unwrap();
let _ = v;
}
#[test]
fn test_rng_handle_thread_local_fill_succeeds() {
let handle = RngHandle::ThreadLocal;
let mut buf = [0u8; 32];
handle.fill_bytes(&mut buf).unwrap();
assert!(buf.iter().any(|&b| b != 0));
}
#[test]
fn test_rng_handle_thread_local_next_u64_succeeds() {
let handle = RngHandle::ThreadLocal;
let v = handle.next_u64().unwrap();
let _ = v;
}
#[test]
fn test_rng_handle_thread_local_next_u32_succeeds() {
let handle = RngHandle::ThreadLocal;
let v = handle.next_u32().unwrap();
let _ = v;
}
#[test]
fn test_get_global_secure_rng_succeeds() {
let rng = get_global_secure_rng().unwrap();
let _ = rng; }
#[test]
fn test_initialize_global_secure_rng_succeeds() {
assert!(initialize_global_secure_rng().is_ok());
}
#[test]
fn test_generate_secure_random_bytes_is_secure_succeeds() {
let bytes = generate_secure_random_bytes(32).unwrap();
assert_eq!(bytes.len(), 32);
assert!(bytes.iter().any(|&b| b != 0));
}
#[test]
fn test_generate_secure_random_bytes_zero_len_succeeds() {
let bytes = generate_secure_random_bytes(0).unwrap();
assert!(bytes.is_empty());
}
#[test]
fn test_generate_secure_random_u64_is_secure_succeeds() {
let v = generate_secure_random_u64().unwrap();
let _ = v;
}
#[test]
fn test_generate_secure_random_u32_is_secure_succeeds() {
let v = generate_secure_random_u32().unwrap();
let _ = v;
}
#[test]
fn test_memory_pool_deallocate_and_reuse_succeeds() {
let pool = MemoryPool::new();
let mut mem = pool.allocate(16).unwrap();
mem.as_mut_slice()[0] = 0xFF;
pool.deallocate(mem);
let mem2 = pool.allocate(16).unwrap();
assert_eq!(mem2.len(), 16);
}
#[test]
fn test_memory_pool_deallocate_pool_full_succeeds() {
let pool = MemoryPool::new();
for _ in 0..100 {
let mem = pool.allocate(8).unwrap();
pool.deallocate(mem);
}
let extra = pool.allocate(8).unwrap();
pool.deallocate(extra);
let mem = pool.allocate(8).unwrap();
assert_eq!(mem.len(), 8);
}
#[test]
fn test_memory_pool_multiple_sizes_succeeds() {
let pool = MemoryPool::new();
let m1 = pool.allocate(16).unwrap();
let m2 = pool.allocate(32).unwrap();
let m3 = pool.allocate(64).unwrap();
assert_eq!(m1.len(), 16);
assert_eq!(m2.len(), 32);
assert_eq!(m3.len(), 64);
pool.deallocate(m1);
pool.deallocate(m2);
pool.deallocate(m3);
let r1 = pool.allocate(32).unwrap();
assert_eq!(r1.len(), 32);
}
#[test]
fn test_memory_pool_allocate_boundary_sizes_succeeds() {
let pool = MemoryPool::new();
let mem = pool.allocate(1024 * 1024).unwrap();
assert_eq!(mem.len(), 1024 * 1024);
let result = pool.allocate(1024 * 1024 + 1);
assert!(result.is_err());
}
#[test]
fn test_secure_bytes_empty_operations_succeeds() {
let mut sb = SecureBytes::zeros(0);
assert!(sb.is_empty());
assert_eq!(sb.len(), 0);
sb.extend_from_slice(&[1, 2, 3]);
assert_eq!(sb.len(), 3);
assert_eq!(sb.as_slice(), &[1, 2, 3]);
}
#[test]
fn test_secure_bytes_resize_larger_then_smaller_succeeds() {
let mut sb = SecureBytes::new(vec![1, 2, 3, 4, 5]);
sb.resize(10);
assert_eq!(sb.len(), 10);
assert_eq!(&sb.as_slice()[..5], &[1, 2, 3, 4, 5]);
assert_eq!(&sb.as_slice()[5..], &[0, 0, 0, 0, 0]);
sb.resize(3);
assert_eq!(sb.len(), 3);
assert_eq!(sb.as_slice(), &[1, 2, 3]);
}
#[test]
fn test_generate_secure_random_bytes_various_lengths_is_secure_has_correct_size() {
for len in [1, 16, 32, 64, 128, 256] {
let bytes = generate_secure_random_bytes(len).unwrap();
assert_eq!(bytes.len(), len);
}
}
}