#![no_std]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg",
html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg"
)]
#![warn(missing_docs, rust_2018_idioms, unused_qualifications)]
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "std")]
extern crate std;
#[cfg(feature = "zeroize_derive")]
pub use zeroize_derive::{Zeroize, ZeroizeOnDrop};
#[cfg(target_arch = "aarch64")]
mod aarch64;
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
mod x86;
use core::{
marker::{PhantomData, PhantomPinned},
mem::{self, MaybeUninit},
num::{
self, NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize,
NonZeroU128, NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
},
ops, ptr,
slice::IterMut,
sync::atomic,
};
#[cfg(feature = "alloc")]
use alloc::{boxed::Box, string::String, vec::Vec};
#[cfg(feature = "std")]
use std::ffi::CString;
pub trait Zeroize {
fn zeroize(&mut self);
}
pub trait ZeroizeOnDrop {}
pub trait DefaultIsZeroes: Copy + Default + Sized {}
pub trait TryZeroize {
#[must_use]
fn try_zeroize(&mut self) -> bool;
}
impl<Z> Zeroize for Z
where
Z: DefaultIsZeroes,
{
fn zeroize(&mut self) {
volatile_write(self, Z::default());
atomic_fence();
}
}
macro_rules! impl_zeroize_with_default {
($($type:ty),+) => {
$(impl DefaultIsZeroes for $type {})+
};
}
#[rustfmt::skip]
impl_zeroize_with_default! {
PhantomPinned, (), bool, char,
f32, f64,
i8, i16, i32, i64, i128, isize,
u8, u16, u32, u64, u128, usize
}
impl ZeroizeOnDrop for PhantomPinned {}
impl ZeroizeOnDrop for () {}
macro_rules! impl_zeroize_for_non_zero {
($($type:ty),+) => {
$(
impl Zeroize for $type {
fn zeroize(&mut self) {
const ONE: $type = match <$type>::new(1) {
Some(one) => one,
None => unreachable!(),
};
volatile_write(self, ONE);
atomic_fence();
}
}
)+
};
}
impl_zeroize_for_non_zero!(
NonZeroI8,
NonZeroI16,
NonZeroI32,
NonZeroI64,
NonZeroI128,
NonZeroIsize,
NonZeroU8,
NonZeroU16,
NonZeroU32,
NonZeroU64,
NonZeroU128,
NonZeroUsize
);
impl<Z> Zeroize for num::Wrapping<Z>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
self.0.zeroize();
}
}
impl<Z, const N: usize> Zeroize for [Z; N]
where
Z: Zeroize,
{
fn zeroize(&mut self) {
self.iter_mut().zeroize();
}
}
impl<Z, const N: usize> ZeroizeOnDrop for [Z; N] where Z: ZeroizeOnDrop {}
impl<Z> Zeroize for IterMut<'_, Z>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
for elem in self {
elem.zeroize();
}
}
}
impl<Z> Zeroize for Option<Z>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
if let Some(value) = self {
value.zeroize();
self.take();
}
unsafe {
volatile_set((self as *mut Self).cast::<u8>(), 0, mem::size_of::<Self>());
}
unsafe { ptr::write_volatile(self, None) }
atomic_fence();
}
}
impl<Z> ZeroizeOnDrop for Option<Z> where Z: ZeroizeOnDrop {}
impl<Z> Zeroize for MaybeUninit<Z> {
fn zeroize(&mut self) {
unsafe { ptr::write_volatile(self, MaybeUninit::zeroed()) }
atomic_fence();
}
}
impl<Z> Zeroize for [MaybeUninit<Z>] {
fn zeroize(&mut self) {
let ptr = self.as_mut_ptr().cast::<MaybeUninit<u8>>();
let size = self.len().checked_mul(mem::size_of::<Z>()).unwrap();
assert!(size <= isize::MAX as usize);
unsafe { volatile_set(ptr, MaybeUninit::zeroed(), size) }
atomic_fence();
}
}
impl<Z> Zeroize for [Z]
where
Z: DefaultIsZeroes,
{
fn zeroize(&mut self) {
assert!(self.len() <= isize::MAX as usize);
unsafe { volatile_set(self.as_mut_ptr(), Z::default(), self.len()) };
atomic_fence();
}
}
impl Zeroize for str {
fn zeroize(&mut self) {
unsafe { self.as_bytes_mut().zeroize() }
}
}
impl<Z> Zeroize for PhantomData<Z> {
fn zeroize(&mut self) {}
}
impl<Z> ZeroizeOnDrop for PhantomData<Z> {}
macro_rules! impl_zeroize_tuple {
( $( $type_name:ident ),+ ) => {
impl<$($type_name: Zeroize),+> Zeroize for ($($type_name,)+) {
fn zeroize(&mut self) {
#[allow(non_snake_case)]
let ($($type_name,)+) = self;
$($type_name.zeroize());+
}
}
impl<$($type_name: ZeroizeOnDrop),+> ZeroizeOnDrop for ($($type_name,)+) { }
}
}
impl_zeroize_tuple!(A);
impl_zeroize_tuple!(A, B);
impl_zeroize_tuple!(A, B, C);
impl_zeroize_tuple!(A, B, C, D);
impl_zeroize_tuple!(A, B, C, D, E);
impl_zeroize_tuple!(A, B, C, D, E, F);
impl_zeroize_tuple!(A, B, C, D, E, F, G);
impl_zeroize_tuple!(A, B, C, D, E, F, G, H);
impl_zeroize_tuple!(A, B, C, D, E, F, G, H, I);
impl_zeroize_tuple!(A, B, C, D, E, F, G, H, I, J);
#[cfg(feature = "alloc")]
impl<Z> Zeroize for Vec<Z>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
self.iter_mut().zeroize();
self.clear();
self.spare_capacity_mut().zeroize();
}
}
#[cfg(feature = "alloc")]
impl<Z> ZeroizeOnDrop for Vec<Z> where Z: ZeroizeOnDrop {}
#[cfg(feature = "alloc")]
impl<Z> Zeroize for Box<[Z]>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
self.iter_mut().zeroize();
}
}
#[cfg(feature = "alloc")]
impl<Z> ZeroizeOnDrop for Box<[Z]> where Z: ZeroizeOnDrop {}
#[cfg(feature = "alloc")]
impl Zeroize for Box<str> {
fn zeroize(&mut self) {
self.as_mut().zeroize();
}
}
#[cfg(feature = "alloc")]
impl Zeroize for String {
fn zeroize(&mut self) {
unsafe { self.as_mut_vec() }.zeroize();
}
}
#[cfg(feature = "std")]
impl Zeroize for CString {
fn zeroize(&mut self) {
let this = mem::take(self);
let mut buf = this.into_bytes_with_nul();
buf.zeroize();
let zeroed = CString::new(buf).expect("buf not truncated");
let _ = mem::replace(self, zeroed);
}
}
#[derive(Debug, Default, Eq, PartialEq)]
pub struct Zeroizing<Z: Zeroize>(Z);
impl<Z> Zeroizing<Z>
where
Z: Zeroize,
{
#[inline(always)]
pub fn new(value: Z) -> Self {
Self(value)
}
}
impl<Z: Zeroize + Clone> Clone for Zeroizing<Z> {
#[inline(always)]
fn clone(&self) -> Self {
Self(self.0.clone())
}
#[inline(always)]
fn clone_from(&mut self, source: &Self) {
self.0.zeroize();
self.0.clone_from(&source.0);
}
}
impl<Z> From<Z> for Zeroizing<Z>
where
Z: Zeroize,
{
#[inline(always)]
fn from(value: Z) -> Zeroizing<Z> {
Zeroizing(value)
}
}
impl<Z> ops::Deref for Zeroizing<Z>
where
Z: Zeroize,
{
type Target = Z;
#[inline(always)]
fn deref(&self) -> &Z {
&self.0
}
}
impl<Z> ops::DerefMut for Zeroizing<Z>
where
Z: Zeroize,
{
#[inline(always)]
fn deref_mut(&mut self) -> &mut Z {
&mut self.0
}
}
impl<T, Z> AsRef<T> for Zeroizing<Z>
where
T: ?Sized,
Z: AsRef<T> + Zeroize,
{
#[inline(always)]
fn as_ref(&self) -> &T {
self.0.as_ref()
}
}
impl<T, Z> AsMut<T> for Zeroizing<Z>
where
T: ?Sized,
Z: AsMut<T> + Zeroize,
{
#[inline(always)]
fn as_mut(&mut self) -> &mut T {
self.0.as_mut()
}
}
impl<Z> Zeroize for Zeroizing<Z>
where
Z: Zeroize,
{
fn zeroize(&mut self) {
self.0.zeroize();
}
}
impl<Z> ZeroizeOnDrop for Zeroizing<Z> where Z: Zeroize {}
impl<Z> Drop for Zeroizing<Z>
where
Z: Zeroize,
{
fn drop(&mut self) {
self.0.zeroize()
}
}
#[cfg(feature = "serde")]
impl<Z> serde::Serialize for Zeroizing<Z>
where
Z: Zeroize + serde::Serialize,
{
#[inline(always)]
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.0.serialize(serializer)
}
}
#[cfg(feature = "serde")]
impl<'de, Z> serde::Deserialize<'de> for Zeroizing<Z>
where
Z: Zeroize + serde::Deserialize<'de>,
{
#[inline(always)]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self(Z::deserialize(deserializer)?))
}
}
#[inline(always)]
fn atomic_fence() {
atomic::compiler_fence(atomic::Ordering::SeqCst);
}
#[inline(always)]
fn volatile_write<T: Copy + Sized>(dst: &mut T, src: T) {
unsafe { ptr::write_volatile(dst, src) }
}
#[inline(always)]
unsafe fn volatile_set<T: Copy + Sized>(dst: *mut T, src: T, count: usize) {
for i in 0..count {
let ptr = dst.add(i);
ptr::write_volatile(ptr, src);
}
}
#[inline(always)]
pub unsafe fn zeroize_flat_type<F: Sized>(data: *mut F) {
let size = mem::size_of::<F>();
volatile_set(data as *mut u8, 0, size);
atomic_fence()
}
#[doc(hidden)]
pub mod __internal {
use super::*;
pub trait AssertZeroizeOnDrop {
fn zeroize_or_on_drop(self);
}
impl<T: ZeroizeOnDrop + ?Sized> AssertZeroizeOnDrop for &&mut T {
fn zeroize_or_on_drop(self) {}
}
pub trait AssertZeroize {
fn zeroize_or_on_drop(&mut self);
}
impl<T: Zeroize + ?Sized> AssertZeroize for T {
fn zeroize_or_on_drop(&mut self) {
self.zeroize()
}
}
}