use std::cmp;
use std::convert::TryInto;
use std::fmt;
use std::io;
use buffered_reader::BufferedReader;
use crate::types::{
AEADAlgorithm,
SymmetricAlgorithm,
};
use crate::utils::{
write_be_u64,
};
use crate::Error;
use crate::Result;
use crate::crypto::SessionKey;
use crate::seal;
use crate::parse::Cookie;
use crate::crypto::backend::{Backend, interface::Kdf};
pub const MAX_NONCE_LEN: usize = 16;
pub(crate) fn chunk_size_usize(chunk_size: u64) -> Result<usize> {
chunk_size.try_into()
.map_err(|_| Error::InvalidOperation(
format!("AEAD chunk size exceeds size of \
virtual memory: {}", chunk_size)).into())
}
pub struct Builder<'a> {
symm: SymmetricAlgorithm,
aead: AEADAlgorithm,
key: &'a SessionKey,
aad: &'a [u8],
nonce: &'a [u8],
}
impl AEADAlgorithm {
pub fn context<'s>(self,
symm: SymmetricAlgorithm,
key: &'s SessionKey,
aad: &'s [u8],
nonce: &'s [u8])
-> Result<Builder<'s>>
{
if ! symm.is_supported() {
return Err(Error::UnsupportedSymmetricAlgorithm(symm).into());
}
use crate::crypto::backend::{Backend, interface::Aead};
if ! Backend::supports_algo_with_symmetric(self, symm) {
return Err(Error::UnsupportedAEADAlgorithm(self).into());
}
Ok(Builder {
symm,
aead: self,
key,
aad,
nonce,
})
}
}
impl Builder<'_> {
pub fn for_encryption(self) -> Result<EncryptionContext> {
use crate::crypto::backend::{Backend, interface::Aead};
Ok(EncryptionContext(
Backend::context(self.aead, self.symm, self.key.as_protected(),
self.aad, self.nonce, CipherOp::Encrypt)?))
}
pub fn for_decryption(self) -> Result<DecryptionContext> {
use crate::crypto::backend::{Backend, interface::Aead};
Ok(DecryptionContext(
Backend::context(self.aead, self.symm, self.key.as_protected(),
self.aad, self.nonce, CipherOp::Decrypt)?))
}
}
pub struct EncryptionContext(Box<dyn Context>);
impl EncryptionContext {
pub fn encrypt_seal(&mut self, dst: &mut [u8], src: &[u8]) -> Result<()> {
if dst.len() != src.len() + self.digest_size() {
return Err(Error::InvalidOperation(
"invalid buffer length".into()).into());
}
self.0.encrypt_seal(dst, src)
}
pub fn digest_size(&self) -> usize {
self.0.digest_size()
}
}
pub struct DecryptionContext(Box<dyn Context>);
impl DecryptionContext {
pub fn decrypt_verify(&mut self, dst: &mut [u8], src: &[u8]) -> Result<()> {
if dst.len() + self.digest_size() != src.len() {
return Err(Error::InvalidOperation(
"invalid buffer length".into()).into());
}
self.0.decrypt_verify(dst, src)
}
pub fn digest_size(&self) -> usize {
self.0.digest_size()
}
}
pub(crate) trait Context: seal::Sealed {
fn encrypt_seal(&mut self, dst: &mut [u8], src: &[u8]) -> Result<()>;
#[allow(dead_code)] fn digest_size(&self) -> usize;
fn decrypt_verify(&mut self, dst: &mut [u8], src: &[u8]) -> Result<()>;
}
pub(crate) enum CipherOp {
Encrypt,
Decrypt,
}
impl AEADAlgorithm {
pub fn digest_size(&self) -> Result<usize> {
use self::AEADAlgorithm::*;
match self {
EAX => Ok(16),
OCB => Ok(16),
GCM => Ok(16),
_ => Err(Error::UnsupportedAEADAlgorithm(*self).into()),
}
}
pub fn nonce_size(&self) -> Result<usize> {
use self::AEADAlgorithm::*;
match self {
EAX => Ok(16),
OCB => Ok(15),
GCM => Ok(12),
_ => Err(Error::UnsupportedAEADAlgorithm(*self).into()),
}
}
}
pub trait Schedule<T>: Send + Sync {
fn chunk(&self,
index: u64,
fun: &mut dyn FnMut(&SessionKey, &[u8], &[u8]) -> Result<T>)
-> Result<T>;
fn finalizer(&self,
index: u64,
length: u64,
fun: &mut dyn FnMut(&SessionKey, &[u8], &[u8]) -> Result<T>)
-> Result<T>;
}
#[derive(Clone)]
pub struct SEIPv2Schedule {
key: SessionKey,
nonce: Box<[u8]>,
ad: [u8; Self::AD_PREFIX_LEN],
nonce_len: usize,
}
impl SEIPv2Schedule {
const MIN_CHUNK_SIZE: usize = 1 << 6;
const MAX_CHUNK_SIZE: usize = 1 << 22;
const AD_PREFIX_LEN: usize = 5;
pub fn new(session_key: &SessionKey,
sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm,
chunk_size: usize,
salt: &[u8]) -> Result<Self>
{
if !(Self::MIN_CHUNK_SIZE..=Self::MAX_CHUNK_SIZE).contains(&chunk_size)
{
return Err(Error::InvalidArgument(
format!("Invalid AEAD chunk size: {}", chunk_size)).into());
}
let key_size = sym_algo.key_size()?;
let nonce_size = aead.nonce_size()? - 8;
let mut key_nonce: SessionKey =
vec![0; key_size + nonce_size].into();
let ad = [
0xd2, 2, sym_algo.into(),
aead.into(),
chunk_size.trailing_zeros() as u8 - 6,
];
Backend::hkdf_sha256(session_key, Some(salt), &ad, &mut key_nonce)?;
let key = Vec::from(&key_nonce[..key_size]).into();
let nonce = Vec::from(&key_nonce[key_size..]).into();
Ok(Self {
key,
nonce,
ad,
nonce_len: aead.nonce_size()?,
})
}
}
impl<T> Schedule<T> for SEIPv2Schedule {
fn chunk(&self,
index: u64,
fun: &mut dyn FnMut(&SessionKey, &[u8], &[u8]) -> Result<T>)
-> Result<T>
{
let index_be: [u8; 8] = index.to_be_bytes();
let mut nonce_store = [0u8; MAX_NONCE_LEN];
let nonce = &mut nonce_store[..self.nonce_len];
nonce[..self.nonce.len()].copy_from_slice(&self.nonce);
nonce[self.nonce.len()..].copy_from_slice(&index_be);
fun(&self.key, nonce, &self.ad)
}
fn finalizer(&self,
index: u64,
length: u64,
fun: &mut dyn FnMut(&SessionKey, &[u8], &[u8]) -> Result<T>)
-> Result<T>
{
let mut ad = [0u8; Self::AD_PREFIX_LEN + 8];
ad[..Self::AD_PREFIX_LEN].copy_from_slice(&self.ad);
write_be_u64(&mut ad[Self::AD_PREFIX_LEN..], length);
let index_be: [u8; 8] = index.to_be_bytes();
let mut nonce_store = [0u8; MAX_NONCE_LEN];
let nonce = &mut nonce_store[..self.nonce_len];
nonce[..self.nonce.len()].copy_from_slice(&self.nonce);
nonce[self.nonce.len()..].copy_from_slice(&index_be);
fun(&self.key, nonce, &ad)
}
}
pub(crate) struct InternalDecryptor<'a, 's> {
source: Box<dyn BufferedReader<Cookie> + 'a>,
sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm,
schedule: Box<dyn Schedule<DecryptionContext> + 's>,
digest_size: usize,
chunk_size: usize,
chunk_index: u64,
bytes_decrypted: u64,
buffer: Vec<u8>,
}
assert_send_and_sync!(InternalDecryptor<'_, '_>);
impl<'a, 's> InternalDecryptor<'a, 's> {
pub fn new<R, S>(sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm, chunk_size: usize,
schedule: S, source: R)
-> Result<Self>
where
R: BufferedReader<Cookie> + 'a,
S: Schedule<DecryptionContext> + 's,
{
Ok(InternalDecryptor {
source: source.into_boxed(),
sym_algo,
aead,
schedule: Box::new(schedule),
digest_size: aead.digest_size()?,
chunk_size,
chunk_index: 0,
bytes_decrypted: 0,
buffer: Vec::with_capacity(chunk_size),
})
}
fn read_helper(&mut self, plaintext: &mut [u8]) -> Result<usize> {
let mut pos = 0;
if !self.buffer.is_empty() {
let to_copy = cmp::min(self.buffer.len(), plaintext.len());
plaintext[..to_copy].copy_from_slice(&self.buffer[..to_copy]);
crate::vec_drain_prefix(&mut self.buffer, to_copy);
pos = to_copy;
if pos == plaintext.len() {
return Ok(pos);
}
}
let n_chunks
= (plaintext.len() - pos + self.chunk_size - 1) / self.chunk_size;
let chunk_digest_size = self.chunk_size + self.digest_size;
let final_digest_size = self.digest_size;
for _ in 0..n_chunks {
let to_read = chunk_digest_size + final_digest_size;
let result = {
match self.source.data(to_read) {
Ok(_) => Ok(self.source.buffer()),
Err(err) => Err(err),
}
};
let check_final_tag;
let chunk = match result {
Ok(chunk) => {
if chunk.is_empty() {
return Ok(pos);
}
if chunk.len() < final_digest_size {
return Err(Error::ManipulatedMessage.into());
}
check_final_tag = chunk.len() < to_read;
&chunk[..cmp::min(chunk.len(), to_read) - final_digest_size]
},
Err(e) => return Err(e.into()),
};
assert!(chunk.len() <= chunk_digest_size);
if chunk.is_empty() {
} else if chunk.len() <= self.digest_size {
return Err(Error::ManipulatedMessage.into());
} else {
let mut aead = self.schedule.chunk(
self.chunk_index,
&mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_decryption()
})?;
let to_decrypt = chunk.len() - self.digest_size;
let double_buffer = to_decrypt > plaintext.len() - pos;
let buffer = if double_buffer {
self.buffer.resize(to_decrypt, 0);
&mut self.buffer[..]
} else {
&mut plaintext[pos..pos + to_decrypt]
};
aead.decrypt_verify(buffer, chunk)?;
if double_buffer {
let to_copy = plaintext.len() - pos;
assert!(0 < to_copy);
assert!(to_copy < self.chunk_size);
plaintext[pos..pos + to_copy]
.copy_from_slice(&self.buffer[..to_copy]);
crate::vec_drain_prefix(&mut self.buffer, to_copy);
pos += to_copy;
} else {
pos += to_decrypt;
}
self.chunk_index += 1;
self.bytes_decrypted += to_decrypt as u64;
let chunk_len = chunk.len();
self.source.consume(chunk_len);
}
if check_final_tag {
let mut aead = self.schedule.finalizer(
self.chunk_index, self.bytes_decrypted,
&mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_decryption()
})?;
let final_digest = self.source.data(final_digest_size)?;
aead.decrypt_verify(&mut [], final_digest)?;
self.source.consume(final_digest_size);
break;
}
}
Ok(pos)
}
}
impl io::Read for InternalDecryptor<'_, '_> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.read_helper(buf) {
Ok(n) => Ok(n),
Err(e) => match e.downcast::<io::Error>() {
Ok(e) => Err(e),
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
},
}
}
}
pub struct Decryptor<'a, 's> {
reader: buffered_reader::Generic<InternalDecryptor<'a, 's>, Cookie>,
}
impl<'a, 's> Decryptor<'a, 's> {
pub fn new<S>(symm: SymmetricAlgorithm,
aead: AEADAlgorithm,
chunk_size: usize,
schedule: S,
source: Box<dyn BufferedReader<Cookie> + 'a>)
-> Result<Self>
where
S: Schedule<DecryptionContext> + 's,
{
Self::with_cookie(symm, aead, chunk_size, schedule, source,
Default::default())
}
pub fn with_cookie<S>(symm: SymmetricAlgorithm,
aead: AEADAlgorithm,
chunk_size: usize,
schedule: S,
source: Box<dyn BufferedReader<Cookie> + 'a>,
cookie: Cookie)
-> Result<Self>
where
S: Schedule<DecryptionContext> + 's,
{
Ok(Decryptor {
reader: buffered_reader::Generic::with_cookie(
InternalDecryptor::new(
symm, aead, chunk_size, schedule, source)?,
None, cookie),
})
}
}
impl io::Read for Decryptor<'_, '_> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.reader.read(buf)
}
}
impl fmt::Display for Decryptor<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Decryptor")
}
}
impl fmt::Debug for Decryptor<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Decryptor")
.field("reader", &self.get_ref().unwrap())
.finish()
}
}
impl BufferedReader<Cookie> for Decryptor<'_, '_> {
fn buffer(&self) -> &[u8] {
self.reader.buffer()
}
fn data(&mut self, amount: usize) -> io::Result<&[u8]> {
self.reader.data(amount)
}
fn data_hard(&mut self, amount: usize) -> io::Result<&[u8]> {
self.reader.data_hard(amount)
}
fn data_eof(&mut self) -> io::Result<&[u8]> {
self.reader.data_eof()
}
fn consume(&mut self, amount: usize) -> &[u8] {
self.reader.consume(amount)
}
fn data_consume(&mut self, amount: usize)
-> io::Result<&[u8]> {
self.reader.data_consume(amount)
}
fn data_consume_hard(&mut self, amount: usize) -> io::Result<&[u8]> {
self.reader.data_consume_hard(amount)
}
fn read_be_u16(&mut self) -> io::Result<u16> {
self.reader.read_be_u16()
}
fn read_be_u32(&mut self) -> io::Result<u32> {
self.reader.read_be_u32()
}
fn steal(&mut self, amount: usize) -> io::Result<Vec<u8>> {
self.reader.steal(amount)
}
fn steal_eof(&mut self) -> io::Result<Vec<u8>> {
self.reader.steal_eof()
}
fn get_mut(&mut self) -> Option<&mut dyn BufferedReader<Cookie>> {
Some(&mut self.reader.reader_mut().source)
}
fn get_ref(&self) -> Option<&dyn BufferedReader<Cookie>> {
Some(&self.reader.reader_ref().source)
}
fn into_inner<'b>(self: Box<Self>)
-> Option<Box<dyn BufferedReader<Cookie> + 'b>> where Self: 'b {
Some(self.reader.into_reader().source.into_boxed())
}
fn cookie_set(&mut self, cookie: Cookie) -> Cookie {
self.reader.cookie_set(cookie)
}
fn cookie_ref(&self) -> &Cookie {
self.reader.cookie_ref()
}
fn cookie_mut(&mut self) -> &mut Cookie {
self.reader.cookie_mut()
}
}
pub struct Encryptor<'s, W: io::Write> {
inner: Option<W>,
sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm,
schedule: Box<dyn Schedule<EncryptionContext> + 's>,
digest_size: usize,
chunk_size: usize,
chunk_index: u64,
bytes_encrypted: u64,
buffer: Vec<u8>,
scratch: Vec<u8>,
}
assert_send_and_sync!(Encryptor<'_, W> where W: io::Write);
impl<'s, W: io::Write> Encryptor<'s, W> {
pub fn new<S>(sym_algo: SymmetricAlgorithm, aead: AEADAlgorithm,
chunk_size: usize, schedule: S, sink: W)
-> Result<Self>
where
S: Schedule<EncryptionContext> + 's,
{
Ok(Encryptor {
inner: Some(sink),
sym_algo,
aead,
schedule: Box::new(schedule),
digest_size: aead.digest_size()?,
chunk_size,
chunk_index: 0,
bytes_encrypted: 0,
buffer: Vec::with_capacity(chunk_size),
scratch: vec![0; chunk_size + aead.digest_size()?],
})
}
fn write_helper(&mut self, mut buf: &[u8]) -> Result<usize> {
if self.inner.is_none() {
return Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken").into());
}
let amount = buf.len();
if !self.buffer.is_empty() {
let n = cmp::min(buf.len(), self.chunk_size - self.buffer.len());
self.buffer.extend_from_slice(&buf[..n]);
assert!(self.buffer.len() <= self.chunk_size);
buf = &buf[n..];
if self.buffer.len() == self.chunk_size {
let mut aead =
self.schedule.chunk(self.chunk_index, &mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_encryption()
})?;
let inner = self.inner.as_mut().unwrap();
aead.encrypt_seal(&mut self.scratch, &self.buffer)?;
self.bytes_encrypted += self.chunk_size as u64;
self.chunk_index += 1;
crate::vec_truncate(&mut self.buffer, 0);
inner.write_all(&self.scratch)?;
}
}
for chunk in buf.chunks(self.chunk_size) {
if chunk.len() == self.chunk_size {
let mut aead =
self.schedule.chunk(self.chunk_index, &mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_encryption()
})?;
let inner = self.inner.as_mut().unwrap();
aead.encrypt_seal(&mut self.scratch, chunk)?;
self.bytes_encrypted += self.chunk_size as u64;
self.chunk_index += 1;
inner.write_all(&self.scratch)?;
} else {
assert!(self.buffer.is_empty());
self.buffer.extend_from_slice(chunk);
}
}
Ok(amount)
}
pub fn finalize(mut self) -> Result<W> {
self.finalize_intern()
}
fn finalize_intern(&mut self) -> Result<W> {
if let Some(mut inner) = self.inner.take() {
if !self.buffer.is_empty() {
let mut aead =
self.schedule.chunk(self.chunk_index, &mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_encryption()
})?;
unsafe {
debug_assert!(self.buffer.len() < self.chunk_size);
self.scratch.set_len(self.buffer.len() + self.digest_size)
}
aead.encrypt_seal(&mut self.scratch, &self.buffer)?;
self.bytes_encrypted += self.buffer.len() as u64;
self.chunk_index += 1;
crate::vec_truncate(&mut self.buffer, 0);
inner.write_all(&self.scratch)?;
}
let mut aead = self.schedule.finalizer(
self.chunk_index, self.bytes_encrypted,
&mut |key, iv, ad| {
self.aead.context(self.sym_algo, key, ad, iv)?
.for_encryption()
})?;
debug_assert!(self.digest_size <= self.scratch.len());
aead.encrypt_seal(&mut self.scratch[..self.digest_size], b"")?;
inner.write_all(&self.scratch[..self.digest_size])?;
Ok(inner)
} else {
Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken").into())
}
}
pub(crate) fn get_ref(&self) -> Option<&W> {
self.inner.as_ref()
}
#[allow(dead_code)]
pub(crate) fn get_mut(&mut self) -> Option<&mut W> {
self.inner.as_mut()
}
}
impl<W: io::Write> io::Write for Encryptor<'_, W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self.write_helper(buf) {
Ok(n) => Ok(n),
Err(e) => match e.downcast::<io::Error>() {
Ok(e) => Err(e),
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
},
}
}
fn flush(&mut self) -> io::Result<()> {
if let Some(ref mut inner) = self.inner {
inner.flush()
} else {
Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken"))
}
}
}
impl<W: io::Write> Drop for Encryptor<'_, W> {
fn drop(&mut self) {
let _ = self.finalize_intern();
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::{Read, Write};
#[test]
fn roundtrip() {
for sym_algo in [SymmetricAlgorithm::AES128,
SymmetricAlgorithm::AES192,
SymmetricAlgorithm::AES256,
SymmetricAlgorithm::Twofish,
SymmetricAlgorithm::Camellia128,
SymmetricAlgorithm::Camellia192,
SymmetricAlgorithm::Camellia256]
.iter()
.filter(|algo| algo.is_supported()) {
for aead in [
AEADAlgorithm::EAX,
AEADAlgorithm::OCB,
AEADAlgorithm::GCM,
].iter().filter(|algo| {
use crate::crypto::backend::{Backend, interface::Aead};
Backend::supports_algo_with_symmetric(**algo, *sym_algo)
}) {
let chunk_size = 64;
let mut key = vec![0; sym_algo.key_size().unwrap()];
crate::crypto::random(&mut key).unwrap();
let key: SessionKey = key.into();
let mut iv = vec![0; aead.nonce_size().unwrap()];
crate::crypto::random(&mut iv).unwrap();
let mut ciphertext = Vec::new();
{
let schedule = SEIPv2Schedule::new(
&key,
*sym_algo,
*aead,
chunk_size,
&iv).expect("valid parameters");
let mut encryptor = Encryptor::new(*sym_algo,
*aead,
chunk_size,
schedule,
&mut ciphertext)
.unwrap();
encryptor.write_all(crate::tests::manifesto()).unwrap();
}
let mut plaintext = Vec::new();
{
let cur = buffered_reader::Memory::with_cookie(
&ciphertext, Default::default());
let schedule = SEIPv2Schedule::new(
&key,
*sym_algo,
*aead,
chunk_size,
&iv).expect("valid parameters");
let mut decryptor = Decryptor::new(*sym_algo,
*aead,
chunk_size,
schedule,
cur.into_boxed())
.unwrap();
decryptor.read_to_end(&mut plaintext).unwrap();
}
assert_eq!(&plaintext[..], crate::tests::manifesto());
}
}
}
}