use std::cmp;
use std::fmt;
use std::io;
use nettle::{aead, cipher};
use buffered_reader::BufferedReader;
use crate::types::{
AEADAlgorithm,
SymmetricAlgorithm,
};
use crate::utils::{
write_be_u64,
};
use crate::Error;
use crate::Result;
use crate::crypto::SessionKey;
use crate::crypto::mem::secure_cmp;
use crate::parse::Cookie;
const DANGER_DISABLE_AUTHENTICATION: bool = false;
impl AEADAlgorithm {
pub fn digest_size(&self) -> Result<usize> {
use self::AEADAlgorithm::*;
match self {
&EAX =>
Ok(aead::Eax::<cipher::Aes128>::DIGEST_SIZE),
&OCB =>
Ok(16),
_ => Err(Error::UnsupportedAEADAlgorithm(self.clone()).into()),
}
}
pub fn iv_size(&self) -> Result<usize> {
use self::AEADAlgorithm::*;
match self {
&EAX =>
Ok(16),
&OCB =>
Ok(15),
_ => Err(Error::UnsupportedAEADAlgorithm(self.clone()).into()),
}
}
pub fn context(&self, sym_algo: SymmetricAlgorithm, key: &[u8], nonce: &[u8])
-> Result<Box<dyn aead::Aead>> {
match self {
AEADAlgorithm::EAX => match sym_algo {
SymmetricAlgorithm::AES128 =>
Ok(Box::new(aead::Eax::<cipher::Aes128>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::AES192 =>
Ok(Box::new(aead::Eax::<cipher::Aes192>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::AES256 =>
Ok(Box::new(aead::Eax::<cipher::Aes256>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::Twofish =>
Ok(Box::new(aead::Eax::<cipher::Twofish>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::Camellia128 =>
Ok(Box::new(aead::Eax::<cipher::Camellia128>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::Camellia192 =>
Ok(Box::new(aead::Eax::<cipher::Camellia192>
::with_key_and_nonce(key, nonce)?)),
SymmetricAlgorithm::Camellia256 =>
Ok(Box::new(aead::Eax::<cipher::Camellia256>
::with_key_and_nonce(key, nonce)?)),
_ =>
Err(Error::UnsupportedSymmetricAlgorithm(sym_algo).into()),
},
_ =>
Err(Error::UnsupportedAEADAlgorithm(self.clone()).into()),
}
}
}
const AD_PREFIX_LEN: usize = 5;
pub struct Decryptor<'a> {
source: Box<dyn BufferedReader<Cookie> + 'a>,
sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm,
key: SessionKey,
iv: Box<[u8]>,
ad: [u8; AD_PREFIX_LEN + 8 + 8],
digest_size: usize,
chunk_size: usize,
chunk_index: u64,
bytes_decrypted: u64,
buffer: Vec<u8>,
}
impl<'a> Decryptor<'a> {
pub fn new<R: io::Read>(version: u8, sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm, chunk_size: usize,
iv: &[u8], key: &SessionKey, source: R)
-> Result<Self>
where R: 'a
{
Self::from_buffered_reader(
version, sym_algo, aead, chunk_size, iv, key,
Box::new(buffered_reader::Generic::with_cookie(
source, None, Default::default())))
}
fn from_buffered_reader(version: u8, sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm, chunk_size: usize,
iv: &[u8], key: &SessionKey,
source: Box<dyn 'a + BufferedReader<Cookie>>)
-> Result<Self>
{
Ok(Decryptor {
source: source,
sym_algo: sym_algo,
aead: aead,
key: key.clone(),
iv: Vec::from(iv).into_boxed_slice(),
ad: [
0xd4, version, sym_algo.into(), aead.into(),
chunk_size.trailing_zeros() as u8 - 6,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
],
digest_size: aead.digest_size()?,
chunk_size: chunk_size,
chunk_index: 0,
bytes_decrypted: 0,
buffer: Vec::with_capacity(chunk_size),
})
}
fn hash_associated_data(&mut self, aead: &mut Box<dyn aead::Aead>,
final_digest: bool) {
write_be_u64(&mut self.ad[AD_PREFIX_LEN..AD_PREFIX_LEN + 8],
self.chunk_index);
if final_digest {
write_be_u64(&mut self.ad[AD_PREFIX_LEN + 8..],
self.bytes_decrypted);
aead.update(&self.ad);
} else {
aead.update(&self.ad[..AD_PREFIX_LEN + 8]);
}
}
fn make_aead(&mut self) -> Result<Box<dyn aead::Aead>> {
let mut chunk_index_be64 = vec![0u8; 8];
write_be_u64(&mut chunk_index_be64, self.chunk_index);
match self.aead {
AEADAlgorithm::EAX => {
let iv_len = self.iv.len();
for (i, o) in &mut self.iv[iv_len - 8..].iter_mut()
.enumerate()
{
*o ^= chunk_index_be64[i];
}
let aead = self.aead.context(self.sym_algo, &self.key, &self.iv)?;
for (i, o) in &mut self.iv[iv_len - 8..].iter_mut()
.enumerate()
{
*o ^= chunk_index_be64[i];
}
Ok(aead)
}
_ => Err(Error::UnsupportedAEADAlgorithm(self.aead).into()),
}
}
fn read_helper(&mut self, plaintext: &mut [u8]) -> Result<usize> {
use std::cmp::Ordering;
let mut pos = 0;
let mut digest = vec![0u8; self.digest_size];
if self.buffer.len() > 0 {
let to_copy = cmp::min(self.buffer.len(), plaintext.len());
&plaintext[..to_copy].copy_from_slice(&self.buffer[..to_copy]);
crate::vec_drain_prefix(&mut self.buffer, to_copy);
pos = to_copy;
if pos == plaintext.len() {
return Ok(pos);
}
}
let n_chunks
= (plaintext.len() - pos + self.chunk_size - 1) / self.chunk_size;
let chunk_digest_size = self.chunk_size + self.digest_size;
let final_digest_size = self.digest_size;
for _ in 0..n_chunks {
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, false);
let to_read = chunk_digest_size + final_digest_size;
let result = {
match self.source.data(to_read) {
Ok(_) => Ok(self.source.buffer()),
Err(err) => Err(err),
}
};
let check_final_tag;
let chunk = match result {
Ok(chunk) => {
if chunk.len() == 0 {
return Ok(pos);
}
if chunk.len() < final_digest_size {
return Err(Error::ManipulatedMessage.into());
}
check_final_tag = chunk.len() < to_read;
&chunk[..cmp::min(chunk.len(), to_read) - final_digest_size]
},
Err(e) => return Err(e.into()),
};
assert!(chunk.len() <= chunk_digest_size);
if chunk.len() == 0 {
} else if chunk.len() <= self.digest_size {
return Err(Error::ManipulatedMessage.into());
} else {
let to_decrypt = chunk.len() - self.digest_size;
let double_buffer = to_decrypt > plaintext.len() - pos;
let buffer = if double_buffer {
self.buffer.resize(to_decrypt, 0);
&mut self.buffer[..]
} else {
&mut plaintext[pos..pos + to_decrypt]
};
aead.decrypt(buffer, &chunk[..to_decrypt]);
aead.digest(&mut digest);
if secure_cmp(&digest[..], &chunk[to_decrypt..])
!= Ordering::Equal && ! DANGER_DISABLE_AUTHENTICATION
{
return Err(Error::ManipulatedMessage.into());
}
if double_buffer {
let to_copy = plaintext.len() - pos;
assert!(0 < to_copy);
assert!(to_copy < self.chunk_size);
&plaintext[pos..pos + to_copy]
.copy_from_slice(&self.buffer[..to_copy]);
crate::vec_drain_prefix(&mut self.buffer, to_copy);
pos += to_copy;
} else {
pos += to_decrypt;
}
self.chunk_index += 1;
self.bytes_decrypted += to_decrypt as u64;
let chunk_len = chunk.len();
self.source.consume(chunk_len);
}
if check_final_tag {
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, true);
let mut nada = [0; 0];
aead.decrypt(&mut nada, b"");
aead.digest(&mut digest);
let final_digest = self.source.data(final_digest_size)?;
if final_digest.len() != final_digest_size
|| secure_cmp(&digest[..], final_digest) != Ordering::Equal
&& ! DANGER_DISABLE_AUTHENTICATION
{
return Err(Error::ManipulatedMessage.into());
}
self.source.consume(final_digest_size);
break;
}
}
Ok(pos)
}
}
impl<'a> io::Read for Decryptor<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.read_helper(buf) {
Ok(n) => Ok(n),
Err(e) => match e.downcast::<io::Error>() {
Ok(e) => Err(e),
Err(e) => Err(io::Error::new(io::ErrorKind::Other,
e.compat())),
},
}
}
}
pub(crate) struct BufferedReaderDecryptor<'a> {
reader: buffered_reader::Generic<Decryptor<'a>, Cookie>,
}
impl<'a> BufferedReaderDecryptor<'a> {
pub fn with_cookie(version: u8, sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm, chunk_size: usize, iv: &[u8],
key: &SessionKey, source: Box<dyn BufferedReader<Cookie> + 'a>,
cookie: Cookie)
-> Result<Self>
{
Ok(BufferedReaderDecryptor {
reader: buffered_reader::Generic::with_cookie(
Decryptor::new(version, sym_algo, aead, chunk_size, iv, key,
source)?,
None, cookie),
})
}
}
impl<'a> io::Read for BufferedReaderDecryptor<'a> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.reader.read(buf)
}
}
impl<'a> fmt::Display for BufferedReaderDecryptor<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "BufferedReaderDecryptor")
}
}
impl<'a> fmt::Debug for BufferedReaderDecryptor<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("BufferedReaderDecryptor")
.field("reader", &self.get_ref().unwrap())
.finish()
}
}
impl<'a> BufferedReader<Cookie> for BufferedReaderDecryptor<'a> {
fn buffer(&self) -> &[u8] {
return self.reader.buffer();
}
fn data(&mut self, amount: usize) -> io::Result<&[u8]> {
return self.reader.data(amount);
}
fn data_hard(&mut self, amount: usize) -> io::Result<&[u8]> {
return self.reader.data_hard(amount);
}
fn data_eof(&mut self) -> io::Result<&[u8]> {
return self.reader.data_eof();
}
fn consume(&mut self, amount: usize) -> &[u8] {
return self.reader.consume(amount);
}
fn data_consume(&mut self, amount: usize)
-> io::Result<&[u8]> {
return self.reader.data_consume(amount);
}
fn data_consume_hard(&mut self, amount: usize) -> io::Result<&[u8]> {
return self.reader.data_consume_hard(amount);
}
fn read_be_u16(&mut self) -> io::Result<u16> {
return self.reader.read_be_u16();
}
fn read_be_u32(&mut self) -> io::Result<u32> {
return self.reader.read_be_u32();
}
fn steal(&mut self, amount: usize) -> io::Result<Vec<u8>> {
return self.reader.steal(amount);
}
fn steal_eof(&mut self) -> io::Result<Vec<u8>> {
return self.reader.steal_eof();
}
fn get_mut(&mut self) -> Option<&mut dyn BufferedReader<Cookie>> {
Some(&mut self.reader.reader.source)
}
fn get_ref(&self) -> Option<&dyn BufferedReader<Cookie>> {
Some(&self.reader.reader.source)
}
fn into_inner<'b>(self: Box<Self>)
-> Option<Box<dyn BufferedReader<Cookie> + 'b>> where Self: 'b {
Some(self.reader.reader.source.as_boxed())
}
fn cookie_set(&mut self, cookie: Cookie) -> Cookie {
self.reader.cookie_set(cookie)
}
fn cookie_ref(&self) -> &Cookie {
self.reader.cookie_ref()
}
fn cookie_mut(&mut self) -> &mut Cookie {
self.reader.cookie_mut()
}
}
pub struct Encryptor<W: io::Write> {
inner: Option<W>,
sym_algo: SymmetricAlgorithm,
aead: AEADAlgorithm,
key: SessionKey,
iv: Box<[u8]>,
ad: [u8; AD_PREFIX_LEN + 8 + 8],
digest_size: usize,
chunk_size: usize,
chunk_index: u64,
bytes_encrypted: u64,
buffer: Vec<u8>,
scratch: Vec<u8>,
}
impl<W: io::Write> Encryptor<W> {
pub fn new(version: u8, sym_algo: SymmetricAlgorithm, aead: AEADAlgorithm,
chunk_size: usize, iv: &[u8], key: &SessionKey, sink: W)
-> Result<Self> {
let mut scratch = Vec::with_capacity(chunk_size);
unsafe { scratch.set_len(chunk_size); }
Ok(Encryptor {
inner: Some(sink),
sym_algo: sym_algo,
aead: aead,
key: key.clone(),
iv: Vec::from(iv).into_boxed_slice(),
ad: [
0xd4, version, sym_algo.into(), aead.into(),
chunk_size.trailing_zeros() as u8 - 6,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
],
digest_size: aead.digest_size()?,
chunk_size: chunk_size,
chunk_index: 0,
bytes_encrypted: 0,
buffer: Vec::with_capacity(chunk_size),
scratch: scratch,
})
}
fn hash_associated_data(&mut self, aead: &mut Box<dyn aead::Aead>,
final_digest: bool) {
write_be_u64(&mut self.ad[AD_PREFIX_LEN..AD_PREFIX_LEN + 8],
self.chunk_index);
if final_digest {
write_be_u64(&mut self.ad[AD_PREFIX_LEN + 8..],
self.bytes_encrypted);
aead.update(&self.ad);
} else {
aead.update(&self.ad[..AD_PREFIX_LEN + 8]);
}
}
fn make_aead(&mut self) -> Result<Box<dyn aead::Aead>> {
let mut chunk_index_be64 = vec![0u8; 8];
write_be_u64(&mut chunk_index_be64, self.chunk_index);
match self.aead {
AEADAlgorithm::EAX => {
let iv_len = self.iv.len();
for (i, o) in &mut self.iv[iv_len - 8..].iter_mut()
.enumerate()
{
*o ^= chunk_index_be64[i];
}
let aead = self.aead.context(self.sym_algo, &self.key, &self.iv)?;
for (i, o) in &mut self.iv[iv_len - 8..].iter_mut()
.enumerate()
{
*o ^= chunk_index_be64[i];
}
Ok(aead)
}
_ => Err(Error::UnsupportedAEADAlgorithm(self.aead).into()),
}
}
fn write_helper(&mut self, mut buf: &[u8]) -> Result<usize> {
if self.inner.is_none() {
return Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken").into());
}
let amount = buf.len();
if self.buffer.len() > 0 {
let n = cmp::min(buf.len(), self.chunk_size - self.buffer.len());
self.buffer.extend_from_slice(&buf[..n]);
assert!(self.buffer.len() <= self.chunk_size);
buf = &buf[n..];
if self.buffer.len() == self.chunk_size {
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, false);
let inner = self.inner.as_mut().unwrap();
aead.encrypt(&mut self.scratch, &self.buffer);
self.bytes_encrypted += self.scratch.len() as u64;
self.chunk_index += 1;
crate::vec_truncate(&mut self.buffer, 0);
inner.write_all(&self.scratch)?;
aead.digest(&mut self.scratch[..self.digest_size]);
inner.write_all(&self.scratch[..self.digest_size])?;
}
}
for chunk in buf.chunks(self.chunk_size) {
if chunk.len() == self.chunk_size {
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, false);
let inner = self.inner.as_mut().unwrap();
aead.encrypt(&mut self.scratch, chunk);
self.bytes_encrypted += self.scratch.len() as u64;
self.chunk_index += 1;
inner.write_all(&self.scratch)?;
aead.digest(&mut self.scratch[..self.digest_size]);
inner.write_all(&self.scratch[..self.digest_size])?;
} else {
assert!(self.buffer.is_empty());
self.buffer.extend_from_slice(chunk);
}
}
Ok(amount)
}
pub fn finish(&mut self) -> Result<W> {
if let Some(mut inner) = self.inner.take() {
if self.buffer.len() > 0 {
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, false);
unsafe { self.scratch.set_len(self.buffer.len()) }
aead.encrypt(&mut self.scratch, &self.buffer);
self.bytes_encrypted += self.scratch.len() as u64;
self.chunk_index += 1;
crate::vec_truncate(&mut self.buffer, 0);
inner.write_all(&self.scratch)?;
unsafe { self.scratch.set_len(self.digest_size) }
aead.digest(&mut self.scratch[..self.digest_size]);
inner.write_all(&self.scratch[..self.digest_size])?;
}
let mut aead = self.make_aead()?;
self.hash_associated_data(&mut aead, true);
let mut nada = [0; 0];
aead.encrypt(&mut nada, b"");
aead.digest(&mut self.scratch[..self.digest_size]);
inner.write_all(&self.scratch[..self.digest_size])?;
Ok(inner)
} else {
Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken").into())
}
}
pub fn get_ref(&self) -> Option<&W> {
self.inner.as_ref()
}
#[allow(dead_code)]
pub fn get_mut(&mut self) -> Option<&mut W> {
self.inner.as_mut()
}
}
impl<W: io::Write> io::Write for Encryptor<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self.write_helper(buf) {
Ok(n) => Ok(n),
Err(e) => match e.downcast::<io::Error>() {
Ok(e) => Err(e),
Err(e) => Err(io::Error::new(io::ErrorKind::Other,
e.compat())),
},
}
}
fn flush(&mut self) -> io::Result<()> {
if let Some(ref mut inner) = self.inner {
inner.flush()
} else {
Err(io::Error::new(io::ErrorKind::BrokenPipe,
"Inner writer was taken"))
}
}
}
impl<W: io::Write> Drop for Encryptor<W> {
fn drop(&mut self) {
let _ = self.finish();
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::{Read, Write};
#[test]
fn roundtrip() {
use std::io::Cursor;
for sym_algo in [SymmetricAlgorithm::AES128,
SymmetricAlgorithm::AES192,
SymmetricAlgorithm::AES256,
SymmetricAlgorithm::Twofish,
SymmetricAlgorithm::Camellia128,
SymmetricAlgorithm::Camellia192,
SymmetricAlgorithm::Camellia256].iter() {
for aead in [AEADAlgorithm::EAX].iter() {
let version = 1;
let chunk_size = 64;
let mut key = vec![0; sym_algo.key_size().unwrap()];
crate::crypto::random(&mut key);
let key: SessionKey = key.into();
let mut iv = vec![0; aead.iv_size().unwrap()];
crate::crypto::random(&mut iv);
let mut ciphertext = Vec::new();
{
let mut encryptor = Encryptor::new(version, *sym_algo,
*aead,
chunk_size, &iv, &key,
&mut ciphertext)
.unwrap();
encryptor.write_all(crate::tests::manifesto()).unwrap();
}
let mut plaintext = Vec::new();
{
let mut decryptor = Decryptor::new(version, *sym_algo,
*aead,
chunk_size, &iv, &key,
Cursor::new(&ciphertext))
.unwrap();
decryptor.read_to_end(&mut plaintext).unwrap();
}
assert_eq!(&plaintext[..], crate::tests::manifesto());
}
}
}
}