1#![no_std]
15#![deny(unsafe_code)]
16#![cfg_attr(docsrs, feature(doc_cfg))]
17#![feature(array_chunks)]
18#![feature(let_chains)]
19#![feature(slice_as_chunks)]
20#![feature(split_array)]
21#![warn(missing_docs, rust_2018_idioms)]
22#![allow(non_snake_case)]
24
25use byteorder::{ByteOrder, LittleEndian};
26use cipher::{
27 consts::U16, BlockCipher, BlockDecrypt, BlockEncrypt, Key, KeyInit,
28 KeySizeUser,
29};
30use core::iter::zip;
31use polyval::{universal_hash::UniversalHash, Polyval};
32
33#[cfg(feature = "zeroize")]
34use zeroize::{Zeroize, ZeroizeOnDrop};
35
36pub const BLOCK_SIZE: usize = 16;
38
39#[derive(Clone)]
41pub struct Cipher<C>
42where
43 C: BlockCipher<BlockSize = U16>,
44{
45 block: C,
47 h: cipher::Block<C>,
49 L: cipher::Block<C>,
51 tweak_len: Option<usize>,
55 state0: Polyval,
57 state1: Polyval,
59}
60
61#[cfg(feature = "zeroize")]
62#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
63impl<C> Drop for Cipher<C>
64where
65 C: BlockCipher<BlockSize = U16>,
66{
67 fn drop(&mut self) {
68 self.h.zeroize();
70 self.L.zeroize();
71 }
72}
73
74#[cfg(feature = "zeroize")]
75#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
76impl<C> ZeroizeOnDrop for Cipher<C> where C: BlockCipher<BlockSize = U16> {}
77
78impl<C> KeySizeUser for Cipher<C>
79where
80 C: BlockCipher<BlockSize = U16> + KeyInit,
81{
82 type KeySize = C::KeySize;
83}
84
85impl<C> KeyInit for Cipher<C>
86where
87 C: BlockCipher<BlockSize = U16> + BlockEncrypt + KeyInit,
88{
89 fn new(key: &Key<C>) -> Self {
94 let block = C::new(key);
95
96 let mut h = cipher::Block::<C>::default();
98 block.encrypt_block(&mut h);
99
100 let mut L = cipher::Block::<C>::default();
102 LittleEndian::write_u64(&mut L, 1);
103 block.encrypt_block(&mut L);
104
105 Cipher {
106 block,
107 h,
108 L,
109 tweak_len: None,
110 state0: Polyval::new(&h),
111 state1: Polyval::new(&h),
112 }
113 }
114}
115
116impl<C> Cipher<C>
117where
118 C: BlockCipher<BlockSize = U16> + BlockEncrypt + BlockDecrypt,
119{
120 pub fn encrypt(
125 &mut self,
126 ciphertext: &mut [u8],
127 plaintext: &[u8],
128 tweak: &[u8],
129 ) {
130 self.hctr2(&mut ciphertext[..plaintext.len()], plaintext, tweak, true)
131 }
132
133 pub fn decrypt(
138 &mut self,
139 plaintext: &mut [u8],
140 ciphertext: &[u8],
141 tweak: &[u8],
142 ) {
143 self.hctr2(&mut plaintext[..ciphertext.len()], ciphertext, tweak, false)
144 }
145
146 fn hctr2(&mut self, dst: &mut [u8], src: &[u8], tweak: &[u8], seal: bool) {
147 assert!(dst.len() >= BLOCK_SIZE);
148 assert!(src.len() >= BLOCK_SIZE);
149 assert!(dst.len() == src.len());
150
151 let (M, N) = src.split_array_ref::<BLOCK_SIZE>();
153
154 self.init_tweak(tweak);
155
156 let mut poly = match N.len() % BLOCK_SIZE {
157 0 => self.state0.clone(),
158 _ => self.state1.clone(),
159 };
160 poly.update_padded(tweak);
161
162 let mut state = poly.clone();
164
165 polyhash(&mut poly, N);
167 let MM = xor2(M, &poly.finalize_reset().into());
168
169 let mut UU = cipher::Block::<C>::default();
171 if seal {
172 self.block.encrypt_block_b2b(&MM.into(), &mut UU);
173 } else {
174 self.block.decrypt_block_b2b(&MM.into(), &mut UU);
175 }
176
177 let S = xor3(&MM, &UU.into(), &self.L.into());
179
180 let (U, V) = dst.split_array_mut::<BLOCK_SIZE>();
181
182 self.xctr(V, N, &S);
184
185 polyhash(&mut state, V);
187 xor_block_into(U, &UU.into(), &state.finalize_reset().into());
188 }
189
190 fn xctr(&self, dst: &mut [u8], src: &[u8], nonce: &[u8; BLOCK_SIZE]) {
191 assert!(dst.len() == src.len());
192
193 let mut ctr = [0u8; BLOCK_SIZE];
194 let mut i = 1u64;
195
196 let (dstHead, dstTail) = dst.as_chunks_mut::<BLOCK_SIZE>();
197 let (srcHead, srcTail) = src.as_chunks::<BLOCK_SIZE>();
198
199 for (dst, src) in zip(dstHead, srcHead) {
200 LittleEndian::write_u64(&mut ctr[..8], i);
202 LittleEndian::write_u64(&mut ctr[8..], 0);
203
204 xor_block_in_place(&mut ctr, nonce);
205 self.block.encrypt_block((&mut ctr).into());
206 xor_block_into(dst, src, &ctr);
207 i += 1;
208 }
209
210 if !dstTail.is_empty() {
211 LittleEndian::write_u64(&mut ctr[..8], i);
212 LittleEndian::write_u64(&mut ctr[8..], 0);
213
214 xor_block_in_place(&mut ctr, nonce);
215 self.block.encrypt_block((&mut ctr).into());
216 xor_into(dstTail, srcTail, &ctr);
217 }
218 }
219
220 pub fn encrypt_in_place(&mut self, plaintext: &mut [u8], tweak: &[u8]) {
224 self.hctr2_in_place(plaintext, tweak, true)
225 }
226
227 pub fn decrypt_in_place(&mut self, ciphertext: &mut [u8], tweak: &[u8]) {
231 self.hctr2_in_place(ciphertext, tweak, false)
232 }
233
234 fn hctr2_in_place(&mut self, data: &mut [u8], tweak: &[u8], seal: bool) {
235 assert!(data.len() >= BLOCK_SIZE);
236
237 let (M, N) = data.split_array_mut::<BLOCK_SIZE>();
239
240 self.init_tweak(tweak);
241
242 let mut poly = match N.len() % BLOCK_SIZE {
243 0 => self.state0.clone(),
244 _ => self.state1.clone(),
245 };
246 poly.update_padded(tweak);
247
248 let mut state = poly.clone();
250
251 polyhash(&mut poly, N);
253 let MM = xor2(M, &poly.finalize_reset().into());
254
255 let mut UU = cipher::Block::<C>::default();
257 if seal {
258 self.block.encrypt_block_b2b(&MM.into(), &mut UU);
259 } else {
260 self.block.decrypt_block_b2b(&MM.into(), &mut UU);
261 }
262
263 let S = xor3(&MM, &UU.into(), &self.L.into());
265
266 self.xctr_in_place(N, &S);
268
269 polyhash(&mut state, N);
271 xor_block_into(M, &UU.into(), &state.finalize_reset().into());
272 }
273
274 fn xctr_in_place(&self, data: &mut [u8], nonce: &[u8; BLOCK_SIZE]) {
275 let mut ctr = [0u8; BLOCK_SIZE];
276 let mut i = 1u64;
277
278 let (head, tail) = data.as_chunks_mut::<BLOCK_SIZE>();
279 for chunk in head {
280 LittleEndian::write_u64(&mut ctr[..8], i);
282 LittleEndian::write_u64(&mut ctr[8..], 0);
283
284 xor_block_in_place(&mut ctr, nonce);
285 self.block.encrypt_block((&mut ctr).into());
286 xor_block_in_place(chunk, &ctr);
287 i += 1;
288 }
289
290 if !tail.is_empty() {
291 LittleEndian::write_u64(&mut ctr[..8], i);
292 LittleEndian::write_u64(&mut ctr[8..], 0);
293
294 xor_block_in_place(&mut ctr, nonce);
295 self.block.encrypt_block((&mut ctr).into());
296 xor_in_place(tail, &ctr);
297 }
298 }
299
300 fn init_tweak(&mut self, tweak: &[u8]) {
301 if let Some(n) = self.tweak_len && n == tweak.len() {
305 return;
306 }
307
308 let l = (tweak.len() as u64) * 8 * 2 + 2;
316 let mut block = polyval::Block::default();
317
318 let poly = Polyval::new(&self.h);
319
320 LittleEndian::write_u64(&mut block, l);
321 self.state1.clone_from(&poly);
322 self.state0.update(&[block]);
323
324 LittleEndian::write_u64(&mut block, l + 1);
325 self.state1.clone_from(&poly);
326 self.state1.update(&[block]);
327
328 self.tweak_len = Some(tweak.len());
329 }
330}
331
332fn polyhash(p: &mut Polyval, src: &[u8]) {
333 let (head, tail) =
334 src.split_at((src.len() / polyval::BLOCK_SIZE) * polyval::BLOCK_SIZE);
335 if !head.is_empty() {
336 p.update_padded(head);
337 }
338 if !tail.is_empty() {
339 let mut block = polyval::Block::default();
340 block[..tail.len()].copy_from_slice(tail);
341 block[tail.len()] = 1;
342 p.update(&[block]);
343 }
344}
345
346fn xor_into<const N: usize>(z: &mut [u8], x: &[u8], y: &[u8; N]) {
348 assert!(z.len() <= N);
349 assert!(x.len() >= z.len());
350
351 for i in 0..z.len() {
352 z[i] = x[i] ^ y[i];
353 }
354}
355
356#[inline(always)]
358fn xor_in_place<const N: usize>(z: &mut [u8], x: &[u8; N]) {
359 for i in 0..z.len() {
360 z[i] ^= x[i];
361 }
362}
363
364#[inline(always)]
366fn xor_block_into<const N: usize>(z: &mut [u8; N], x: &[u8; N], y: &[u8; N]) {
367 for i in 0..N {
368 z[i] = x[i] ^ y[i];
369 }
370}
371
372#[inline(always)]
374fn xor_block_in_place<const N: usize>(z: &mut [u8; N], x: &[u8; N]) {
375 for i in 0..N {
376 z[i] ^= x[i]
377 }
378}
379
380#[inline(always)]
382fn xor2<const N: usize>(x: &[u8; N], y: &[u8; N]) -> [u8; N] {
383 let mut z = [0u8; N];
384 xor_block_into(&mut z, x, y);
385 z
386}
387
388#[inline(always)]
390fn xor3<const N: usize>(v: &[u8; N], x: &[u8; N], y: &[u8; N]) -> [u8; N] {
391 let mut z = [0u8; N];
392 for i in 0..N {
393 z[i] = v[i] ^ x[i] ^ y[i];
394 }
395 z
396}