1#![warn(
2 clippy::cargo,
3 clippy::nursery,
4 clippy::pedantic,
5 missing_debug_implementations,
6 missing_docs,
7 rust_2018_idioms
8)]
9#![deny(unsafe_code)]
10#![allow(clippy::inline_always)]
11#![no_std]
12#![cfg_attr(docsrs, feature(doc_cfg))]
13
14#[cfg(all(feature = "block-padding", feature = "alloc"))]
55extern crate alloc;
56
57use cipher::{
58 consts::{True, U1, U16, U2048},
59 crypto_common::{InnerUser, IvSizeUser},
60 generic_array::{ArrayLength, GenericArray},
61 inout::{InOut, InOutBuf},
62 typenum::{IsLessOrEqual, PartialDiv},
63 AlgorithmName, Block, BlockBackend, BlockCipher, BlockClosure, BlockDecryptMut, BlockEncryptMut,
64 BlockSizeUser, InnerIvInit, Iv, IvState, ParBlocksSizeUser,
65};
66use core::{fmt, marker::PhantomData};
67
68#[cfg(all(feature = "block-padding", feature = "alloc"))]
69use alloc::{vec, vec::Vec};
70
71#[cfg(all(feature = "block-padding", feature = "alloc"))]
72use cipher::Unsigned;
73
74#[cfg(feature = "block-padding")]
75use cipher::{
76 block_padding::{Padding, UnpadError},
77 inout::{InOutBufReserved, PadError, PaddedInOutBuf},
78};
79
80#[cfg(feature = "zeroize")]
81use cipher::zeroize::{Zeroize, ZeroizeOnDrop};
82
83pub use cipher;
84
85#[inline(always)]
86fn xor<N: ArrayLength<u8>>(out: &mut GenericArray<u8, N>, buf: &GenericArray<u8, N>) {
87 for (a, b) in out.iter_mut().zip(buf) {
88 *a ^= *b;
89 }
90}
91
92#[inline(always)]
96fn multiply_by_2(out: &mut GenericArray<u8, U16>, input: &GenericArray<u8, U16>) {
97 out.iter_mut().zip(input).fold(false, |carry, (o, i)| {
98 let (n, overflow) = i.overflowing_mul(2);
99 *o = n + u8::from(carry);
100 overflow
101 });
102 if input[15] >= 128 {
103 out[0] ^= 135;
104 }
105}
106
107#[inline(always)]
108fn multiply_by_2_ip(out: &mut GenericArray<u8, U16>) {
109 let tmp = *out;
110 multiply_by_2(out, &tmp);
111}
112
113#[derive(Clone)]
120pub struct Eme<C: BlockCipher, BS> {
121 cipher: C,
122 t: Block<C>,
123 _bs: PhantomData<BS>,
124}
125
126impl<C, BS> BlockSizeUser for Eme<C, BS>
127where
128 C: BlockCipher,
129 BS: ArrayLength<u8> + PartialDiv<U16> + IsLessOrEqual<U2048, Output = True>,
130{
131 type BlockSize = BS;
132}
133
134impl<C, BS> BlockEncryptMut for Eme<C, BS>
135where
136 C: BlockEncryptMut + BlockCipher + BlockSizeUser<BlockSize = U16>,
137 BS: ArrayLength<u8> + PartialDiv<U16> + IsLessOrEqual<U2048, Output = True>,
138{
139 fn encrypt_with_backend_mut(&mut self, f: impl BlockClosure<BlockSize = Self::BlockSize>) {
140 let Self { cipher, t, _bs } = self;
141 let mut l0 = GenericArray::default();
142 cipher.encrypt_block_mut(&mut l0);
143 cipher.encrypt_with_backend_mut(Closure { t, l0, f });
144 }
145}
146
147impl<C, BS> BlockDecryptMut for Eme<C, BS>
148where
149 C: BlockEncryptMut + BlockDecryptMut + BlockCipher + BlockSizeUser<BlockSize = U16>,
150 BS: ArrayLength<u8> + PartialDiv<U16> + IsLessOrEqual<U2048, Output = True>,
151{
152 fn decrypt_with_backend_mut(&mut self, f: impl BlockClosure<BlockSize = Self::BlockSize>) {
153 let Self { cipher, t, _bs } = self;
154 let mut l0 = GenericArray::default();
155 cipher.encrypt_block_mut(&mut l0);
156 cipher.decrypt_with_backend_mut(Closure { t, l0, f });
157 }
158}
159
160impl<C, BS> InnerUser for Eme<C, BS>
161where
162 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
163{
164 type Inner = C;
165}
166
167impl<C, BS> IvSizeUser for Eme<C, BS>
168where
169 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
170{
171 type IvSize = U16;
172}
173
174impl<C, BS> InnerIvInit for Eme<C, BS>
175where
176 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
177{
178 #[inline]
179 fn inner_iv_init(cipher: C, iv: &Iv<Self>) -> Self {
180 Self {
181 cipher,
182 t: *iv,
183 _bs: PhantomData,
184 }
185 }
186}
187
188impl<C, BS> IvState for Eme<C, BS>
189where
190 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
191{
192 #[inline]
193 fn iv_state(&self) -> Iv<Self> {
194 self.t
195 }
196}
197
198impl<C, BS> AlgorithmName for Eme<C, BS>
199where
200 C: BlockCipher + BlockSizeUser<BlockSize = U16> + AlgorithmName,
201{
202 fn write_alg_name(f: &mut fmt::Formatter<'_>) -> fmt::Result {
203 f.write_str("eme_mode::Eme<")?;
204 <C as AlgorithmName>::write_alg_name(f)?;
205 f.write_str(">")
206 }
207}
208
209impl<C, BS> fmt::Debug for Eme<C, BS>
210where
211 C: BlockCipher + BlockSizeUser<BlockSize = U16> + AlgorithmName,
212{
213 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
214 f.write_str("eme_mode::Eme<")?;
215 <C as AlgorithmName>::write_alg_name(f)?;
216 f.write_str("> { ... }")
217 }
218}
219
220#[cfg(feature = "zeroize")]
221#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
222impl<C: BlockCipher, BS> Drop for Eme<C, BS> {
223 fn drop(&mut self) {
224 self.t.zeroize();
225 }
226}
227
228#[cfg(feature = "zeroize")]
229#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
230impl<C: BlockCipher + ZeroizeOnDrop, BS> ZeroizeOnDrop for Eme<C, BS> {}
231
232struct Closure<'a, BS, BC>
233where
234 BC: BlockClosure<BlockSize = BS>,
235{
236 t: &'a mut GenericArray<u8, U16>,
237 l0: GenericArray<u8, U16>,
238 f: BC,
239}
240
241impl<'a, BS, BC> BlockSizeUser for Closure<'a, BS, BC>
242where
243 BS: ArrayLength<u8>,
244 BC: BlockClosure<BlockSize = BS>,
245{
246 type BlockSize = U16;
247}
248
249impl<'a, BS, BC> BlockClosure for Closure<'a, BS, BC>
250where
251 BS: ArrayLength<u8>,
252 BC: BlockClosure<BlockSize = BS>,
253{
254 #[inline]
255 fn call<B: BlockBackend<BlockSize = Self::BlockSize>>(self, backend: &mut B) {
256 let Self { t, l0, f } = self;
257 f.call(&mut Backend {
258 t,
259 l0,
260 backend,
261 _bs: PhantomData,
262 });
263 }
264}
265
266struct Backend<'a, BS, BK>
267where
268 BS: ArrayLength<u8>,
269 BK: BlockBackend<BlockSize = U16>,
270{
271 t: &'a mut GenericArray<u8, U16>,
272 l0: GenericArray<u8, U16>,
273 backend: &'a mut BK,
274 _bs: PhantomData<BS>,
275}
276
277impl<'a, BS, BK> BlockSizeUser for Backend<'a, BS, BK>
278where
279 BS: ArrayLength<u8>,
280 BK: BlockBackend<BlockSize = U16>,
281{
282 type BlockSize = BS;
283}
284
285impl<'a, BS, BK> ParBlocksSizeUser for Backend<'a, BS, BK>
286where
287 BS: ArrayLength<u8>,
288 BK: BlockBackend<BlockSize = U16>,
289{
290 type ParBlocksSize = U1;
291}
292
293impl<'a, BS, BK> BlockBackend for Backend<'a, BS, BK>
294where
295 BS: ArrayLength<u8>,
296 BK: BlockBackend<BlockSize = U16>,
297{
298 #[inline]
299 fn proc_block(&mut self, block: InOut<'_, '_, Block<Self>>) {
300 let Self {
301 t,
302 l0,
303 backend,
304 _bs,
305 } = self;
306 let mut l = GenericArray::default();
307 multiply_by_2(&mut l, l0);
308
309 let (mut chunks, rest) = block.into_buf().into_chunks::<U16>();
310 assert!(rest.is_empty());
311
312 for i in 0..(chunks.get_in().len()) {
313 let mut block = chunks.get(i);
314 block.xor_in2out(&l);
315 backend.proc_block(block.get_out().into()); multiply_by_2_ip(&mut l);
317 }
318
319 let mut mp = GenericArray::clone_from_slice(t);
320 for i in 0..(chunks.get_in().len()) {
321 let mut block = chunks.get(i);
322
323 xor(&mut mp, block.get_out());
324 }
325
326 let mut m = GenericArray::clone_from_slice(&mp);
327 let mut block0 = GenericArray::clone_from_slice(&mp);
328 backend.proc_block((&mut block0).into()); xor(&mut m, &block0); for i in 1..(chunks.get_in().len()) {
331 let mut block = chunks.get(i);
332 multiply_by_2_ip(&mut m);
333 xor(block.get_out(), &m); }
335 xor(&mut block0, t); for i in 1..(chunks.get_in().len()) {
338 xor(&mut block0, chunks.get(i).get_out());
339 }
340 chunks.get(0).get_out().copy_from_slice(&block0);
341 multiply_by_2(&mut l, l0); for i in 0..(chunks.get_in().len()) {
344 let mut block = chunks.get(i);
345 backend.proc_block(block.get_out().into()); xor(block.get_out(), &l); multiply_by_2_ip(&mut l);
348 }
349 }
350}
351
352#[derive(Clone)]
360pub struct DynamicEme<C: BlockCipher> {
361 cipher: C,
362 tweak: Block<C>,
363}
364
365impl<C: BlockEncryptMut + BlockCipher + BlockSizeUser<BlockSize = U16>> DynamicEme<C> {
366 pub fn encrypt_block_mut(&mut self, block: &mut [u8]) {
373 let (chunks, rest) = InOutBuf::from(block).into_chunks();
374 assert!(rest.is_empty());
375 self.encrypt_blocks_inout_mut(chunks);
376 }
377
378 pub fn encrypt_blocks_inout_mut(&mut self, mut chunks: InOutBuf<'_, '_, Block<C>>) {
384 assert!(chunks.len() <= 128);
385 if chunks.is_empty() {
386 return;
387 }
388
389 let mut l0 = GenericArray::default();
390 self.cipher.encrypt_block_mut(&mut l0);
391
392 let mut l = GenericArray::default();
393 multiply_by_2(&mut l, &l0);
394
395 for i in 0..(chunks.get_in().len()) {
396 let mut block = chunks.get(i);
397 block.xor_in2out(&l);
398 self.cipher.encrypt_block_mut(block.get_out()); multiply_by_2_ip(&mut l);
400 }
401
402 let mut mp = GenericArray::clone_from_slice(&self.tweak);
403 for i in 0..(chunks.get_in().len()) {
404 let mut block = chunks.get(i);
405
406 xor(&mut mp, block.get_out());
407 }
408
409 let mut m = GenericArray::clone_from_slice(&mp);
410 let mut block0 = GenericArray::clone_from_slice(&mp);
411 self.cipher.encrypt_block_mut(&mut block0); xor(&mut m, &block0); for i in 1..(chunks.get_in().len()) {
414 let mut block = chunks.get(i);
415 multiply_by_2_ip(&mut m);
416 xor(block.get_out(), &m); }
418 xor(&mut block0, &self.tweak); for i in 1..(chunks.get_in().len()) {
421 xor(&mut block0, chunks.get(i).get_out());
422 }
423 chunks.get(0).get_out().copy_from_slice(&block0);
424 multiply_by_2(&mut l, &l0); for i in 0..(chunks.get_in().len()) {
427 let mut block = chunks.get(i);
428 self.cipher.encrypt_block_mut(block.get_out()); xor(block.get_out(), &l); multiply_by_2_ip(&mut l);
431 }
432 }
433
434 #[inline]
440 pub fn encrypt_blocks_mut(&mut self, blocks: &mut [Block<C>]) {
441 self.encrypt_blocks_inout_mut(blocks.into());
442 }
443
444 #[cfg(feature = "block-padding")]
454 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
455 #[inline]
456 pub fn encrypt_padded_inout_mut<'inp, 'out, P: Padding<C::BlockSize>>(
457 &mut self,
458 data: InOutBufReserved<'inp, 'out, u8>,
459 ) -> Result<&'out [u8], PadError> {
460 let mut buf = padded_in_out_buf_to_in_out_buf(data.into_padded_blocks::<P, C::BlockSize>()?);
461
462 self.encrypt_blocks_inout_mut(buf.reborrow());
463 Ok(chunks_into_out(buf))
464 }
465
466 #[cfg(feature = "block-padding")]
476 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
477 #[inline]
478 pub fn encrypt_padded_mut<'b, P: Padding<C::BlockSize>>(
479 &mut self,
480 buf: &'b mut [u8],
481 msg_len: usize,
482 ) -> Result<&'b [u8], PadError> {
483 let buf = InOutBufReserved::from_mut_slice(buf, msg_len).map_err(|_| PadError)?;
484 self.encrypt_padded_inout_mut::<P>(buf)
485 }
486
487 #[cfg(feature = "block-padding")]
498 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
499 #[inline]
500 pub fn encrypt_padded_b2b_mut<'a, P: Padding<C::BlockSize>>(
501 &mut self,
502 msg: &[u8],
503 out_buf: &'a mut [u8],
504 ) -> Result<&'a [u8], PadError> {
505 let buf = InOutBufReserved::from_slices(msg, out_buf).map_err(|_| PadError)?;
506 self.encrypt_padded_inout_mut::<P>(buf)
507 }
508
509 #[cfg(all(feature = "block-padding", feature = "alloc"))]
516 #[cfg_attr(docsrs, doc(cfg(all(feature = "block-padding", feature = "alloc"))))]
517 #[inline]
518 pub fn encrypt_padded_vec_mut<P: Padding<C::BlockSize>>(&mut self, msg: &[u8]) -> Vec<u8> {
519 let mut out = allocate_out_vec::<C>(msg.len());
520 let len = self
521 .encrypt_padded_b2b_mut::<P>(msg, &mut out)
522 .expect("enough space for encrypting is allocated")
523 .len();
524 out.truncate(len);
525 out
526 }
527}
528
529impl<C: BlockEncryptMut + BlockDecryptMut + BlockCipher + BlockSizeUser<BlockSize = U16>>
530 DynamicEme<C>
531{
532 pub fn decrypt_block_mut(&mut self, block: &mut [u8]) {
539 let (chunks, rest) = InOutBuf::from(block).into_chunks();
540 assert!(rest.is_empty());
541 self.decrypt_blocks_inout_mut(chunks);
542 }
543
544 pub fn decrypt_blocks_inout_mut(&mut self, mut chunks: InOutBuf<'_, '_, Block<C>>) {
550 assert!(chunks.len() <= 128);
551 if chunks.is_empty() {
552 return;
553 }
554
555 let mut l0 = GenericArray::default();
556 self.cipher.encrypt_block_mut(&mut l0);
557
558 let mut l = GenericArray::default();
559 multiply_by_2(&mut l, &l0);
560
561 for i in 0..(chunks.get_in().len()) {
562 let mut block = chunks.get(i);
563 block.xor_in2out(&l);
564 self.cipher.decrypt_block_mut(block.get_out()); multiply_by_2_ip(&mut l);
566 }
567
568 let mut mp = GenericArray::clone_from_slice(&self.tweak);
569 for i in 0..(chunks.get_in().len()) {
570 let mut block = chunks.get(i);
571
572 xor(&mut mp, block.get_out());
573 }
574
575 let mut m = GenericArray::clone_from_slice(&mp);
576 let mut block0 = GenericArray::clone_from_slice(&mp);
577 self.cipher.decrypt_block_mut(&mut block0); xor(&mut m, &block0); for i in 1..(chunks.get_in().len()) {
580 let mut block = chunks.get(i);
581 multiply_by_2_ip(&mut m);
582 xor(block.get_out(), &m); }
584 xor(&mut block0, &self.tweak); for i in 1..(chunks.get_in().len()) {
587 xor(&mut block0, chunks.get(i).get_out());
588 }
589 chunks.get(0).get_out().copy_from_slice(&block0);
590 multiply_by_2(&mut l, &l0); for i in 0..(chunks.get_in().len()) {
593 let mut block = chunks.get(i);
594 self.cipher.decrypt_block_mut(block.get_out()); xor(block.get_out(), &l); multiply_by_2_ip(&mut l);
597 }
598 }
599
600 #[inline]
606 pub fn decrypt_blocks_mut(&mut self, blocks: &mut [Block<C>]) {
607 self.decrypt_blocks_inout_mut(blocks.into());
608 }
609
610 #[cfg(feature = "block-padding")]
622 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
623 #[inline]
624 pub fn decrypt_padded_inout_mut<'inp, 'out, P: Padding<C::BlockSize>>(
625 mut self,
626 data: InOutBuf<'inp, 'out, u8>,
627 ) -> Result<&'out [u8], UnpadError> {
628 let (mut blocks, tail) = data.into_chunks();
629 if !tail.is_empty() {
630 return Err(UnpadError);
631 }
632 self.decrypt_blocks_inout_mut(blocks.reborrow());
633 P::unpad_blocks(blocks.into_out())
634 }
635
636 #[cfg(feature = "block-padding")]
648 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
649 #[inline]
650 pub fn decrypt_padded_mut<P: Padding<C::BlockSize>>(
651 self,
652 buf: &mut [u8],
653 ) -> Result<&[u8], UnpadError> {
654 self.decrypt_padded_inout_mut::<P>(buf.into())
655 }
656
657 #[cfg(feature = "block-padding")]
670 #[cfg_attr(docsrs, doc(cfg(feature = "block-padding")))]
671 #[inline]
672 pub fn decrypt_padded_b2b_mut<'a, P: Padding<C::BlockSize>>(
673 self,
674 in_buf: &[u8],
675 out_buf: &'a mut [u8],
676 ) -> Result<&'a [u8], UnpadError> {
677 if out_buf.len() < in_buf.len() {
678 return Err(UnpadError);
679 }
680 let n = in_buf.len();
681 let buf = InOutBuf::new(in_buf, &mut out_buf[..n]).map_err(|_| UnpadError)?;
683 self.decrypt_padded_inout_mut::<P>(buf)
684 }
685
686 #[cfg(all(feature = "block-padding", feature = "alloc"))]
699 #[cfg_attr(docsrs, doc(cfg(all(feature = "block-padding", feature = "alloc"))))]
700 #[inline]
701 pub fn decrypt_padded_vec_mut<P: Padding<C::BlockSize>>(
702 self,
703 buf: &[u8],
704 ) -> Result<Vec<u8>, UnpadError> {
705 let mut out = vec![0; buf.len()];
706 let len = self.decrypt_padded_b2b_mut::<P>(buf, &mut out)?.len();
707 out.truncate(len);
708 Ok(out)
709 }
710}
711
712impl<C> InnerUser for DynamicEme<C>
713where
714 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
715{
716 type Inner = C;
717}
718
719impl<C> IvSizeUser for DynamicEme<C>
720where
721 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
722{
723 type IvSize = U16;
724}
725
726impl<C> InnerIvInit for DynamicEme<C>
727where
728 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
729{
730 #[inline]
731 fn inner_iv_init(cipher: C, iv: &Iv<Self>) -> Self {
732 Self { cipher, tweak: *iv }
733 }
734}
735
736impl<C> IvState for DynamicEme<C>
737where
738 C: BlockCipher + BlockSizeUser<BlockSize = U16>,
739{
740 #[inline]
741 fn iv_state(&self) -> Iv<Self> {
742 self.tweak
743 }
744}
745
746impl<C> AlgorithmName for DynamicEme<C>
747where
748 C: BlockCipher + BlockSizeUser<BlockSize = U16> + AlgorithmName,
749{
750 fn write_alg_name(f: &mut fmt::Formatter<'_>) -> fmt::Result {
751 f.write_str("eme_mode::DynamicEme<")?;
752 <C as AlgorithmName>::write_alg_name(f)?;
753 f.write_str(">")
754 }
755}
756
757impl<C> fmt::Debug for DynamicEme<C>
758where
759 C: BlockCipher + BlockSizeUser<BlockSize = U16> + AlgorithmName,
760{
761 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
762 f.write_str("eme_mode::DynamicEme<")?;
763 <C as AlgorithmName>::write_alg_name(f)?;
764 f.write_str("> { ... }")
765 }
766}
767
768#[cfg(feature = "zeroize")]
769#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
770impl<C: BlockCipher> Drop for DynamicEme<C> {
771 fn drop(&mut self) {
772 self.tweak.zeroize();
773 }
774}
775
776#[cfg(feature = "zeroize")]
777#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
778impl<C: BlockCipher + ZeroizeOnDrop> ZeroizeOnDrop for DynamicEme<C> {}
779
780#[cfg(feature = "block-padding")]
781#[allow(unsafe_code)]
782fn padded_in_out_buf_to_in_out_buf<'inp, 'out, BS: ArrayLength<u8>>(
783 mut buf: PaddedInOutBuf<'inp, 'out, BS>,
784) -> InOutBuf<'out, 'out, GenericArray<u8, BS>> {
785 let blocks = buf.get_blocks();
786 let mut blocks_len = blocks.len();
787 let (blocks_in, blocks_out) = blocks.into_raw();
788
789 if blocks_in != blocks_out {
790 unsafe {
793 core::ptr::copy_nonoverlapping(blocks_in, blocks_out, blocks_len);
794 }
795 }
796
797 if let Some(tail) = buf.get_tail_block() {
798 let (tail_in, tail_out) = tail.into_raw();
799
800 unsafe {
803 assert_eq!(blocks_out.add(blocks_len), tail_out);
804 core::ptr::copy_nonoverlapping(tail_in, tail_out, 1);
805 blocks_len += 1;
806 }
807 }
808
809 unsafe { InOutBuf::from_raw(blocks_out, blocks_out, blocks_len) }
812}
813
814#[cfg(feature = "block-padding")]
815#[allow(unsafe_code)]
816fn chunks_into_out<'inp, 'out, BS: ArrayLength<u8>>(
817 buf: InOutBuf<'inp, 'out, GenericArray<u8, BS>>,
818) -> &'out [u8] {
819 let total_blocks = buf.len();
820 let res_len = BS::USIZE * total_blocks;
821 let (_, out_ptr) = buf.into_raw();
822
823 unsafe { core::slice::from_raw_parts(out_ptr as *const u8, res_len) }
826}
827
828#[cfg(all(feature = "block-padding", feature = "alloc"))]
829fn allocate_out_vec<BS: BlockSizeUser>(len: usize) -> Vec<u8> {
830 let bs = BS::BlockSize::USIZE;
831 vec![0; bs * (len / bs + 1)]
832}