1use core::{
4 convert::{AsMut, AsRef},
5 num::Wrapping,
6 ops::{BitXor, BitXorAssign},
7 slice,
8};
9
10#[cfg(feature = "zeroize")]
11use zeroize::Zeroize;
12
13const TRUNC: u64 = u32::MAX as u64;
14
15#[rustfmt::skip]
16macro_rules! permute_step {
17 ($a:expr, $b:expr, $c:expr, $d:expr) => {
18 $a = (Wrapping($a) + Wrapping($b) + (Wrapping(2) * Wrapping(($a & TRUNC) * ($b & TRUNC)))).0;
19 $d = ($d ^ $a).rotate_right(32);
20 $c = (Wrapping($c) + Wrapping($d) + (Wrapping(2) * Wrapping(($c & TRUNC) * ($d & TRUNC)))).0;
21 $b = ($b ^ $c).rotate_right(24);
22
23 $a = (Wrapping($a) + Wrapping($b) + (Wrapping(2) * Wrapping(($a & TRUNC) * ($b & TRUNC)))).0;
24 $d = ($d ^ $a).rotate_right(16);
25 $c = (Wrapping($c) + Wrapping($d) + (Wrapping(2) * Wrapping(($c & TRUNC) * ($d & TRUNC)))).0;
26 $b = ($b ^ $c).rotate_right(63);
27 };
28}
29
30macro_rules! permute {
31 (
32 $v0:expr, $v1:expr, $v2:expr, $v3:expr,
33 $v4:expr, $v5:expr, $v6:expr, $v7:expr,
34 $v8:expr, $v9:expr, $v10:expr, $v11:expr,
35 $v12:expr, $v13:expr, $v14:expr, $v15:expr,
36 ) => {
37 permute_step!($v0, $v4, $v8, $v12);
38 permute_step!($v1, $v5, $v9, $v13);
39 permute_step!($v2, $v6, $v10, $v14);
40 permute_step!($v3, $v7, $v11, $v15);
41 permute_step!($v0, $v5, $v10, $v15);
42 permute_step!($v1, $v6, $v11, $v12);
43 permute_step!($v2, $v7, $v8, $v13);
44 permute_step!($v3, $v4, $v9, $v14);
45 };
46}
47
48#[derive(Copy, Clone, Debug)]
50#[repr(align(64))]
51pub struct Block([u64; Self::SIZE / 8]);
52
53impl Block {
54 pub const SIZE: usize = 1024;
56
57 pub const fn new() -> Self {
59 Self([0u64; Self::SIZE / 8])
60 }
61
62 #[inline(always)]
64 pub(crate) fn load(&mut self, input: &[u8; Block::SIZE]) {
65 for (i, chunk) in input.chunks(8).enumerate() {
66 self.0[i] = u64::from_le_bytes(chunk.try_into().expect("should be 8 bytes"));
67 }
68 }
69
70 #[inline(always)]
72 pub(crate) fn iter(&self) -> slice::Iter<'_, u64> {
73 self.0.iter()
74 }
75
76 #[inline(always)]
79 pub(crate) fn compress(rhs: &Self, lhs: &Self) -> Self {
80 let r = *rhs ^ lhs;
81
82 let mut q = r;
84 for chunk in q.0.chunks_exact_mut(16) {
85 #[rustfmt::skip]
86 permute!(
87 chunk[0], chunk[1], chunk[2], chunk[3],
88 chunk[4], chunk[5], chunk[6], chunk[7],
89 chunk[8], chunk[9], chunk[10], chunk[11],
90 chunk[12], chunk[13], chunk[14], chunk[15],
91 );
92 }
93
94 for i in 0..8 {
96 let b = i * 2;
97
98 #[rustfmt::skip]
99 permute!(
100 q.0[b], q.0[b + 1],
101 q.0[b + 16], q.0[b + 17],
102 q.0[b + 32], q.0[b + 33],
103 q.0[b + 48], q.0[b + 49],
104 q.0[b + 64], q.0[b + 65],
105 q.0[b + 80], q.0[b + 81],
106 q.0[b + 96], q.0[b + 97],
107 q.0[b + 112], q.0[b + 113],
108 );
109 }
110
111 q ^= &r;
112 q
113 }
114}
115
116impl Default for Block {
117 fn default() -> Self {
118 Self([0u64; Self::SIZE / 8])
119 }
120}
121
122impl AsRef<[u64]> for Block {
123 fn as_ref(&self) -> &[u64] {
124 &self.0
125 }
126}
127
128impl AsMut<[u64]> for Block {
129 fn as_mut(&mut self) -> &mut [u64] {
130 &mut self.0
131 }
132}
133
134impl BitXor<&Block> for Block {
135 type Output = Block;
136
137 fn bitxor(mut self, rhs: &Block) -> Self::Output {
138 self ^= rhs;
139 self
140 }
141}
142
143impl BitXorAssign<&Block> for Block {
144 fn bitxor_assign(&mut self, rhs: &Block) {
145 for (dst, src) in self.0.iter_mut().zip(rhs.0.iter()) {
146 *dst ^= src;
147 }
148 }
149}
150
151#[cfg(feature = "zeroize")]
152impl Zeroize for Block {
153 fn zeroize(&mut self) {
154 self.0.zeroize();
155 }
156}