eva_crypto/generic.rs
1#[allow(non_camel_case_types)]
2pub type u8x4 = [u8; 4];
3
4#[allow(non_camel_case_types)]
5pub type u8x4x4 = [u8x4; 4];
6
7pub trait Ops {
8 fn lrot(&self) -> Self;
9 fn rrot(&self) -> Self;
10 fn xor(&self, rhs: &Self) -> Self;
11 fn and(&self, rhs: &Self) -> Self;
12 fn gmul(&self, rhs: &Self, bits: u8) -> Self;
13}
14
15pub trait Permutation {
16 fn sub_sbox(&self, sbox: &[u8]) -> Self;
17}
18
19impl Ops for u8x4x4 {
20 /// ```
21 /// use eva_crypto::generic::Ops;
22 /// assert_eq!(
23 /// [[1, 2, 3, 4]; 4].lrot(),
24 /// [
25 /// [1, 2, 3, 4],
26 /// [2, 3, 4, 1],
27 /// [3, 4, 1, 2],
28 /// [4, 1, 2, 3]
29 /// ]
30 /// );
31 /// ```
32 fn lrot(&self) -> Self {
33 [
34 self[0],
35 self[1].lrot(),
36 self[2].lrot().lrot(),
37 self[3].lrot().lrot().lrot(),
38 ]
39 }
40
41 /// ```
42 /// use eva_crypto::generic::Ops;
43 /// assert_eq!(
44 /// [[1, 2, 3, 4]; 4].lrot().rrot(),
45 /// [[1, 2, 3, 4]; 4]
46 /// );
47 /// ```
48 fn rrot(&self) -> Self {
49 [
50 self[0],
51 self[1].rrot(),
52 self[2].rrot().rrot(),
53 self[3].rrot().rrot().rrot(),
54 ]
55 }
56
57 /// ```
58 /// use eva_crypto::generic::Ops;
59 /// assert_eq!(
60 /// [[0x1, 0x2, 0x3, 0x4]; 4].xor(&[[0x11, 0x22, 0x33, 0x44]; 4]),
61 /// [[0x10, 0x20, 0x30, 0x40]; 4]
62 /// );
63 /// ```
64 fn xor(&self, rhs: &Self) -> Self {
65 [
66 self[0].xor(&rhs[0]),
67 self[1].xor(&rhs[1]),
68 self[2].xor(&rhs[2]),
69 self[3].xor(&rhs[3]),
70 ]
71 }
72
73 /// ```
74 /// use eva_crypto::generic::Ops;
75 /// assert_eq!(
76 /// [[0x1, 0x2, 0x3, 0x4]; 4].and(&[[0x10, 0x20, 0x30, 0x40]; 4]),
77 /// [[0; 4]; 4]
78 /// );
79 /// ```
80 fn and(&self, rhs: &Self) -> Self {
81 [
82 self[0].and(&rhs[0]),
83 self[1].and(&rhs[1]),
84 self[2].and(&rhs[2]),
85 self[3].and(&rhs[3]),
86 ]
87 }
88
89 /// ```
90 /// use eva_crypto::generic::Ops;
91 /// assert_eq!(
92 /// [
93 /// [0x0e, 0x09, 0x0d, 0x0b],
94 /// [0x0b, 0x0e, 0x09, 0x0d],
95 /// [0x0d, 0x0b, 0x0e, 0x09],
96 /// [0x09, 0x0d, 0x0b, 0x0e],
97 /// ].gmul(
98 /// &[
99 /// [0x02, 0x01, 0x01, 0x03],
100 /// [0x03, 0x02, 0x01, 0x01],
101 /// [0x01, 0x03, 0x02, 0x01],
102 /// [0x01, 0x01, 0x03, 0x02],
103 /// ], 8),
104 /// [[1, 0, 0, 0]; 4].rrot()
105 /// );
106 /// ```
107 fn gmul(&self, rhs: &Self, bits: u8) -> Self {
108 [
109 self[0].gmul(&[rhs[0][0]; 4], bits),
110 self[0].gmul(&[rhs[1][0]; 4], bits),
111 self[0].gmul(&[rhs[2][0]; 4], bits),
112 self[0].gmul(&[rhs[3][0]; 4], bits),
113 ]
114 .xor(&[
115 self[1].gmul(&[rhs[0][1]; 4], bits),
116 self[1].gmul(&[rhs[1][1]; 4], bits),
117 self[1].gmul(&[rhs[2][1]; 4], bits),
118 self[1].gmul(&[rhs[3][1]; 4], bits),
119 ])
120 .xor(&[
121 self[2].gmul(&[rhs[0][2]; 4], bits),
122 self[2].gmul(&[rhs[1][2]; 4], bits),
123 self[2].gmul(&[rhs[2][2]; 4], bits),
124 self[2].gmul(&[rhs[3][2]; 4], bits),
125 ])
126 .xor(&[
127 self[3].gmul(&[rhs[0][3]; 4], bits),
128 self[3].gmul(&[rhs[1][3]; 4], bits),
129 self[3].gmul(&[rhs[2][3]; 4], bits),
130 self[3].gmul(&[rhs[3][3]; 4], bits),
131 ])
132 }
133}
134
135impl Ops for u8x4 {
136 /// ```
137 /// use eva_crypto::generic::Ops;
138 /// assert_eq!(
139 /// [1, 2, 3, 4].lrot(),
140 /// [2, 3, 4, 1]
141 /// );
142 /// ```
143 fn lrot(&self) -> Self {
144 [self[1], self[2], self[3], self[0]]
145 }
146
147 /// ```
148 /// use eva_crypto::generic::Ops;
149 /// assert_eq!(
150 /// [1, 2, 3, 4].lrot().rrot(),
151 /// [1, 2, 3, 4]
152 /// );
153 /// ```
154 fn rrot(&self) -> Self {
155 [self[3], self[0], self[1], self[2]]
156 }
157
158 /// ```
159 /// use eva_crypto::generic::Ops;
160 /// assert_eq!(
161 /// [0x0, 0x1, 0x2, 0x3].xor(&[0x3, 0x2, 0x1, 0x0]),
162 /// [0x3, 0x3, 0x3, 0x3]
163 /// );
164 /// ```
165
166 fn xor(&self, rhs: &Self) -> Self {
167 [
168 self[0] ^ rhs[0],
169 self[1] ^ rhs[1],
170 self[2] ^ rhs[2],
171 self[3] ^ rhs[3],
172 ]
173 }
174
175 /// ```
176 /// use eva_crypto::generic::Ops;
177 /// assert_eq!(
178 /// [0x0, 0x1, 0x1, 0x0].and(&[0x1, 0x3, 0x3, 0x1]),
179 /// [0x0, 0x1, 0x1, 0x0]
180 /// );
181 /// ```
182 fn and(&self, rhs: &Self) -> Self {
183 [
184 self[0] & rhs[0],
185 self[1] & rhs[1],
186 self[2] & rhs[2],
187 self[3] & rhs[3],
188 ]
189 }
190
191 fn gmul(&self, rhs: &Self, bits: u8) -> Self {
192 [
193 self[0].gmul(&rhs[0], bits),
194 self[1].gmul(&rhs[1], bits),
195 self[2].gmul(&rhs[2], bits),
196 self[3].gmul(&rhs[3], bits),
197 ]
198 }
199}
200
201impl Ops for u8 {
202 fn lrot(&self) -> Self {
203 self.rotate_left(1)
204 }
205 fn rrot(&self) -> Self {
206 self.rotate_right(1)
207 }
208 fn xor(&self, rhs: &Self) -> Self {
209 self ^ rhs
210 }
211 fn and(&self, rhs: &Self) -> Self {
212 self & rhs
213 }
214 fn gmul(&self, rhs: &Self, bits: u8) -> Self {
215 match bits {
216 8 => gmul_x(*self, *rhs, 0x1b, 8),
217 4 => gmul_x(*self, *rhs, 0x03, 4),
218 _ => unimplemented!(),
219 }
220 }
221}
222
223/// Implementation of Galois field multiplication. `poly` denotes the used irreducible polynomial in bits.
224pub fn gmul_x(mut a: u8, mut b: u8, poly: u8, bits: u8) -> u8 {
225 let mut p = 0;
226 while a != 0 && b != 0 {
227 if b & 1 != 0 {
228 p ^= a;
229 }
230 let hi_bit_set = a & (1 << (bits - 1));
231 a <<= 1;
232 if hi_bit_set != 0 {
233 a ^= poly;
234 }
235 b >>= 1;
236 }
237 p & (0xff >> (8 - bits))
238}
239
240impl Permutation for u8x4 {
241 fn sub_sbox(&self, sbox: &[u8]) -> Self {
242 [
243 sbox[self[0] as usize],
244 sbox[self[1] as usize],
245 sbox[self[2] as usize],
246 sbox[self[3] as usize],
247 ]
248 }
249}
250
251impl Permutation for u8x4x4 {
252 fn sub_sbox(&self, sbox: &[u8]) -> Self {
253 [
254 self[0].sub_sbox(sbox),
255 self[1].sub_sbox(sbox),
256 self[2].sub_sbox(sbox),
257 self[3].sub_sbox(sbox),
258 ]
259 }
260}
261
262/// Create a 4x4 state matrix from a 16 sized u8 array, used in many blcok ciphers.
263/// ```
264/// use eva_crypto::generic::create_u8x4x4;
265/// assert_eq!(
266/// create_u8x4x4(&[0x1; 16]),
267/// [[0x1;4]; 4]
268/// );
269/// ```
270pub fn create_u8x4x4(data: &[u8]) -> u8x4x4 {
271 assert_eq!(data.len(), 16);
272 let mut state = [[0; 4]; 4];
273 for (i, &j) in data.iter().enumerate() {
274 state[i / 4][i % 4] = j;
275 }
276 state
277}
278
279pub fn create_u8x16(data: &u8x4x4) -> [u8; 16] {
280 let mut state = [0u8; 16];
281 for i in 0..16 {
282 state[i] = data[i / 4 as usize][i % 4 as usize];
283 }
284 state
285}
286
287/// Create u8x4 from an u32 value.
288/// ```
289/// use eva_crypto::generic::create_u8x4;
290/// assert_eq!(
291/// create_u8x4(0x0102_0304),
292/// [0x01, 0x02, 0x03, 0x04]
293/// );
294/// ```
295pub fn create_u8x4(data: u32) -> u8x4 {
296 [
297 (data >> 24) as u8,
298 (data >> 16) as u8,
299 (data >> 8) as u8,
300 data as u8,
301 ]
302}
303
304/// Create u32 from an u8 array.
305/// ```
306/// use eva_crypto::generic::u8x4_to_u32;
307/// assert_eq!(
308/// u8x4_to_u32([0x01, 0x02, 0x03, 0x04]),
309/// 0x0102_0304
310/// );
311/// ```
312pub fn u8x4_to_u32(data: u8x4) -> u32 {
313 let mut ret: u32 = 0;
314 for i in 0..4 {
315 ret ^= (data[3 - i] as u32) << (i * 8);
316 }
317 ret
318}
319
320/// Transpose a 4x4 state matrix, used in many blcok ciphers.
321/// ```
322/// use eva_crypto::generic::transpose;
323/// assert_eq!(
324/// transpose(&[
325/// [0x1, 0x2, 0x3, 0x4],
326/// [0x0, 0x0, 0x0, 0x0],
327/// [0x0, 0x0, 0x0, 0x0],
328/// [0x0, 0x0, 0x0, 0x0],
329/// ]),
330/// [
331/// [0x1, 0x0, 0x0, 0x0],
332/// [0x2, 0x0, 0x0, 0x0],
333/// [0x3, 0x0, 0x0, 0x0],
334/// [0x4, 0x0, 0x0, 0x0],
335/// ]
336/// );
337/// ```
338pub fn transpose(input: &u8x4x4) -> u8x4x4 {
339 let mut out = [[0; 4]; 4];
340 for (i, &n) in input.iter().enumerate() {
341 for (j, &u) in n.iter().enumerate() {
342 out[j][i] = u;
343 }
344 }
345 out
346}
347
348/// Expand the data to bits vector.
349/// ```
350/// use eva_crypto::generic::expand_bits;
351/// assert_eq!(
352/// expand_bits(&vec![0b0001, 0b1101], 4),
353/// [false, false, false, true, true, true, false, true]
354/// );
355/// assert_eq!(
356/// expand_bits(&vec![0b0001_1101], 0),
357/// [false, false, false, true, true, true, false, true]
358/// );
359/// ```
360pub fn expand_bits(data: &[u8], skip: usize) -> Vec<bool> {
361 let bytes = &data[..];
362 let mut ret: Vec<bool> = vec![];
363 for &i in bytes.iter() {
364 let mut byte = i;
365 match skip {
366 0..=7 => (),
367 _ => panic!("Skip size should be in range 0 - 7."),
368 };
369 byte <<= skip;
370 for _ in 0..(8 - skip) {
371 ret.push(byte & 0b1000_0000 != 0);
372 byte <<= 1;
373 }
374 }
375 ret
376}
377
378/// Restore the data from a bit vector.
379/// ```
380/// use eva_crypto::generic::restore_data;
381/// assert_eq!(
382/// restore_data(&vec![false, false, false, true, true, true, false, true], 4),
383/// [0b0001, 0b1101]
384/// );
385/// assert_eq!(
386/// restore_data(&vec![false, false, false, true, true, true, false, true], 0),
387/// [0b0001_1101]
388/// );
389/// ```
390pub fn restore_data(bits: &[bool], skip: usize) -> Vec<u8> {
391 let mut ret: Vec<u8> = vec![];
392 let mut buffer: u8;
393 for i in (0..bits.len()).step_by(8 - skip) {
394 buffer = 0;
395 let init = 0b1000_0000 >> skip;
396 for j in 0..(8 - skip) {
397 if bits[i + j] {
398 buffer ^= init >> j;
399 }
400 }
401 ret.push(buffer);
402 }
403 ret
404}