Skip to main content

noxtls_crypto/sym/encryption/
aria.rs

1// Copyright (c) 2019-2026, Argenox Technologies LLC
2// All rights reserved.
3//
4// SPDX-License-Identifier: GPL-2.0-only OR LicenseRef-Argenox-Commercial-License
5//
6// This file is part of the NoxTLS Library.
7//
8// This program is free software: you can redistribute it and/or modify
9// it under the terms of the GNU General Public License as published by the
10// Free Software Foundation; version 2 of the License.
11//
12// Alternatively, this file may be used under the terms of a commercial
13// license from Argenox Technologies LLC.
14//
15// See `noxtls/LICENSE` and `noxtls/LICENSE.md` in this repository for full details.
16// CONTACT: info@argenox.com
17
18use crate::internal_alloc::Vec;
19use noxtls_core::{Error, Result};
20
21const ARIA_S1: [u8; 256] = [
22    0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
23    0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
24    0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
25    0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
26    0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
27    0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
28    0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
29    0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
30    0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
31    0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
32    0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
33    0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
34    0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
35    0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
36    0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
37    0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
38];
39
40const ARIA_S2: [u8; 256] = [
41    0xe2, 0x4e, 0x54, 0xfc, 0x94, 0xc2, 0x4a, 0xcc, 0x62, 0x0d, 0x6a, 0x46, 0x3c, 0x4d, 0x8b, 0xd1,
42    0x5e, 0xfa, 0x64, 0xcb, 0xb4, 0x97, 0xbe, 0x2b, 0xbc, 0x77, 0x2e, 0x03, 0xd3, 0x19, 0x59, 0xc1,
43    0x1d, 0x06, 0x41, 0x6b, 0x55, 0xf0, 0x99, 0x69, 0xea, 0x9c, 0x18, 0xae, 0x63, 0xdf, 0xe7, 0xbb,
44    0x00, 0x73, 0x66, 0xfb, 0x96, 0x4c, 0x85, 0xe4, 0x3a, 0x09, 0x45, 0xaa, 0x0f, 0xee, 0x10, 0xeb,
45    0x2d, 0x7f, 0xf4, 0x29, 0xac, 0xcf, 0xad, 0x91, 0x8d, 0x78, 0xc8, 0x95, 0xf9, 0x2f, 0xce, 0xcd,
46    0x08, 0x7a, 0x88, 0x38, 0x5c, 0x83, 0x2a, 0x28, 0x47, 0xdb, 0xb8, 0xc7, 0x93, 0xa4, 0x12, 0x53,
47    0xff, 0x87, 0x0e, 0x31, 0x36, 0x21, 0x58, 0x48, 0x01, 0x8e, 0x37, 0x74, 0x32, 0xca, 0xe9, 0xb1,
48    0xb7, 0xab, 0x0c, 0xd7, 0xc4, 0x56, 0x42, 0x26, 0x07, 0x98, 0x60, 0xd9, 0xb6, 0xb9, 0x11, 0x40,
49    0xec, 0x20, 0x8c, 0xbd, 0xa0, 0xc9, 0x84, 0x04, 0x49, 0x23, 0xf1, 0x4f, 0x50, 0x1f, 0x13, 0xdc,
50    0xd8, 0xc0, 0x9e, 0x57, 0xe3, 0xc3, 0x7b, 0x65, 0x3b, 0x02, 0x8f, 0x3e, 0xe8, 0x25, 0x92, 0xe5,
51    0x15, 0xdd, 0xfd, 0x17, 0xa9, 0xbf, 0xd4, 0x9a, 0x7e, 0xc5, 0x39, 0x67, 0xfe, 0x76, 0x9d, 0x43,
52    0xa7, 0xe1, 0xd0, 0xf5, 0x68, 0xf2, 0x1b, 0x34, 0x70, 0x05, 0xa3, 0x8a, 0xd5, 0x79, 0x86, 0xa8,
53    0x30, 0xc6, 0x51, 0x4b, 0x1e, 0xa6, 0x27, 0xf6, 0x35, 0xd2, 0x6e, 0x24, 0x16, 0x82, 0x5f, 0xda,
54    0xe6, 0x75, 0xa2, 0xef, 0x2c, 0xb2, 0x1c, 0x9f, 0x5d, 0x6f, 0x80, 0x0a, 0x72, 0x44, 0x9b, 0x6c,
55    0x90, 0x0b, 0x5b, 0x33, 0x7d, 0x5a, 0x52, 0xf3, 0x61, 0xa1, 0xf7, 0xb0, 0xd6, 0x3f, 0x7c, 0x6d,
56    0xed, 0x14, 0xe0, 0xa5, 0x3d, 0x22, 0xb3, 0xf8, 0x89, 0xde, 0x71, 0x1a, 0xaf, 0xba, 0xb5, 0x81,
57];
58
59const C1: [u8; 16] = [
60    0x51, 0x7c, 0xc1, 0xb7, 0x27, 0x22, 0x0a, 0x94, 0xfe, 0x13, 0xab, 0xe8, 0xfa, 0x9a, 0x6e, 0xe0,
61];
62const C2: [u8; 16] = [
63    0x6d, 0xb1, 0x4a, 0xcc, 0x9e, 0x21, 0xc8, 0x20, 0xff, 0x28, 0xb1, 0xd5, 0xef, 0x5d, 0xe2, 0xb0,
64];
65const C3: [u8; 16] = [
66    0xdb, 0x92, 0x37, 0x1d, 0x21, 0x26, 0xe9, 0x70, 0x03, 0x24, 0x97, 0x75, 0x04, 0xe8, 0xc9, 0x0e,
67];
68
69/// Implements ARIA block cipher with key schedule and block operations.
70#[derive(Debug, Clone)]
71pub struct AriaCipher {
72    round_keys: [[u8; 16]; 17],
73    rounds: usize,
74}
75
76impl AriaCipher {
77    /// Constructs ARIA key schedule for 128/192/256-bit keys.
78    ///
79    /// # Arguments
80    /// * `key`: ARIA key bytes (16, 24, or 32 bytes).
81    ///
82    /// # Returns
83    /// Initialized `AriaCipher` with round keys.
84    pub fn new(key: &[u8]) -> Result<Self> {
85        let (rounds, ck1, ck2, ck3) = match key.len() {
86            16 => (12, &C1, &C2, &C3),
87            24 => (14, &C2, &C3, &C1),
88            32 => (16, &C3, &C1, &C2),
89            _ => {
90                return Err(Error::InvalidLength(
91                    "aria key length must be 16, 24, or 32 bytes",
92                ));
93            }
94        };
95
96        let mut kl = [0_u8; 16];
97        kl.copy_from_slice(&key[..16]);
98        let mut kr = [0_u8; 16];
99        match key.len() {
100            16 => {}
101            24 => {
102                kr[..8].copy_from_slice(&key[16..24]);
103            }
104            32 => {
105                kr.copy_from_slice(&key[16..32]);
106            }
107            _ => unreachable!(),
108        }
109
110        let w0 = kl;
111        let mut w1 = fo(&w0, ck1);
112        xor_block_in_place(&mut w1, &kr);
113        let mut w2 = fe(&w1, ck2);
114        xor_block_in_place(&mut w2, &w0);
115        let mut w3 = fo(&w2, ck3);
116        xor_block_in_place(&mut w3, &w1);
117
118        let mut ek = [[0_u8; 16]; 17];
119        let mut rot = [0_u8; 16];
120
121        rotate_right_128(&w1, &mut rot, 19);
122        ek[0] = xor_block(&w0, &rot);
123        rotate_right_128(&w2, &mut rot, 19);
124        ek[1] = xor_block(&w1, &rot);
125        rotate_right_128(&w3, &mut rot, 19);
126        ek[2] = xor_block(&w2, &rot);
127        rotate_right_128(&w0, &mut rot, 19);
128        ek[3] = xor_block(&rot, &w3);
129
130        rotate_right_128(&w1, &mut rot, 31);
131        ek[4] = xor_block(&w0, &rot);
132        rotate_right_128(&w2, &mut rot, 31);
133        ek[5] = xor_block(&w1, &rot);
134        rotate_right_128(&w3, &mut rot, 31);
135        ek[6] = xor_block(&w2, &rot);
136        rotate_right_128(&w0, &mut rot, 31);
137        ek[7] = xor_block(&rot, &w3);
138
139        rotate_left_128(&w1, &mut rot, 61);
140        ek[8] = xor_block(&w0, &rot);
141        rotate_left_128(&w2, &mut rot, 61);
142        ek[9] = xor_block(&w1, &rot);
143        rotate_left_128(&w3, &mut rot, 61);
144        ek[10] = xor_block(&w2, &rot);
145        rotate_left_128(&w0, &mut rot, 61);
146        ek[11] = xor_block(&rot, &w3);
147
148        rotate_left_128(&w1, &mut rot, 31);
149        ek[12] = xor_block(&w0, &rot);
150        rotate_left_128(&w2, &mut rot, 31);
151        ek[13] = xor_block(&w1, &rot);
152        rotate_left_128(&w3, &mut rot, 31);
153        ek[14] = xor_block(&w2, &rot);
154        rotate_left_128(&w0, &mut rot, 31);
155        ek[15] = xor_block(&rot, &w3);
156
157        rotate_left_128(&w1, &mut rot, 19);
158        ek[16] = xor_block(&w0, &rot);
159
160        let mut enc = Self {
161            round_keys: [[0_u8; 16]; 17],
162            rounds,
163        };
164        enc.round_keys[..=rounds].copy_from_slice(&ek[..=rounds]);
165        Ok(enc)
166    }
167
168    /// Encrypts one 16-byte ARIA block in place.
169    ///
170    /// # Arguments
171    /// * `block`: Mutable 16-byte block to encrypt in place.
172    pub fn encrypt_block(&self, block: &mut [u8; 16]) -> Result<()> {
173        let mut state = *block;
174        for round in 1..self.rounds {
175            if (round & 1) != 0 {
176                state = fo(&state, &self.round_keys[round - 1]);
177            } else {
178                state = fe(&state, &self.round_keys[round - 1]);
179            }
180        }
181        xor_block_in_place(&mut state, &self.round_keys[self.rounds - 1]);
182        sl2(&mut state);
183        xor_block_in_place(&mut state, &self.round_keys[self.rounds]);
184        *block = state;
185        Ok(())
186    }
187
188    /// Decrypts one 16-byte ARIA block in place.
189    ///
190    /// # Arguments
191    /// * `block`: Mutable 16-byte block to decrypt in place.
192    pub fn decrypt_block(&self, block: &mut [u8; 16]) -> Result<()> {
193        let mut temp = self.round_keys;
194        let rounds = self.rounds;
195
196        let mut dec_keys = [[0_u8; 16]; 17];
197        dec_keys[0] = temp[rounds];
198        for i in 1..rounds {
199            dec_keys[i] = temp[rounds - i];
200            diffusion_layer(&mut dec_keys[i]);
201        }
202        dec_keys[rounds] = temp[0];
203
204        temp = dec_keys;
205        let mut state = *block;
206        for round in 1..rounds {
207            if (round & 1) != 0 {
208                state = fo(&state, &temp[round - 1]);
209            } else {
210                state = fe(&state, &temp[round - 1]);
211            }
212        }
213        xor_block_in_place(&mut state, &temp[rounds - 1]);
214        sl2(&mut state);
215        xor_block_in_place(&mut state, &temp[rounds]);
216        *block = state;
217        Ok(())
218    }
219}
220
221/// Encrypts ARIA-ECB over full blocks; input length must be multiple of 16.
222#[cfg(feature = "hazardous-legacy-crypto")]
223pub fn aria_ecb_encrypt(cipher: &AriaCipher, input: &[u8]) -> Result<Vec<u8>> {
224    if !input.len().is_multiple_of(16) {
225        return Err(Error::InvalidLength("aria ecb input must be block-aligned"));
226    }
227    let mut out = input.to_vec();
228    for chunk in out.chunks_exact_mut(16) {
229        let mut block = [0_u8; 16];
230        block.copy_from_slice(chunk);
231        cipher.encrypt_block(&mut block)?;
232        chunk.copy_from_slice(&block);
233    }
234    Ok(out)
235}
236
237/// Decrypts ARIA-ECB over full blocks; input length must be multiple of 16.
238#[cfg(feature = "hazardous-legacy-crypto")]
239pub fn aria_ecb_decrypt(cipher: &AriaCipher, input: &[u8]) -> Result<Vec<u8>> {
240    if !input.len().is_multiple_of(16) {
241        return Err(Error::InvalidLength("aria ecb input must be block-aligned"));
242    }
243    let mut out = input.to_vec();
244    for chunk in out.chunks_exact_mut(16) {
245        let mut block = [0_u8; 16];
246        block.copy_from_slice(chunk);
247        cipher.decrypt_block(&mut block)?;
248        chunk.copy_from_slice(&block);
249    }
250    Ok(out)
251}
252
253/// Encrypts ARIA-CBC with a 16-byte IV and block-aligned plaintext.
254pub fn aria_cbc_encrypt(cipher: &AriaCipher, iv: &[u8; 16], plaintext: &[u8]) -> Result<Vec<u8>> {
255    if !plaintext.len().is_multiple_of(16) {
256        return Err(Error::InvalidLength("aria cbc input must be block-aligned"));
257    }
258    let mut out = plaintext.to_vec();
259    let mut prev = *iv;
260    for chunk in out.chunks_exact_mut(16) {
261        for (i, byte) in chunk.iter_mut().enumerate() {
262            *byte ^= prev[i];
263        }
264        let mut block = [0_u8; 16];
265        block.copy_from_slice(chunk);
266        cipher.encrypt_block(&mut block)?;
267        chunk.copy_from_slice(&block);
268        prev = block;
269    }
270    Ok(out)
271}
272
273/// Decrypts ARIA-CBC with a 16-byte IV and block-aligned ciphertext.
274pub fn aria_cbc_decrypt(cipher: &AriaCipher, iv: &[u8; 16], ciphertext: &[u8]) -> Result<Vec<u8>> {
275    if !ciphertext.len().is_multiple_of(16) {
276        return Err(Error::InvalidLength("aria cbc input must be block-aligned"));
277    }
278    let mut out = ciphertext.to_vec();
279    let mut prev = *iv;
280    for chunk in out.chunks_exact_mut(16) {
281        let mut cur = [0_u8; 16];
282        cur.copy_from_slice(chunk);
283        let mut block = cur;
284        cipher.decrypt_block(&mut block)?;
285        for i in 0..16 {
286            block[i] ^= prev[i];
287        }
288        chunk.copy_from_slice(&block);
289        prev = cur;
290    }
291    Ok(out)
292}
293
294/// Applies ARIA-CTR transformation using a 16-byte initial counter block.
295#[must_use]
296pub fn aria_ctr_apply(cipher: &AriaCipher, nonce_counter: &[u8; 16], input: &[u8]) -> Vec<u8> {
297    aria_ctr_encrypt(cipher, nonce_counter, input)
298}
299
300/// Encrypts bytes with ARIA-CTR using a 16-byte initial counter block.
301#[must_use]
302pub fn aria_ctr_encrypt(
303    cipher: &AriaCipher,
304    nonce_counter: &[u8; 16],
305    plaintext: &[u8],
306) -> Vec<u8> {
307    aria_ctr_process(cipher, nonce_counter, plaintext)
308}
309
310/// Decrypts bytes with ARIA-CTR using a 16-byte initial counter block.
311#[must_use]
312pub fn aria_ctr_decrypt(
313    cipher: &AriaCipher,
314    nonce_counter: &[u8; 16],
315    ciphertext: &[u8],
316) -> Vec<u8> {
317    aria_ctr_process(cipher, nonce_counter, ciphertext)
318}
319
320/// Applies CTR keystream XOR (same operation for encrypt/decrypt).
321///
322/// # Arguments
323///
324/// * `cipher` — `&AriaCipher`.
325/// * `nonce_counter` — `&[u8; 16]`.
326/// * `input` — `&[u8]`.
327///
328/// # Returns
329///
330/// `Vec<u8>` produced by `aria_ctr_process` (see implementation).
331///
332/// # Panics
333///
334/// This function does not panic unless otherwise noted.
335fn aria_ctr_process(cipher: &AriaCipher, nonce_counter: &[u8; 16], input: &[u8]) -> Vec<u8> {
336    let mut out = vec![0_u8; input.len()];
337    let mut counter = *nonce_counter;
338    let mut offset = 0;
339    while offset < input.len() {
340        let mut stream = counter;
341        cipher
342            .encrypt_block(&mut stream)
343            .expect("aria block encryption should not fail");
344        let chunk_len = (input.len() - offset).min(16);
345        for i in 0..chunk_len {
346            out[offset + i] = input[offset + i] ^ stream[i];
347        }
348        increment_be(&mut counter);
349        offset += chunk_len;
350    }
351    out
352}
353
354/// Applies ARIA-CFB-128 transformation with a 16-byte IV.
355#[must_use]
356pub fn aria_cfb_apply(cipher: &AriaCipher, iv: &[u8; 16], input: &[u8]) -> Vec<u8> {
357    aria_cfb_encrypt(cipher, iv, input)
358}
359
360/// Encrypts bytes with ARIA-CFB-128 using a 16-byte IV/register.
361#[must_use]
362pub fn aria_cfb_encrypt(cipher: &AriaCipher, iv: &[u8; 16], plaintext: &[u8]) -> Vec<u8> {
363    aria_cfb_process(cipher, iv, plaintext, true)
364}
365
366/// Decrypts bytes with ARIA-CFB-128 using a 16-byte IV/register.
367#[must_use]
368pub fn aria_cfb_decrypt(cipher: &AriaCipher, iv: &[u8; 16], ciphertext: &[u8]) -> Vec<u8> {
369    aria_cfb_process(cipher, iv, ciphertext, false)
370}
371
372/// Applies CFB keystream XOR with direction-specific register updates.
373///
374/// # Arguments
375///
376/// * `cipher` — `&AriaCipher`.
377/// * `iv` — `&[u8; 16]`.
378/// * `input` — `&[u8]`.
379/// * `encrypt` — `bool`.
380///
381/// # Returns
382///
383/// `Vec<u8>` produced by `aria_cfb_process` (see implementation).
384///
385/// # Panics
386///
387/// This function does not panic unless otherwise noted.
388fn aria_cfb_process(cipher: &AriaCipher, iv: &[u8; 16], input: &[u8], encrypt: bool) -> Vec<u8> {
389    let mut out = vec![0_u8; input.len()];
390    let mut reg = *iv;
391    let mut offset = 0;
392    while offset < input.len() {
393        let mut stream = reg;
394        cipher
395            .encrypt_block(&mut stream)
396            .expect("aria block encryption should not fail");
397        let chunk_len = (input.len() - offset).min(16);
398        for i in 0..chunk_len {
399            out[offset + i] = input[offset + i] ^ stream[i];
400        }
401        if encrypt {
402            shift_register_append(&mut reg, &out[offset..offset + chunk_len]);
403        } else {
404            shift_register_append(&mut reg, &input[offset..offset + chunk_len]);
405        }
406        offset += chunk_len;
407    }
408    out
409}
410
411/// Applies ARIA-OFB transformation with a 16-byte IV.
412#[must_use]
413pub fn aria_ofb_apply(cipher: &AriaCipher, iv: &[u8; 16], input: &[u8]) -> Vec<u8> {
414    aria_ofb_encrypt(cipher, iv, input)
415}
416
417/// Encrypts bytes with ARIA-OFB using a 16-byte IV.
418#[must_use]
419pub fn aria_ofb_encrypt(cipher: &AriaCipher, iv: &[u8; 16], plaintext: &[u8]) -> Vec<u8> {
420    aria_ofb_process(cipher, iv, plaintext)
421}
422
423/// Decrypts bytes with ARIA-OFB using a 16-byte IV.
424#[must_use]
425pub fn aria_ofb_decrypt(cipher: &AriaCipher, iv: &[u8; 16], ciphertext: &[u8]) -> Vec<u8> {
426    aria_ofb_process(cipher, iv, ciphertext)
427}
428
429/// Applies OFB keystream XOR (same operation for encrypt/decrypt).
430///
431/// # Arguments
432///
433/// * `cipher` — `&AriaCipher`.
434/// * `iv` — `&[u8; 16]`.
435/// * `input` — `&[u8]`.
436///
437/// # Returns
438///
439/// `Vec<u8>` produced by `aria_ofb_process` (see implementation).
440///
441/// # Panics
442///
443/// This function does not panic unless otherwise noted.
444fn aria_ofb_process(cipher: &AriaCipher, iv: &[u8; 16], input: &[u8]) -> Vec<u8> {
445    let mut out = vec![0_u8; input.len()];
446    let mut stream = *iv;
447    let mut offset = 0;
448    while offset < input.len() {
449        cipher
450            .encrypt_block(&mut stream)
451            .expect("aria block encryption should not fail");
452        let chunk_len = (input.len() - offset).min(16);
453        for i in 0..chunk_len {
454            out[offset + i] = input[offset + i] ^ stream[i];
455        }
456        offset += chunk_len;
457    }
458    out
459}
460
461/// Increments a big-endian 128-bit counter in place.
462///
463/// # Arguments
464///
465/// * `counter` — `&mut [u8; 16]`.
466///
467/// # Returns
468///
469/// `()` when there is no return data.
470///
471/// # Panics
472///
473/// This function does not panic unless otherwise noted.
474fn increment_be(counter: &mut [u8; 16]) {
475    for b in counter.iter_mut().rev() {
476        *b = b.wrapping_add(1);
477        if *b != 0 {
478            break;
479        }
480    }
481}
482
483/// Applies ARIA FO layer.
484///
485/// # Arguments
486///
487/// * `input` — `&[u8; 16]`.
488/// * `rk` — `&[u8; 16]`.
489///
490/// # Returns
491///
492/// `[u8` produced by `fo` (see implementation).
493///
494/// # Panics
495///
496/// This function does not panic unless otherwise noted.
497fn fo(input: &[u8; 16], rk: &[u8; 16]) -> [u8; 16] {
498    let mut out = *input;
499    xor_block_in_place(&mut out, rk);
500    sl1(&mut out);
501    diffusion_layer(&mut out);
502    out
503}
504
505/// Applies ARIA FE layer.
506///
507/// # Arguments
508///
509/// * `input` — `&[u8; 16]`.
510/// * `rk` — `&[u8; 16]`.
511///
512/// # Returns
513///
514/// `[u8` produced by `fe` (see implementation).
515///
516/// # Panics
517///
518/// This function does not panic unless otherwise noted.
519fn fe(input: &[u8; 16], rk: &[u8; 16]) -> [u8; 16] {
520    let mut out = *input;
521    xor_block_in_place(&mut out, rk);
522    sl2(&mut out);
523    diffusion_layer(&mut out);
524    out
525}
526
527/// Applies ARIA substitution layer type 1.
528///
529/// # Arguments
530///
531/// * `state` — `&mut [u8; 16]`.
532///
533/// # Returns
534///
535/// `()` when there is no return data.
536///
537/// # Panics
538///
539/// This function does not panic unless otherwise noted.
540fn sl1(state: &mut [u8; 16]) {
541    let (inv_s1, inv_s2) = inverse_sboxes();
542    state[0] = ARIA_S1[state[0] as usize];
543    state[1] = ARIA_S2[state[1] as usize];
544    state[2] = inv_s1[state[2] as usize];
545    state[3] = inv_s2[state[3] as usize];
546    state[4] = ARIA_S1[state[4] as usize];
547    state[5] = ARIA_S2[state[5] as usize];
548    state[6] = inv_s1[state[6] as usize];
549    state[7] = inv_s2[state[7] as usize];
550    state[8] = ARIA_S1[state[8] as usize];
551    state[9] = ARIA_S2[state[9] as usize];
552    state[10] = inv_s1[state[10] as usize];
553    state[11] = inv_s2[state[11] as usize];
554    state[12] = ARIA_S1[state[12] as usize];
555    state[13] = ARIA_S2[state[13] as usize];
556    state[14] = inv_s1[state[14] as usize];
557    state[15] = inv_s2[state[15] as usize];
558}
559
560/// Applies ARIA substitution layer type 2.
561///
562/// # Arguments
563///
564/// * `state` — `&mut [u8; 16]`.
565///
566/// # Returns
567///
568/// `()` when there is no return data.
569///
570/// # Panics
571///
572/// This function does not panic unless otherwise noted.
573fn sl2(state: &mut [u8; 16]) {
574    let (inv_s1, inv_s2) = inverse_sboxes();
575    state[0] = inv_s1[state[0] as usize];
576    state[1] = inv_s2[state[1] as usize];
577    state[2] = ARIA_S1[state[2] as usize];
578    state[3] = ARIA_S2[state[3] as usize];
579    state[4] = inv_s1[state[4] as usize];
580    state[5] = inv_s2[state[5] as usize];
581    state[6] = ARIA_S1[state[6] as usize];
582    state[7] = ARIA_S2[state[7] as usize];
583    state[8] = inv_s1[state[8] as usize];
584    state[9] = inv_s2[state[9] as usize];
585    state[10] = ARIA_S1[state[10] as usize];
586    state[11] = ARIA_S2[state[11] as usize];
587    state[12] = inv_s1[state[12] as usize];
588    state[13] = inv_s2[state[13] as usize];
589    state[14] = ARIA_S1[state[14] as usize];
590    state[15] = ARIA_S2[state[15] as usize];
591}
592
593/// Applies ARIA diffusion layer matrix multiplication.
594///
595/// # Arguments
596///
597/// * `state` — `&mut [u8; 16]`.
598///
599/// # Returns
600///
601/// `()` when there is no return data.
602///
603/// # Panics
604///
605/// This function does not panic unless otherwise noted.
606fn diffusion_layer(state: &mut [u8; 16]) {
607    let mut temp = [0_u8; 16];
608    temp[0] = state[3] ^ state[4] ^ state[6] ^ state[8] ^ state[9] ^ state[13] ^ state[14];
609    temp[1] = state[2] ^ state[5] ^ state[7] ^ state[8] ^ state[9] ^ state[12] ^ state[15];
610    temp[2] = state[1] ^ state[4] ^ state[6] ^ state[10] ^ state[11] ^ state[12] ^ state[15];
611    temp[3] = state[0] ^ state[5] ^ state[7] ^ state[10] ^ state[11] ^ state[13] ^ state[14];
612    temp[4] = state[0] ^ state[2] ^ state[5] ^ state[8] ^ state[11] ^ state[14] ^ state[15];
613    temp[5] = state[1] ^ state[3] ^ state[4] ^ state[9] ^ state[10] ^ state[14] ^ state[15];
614    temp[6] = state[0] ^ state[2] ^ state[7] ^ state[9] ^ state[10] ^ state[12] ^ state[13];
615    temp[7] = state[1] ^ state[3] ^ state[6] ^ state[8] ^ state[11] ^ state[12] ^ state[13];
616    temp[8] = state[0] ^ state[1] ^ state[4] ^ state[7] ^ state[10] ^ state[13] ^ state[15];
617    temp[9] = state[0] ^ state[1] ^ state[5] ^ state[6] ^ state[11] ^ state[12] ^ state[14];
618    temp[10] = state[2] ^ state[3] ^ state[5] ^ state[6] ^ state[8] ^ state[13] ^ state[15];
619    temp[11] = state[2] ^ state[3] ^ state[4] ^ state[7] ^ state[9] ^ state[12] ^ state[14];
620    temp[12] = state[1] ^ state[2] ^ state[6] ^ state[7] ^ state[9] ^ state[11] ^ state[12];
621    temp[13] = state[0] ^ state[3] ^ state[6] ^ state[7] ^ state[8] ^ state[10] ^ state[13];
622    temp[14] = state[0] ^ state[3] ^ state[4] ^ state[5] ^ state[9] ^ state[11] ^ state[14];
623    temp[15] = state[1] ^ state[2] ^ state[4] ^ state[5] ^ state[8] ^ state[10] ^ state[15];
624    *state = temp;
625}
626
627/// Builds inverse S-box tables from forward S-box constants.
628///
629/// # Arguments
630///
631/// * *(none)* — This function takes no parameters.
632///
633/// # Returns
634///
635/// `([u8` produced by `inverse_sboxes` (see implementation).
636///
637/// # Panics
638///
639/// This function does not panic unless otherwise noted.
640fn inverse_sboxes() -> ([u8; 256], [u8; 256]) {
641    let mut inv_s1 = [0_u8; 256];
642    let mut inv_s2 = [0_u8; 256];
643    for (idx, val) in ARIA_S1.iter().enumerate() {
644        inv_s1[*val as usize] = idx as u8;
645    }
646    for (idx, val) in ARIA_S2.iter().enumerate() {
647        inv_s2[*val as usize] = idx as u8;
648    }
649    (inv_s1, inv_s2)
650}
651
652/// XORs two 128-bit blocks.
653///
654/// # Arguments
655///
656/// * `a` — `&[u8; 16]`.
657/// * `b` — `&[u8; 16]`.
658///
659/// # Returns
660///
661/// `[u8` produced by `xor_block` (see implementation).
662///
663/// # Panics
664///
665/// This function does not panic unless otherwise noted.
666fn xor_block(a: &[u8; 16], b: &[u8; 16]) -> [u8; 16] {
667    let mut out = [0_u8; 16];
668    for i in 0..16 {
669        out[i] = a[i] ^ b[i];
670    }
671    out
672}
673
674/// XORs `rhs` into `lhs` block in place.
675///
676/// # Arguments
677///
678/// * `lhs` — `&mut [u8; 16]`.
679/// * `rhs` — `&[u8; 16]`.
680///
681/// # Returns
682///
683/// `()` when there is no return data.
684///
685/// # Panics
686///
687/// This function does not panic unless otherwise noted.
688fn xor_block_in_place(lhs: &mut [u8; 16], rhs: &[u8; 16]) {
689    for i in 0..16 {
690        lhs[i] ^= rhs[i];
691    }
692}
693
694/// Shifts CFB register left by segment length and appends segment bytes.
695///
696/// # Arguments
697///
698/// * `reg` — `&mut [u8; 16]`.
699/// * `segment` — `&[u8]`.
700///
701/// # Returns
702///
703/// `()` when there is no return data.
704///
705/// # Panics
706///
707/// This function does not panic unless otherwise noted.
708fn shift_register_append(reg: &mut [u8; 16], segment: &[u8]) {
709    debug_assert!(segment.len() <= 16);
710    if segment.len() == 16 {
711        reg.copy_from_slice(segment);
712        return;
713    }
714    let keep = 16 - segment.len();
715    reg.copy_within(segment.len().., 0);
716    reg[keep..].copy_from_slice(segment);
717}
718
719/// Rotates a 128-bit block right by `bits`.
720///
721/// # Arguments
722///
723/// * `input` — `&[u8; 16]`.
724/// * `out` — `&mut [u8; 16]`.
725/// * `bits` — `usize`.
726///
727/// # Returns
728///
729/// `()` when there is no return data.
730///
731/// # Panics
732///
733/// This function does not panic unless otherwise noted.
734fn rotate_right_128(input: &[u8; 16], out: &mut [u8; 16], bits: usize) {
735    let b = bits & 127;
736    if b == 0 {
737        *out = *input;
738        return;
739    }
740    let hi = u64::from_be_bytes(input[..8].try_into().expect("slice is 8 bytes"));
741    let lo = u64::from_be_bytes(input[8..].try_into().expect("slice is 8 bytes"));
742    let (new_hi, new_lo) = if b < 64 {
743        ((hi >> b) | (lo << (64 - b)), (lo >> b) | (hi << (64 - b)))
744    } else if b == 64 {
745        (lo, hi)
746    } else {
747        let s = b - 64;
748        ((lo >> s) | (hi << (64 - s)), (hi >> s) | (lo << (64 - s)))
749    };
750    out[..8].copy_from_slice(&new_hi.to_be_bytes());
751    out[8..].copy_from_slice(&new_lo.to_be_bytes());
752}
753
754/// Rotates a 128-bit block left by `bits`.
755///
756/// # Arguments
757///
758/// * `input` — `&[u8; 16]`.
759/// * `out` — `&mut [u8; 16]`.
760/// * `bits` — `usize`.
761///
762/// # Returns
763///
764/// `()` when there is no return data.
765///
766/// # Panics
767///
768/// This function does not panic unless otherwise noted.
769fn rotate_left_128(input: &[u8; 16], out: &mut [u8; 16], bits: usize) {
770    rotate_right_128(input, out, 128 - (bits & 127));
771}