1use aes::cipher::generic_array::{typenum::U16, typenum::U8, GenericArray};
63use aes::cipher::{BlockEncrypt, KeyInit};
64use aes::Aes128;
65use byteorder::{ByteOrder, LittleEndian};
66use rand::{CryptoRng, Error, RngCore, SeedableRng};
67use std::mem;
68use std::slice;
69
70const AES_BLK_SIZE: usize = 16;
71const PIPELINES_U128: u128 = 8;
72const PIPELINES_USIZE: usize = 8;
73const STATE_SIZE: usize = PIPELINES_USIZE * AES_BLK_SIZE;
74pub const SEED_SIZE: usize = AES_BLK_SIZE;
75pub type RngSeed = [u8; SEED_SIZE];
76
77type Block128 = GenericArray<u8, U16>;
78type Block128x8 = GenericArray<Block128, U8>;
79
80#[derive(Clone)]
81pub struct AesRngState {
82 blocks: Block128x8,
83 next_index: u128,
84 used_bytes: usize,
85}
86
87impl Default for AesRngState {
88 fn default() -> Self {
89 AesRngState::init()
90 }
91}
92
93fn create_init_state() -> Block128x8 {
97 let mut state = [0_u8; STATE_SIZE];
98 Block128x8::from_exact_iter((0..PIPELINES_USIZE).map(|i| {
99 LittleEndian::write_u128(
100 &mut state[i * AES_BLK_SIZE..(i + 1) * AES_BLK_SIZE],
101 i as u128,
102 );
103 let sliced_state = &mut state[i * AES_BLK_SIZE..(i + 1) * AES_BLK_SIZE];
104 let block = GenericArray::from_mut_slice(sliced_state);
105 *block
106 }))
107 .unwrap()
108}
109
110impl AesRngState {
111 fn as_mut_bytes(&mut self) -> &mut [u8] {
113 #[allow(unsafe_code)]
114 unsafe {
115 slice::from_raw_parts_mut(&mut self.blocks as *mut Block128x8 as *mut u8, STATE_SIZE)
116 }
117 }
118
119 fn init() -> Self {
121 AesRngState {
122 blocks: create_init_state(),
123 next_index: PIPELINES_U128,
124 used_bytes: 0,
125 }
126 }
127
128 fn next(&mut self) {
131 let counter = self.next_index;
132 let blocks_bytes = self.as_mut_bytes();
133 for i in 0..PIPELINES_USIZE {
134 LittleEndian::write_u128(
135 &mut blocks_bytes[i * AES_BLK_SIZE..(i + 1) * AES_BLK_SIZE],
136 counter + i as u128,
137 );
138 }
139 self.next_index += PIPELINES_U128;
140 self.used_bytes = 0;
141 }
142}
143
144#[derive(Clone)]
146pub struct AesRng {
147 state: AesRngState,
148 cipher: Aes128,
149 n_cached_bits: usize,
150 cached_bits: u64,
151}
152
153impl SeedableRng for AesRng {
154 type Seed = RngSeed;
155 #[inline]
159 fn from_seed(seed: Self::Seed) -> Self {
160 let key = GenericArray::from(seed);
161 let mut out = AesRng {
162 state: AesRngState::default(),
163 cipher: Aes128::new(&key),
164 n_cached_bits: 0,
165 cached_bits: 0,
166 };
167 out.init();
168 out
169 }
170}
171
172impl AesRng {
173 fn init(&mut self) {
176 self.cipher.encrypt_blocks(&mut self.state.blocks);
177 }
178
179 fn next(&mut self) {
183 self.state.next();
184 self.cipher.encrypt_blocks(&mut self.state.blocks);
185 }
186
187 #[deprecated(since = "0.2.0")]
188 pub fn generate_random_key() -> [u8; SEED_SIZE] {
189 Self::generate_random_seed()
190 }
191
192 pub fn generate_random_seed() -> [u8; SEED_SIZE] {
193 let mut seed = [0u8; SEED_SIZE];
194 let mut rng = rand::thread_rng();
195 rng.fill_bytes(&mut seed);
196 seed
197 }
198
199 pub fn from_random_seed() -> Self {
207 let seed = AesRng::generate_random_seed();
208 Self::from_seed(seed)
209 }
210
211 pub fn get_bit(&mut self) -> u8 {
213 if self.n_cached_bits == 0 {
214 self.cached_bits = self.next_u64();
215 self.n_cached_bits = 64;
216 }
217 self.n_cached_bits -= 1;
218 let result: u8 = (self.cached_bits & 1) as u8;
219 self.cached_bits >>= 1;
220 result
221 }
222}
223
224impl RngCore for AesRng {
225 fn next_u32(&mut self) -> u32 {
227 let u32_size = mem::size_of::<u32>();
228 if self.state.used_bytes >= STATE_SIZE - u32_size {
229 self.next();
230 }
231 let used_bytes = self.state.used_bytes;
232 self.state.used_bytes += u32_size; let blocks_bytes = self.state.as_mut_bytes();
234 LittleEndian::read_u32(&blocks_bytes[used_bytes..used_bytes + u32_size])
235 }
236
237 fn next_u64(&mut self) -> u64 {
239 let u64_size = mem::size_of::<u64>();
240 if self.state.used_bytes >= STATE_SIZE - u64_size {
241 self.next();
242 }
243 let used_bytes = self.state.used_bytes;
244 self.state.used_bytes += u64_size; LittleEndian::read_u64(&self.state.as_mut_bytes()[used_bytes..used_bytes + u64_size])
246 }
247
248 fn fill_bytes(&mut self, dest: &mut [u8]) {
250 let mut read_len = STATE_SIZE - self.state.used_bytes;
251 let mut dest_start = 0;
252
253 while read_len < dest.len() {
254 let src_start = self.state.used_bytes;
255 dest[dest_start..read_len]
256 .copy_from_slice(&self.state.as_mut_bytes()[src_start..STATE_SIZE]);
257 self.next();
258 dest_start = read_len;
259 read_len += STATE_SIZE;
260 }
261
262 let src_start = self.state.used_bytes;
263 let remainder = dest.len() - dest_start;
264 let dest_len = dest.len();
265
266 dest[dest_start..dest_len]
267 .copy_from_slice(&self.state.as_mut_bytes()[src_start..src_start + remainder]);
268 self.state.used_bytes += remainder;
269 }
270
271 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
272 self.fill_bytes(dest);
273 Ok(())
274 }
275}
276
277impl CryptoRng for AesRng {}
278
279#[cfg(test)]
280mod tests {
281 use super::*;
282
283 #[test]
284 fn test_prng_match_aes() {
285 let seed = [0u8; SEED_SIZE];
286 let key: Block128 = GenericArray::clone_from_slice(&seed);
287 let cipher = Aes128::new(&key);
288
289 let block0 =
290 GenericArray::clone_from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
291 let block1 =
292 GenericArray::clone_from_slice(&[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
293 let block2 =
294 GenericArray::clone_from_slice(&[2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
295 let block3 =
296 GenericArray::clone_from_slice(&[3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
297 let block4 =
298 GenericArray::clone_from_slice(&[4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
299 let block5 =
300 GenericArray::clone_from_slice(&[5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
301 let block6 =
302 GenericArray::clone_from_slice(&[6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
303 let block7 =
304 GenericArray::clone_from_slice(&[7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
305
306 let mut blocks = Block128x8::clone_from_slice(&[
307 block0, block1, block2, block3, block4, block5, block6, block7,
308 ]);
309
310 cipher.encrypt_blocks(&mut blocks);
312
313 let mut rng = AesRng::from_seed(seed);
314 let mut out = [0u8; 16 * 8];
315 rng.try_fill_bytes(&mut out).expect("");
316
317 assert_eq!(rng.state.blocks, blocks);
319 }
320
321 #[test]
322 fn test_prng_vector1() {
323 let seed = [0u8; SEED_SIZE];
331
332 let mut rng = AesRng::from_seed(seed);
333 let mut out = [0u8; 16];
334
335 for _ in 0..129 {
336 rng.try_fill_bytes(&mut out).expect("");
337 }
338
339 let expected: [u8; 16] = [
340 58, 215, 142, 114, 108, 30, 192, 43, 126, 191, 233, 43, 35, 217, 236, 52,
341 ];
342 assert_eq!(expected, out);
343 }
344
345 #[test]
346 fn test_prng_vector2() {
347 let seed = [0u8; SEED_SIZE];
355
356 let mut rng = AesRng::from_seed(seed);
357 let mut out = [0u8; 16];
358 for _ in 0..17 {
359 rng.try_fill_bytes(&mut out).expect("");
360 }
361
362 let expected: [u8; 16] = [
363 245, 86, 155, 58, 182, 166, 209, 30, 253, 225, 191, 10, 100, 198, 133, 74,
364 ];
365 assert_eq!(expected, out);
366 }
367
368 #[test]
369 fn test_prng_used_bytes() {
370 let mut rng: AesRng = AesRng::from_random_seed();
371 let mut out = [0u8; 16 * 8];
372 rng.try_fill_bytes(&mut out).expect("");
373
374 assert_eq!(rng.state.used_bytes, 16 * 8);
375
376 let _ = rng.next_u32();
377 assert_eq!(rng.state.used_bytes, 4);
380 }
381
382 #[test]
383 fn test_seeded_prng() {
384 let mut rng: AesRng = AesRng::from_random_seed();
385 let _ = rng.next_u32();
387 let _ = rng.next_u64();
388 }
389
390 #[test]
391 fn test_cloned_prng() {
392 let mut rng1: AesRng = AesRng::from_random_seed();
393 let mut rng2 = rng1.clone();
394
395 assert_eq!(rng1.next_u32(), rng2.next_u32());
396 }
397}