ya_rand/secure.rs
1use crate::rng::*;
2use chachacha::{BUF_LEN_U64, ChaCha8Djb, SEED_LEN_U8};
3use core::mem::MaybeUninit;
4use getrandom::fill;
5
6/// A cryptographically secure random number generator.
7///
8/// The current implementation uses ChaCha with 8 rounds and a 64-bit counter.
9/// This allows for 1 ZiB (2<sup>70</sup> bytes) of output before cycling.
10/// That's over 147 **quintillion** calls to [`SecureRng::u64`].
11pub struct SecureRng {
12 buf: [u64; BUF_LEN_U64],
13 index: usize,
14 internal: ChaCha8Djb,
15}
16
17impl SecureYARandGenerator for SecureRng {
18 #[inline]
19 fn fill_bytes(&mut self, dst: &mut [u8]) {
20 // The `chachacha` crate provides a thoroughly tested and
21 // extremely fast fill implementation.
22 self.internal.fill(dst);
23 }
24}
25
26impl YARandGenerator for SecureRng {
27 #[inline]
28 fn try_new() -> Result<Self, getrandom::Error> {
29 // We randomize **all** bits of the matrix, even the counter.
30 // If used as a cipher this approach is completely braindead,
31 // but since this is exclusively for use as a CRNG it's fine.
32 #[allow(invalid_value)]
33 let mut state = unsafe { MaybeUninit::<[u8; SEED_LEN_U8]>::uninit().assume_init() };
34 fill(&mut state)?;
35 let mut internal = ChaCha8Djb::from(state);
36 let buf = internal.get_block_u64();
37 let index = 0;
38 Ok(Self {
39 buf,
40 index,
41 internal,
42 })
43 }
44
45 #[cfg_attr(feature = "inline", inline)]
46 fn u64(&mut self) -> u64 {
47 // TODO: This is the cold path, occuring only once every
48 // 32 calls to `Self::u64`. If there is ever a cold/unlikely
49 // intrinsic moved to stable then test the performance impact
50 // of applying it to this branch.
51 if self.index >= self.buf.len() {
52 self.index = 0;
53 self.internal.fill_block_u64(&mut self.buf);
54 }
55 // Bounds check is elided thanks to above code.
56 let result = self.buf[self.index];
57 self.index += 1;
58 result
59 }
60}