1use rand::{rngs::StdRng, CryptoRng, RngCore, SeedableRng};
4
5pub fn test_rng() -> StdRng {
9 StdRng::seed_from_u64(0)
10}
11
12pub fn test_rng_seeded(seed: u64) -> StdRng {
17 StdRng::seed_from_u64(seed)
18}
19
20fn fnv1a_hash(bytes: &[u8]) -> u64 {
25 const FNV_OFFSET: u64 = 0xcbf29ce484222325;
26 const FNV_PRIME: u64 = 0x100000001b3;
27
28 let mut hash = FNV_OFFSET;
29 for &byte in bytes {
30 hash ^= byte as u64;
31 hash = hash.wrapping_mul(FNV_PRIME);
32 }
33 hash
34}
35
36pub struct BytesRng {
43 bytes: Vec<u8>,
44 offset: usize,
45 fallback: StdRng,
46}
47
48impl BytesRng {
49 pub fn new(bytes: Vec<u8>) -> Self {
54 let fallback = StdRng::seed_from_u64(fnv1a_hash(&bytes));
55 Self {
56 bytes,
57 offset: 0,
58 fallback,
59 }
60 }
61
62 pub const fn remaining(&self) -> usize {
64 self.bytes.len().saturating_sub(self.offset)
65 }
66
67 pub fn consumed(&self) -> usize {
69 self.offset.min(self.bytes.len())
70 }
71}
72
73impl RngCore for BytesRng {
74 fn next_u32(&mut self) -> u32 {
75 let mut buf = [0u8; 4];
76 self.fill_bytes(&mut buf);
77 u32::from_be_bytes(buf)
78 }
79
80 fn next_u64(&mut self) -> u64 {
81 let mut buf = [0u8; 8];
82 self.fill_bytes(&mut buf);
83 u64::from_be_bytes(buf)
84 }
85
86 fn fill_bytes(&mut self, dest: &mut [u8]) {
87 let from_buffer = dest.len().min(self.bytes.len().saturating_sub(self.offset));
88 dest[..from_buffer].copy_from_slice(&self.bytes[self.offset..self.offset + from_buffer]);
89 self.offset += from_buffer;
90 if from_buffer < dest.len() {
91 self.fallback.fill_bytes(&mut dest[from_buffer..]);
92 }
93 }
94
95 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> {
96 self.fill_bytes(dest);
97 Ok(())
98 }
99}
100
101impl CryptoRng for BytesRng {}
102
103#[cfg(test)]
104mod tests {
105 use super::*;
106
107 #[test]
108 fn test_empty_bytes() {
109 let mut rng = BytesRng::new(vec![]);
110 assert_eq!(rng.remaining(), 0);
111 assert_eq!(rng.consumed(), 0);
112
113 let v1 = rng.next_u64();
115 let v2 = rng.next_u64();
116 assert_ne!(v1, v2); }
118
119 #[test]
120 fn test_consumes_bytes_in_order() {
121 let bytes = vec![1, 2, 3, 4, 5, 6, 7, 8];
122 let mut rng = BytesRng::new(bytes);
123
124 assert_eq!(rng.remaining(), 8);
125 assert_eq!(rng.consumed(), 0);
126
127 let mut buf = [0u8; 4];
128 rng.fill_bytes(&mut buf);
129 assert_eq!(buf, [1, 2, 3, 4]);
130 assert_eq!(rng.remaining(), 4);
131 assert_eq!(rng.consumed(), 4);
132
133 rng.fill_bytes(&mut buf);
134 assert_eq!(buf, [5, 6, 7, 8]);
135 assert_eq!(rng.remaining(), 0);
136 assert_eq!(rng.consumed(), 8);
137 }
138
139 #[test]
140 fn test_fallback_after_exhaustion() {
141 let bytes = vec![1, 2, 3, 4];
142 let mut rng = BytesRng::new(bytes.clone());
143
144 let mut buf = [0u8; 4];
146 rng.fill_bytes(&mut buf);
147 assert_eq!(buf, [1, 2, 3, 4]);
148
149 rng.fill_bytes(&mut buf);
151 let first_fallback = buf;
152
153 let mut rng2 = BytesRng::new(bytes);
155 let mut buf2 = [0u8; 4];
156 rng2.fill_bytes(&mut buf2); rng2.fill_bytes(&mut buf2); assert_eq!(first_fallback, buf2);
160 }
161
162 #[test]
163 fn test_fallback_seed_from_hash() {
164 let bytes1 = vec![1, 2, 3, 4];
166 let bytes2 = vec![1, 2, 3, 5];
167
168 let mut rng1 = BytesRng::new(bytes1);
169 let mut rng2 = BytesRng::new(bytes2);
170
171 let mut buf = [0u8; 4];
173 rng1.fill_bytes(&mut buf);
174 rng2.fill_bytes(&mut buf);
175
176 assert_ne!(rng1.next_u64(), rng2.next_u64());
178 }
179
180 #[test]
181 fn test_short_buffer_fallback_seed() {
182 let bytes = vec![1, 2, 3];
184 let mut rng = BytesRng::new(bytes);
185
186 let mut buf = [0u8; 3];
188 rng.fill_bytes(&mut buf);
189 assert_eq!(buf, [1, 2, 3]);
190
191 let v = rng.next_u64();
193 assert_ne!(v, 0); }
195
196 #[test]
197 fn test_deterministic_with_same_input() {
198 let bytes = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
199
200 let mut rng1 = BytesRng::new(bytes.clone());
201 let mut rng2 = BytesRng::new(bytes);
202
203 for _ in 0..100 {
205 assert_eq!(rng1.next_u64(), rng2.next_u64());
206 }
207 }
208
209 #[test]
210 fn test_next_u32() {
211 let bytes = vec![0x01, 0x02, 0x03, 0x04];
212 let mut rng = BytesRng::new(bytes);
213
214 let v = rng.next_u32();
215 assert_eq!(v, u32::from_be_bytes([0x01, 0x02, 0x03, 0x04]));
216 }
217
218 #[test]
219 fn test_next_u64() {
220 let bytes = vec![0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08];
221 let mut rng = BytesRng::new(bytes);
222
223 let v = rng.next_u64();
224 assert_eq!(
225 v,
226 u64::from_be_bytes([0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08])
227 );
228 }
229
230 mod conformance {
231 use super::*;
232 use commonware_conformance::Conformance;
233
234 struct BytesRngConformance;
239
240 impl Conformance for BytesRngConformance {
241 async fn commit(seed: u64) -> Vec<u8> {
242 let mut rng = BytesRng::new(seed.to_be_bytes().to_vec());
243
244 let mut output = Vec::with_capacity(64);
246 for _ in 0..8 {
247 output.extend_from_slice(&rng.next_u64().to_be_bytes());
248 }
249 output
250 }
251 }
252
253 commonware_conformance::conformance_tests! {
254 BytesRngConformance => 1024,
255 }
256 }
257}