1#![forbid(unsafe_code)]
2#![deny(clippy::all, clippy::cargo)]
3#![warn(clippy::pedantic, clippy::nursery)]
4#![allow(
5 clippy::module_name_repetitions,
6 clippy::doc_markdown,
7 clippy::missing_panics_doc
8)]
9#![doc = r#"
10# xoroshiro256** PRNG (fast, non-cryptographic)
11
12A tiny, fast implementation of **xoroshiro256** without bit waste, intended for simulations,
13 games, and other **deterministic** workloads. This generator is **not
14cryptographically secure**. If you need CSPRNG, enable the `"crypto"`
15feature and use `CryptoRng256` (ChaCha20).
16
17## Features
18
19- `rand_core` โ implements `rand_core::RngCore` and `SeedableRng`.
20- `serde` โ enables `Serialize`/`Deserialize` for `Xoroshiro256State`.
21- `crypto` โ provides `CryptoRng256` (ChaCha20) for cryptographic use.
22
23## Examples
24
25### Fast deterministic numbers (no features required)
26```rust
27use xoroshiro256_full::Xoroshiro256State;
28
29let mut rng = Xoroshiro256State::init([1, 2, 3, 4], 4);
30let x = rng.next_u64(); // fast, deterministic
31```
32
33### Integrate with `rand` ecosystem (enable `rand_core`)
34```rust
35# #[cfg(feature="rand_core")] {
36use rand_core::{RngCore, SeedableRng};
37use xoroshiro256_full::Xoroshiro256State;
38
39let mut rng = Xoroshiro256State::seed_from_u64(42);
40let _ = rng.next_u32();
41# }
42```
43
44### Cryptographically secure RNG (enable `crypto`)
45```rust
46# #[cfg(feature="crypto")] {
47use xoroshiro256_full::CryptoRng256;
48
49let mut crng = CryptoRng256::from_os();
50let x = crng.next_u64();
51# }
52```
53
54## Notes
55
56- This crate uses **wrapping arithmetic** to match the original reference.
57- Endianness: byte conversions use **little-endian** (`to_le_bytes`).
58
59### No bit waste (implementation detail)
60When the `rand_core` feature is enabled, this implementation avoids bit waste in two places:
61
62- `next_u32()` caches the unused 32 bits from a generated `u64` and returns them on the next call.
63- `fill_bytes()` caches any unused tail bytes (up to 7) from a generated `u64` and uses them at the start of the next call.
64
65These caches are **not** part of the algorithmic state and are skipped during `serde` serialization; only the 256-bit xoroshiro state is serialized.
66
67"#]
68
69#[cfg(feature = "rand_core")]
70use rand_core::{Error, RngCore, SeedableRng};
71
72#[cfg(feature = "serde")]
73use serde::{Deserialize, Serialize};
74
75#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
80#[derive(Clone, Copy, Debug)]
81pub struct Xoroshiro256State {
82 parts: [u64; 4],
83
84 #[cfg(feature = "rand_core")]
87 #[cfg_attr(feature = "serde", serde(skip))]
88 u32_spare: Option<u32>,
89 #[cfg(feature = "rand_core")]
90 #[cfg_attr(feature = "serde", serde(skip))]
91 byte_spare: [u8; 8],
92 #[cfg(feature = "rand_core")]
93 #[cfg_attr(feature = "serde", serde(skip))]
94 byte_spare_len: u8,
95}
96
97impl Default for Xoroshiro256State {
98 #[inline]
99 fn default() -> Self {
100 Self {
101 parts: [0; 4],
102 #[cfg(feature = "rand_core")]
103 u32_spare: None,
104 #[cfg(feature = "rand_core")]
105 byte_spare: [0; 8],
106 #[cfg(feature = "rand_core")]
107 byte_spare_len: 0,
108 }
109 }
110}
111
112impl Xoroshiro256State {
113 #[must_use]
131 pub fn init(init_key: [u64; 4], key_length: usize) -> Self {
132 assert!((1..=4).contains(&key_length), "key_length must be 1..=4");
133
134 let mut state = Self::default();
135 let mut last = 0u64;
136 for (i, slot) in state.parts.iter_mut().enumerate() {
137 let val = if i < key_length {
138 init_key[i]
139 } else {
140 last.wrapping_mul(6_364_136_223_846_793_005_u64)
142 .wrapping_add(1)
143 };
144 *slot = val;
145 last = val;
146 }
147
148 if state.parts.iter().all(|&p| p == 0) {
150 state.parts[0] = 0x9E37_79B9_7F4A_7C15; }
152
153 state
154 }
155
156 #[inline]
157 const fn rotl(x: u64, k: u32) -> u64 {
158 x.rotate_left(k)
159 }
160
161 #[inline]
165 pub const fn next_u64(&mut self) -> u64 {
166 let result = Self::rotl(self.parts[1].wrapping_mul(5), 7).wrapping_mul(9);
168 let t = self.parts[1] << 17;
169
170 self.parts[2] ^= self.parts[0];
171 self.parts[3] ^= self.parts[1];
172 self.parts[1] ^= self.parts[2];
173 self.parts[0] ^= self.parts[3];
174
175 self.parts[2] ^= t;
176 self.parts[3] = Self::rotl(self.parts[3], 45);
177
178 result
179 }
180
181 #[inline]
188 pub fn genrand_uint256_to_buf(&self, buf: &mut [u8]) {
189 assert!(buf.len() >= 32, "Buffer must have at least 32 bytes");
190 for (i, &x) in self.parts.iter().enumerate() {
191 buf[i * 8..(i + 1) * 8].copy_from_slice(&x.to_le_bytes());
192 }
193 }
194}
195
196#[cfg(feature = "rand_core")]
199impl RngCore for Xoroshiro256State {
200 #[inline]
201 fn next_u32(&mut self) -> u32 {
202 if let Some(lo) = self.u32_spare.take() {
204 return lo;
205 }
206 let x = self.next_u64();
207 let hi = (x >> 32) as u32;
208 let lo = (x & 0xFFFF_FFFF) as u32;
209 self.u32_spare = Some(lo);
210 hi
211 }
212
213 #[inline]
214 fn next_u64(&mut self) -> u64 {
215 Self::next_u64(self)
216 }
217
218 fn fill_bytes(&mut self, dest: &mut [u8]) {
219 let mut written = 0;
220
221 if self.byte_spare_len > 0 {
223 let n = self.byte_spare_len as usize;
224 let take = n.min(dest.len());
225 dest[..take].copy_from_slice(&self.byte_spare[..take]);
226 if take < n {
227 self.byte_spare.copy_within(take..n, 0);
229 self.byte_spare_len = u8::try_from(n - take).expect("spare len <= 8");
230 return; }
232 self.byte_spare_len = 0;
233 written += take;
234 }
235
236 let mut chunks = dest[written..].chunks_exact_mut(8);
238 for chunk in &mut chunks {
239 chunk.copy_from_slice(&self.next_u64().to_le_bytes());
240 }
241
242 let rem = chunks.into_remainder();
244 if !rem.is_empty() {
245 let r = self.next_u64().to_le_bytes();
246 let need = rem.len();
247 rem.copy_from_slice(&r[..need]);
248 let leftover = 8 - need;
249 if leftover > 0 {
250 self.byte_spare[..leftover].copy_from_slice(&r[need..]);
251 self.byte_spare_len = u8::try_from(leftover).expect("leftover <= 8");
252 }
253 }
254 }
255
256 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
257 self.fill_bytes(dest);
258 Ok(())
259 }
260}
261
262#[cfg(feature = "rand_core")]
263impl SeedableRng for Xoroshiro256State {
264 type Seed = [u8; 32];
265
266 fn from_seed(seed: Self::Seed) -> Self {
268 let mut parts = [0u64; 4];
269 for i in 0..4 {
270 parts[i] = u64::from_le_bytes(seed[i * 8..(i + 1) * 8].try_into().unwrap());
271 }
272 Self::init(parts, 4)
273 }
274
275 fn seed_from_u64(mut x: u64) -> Self {
277 let mut parts = [0u64; 4];
278 for slot in &mut parts {
280 x ^= x >> 12;
281 x ^= x << 25;
282 x ^= x >> 27;
283 *slot = x.wrapping_mul(2_685_821_657_736_338_717);
284 }
285 Self::init(parts, 4)
286 }
287}
288
289#[cfg(feature = "crypto")]
308pub use crypto_rng::CryptoRng256;
309
310#[cfg(feature = "crypto")]
311mod crypto_rng {
312 use getrandom::getrandom;
313 use rand_chacha::ChaCha20Rng;
314 use rand_core::{CryptoRng, RngCore, SeedableRng};
315
316 #[derive(Clone, Debug)]
317 pub struct CryptoRng256 {
318 pub(crate) inner: ChaCha20Rng,
319 }
320
321 impl CryptoRng256 {
322 #[must_use]
324 pub fn from_os() -> Self {
325 let mut seed = <ChaCha20Rng as SeedableRng>::Seed::default(); getrandom(&mut seed).expect("OS randomness unavailable");
327 Self {
328 inner: ChaCha20Rng::from_seed(seed),
329 }
330 }
331
332 #[must_use]
334 pub fn from_seed(seed: [u8; 32]) -> Self {
335 Self {
336 inner: ChaCha20Rng::from_seed(seed),
337 }
338 }
339
340 #[inline]
342 pub fn fill_bytes(&mut self, buf: &mut [u8]) {
343 self.inner.fill_bytes(buf);
344 }
345
346 #[inline]
348 pub fn next_u64(&mut self) -> u64 {
349 self.inner.next_u64()
350 }
351 }
352
353 impl RngCore for CryptoRng256 {
354 #[inline]
355 fn next_u32(&mut self) -> u32 {
356 self.inner.next_u32()
357 }
358 #[inline]
359 fn next_u64(&mut self) -> u64 {
360 self.inner.next_u64()
361 }
362 fn fill_bytes(&mut self, dest: &mut [u8]) {
363 self.inner.fill_bytes(dest);
364 }
365 fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {
366 self.inner.try_fill_bytes(dest)
367 }
368 }
369 impl CryptoRng for CryptoRng256 {}
370}
371
372#[cfg(test)]
375mod tests {
376 use super::*;
377 use std::collections::HashSet;
378
379 #[test]
382 fn init_1_key_derives_rest_and_nonzero() {
383 let s = Xoroshiro256State::init([0xDEAD_BEEF_CAFE_BABE, 0, 0, 0], 1);
384 assert!(s.parts.iter().any(|&p| p != 0));
386 }
387
388 #[test]
389 #[should_panic(expected = "key_length must be 1..=4")]
390 fn init_panics_on_zero_length() {
391 let _ = Xoroshiro256State::init([0; 4], 0);
392 }
393
394 #[test]
395 #[should_panic(expected = "key_length must be 1..=4")]
396 fn init_panics_on_too_long() {
397 let _ = Xoroshiro256State::init([0; 4], 5);
398 }
399
400 #[test]
401 fn init_all_zero_forced_nonzero() {
402 let s = Xoroshiro256State::init([0, 0, 0, 0], 4);
403 assert!(s.parts.iter().any(|&p| p != 0));
404 }
405
406 #[test]
409 fn next_u64_progresses() {
410 let mut r = Xoroshiro256State::init([1, 2, 3, 4], 4);
411 let a = r.next_u64();
412 let b = r.next_u64();
413 assert_ne!(a, b);
414 }
415
416 #[test]
417 fn determinism_same_seed_same_sequence() {
418 let seed = [
419 0xAABB_CCDD_EEFF_0011,
420 0x1122_3344_5566_7788,
421 0x99AA_BBCC_DDEE_FF00,
422 0x1234_5678_90AB_CDEF,
423 ];
424 let mut r1 = Xoroshiro256State::init(seed, 4);
425 let mut r2 = Xoroshiro256State::init(seed, 4);
426 for _ in 0..64 {
427 assert_eq!(r1.next_u64(), r2.next_u64());
428 }
429 }
430
431 #[test]
434 #[should_panic(expected = "Buffer must have at least 32 bytes")]
435 fn dump_panics_on_small_buffer() {
436 let r = Xoroshiro256State::init([1, 2, 3, 4], 4);
437 let mut buf = [0u8; 31];
438 r.genrand_uint256_to_buf(&mut buf);
439 }
440
441 #[test]
442 fn dump_writes_exact_32_bytes_little_endian() {
443 let r = Xoroshiro256State::init([1, 2, 3, 4], 4);
444 let mut buf = [0u8; 32];
445 r.genrand_uint256_to_buf(&mut buf);
446 for i in 0..4 {
448 let mut le = [0u8; 8];
449 le.copy_from_slice(&buf[i * 8..(i + 1) * 8]);
450 assert_eq!(u64::from_le_bytes(le), r.parts[i]);
451 }
452 }
453
454 #[cfg(feature = "rand_core")]
457 #[test]
458 fn rand_core_fill_bytes_various_lengths() {
459 use rand_core::RngCore;
460 let mut r = Xoroshiro256State::seed_from_u64(42);
461 for &len in &[0usize, 1, 7, 8, 9, 15, 16, 31, 32, 33, 64, 65] {
462 let mut buf = vec![0u8; len];
463 r.fill_bytes(&mut buf);
464 if len > 0 {
465 assert!(buf.iter().any(|&b| b != 0));
466 }
467 }
468 }
469
470 #[cfg(feature = "rand_core")]
471 #[test]
472 fn seedable_from_seed_roundtrip() {
473 let seed = [7u8; 32];
474 let mut r1 = Xoroshiro256State::from_seed(seed);
475 let mut r2 = Xoroshiro256State::from_seed(seed);
476 for _ in 0..16 {
477 assert_eq!(r1.next_u64(), r2.next_u64());
478 }
479 }
480
481 #[cfg(feature = "rand_core")]
482 #[test]
483 fn seed_from_u64_reproducible() {
484 let mut r1 = Xoroshiro256State::seed_from_u64(123_456_789);
485 let mut r2 = Xoroshiro256State::seed_from_u64(123_456_789);
486 for _ in 0..16 {
487 assert_eq!(r1.next_u64(), r2.next_u64());
488 }
489 }
490
491 #[test]
494 fn basic_spread_low_collision() {
495 let mut r = Xoroshiro256State::init([1, 2, 3, 4], 4);
496 let mut set = HashSet::new();
497 for _ in 0..100 {
498 set.insert(r.next_u64());
499 }
500 assert!(set.len() > 95, "too many collisions for 100 samples");
501 }
502
503 #[cfg(feature = "crypto")]
506 #[test]
507 fn crypto_rng_basic() {
508 let mut r = super::CryptoRng256::from_os();
509 let a = r.next_u64();
510 let b = r.next_u64();
511 assert_ne!(a, b);
512 let mut buf = [0u8; 64];
513 r.fill_bytes(&mut buf);
514 assert!(buf.iter().any(|&b| b != 0));
515 }
516
517 #[cfg(feature = "crypto")]
518 #[test]
519 fn crypto_rng_reproducible_seed() {
520 let seed = [7u8; 32];
521 let mut r1 = super::CryptoRng256::from_seed(seed);
522 let mut r2 = super::CryptoRng256::from_seed(seed);
523 for _ in 0..8 {
524 assert_eq!(r1.next_u64(), r2.next_u64());
525 }
526 }
527
528 #[cfg(feature = "rand_core")]
531 #[test]
532 fn next_u32_consumes_full_u64_in_pairs() {
533 use rand_core::RngCore;
534 let mut a = Xoroshiro256State::seed_from_u64(9999);
535 let mut b = Xoroshiro256State::seed_from_u64(9999);
536 let x = a.next_u64();
537 let hi = u64::from(b.next_u32());
538 let lo = u64::from(b.next_u32());
539 assert_eq!((hi << 32) | lo, x);
540 }
541
542 #[cfg(feature = "rand_core")]
543 #[test]
544 fn fill_bytes_tail_is_reused_across_calls() {
545 use rand_core::RngCore;
546 let mut a = Xoroshiro256State::seed_from_u64(2024);
547 let mut b = Xoroshiro256State::seed_from_u64(2024);
548 let mut buf8 = [0u8; 8];
549 b.fill_bytes(&mut buf8);
550
551 let mut buf3 = [0u8; 3];
552 a.fill_bytes(&mut buf3);
553 let mut buf5 = [0u8; 5];
554 a.fill_bytes(&mut buf5); let mut joined = [0u8; 8];
557 joined[..3].copy_from_slice(&buf3);
558 joined[3..].copy_from_slice(&buf5);
559
560 assert_eq!(joined, buf8);
561 }
562}