1#![doc = include_str!("../README.md")]
2#![no_std]
3
4#[cfg(feature = "std")]
5extern crate std;
6
7use core::fmt::Debug;
8use core::fmt::Formatter;
9use core::fmt;
10use core::hint::cold_path;
11use core::hint::select_unpredictable;
12use core::mem::MaybeUninit;
13use core::num::NonZeroU128;
14use core::num::NonZeroU32;
15use core::num::NonZeroU64;
16
17#[derive(Clone)]
19pub struct Rng { state: NonZeroU128 }
20
21#[inline(always)]
22const fn concat(x: u64, y: u64) -> u128 {
23 x as u128 ^ (y as u128) << 64
24}
25
26#[inline(always)]
27const fn lo(x: u128) -> u64 {
28 x as u64
29}
30
31#[inline(always)]
32const fn hi(x: u128) -> u64 {
33 (x >> 64) as u64
34}
35
36#[inline(always)]
37const fn asr(x: u64, a: usize) -> u64 {
38 ((x as i64) >> a) as u64
39}
40
41#[inline(always)]
42const fn lsl(x: u64, a: usize) -> u64 {
43 x << a
44}
45
46#[inline(always)]
47const fn mulhi(x: u64, y: u64) -> u64 {
48 ((x as u128 * y as u128) >> 64) as u64
49}
50
51impl Rng {
52 pub const fn new(seed: NonZeroU128) -> Self {
55 const M: u128 = 0x93c4_67e3_7db0_c7a4_d1be_3f81_0152_cb57;
62 let x = seed.get();
63 let x = x.wrapping_mul(M);
64 let x = x.swap_bytes();
65 let x = x.wrapping_mul(M);
66 let x = x.swap_bytes();
67 let x = x.wrapping_mul(M);
68 let s = unsafe { NonZeroU128::new_unchecked(x) };
69 Self { state: s }
70 }
71
72 pub const fn from_u64(seed: u64) -> Self {
75 let s = concat(seed, 1);
76 let s = unsafe { NonZeroU128::new_unchecked(s) };
77 Self::new(s)
78 }
79
80 #[inline(always)]
82 pub const fn state(&self) -> NonZeroU128 {
83 self.state
84 }
85
86 #[inline(always)]
96 pub const fn from_state(state: NonZeroU128) -> Self {
97 Self { state }
98 }
99
100 #[cfg(feature = "getrandom")]
103 #[inline(never)]
104 #[cold]
105 pub fn from_operating_system() -> Self {
106 let mut buf = [0; 16];
107 getrandom::fill(&mut buf).expect("getrandom::fill failed!");
108 let s = 1 | u128::from_le_bytes(buf);
109 let s = unsafe { NonZeroU128::new_unchecked(s) };
110 Self { state: s }
111 }
112
113 #[inline(always)]
119 pub fn bernoulli(&mut self, p: f64) -> bool {
120 let n = (p * f64::from_bits(0x43f0_0000_0000_0000)) as u64;
141 let x = self.u64();
142 x < n || n == u64::MAX
143 }
144
145 #[inline(always)]
147 pub fn bool(&mut self) -> bool {
148 self.i64() < 0
149 }
150
151 #[inline(always)]
153 pub fn i32(&mut self) -> i32 {
154 self.u64() as i32
155 }
156
157 #[inline(always)]
159 pub fn i64(&mut self) -> i64 {
160 self.u64() as i64
161 }
162
163 #[inline(always)]
165 pub fn i128(&mut self) -> i128 {
166 self.u128() as i128
167 }
168
169 #[inline(always)]
171 pub fn u32(&mut self) -> u32 {
172 self.u64() as u32
173 }
174
175 #[inline(always)]
177 pub fn u64(&mut self) -> u64 {
178 let s = self.state.get();
180 let x = lo(s);
181 let y = hi(s);
182 let s = concat(y ^ asr(x, 4), x ^ lsl(y, 7));
183 let s = unsafe { NonZeroU128::new_unchecked(s) };
184 self.state = s;
185 y.wrapping_add(x.wrapping_mul(x)) ^ mulhi(x, x)
186 }
187
188 #[inline(always)]
190 pub fn u128(&mut self) -> u128 {
191 let x = self.u64();
192 let y = self.u64();
193 concat(x, y)
194 }
195
196 #[inline(always)]
198 pub fn non_zero_u32(&mut self) -> NonZeroU32 {
199 loop {
200 if let Some(x) = NonZeroU32::new(self.u32()) {
201 break x
202 }
203 }
204 }
205
206 #[inline(always)]
208 pub fn non_zero_u64(&mut self) -> NonZeroU64 {
209 loop {
210 if let Some(x) = NonZeroU64::new(self.u64()) {
211 break x
212 }
213 }
214 }
215
216 #[inline(always)]
218 pub fn non_zero_u128(&mut self) -> NonZeroU128 {
219 loop {
220 if let Some(x) = NonZeroU128::new(self.u128()) {
221 break x
222 }
223 }
224 }
225
226 #[inline(always)]
230 pub fn bounded_u32(&mut self, n: u32) -> u32 {
231 let x = self.u64();
233 let n = n as u64;
234 let m = n + 1;
235 let a = mulhi(x, m);
236 let b = x.wrapping_mul(m);
237 if b.overflowing_add(n).1 {
238 debug_assert!(m != 0);
239 let mut r = b;
240 loop {
241 let y = self.u64();
242 let c = mulhi(y, m);
243 let d = y.wrapping_mul(m);
244 let s = r.overflowing_add(c);
245 let w = s.0;
246 let p = s.1;
247 if w != u64::MAX { break (a + p as u64) as u32 }
248 r = d;
249 cold_path();
250 }
251 } else {
252 a as u32
253 }
254 }
255
256 #[inline(always)]
260 pub fn bounded_u64(&mut self, n: u64) -> u64 {
261 let x = self.u64();
262 let m = n.wrapping_add(1);
263 let a = mulhi(x, m);
264 let b = x.wrapping_mul(m);
265 let u = select_unpredictable(m == 0, x, a);
266 if b.overflowing_add(n).1 {
267 debug_assert!(m != 0);
268 let mut r = b;
269 loop {
270 let y = self.u64();
271 let c = mulhi(y, m);
272 let d = y.wrapping_mul(m);
273 let s = r.overflowing_add(c);
274 let w = s.0;
275 let p = s.1;
276 if w != u64::MAX { break a + p as u64 }
277 r = d;
281 cold_path();
282 }
283 } else {
284 u
285 }
286 }
287
288 #[inline(always)]
292 pub fn bounded_usize(&mut self, n: usize) -> usize {
293 match const { usize::BITS } {
294 32 => self.bounded_u32(n as u32) as usize,
295 64 => self.bounded_u64(n as u64) as usize,
296 _ => unimplemented!(),
297 }
298 }
299
300 #[inline(always)]
305 pub fn range_i32(&mut self, a: i32, b: i32) -> i32 {
306 self.range_u32(a as u32, b as u32) as i32
307 }
308
309 #[inline(always)]
314 pub fn range_i64(&mut self, a: i64, b: i64) -> i64 {
315 self.range_u64(a as u64, b as u64) as i64
316 }
317
318 #[inline(always)]
323 pub fn range_isize(&mut self, a: isize, b: isize) -> isize {
324 self.range_usize(a as usize, b as usize) as isize
325 }
326
327 #[inline(always)]
332 pub fn range_u32(&mut self, a: u32, b: u32) -> u32 {
333 a.wrapping_add(self.bounded_u32(b.wrapping_sub(a)))
334 }
335
336 #[inline(always)]
341 pub fn range_u64(&mut self, a: u64, b: u64) -> u64 {
342 a.wrapping_add(self.bounded_u64(b.wrapping_sub(a)))
343 }
344
345 #[inline(always)]
350 pub fn range_usize(&mut self, a: usize, b: usize) -> usize {
351 a.wrapping_add(self.bounded_usize(b.wrapping_sub(a)))
352 }
353
354 #[inline(always)]
366 pub fn f32(&mut self) -> f32 {
367 let x = self.i64();
368 let x = f32::from_bits(0x2000_0000) * (x as f32);
369 x.abs()
370 }
371
372 #[inline(always)]
384 pub fn f64(&mut self) -> f64 {
385 let x = self.i64();
391 let x = f64::from_bits(0x3c00_0000_0000_0000) * (x as f64);
392 x.abs()
393 }
394
395 #[inline(always)]
405 pub fn biunit_f32(&mut self) -> f32 {
406 let x = self.i64();
407 let x = (x & 1) + (x >> 1);
408 f32::from_bits(0x2080_0000) * (x as f32)
409 }
410
411 #[inline(always)]
421 pub fn biunit_f64(&mut self) -> f64 {
422 let x = self.i64();
429 let x = (x & 1) + (x >> 1);
430 f64::from_bits(0x3c10_0000_0000_0000) * (x as f64)
431 }
432
433 #[inline(always)]
434 unsafe fn fill_unchecked_inlined(&mut self, dst: *mut u8, len: usize) {
435 let mut p = dst;
436 let mut n = len;
437 if n == 0 { return }
438 while n >= 17 {
439 let x = self.u64().to_le_bytes();
440 let y = self.u64().to_le_bytes();
441 unsafe { p.copy_from_nonoverlapping(&raw const x as _, 8) };
442 p = unsafe { p.add(8) };
443 n = n - 8;
444 unsafe { p.copy_from_nonoverlapping(&raw const y as _, 8) };
445 p = unsafe { p.add(8) };
446 n = n - 8;
447 }
448 if n >= 9 {
449 let x = self.u64().to_le_bytes();
450 unsafe { p.copy_from_nonoverlapping(&raw const x as _, 8) };
451 p = unsafe { p.add(8) };
452 n = n - 8;
453 }
454 let x = self.u64().to_le_bytes();
455 match n {
456 1 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 1) },
457 2 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 2) },
458 3 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 3) },
459 4 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 4) },
460 5 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 5) },
461 6 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 6) },
462 7 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 7) },
463 8 => unsafe { p.copy_from_nonoverlapping(&raw const x as _, 8) },
464 _ => unreachable!(),
465 }
466 }
467
468 pub fn fill(&mut self, dst: &mut [u8]) {
470 let n = dst.len();
471 unsafe { self.fill_unchecked(&raw mut *dst as _, n) };
472 }
473
474 pub unsafe fn fill_unchecked(&mut self, dst: *mut u8, len: usize) {
477 unsafe { self.fill_unchecked_inlined(dst, len) };
478 }
479
480 pub fn fill_uninit<'a>(&mut self, dst: &'a mut [MaybeUninit<u8>]) -> &'a mut [u8] {
484 let n = dst.len();
485 unsafe { self.fill_unchecked(&raw mut *dst as _, n) };
486 unsafe { dst.assume_init_mut() }
487 }
488
489 pub fn byte_array<const N: usize>(&mut self) -> [u8; N] {
491 let mut buf = [0; N];
492 unsafe { self.fill_unchecked_inlined(&raw mut buf as _, N) };
493 buf
494 }
495
496 pub fn shuffle<T>(&mut self, slice: &mut [T]) {
498 let n = slice.len();
499 if n >= 2 {
500 let p = &raw mut *slice as *mut T;
501 for i in 1 .. n {
502 let j = self.bounded_usize(i);
503 unsafe { p.add(i).swap(p.add(j)) };
504 }
505 }
506 }
507}
508
509impl Debug for Rng {
510 fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
511 f.debug_struct("Rng").finish_non_exhaustive()
512 }
513}
514
515#[cfg(feature = "rand_core")]
516impl rand_core::TryRng for Rng {
517 type Error = rand_core::Infallible;
518
519 #[inline(always)]
520 fn try_next_u32(&mut self) -> Result<u32, Self::Error> {
521 Ok(self.u32())
522 }
523
524 #[inline(always)]
525 fn try_next_u64(&mut self) -> Result<u64, Self::Error> {
526 Ok(self.u64())
527 }
528
529 fn try_fill_bytes(&mut self, dst: &mut [u8]) -> Result<(), Self::Error> {
530 self.fill(dst);
531 Ok(())
532 }
533}
534
535#[cfg(feature = "rand_core")]
536impl rand_core::SeedableRng for Rng {
537 type Seed = [u8; 16];
538
539 fn from_seed(seed: Self::Seed) -> Self {
540 let s = 1 | u128::from_le_bytes(seed);
541 let s = unsafe { NonZeroU128::new_unchecked(s) };
542 Self { state: s }
543 }
544
545 fn seed_from_u64(seed: u64) -> Self {
546 Self::from_u64(seed)
547 }
548
549 fn from_rng<T: rand_core::Rng + ?Sized>(g: &mut T) -> Self {
550 let x = g.next_u64();
551 let y = g.next_u64();
552 let s = 1 | concat(x, y);
553 let s = unsafe { NonZeroU128::new_unchecked(s) };
554 Self { state: s }
555 }
556
557 fn try_from_rng<T: rand_core::TryRng + ?Sized>(g: &mut T) -> Result<Self, T::Error> {
558 let x = g.try_next_u64()?;
559 let y = g.try_next_u64()?;
560 let s = 1 | concat(x, y);
561 let s = unsafe { NonZeroU128::new_unchecked(s) };
562 Ok(Self { state: s })
563 }
564
565 }
567
568#[cfg(feature = "thread_local")]
569pub mod thread_local {
570 use std::cell::Cell;
573 use std::mem::MaybeUninit;
574 use std::num::NonZeroU128;
575 use std::num::NonZeroU32;
576 use std::num::NonZeroU64;
577 use std::thread_local;
578 use super::Rng;
579
580 thread_local! {
581 static RNG: Cell<Option<NonZeroU128>> = const {
582 Cell::new(None)
583 };
584 }
585
586 #[inline(always)]
589 fn with<T>(f: impl FnOnce(&mut Rng) -> T) -> T {
590 RNG.with(|state| {
591 let mut g =
592 match state.get() {
593 None => Rng::from_operating_system(),
594 Some(s) => Rng::from_state(s),
595 };
596 let x = f(&mut g);
597 state.set(Some(g.state()));
598 x
599 })
600 }
601
602 pub fn bernoulli(p: f64) -> bool {
604 with(|g| g.bernoulli(p))
605 }
606
607 pub fn bool() -> bool {
609 with(|g| g.bool())
610 }
611
612 pub fn i32() -> i32 {
614 with(|g| g.i32())
615 }
616
617 pub fn i64() -> i64 {
619 with(|g| g.i64())
620 }
621
622 pub fn i128() -> i128 {
624 with(|g| g.i128())
625 }
626
627 pub fn u32() -> u32 {
629 with(|g| g.u32())
630 }
631
632 pub fn u64() -> u64 {
634 with(|g| g.u64())
635 }
636
637 pub fn u128() -> u128 {
639 with(|g| g.u128())
640 }
641
642 pub fn non_zero_u32() -> NonZeroU32 {
644 with(|g| g.non_zero_u32())
645 }
646
647 pub fn non_zero_u64() -> NonZeroU64 {
649 with(|g| g.non_zero_u64())
650 }
651
652 pub fn non_zero_u128() -> NonZeroU128 {
654 with(|g| g.non_zero_u128())
655 }
656 pub fn bounded_u32(n: u32) -> u32 {
658 with(|g| g.bounded_u32(n))
659 }
660
661 pub fn bounded_u64(n: u64) -> u64 {
663 with(|g| g.bounded_u64(n))
664 }
665
666 pub fn bounded_usize(n: usize) -> usize {
668 with(|g| g.bounded_usize(n))
669 }
670
671 pub fn range_i32(lo: i32, hi: i32) -> i32 {
673 with(|g| g.range_i32(lo, hi))
674 }
675
676 pub fn range_i64(lo: i64, hi: i64) -> i64 {
678 with(|g| g.range_i64(lo, hi))
679 }
680
681 pub fn range_isize(lo: isize, hi: isize) -> isize {
683 with(|g| g.range_isize(lo, hi))
684 }
685
686 pub fn range_u32(lo: u32, hi: u32) -> u32 {
688 with(|g| g.range_u32(lo, hi))
689 }
690
691 pub fn range_u64(lo: u64, hi: u64) -> u64 {
693 with(|g| g.range_u64(lo, hi))
694 }
695
696 pub fn range_usize(lo: usize, hi: usize) -> usize {
698 with(|g| g.range_usize(lo, hi))
699 }
700
701 pub fn f32() -> f32 {
703 with(|g| g.f32())
704 }
705
706 pub fn f64() -> f64 {
708 with(|g| g.f64())
709 }
710
711 pub fn biunit_f32() -> f32 {
713 with(|g| g.biunit_f32())
714 }
715
716 pub fn biunit_f64() -> f64 {
718 with(|g| g.biunit_f64())
719 }
720
721 pub fn fill(dst: &mut [u8]) {
723 with(|g| g.fill(dst))
724 }
725
726 pub unsafe fn fill_unchecked(dst: *mut u8, len: usize) {
728 with(|g| unsafe { g.fill_unchecked(dst, len) })
729 }
730
731 pub fn fill_uninit(dst: &mut [MaybeUninit<u8>]) -> &mut [u8] {
733 with(|g| g.fill_uninit(dst))
734 }
735
736 pub fn byte_array<const N: usize>() -> [u8; N] {
738 with(|g| g.byte_array())
739 }
740}