use crate::impls::fill_via_chunks;
use crate::{CryptoRng, RngCore, SeedableRng, TryRngCore};
use core::fmt;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
pub trait BlockRngCore {
type Item;
type Results: AsRef<[Self::Item]> + AsMut<[Self::Item]> + Default;
fn generate(&mut self, results: &mut Self::Results);
}
pub trait CryptoBlockRng: BlockRngCore {}
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(
feature = "serde",
serde(
bound = "for<'x> R: Serialize + Deserialize<'x>, for<'x> R::Results: Serialize + Deserialize<'x>"
)
)]
pub struct BlockRng<R: BlockRngCore> {
results: R::Results,
index: usize,
pub core: R,
}
impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng<R> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("BlockRng")
.field("core", &self.core)
.field("result_len", &self.results.as_ref().len())
.field("index", &self.index)
.finish()
}
}
impl<R: BlockRngCore> BlockRng<R> {
#[inline]
pub fn new(core: R) -> BlockRng<R> {
let results_empty = R::Results::default();
BlockRng {
core,
index: results_empty.as_ref().len(),
results: results_empty,
}
}
#[inline(always)]
pub fn index(&self) -> usize {
self.index
}
#[inline]
pub fn reset(&mut self) {
self.index = self.results.as_ref().len();
}
#[inline]
pub fn generate_and_set(&mut self, index: usize) {
assert!(index < self.results.as_ref().len());
self.core.generate(&mut self.results);
self.index = index;
}
}
impl<R: BlockRngCore<Item = u32>> RngCore for BlockRng<R> {
#[inline]
fn next_u32(&mut self) -> u32 {
if self.index >= self.results.as_ref().len() {
self.generate_and_set(0);
}
let value = self.results.as_ref()[self.index];
self.index += 1;
value
}
#[inline]
fn next_u64(&mut self) -> u64 {
let read_u64 = |results: &[u32], index| {
let data = &results[index..=index + 1];
(u64::from(data[1]) << 32) | u64::from(data[0])
};
let len = self.results.as_ref().len();
let index = self.index;
if index < len - 1 {
self.index += 2;
read_u64(self.results.as_ref(), index)
} else if index >= len {
self.generate_and_set(2);
read_u64(self.results.as_ref(), 0)
} else {
let x = u64::from(self.results.as_ref()[len - 1]);
self.generate_and_set(1);
let y = u64::from(self.results.as_ref()[0]);
(y << 32) | x
}
}
#[inline]
fn fill_bytes(&mut self, dest: &mut [u8]) {
let mut read_len = 0;
while read_len < dest.len() {
if self.index >= self.results.as_ref().len() {
self.generate_and_set(0);
}
let (consumed_u32, filled_u8) =
fill_via_chunks(&self.results.as_mut()[self.index..], &mut dest[read_len..]);
self.index += consumed_u32;
read_len += filled_u8;
}
}
}
impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng<R> {
type Seed = R::Seed;
#[inline(always)]
fn from_seed(seed: Self::Seed) -> Self {
Self::new(R::from_seed(seed))
}
#[inline(always)]
fn seed_from_u64(seed: u64) -> Self {
Self::new(R::seed_from_u64(seed))
}
#[inline(always)]
fn from_rng(rng: &mut impl RngCore) -> Self {
Self::new(R::from_rng(rng))
}
#[inline(always)]
fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
R::try_from_rng(rng).map(Self::new)
}
}
impl<R: CryptoBlockRng + BlockRngCore<Item = u32>> CryptoRng for BlockRng<R> {}
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct BlockRng64<R: BlockRngCore + ?Sized> {
results: R::Results,
index: usize,
half_used: bool, pub core: R,
}
impl<R: BlockRngCore + fmt::Debug> fmt::Debug for BlockRng64<R> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("BlockRng64")
.field("core", &self.core)
.field("result_len", &self.results.as_ref().len())
.field("index", &self.index)
.field("half_used", &self.half_used)
.finish()
}
}
impl<R: BlockRngCore> BlockRng64<R> {
#[inline]
pub fn new(core: R) -> BlockRng64<R> {
let results_empty = R::Results::default();
BlockRng64 {
core,
index: results_empty.as_ref().len(),
half_used: false,
results: results_empty,
}
}
#[inline(always)]
pub fn index(&self) -> usize {
self.index
}
#[inline]
pub fn reset(&mut self) {
self.index = self.results.as_ref().len();
self.half_used = false;
}
#[inline]
pub fn generate_and_set(&mut self, index: usize) {
assert!(index < self.results.as_ref().len());
self.core.generate(&mut self.results);
self.index = index;
self.half_used = false;
}
}
impl<R: BlockRngCore<Item = u64>> RngCore for BlockRng64<R> {
#[inline]
fn next_u32(&mut self) -> u32 {
let mut index = self.index - self.half_used as usize;
if index >= self.results.as_ref().len() {
self.core.generate(&mut self.results);
self.index = 0;
index = 0;
self.half_used = false;
}
let shift = 32 * (self.half_used as usize);
self.half_used = !self.half_used;
self.index += self.half_used as usize;
(self.results.as_ref()[index] >> shift) as u32
}
#[inline]
fn next_u64(&mut self) -> u64 {
if self.index >= self.results.as_ref().len() {
self.core.generate(&mut self.results);
self.index = 0;
}
let value = self.results.as_ref()[self.index];
self.index += 1;
self.half_used = false;
value
}
#[inline]
fn fill_bytes(&mut self, dest: &mut [u8]) {
let mut read_len = 0;
self.half_used = false;
while read_len < dest.len() {
if self.index >= self.results.as_ref().len() {
self.core.generate(&mut self.results);
self.index = 0;
}
let (consumed_u64, filled_u8) =
fill_via_chunks(&self.results.as_mut()[self.index..], &mut dest[read_len..]);
self.index += consumed_u64;
read_len += filled_u8;
}
}
}
impl<R: BlockRngCore + SeedableRng> SeedableRng for BlockRng64<R> {
type Seed = R::Seed;
#[inline(always)]
fn from_seed(seed: Self::Seed) -> Self {
Self::new(R::from_seed(seed))
}
#[inline(always)]
fn seed_from_u64(seed: u64) -> Self {
Self::new(R::seed_from_u64(seed))
}
#[inline(always)]
fn from_rng(rng: &mut impl RngCore) -> Self {
Self::new(R::from_rng(rng))
}
#[inline(always)]
fn try_from_rng<S: TryRngCore>(rng: &mut S) -> Result<Self, S::Error> {
R::try_from_rng(rng).map(Self::new)
}
}
impl<R: CryptoBlockRng + BlockRngCore<Item = u64>> CryptoRng for BlockRng64<R> {}
#[cfg(test)]
mod test {
use crate::block::{BlockRng, BlockRng64, BlockRngCore};
use crate::{RngCore, SeedableRng};
#[derive(Debug, Clone)]
struct DummyRng {
counter: u32,
}
impl BlockRngCore for DummyRng {
type Item = u32;
type Results = [u32; 16];
fn generate(&mut self, results: &mut Self::Results) {
for r in results {
*r = self.counter;
self.counter = self.counter.wrapping_add(3511615421);
}
}
}
impl SeedableRng for DummyRng {
type Seed = [u8; 4];
fn from_seed(seed: Self::Seed) -> Self {
DummyRng {
counter: u32::from_le_bytes(seed),
}
}
}
#[test]
fn blockrng_next_u32_vs_next_u64() {
let mut rng1 = BlockRng::<DummyRng>::from_seed([1, 2, 3, 4]);
let mut rng2 = rng1.clone();
let mut rng3 = rng1.clone();
let mut a = [0; 16];
a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
let mut b = [0; 16];
b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
assert_eq!(a, b);
let mut c = [0; 16];
c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
assert_eq!(a, c);
}
#[derive(Debug, Clone)]
struct DummyRng64 {
counter: u64,
}
impl BlockRngCore for DummyRng64 {
type Item = u64;
type Results = [u64; 8];
fn generate(&mut self, results: &mut Self::Results) {
for r in results {
*r = self.counter;
self.counter = self.counter.wrapping_add(2781463553396133981);
}
}
}
impl SeedableRng for DummyRng64 {
type Seed = [u8; 8];
fn from_seed(seed: Self::Seed) -> Self {
DummyRng64 {
counter: u64::from_le_bytes(seed),
}
}
}
#[test]
fn blockrng64_next_u32_vs_next_u64() {
let mut rng1 = BlockRng64::<DummyRng64>::from_seed([1, 2, 3, 4, 5, 6, 7, 8]);
let mut rng2 = rng1.clone();
let mut rng3 = rng1.clone();
let mut a = [0; 16];
a[..4].copy_from_slice(&rng1.next_u32().to_le_bytes());
a[4..12].copy_from_slice(&rng1.next_u64().to_le_bytes());
a[12..].copy_from_slice(&rng1.next_u32().to_le_bytes());
let mut b = [0; 16];
b[..4].copy_from_slice(&rng2.next_u32().to_le_bytes());
b[4..8].copy_from_slice(&rng2.next_u32().to_le_bytes());
b[8..].copy_from_slice(&rng2.next_u64().to_le_bytes());
assert_ne!(a, b);
assert_eq!(&a[..4], &b[..4]);
assert_eq!(&a[4..12], &b[8..]);
let mut c = [0; 16];
c[..8].copy_from_slice(&rng3.next_u64().to_le_bytes());
c[8..12].copy_from_slice(&rng3.next_u32().to_le_bytes());
c[12..].copy_from_slice(&rng3.next_u32().to_le_bytes());
assert_eq!(b, c);
}
}