use std::{
intrinsics::transmute,
io::{Read, Result},
mem::size_of,
ops::{Add, Mul, Rem, Sub},
ptr::addr_of,
};
use crate::{num_def::NumberSupport, zest_block::ZestBlock, zest_hash::zest4096};
// Notice: The seed of the PRNG also holds the counter, in the higher array indicies.
// Keep your key/seed in the low bits, and the nonce after that if you wish.
// The top 16 bytes for the CSPRNG will be "Bytes are fun :)" in big endian,
// The next highest bytes will be counted over once the current 512 byte buffer
// has been read from.
#[derive(Clone, Copy)]
pub struct ZestRand {
// Seed of the CSPRNG (THIS INCLUDES THE COUNTER) -- higher bits get changed first
seed: ZestBlock,
// Current output of the CSPRNG (in byte form)
hash_output: [u8; 512],
// Current byte of the CSPRNG (in the output)
byte_offset: usize,
}
impl ZestRand {
//Create new ZestRand instance
#[inline(always)]
pub fn new(seed: Option<ZestBlock>) -> ZestRand {
let mut initial = ZestRand {
seed: seed.unwrap_or_default(),
hash_output: [0; 512],
byte_offset: 0,
};
initial.hash_output = zest4096(initial.seed).data_u8();
initial
}
// (Re)seed the CSPRNG, and start at beginning of block + offset.
// Doesn't check for identical seeds before rehashing.
#[inline(always)]
pub fn seed(&mut self, seed: ZestBlock, byte_offset: usize) {
self.seed = seed;
self.hash_output = zest4096(self.seed).data_u8();
self.byte_offset = byte_offset & 0x1FF;
}
//Request current seed and offset in the CSPRNG
pub fn getseed(&self) -> (ZestBlock, usize) {
(self.seed, self.byte_offset)
}
// Counts to the next block, (lower index ptr = higher place value)
// and sets the hash output to None
#[inline(always)]
pub fn next_block(&mut self) {
self.seed.increment_block();
self.hash_output = zest4096(self.seed).data_u8();
}
// Get the current block by rehashing the seed
#[inline(always)]
pub fn get_block_rehash(&mut self) -> ZestBlock {
zest4096(self.seed)
}
// Get the current block by copying the hashed output
// Work with it as if it were in big endian
#[inline(always)]
pub fn get_block_data(&mut self) -> [u8; 512] {
self.hash_output
}
}
// Functions which output random data:
impl ZestRand {
// Fetch the next byte, this constructs everything.
#[inline(always)]
pub fn next_byte(&mut self) -> u8 {
// Safety: known size and type before and after conversion
let slice_ref = &self.hash_output;
let cast_slice: &[u8; 512] = unsafe { transmute(slice_ref) };
let byte_val = cast_slice[self.byte_offset & 0x1FF];
self.byte_offset += 1;
if self.byte_offset >= 0x200 {
self.byte_offset = 0;
self.next_block();
}
byte_val
}
// Fills the output array with random bytes --
// useful for types with "from_be_bytes" and "from_le_bytes" options.
#[inline(always)]
pub fn next_bytes<const N: usize>(&mut self) -> [u8; N] {
let mut output = [0; N];
output.fill_with(|| self.next_byte());
output
}
// Fill given type with random data.
/// # Safety
/// Do not use with objects containing pointers or references,
/// as they are written over with random data, and can cause a seg fault.
/// Do not use with enums, as not all values could map to a valid enum.
/// Think before you use!
#[inline(always)]
pub unsafe fn fill_rand<A: Copy + Sized>(&mut self) -> A {
let mut data = vec![];
data.resize_with(size_of::<A>(), || self.next_byte());
let data_address = addr_of!(data[..]) as *const A;
*data_address
}
// Use to generate random integers.
#[inline(always)]
pub fn next_num<T: NumberSupport + Copy>(&mut self) -> T {
unsafe { self.fill_rand() }
}
// Function takes in most integer types, and the max and minimum value it could be.
// Returns an unbiased result in that type.
// the maximum range is also ***non-inclusive***.
// Make sure max is at least one more than min to assure no errors occur
#[inline(always)]
pub fn bounded<
A: NumberSupport
+ Copy
+ Sub<Output = A>
+ Add<Output = A>
+ Mul<Output = A>
+ Rem<Output = A>
+ PartialOrd
+ From<u8>,
>(
&mut self,
min: A,
max: A,
) -> A {
let max_val: A = A::MAX;
let mut not_biased = self.next_num::<A>();
let range = max - min;
while not_biased > (max_val - (((max_val % range) + A::from(1)) % range)) {
not_biased = self.next_num::<A>();
}
not_biased.abs() % range + min
}
}
impl Default for ZestRand {
#[inline(always)]
fn default() -> Self {
// Safety: All fields are set to 0, and none
// are pointers to any other object.
// The initial block is also generated.
Self::new(None)
}
}
// Will always return Some. You can safely unwrap.
// This will copy the current state, so take care to
// not assume that this function updates the original instance.
impl Iterator for ZestRand {
type Item = u8;
#[inline(always)]
fn next(&mut self) -> Option<Self::Item> {
Some(self.next_byte())
}
}
// Simply fills the requested buffer.
// Always returns ok. You can safely unwrap.
impl Read for ZestRand {
#[inline(always)]
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
buf.fill_with(|| self.next_byte());
Ok(buf.len())
}
// The number of possible outputs is far greater than any ram stick array.
#[inline(always)]
fn read_to_end(&mut self, _: &mut Vec<u8>) -> Result<usize> {
panic!();
}
}
// Functions for dealing with slice types:
impl ZestRand {
// Shuffle a slice using non-biased uniform indexing.
#[inline(always)]
pub fn slice_shuffle<T: Sized + Copy>(&mut self, slice: &mut [T]) {
for i in 0..slice.len() {
let swap_index = self.bounded(i, slice.len());
slice.swap(i, swap_index);
}
}
// Fill a vector with a number of random objects
/// # Safety
/// Do not use with objects containing pointers or references,
/// as they are written over with random data, and can cause a seg fault.
/// Do not use with enums, as not all values could map to a valid enum.
/// Think before you use!
#[inline(always)]
pub unsafe fn fill_vec_any<T: Sized + Copy>(&mut self, len: usize) -> Vec<T> {
let mut data: Vec<T> = vec![];
data.resize_with(len, || self.fill_rand());
data
}
// Fill a vector with random numbers.
// Wrapper around fill_vec_any.
#[inline(always)]
pub fn fill_vec_num<T: NumberSupport + Copy>(&mut self, len: usize) -> Vec<T> {
unsafe { self.fill_vec_any(len) }
}
}