use std::convert::{TryFrom, TryInto};
use std::ops::Mul;
use binout::{AsIs, Serializer};
use bitm::{BitAccess, ceiling_div};
use crate::seeds::{to_io_error, Bits, TwoToPowerBitsStatic};
use crate::utils::{map32_to_32, map64_to_64};
#[inline(always)]
pub fn group_nr(hash: u64, level_size_groups: usize) -> usize {
map64_to_64(hash, level_size_groups as u64) as usize }
#[inline(always)]
fn mix32(mut x: u32) -> u32 {
x = (x ^ (x >> 16)).wrapping_mul(0x21f0aaad);
x = (x ^ (x >> 15)).wrapping_mul(0xd35a2d97);
x ^ (x >> 15)
}
pub trait GroupSize: Sized + Mul<usize, Output=usize> + Copy + Into<u8> + TryFrom<u8, Error=&'static str> {
fn validate(&self) -> Result<Self, &'static str> { Ok(*self) }
#[inline(always)]
fn hash_to_group(&self, hash: u32) -> u8 {
map32_to_32(hash as u32, Into::<u8>::into(*self) as u32) as u8
}
#[inline(always)]
fn in_group_index(&self, hash: u64, group_seed: u16) -> u8 {
self.hash_to_group(mix32((hash as u32) ^ (group_seed as u32)))
}
#[inline]
fn bit_index_for_seed(&self, hash: u64, group_seed: u16, group: usize) -> usize {
(*self * group as usize) + self.in_group_index(hash, group_seed) as usize
}
fn level_size_groups_segments(&self, mut desired_total_size: usize) -> (usize, usize) {
let remainder = desired_total_size % 64;
if remainder != 0 { desired_total_size += 64 - remainder; } let group_size = Into::<u8>::into(*self) as usize;
while desired_total_size % group_size != 0 { desired_total_size += 64; } return (desired_total_size / group_size, desired_total_size / 64);
}
#[inline(always)] fn copy_group_if_better<CB>(&self, dst: &mut [u64], src: &[u64], group_index: usize, callback: CB)
where CB: FnOnce()
{
dst.conditionally_copy_fragment(src, |best, new|
if best.count_ones() < new.count_ones() {
callback();
true
} else { false }, group_index, (*self).into())
}
#[inline] fn write_size_bytes(&self) -> usize {
std::mem::size_of::<u8>()
}
fn write(&self, output: &mut dyn std::io::Write) -> std::io::Result<()> {
AsIs::write(output, (*self).into())
}
fn read(input: &mut dyn std::io::Read) -> std::io::Result<Self> {
let group_size: u8 = AsIs::read(input)?;
let result = TryInto::<Self>::try_into(group_size).map_err(to_io_error)?;
result.validate().map_err(to_io_error)
}
}
#[derive(Copy, Clone)]
pub struct TwoToPowerBits {
log2size: u8,
mask: u8
}
impl TwoToPowerBits {
pub fn new(log2size: u8) -> Self {
assert!(log2size <= 7);
Self { log2size, mask: (1u8<<log2size)-1 }
}
}
impl Mul<usize> for TwoToPowerBits {
type Output = usize;
#[inline(always)] fn mul(self, rhs: usize) -> Self::Output {
rhs << self.log2size
}
}
impl Into<u8> for TwoToPowerBits {
#[inline(always)] fn into(self) -> u8 {
1<<self.log2size
}
}
impl TryFrom<u8> for TwoToPowerBits {
type Error = &'static str;
fn try_from(value: u8) -> Result<Self, Self::Error> {
if value.is_power_of_two() && value <= 128 {
Ok(Self::new(value.trailing_zeros() as u8))
} else {
Err("group size must be the power of two, not greater than 128")
}
}
}
impl GroupSize for TwoToPowerBits {
#[inline(always)] fn hash_to_group(&self, hash: u32) -> u8 {
hash as u8 & self.mask
}
fn level_size_groups_segments(&self, desired_total_size: usize) -> (usize, usize) {
let level_size_segments;
let level_size_groups;
if self.log2size > 6 {
level_size_groups = ceiling_div(desired_total_size, 1usize<<self.log2size);
level_size_segments = (level_size_groups as usize) << (self.log2size - 6);
} else {
level_size_segments = ceiling_div(desired_total_size, 64);
level_size_groups = level_size_segments << (6 - self.log2size);
}
(level_size_groups, level_size_segments)
}
}
impl GroupSize for Bits {
fn validate(&self) -> Result<Self, &'static str> {
if self.0 <= 63 { Ok(*self) } else { Err("group sizes grater than 63 are not supported by Bits") }
}
}
impl<const LOG2_BITS: u8> GroupSize for TwoToPowerBitsStatic<LOG2_BITS> {
#[inline(always)] fn hash_to_group(&self, hash: u32) -> u8 {
hash as u8 & Self::LOG2_MASK
}
fn level_size_groups_segments(&self, desired_total_size: usize) -> (usize, usize) {
let level_size_segments;
let level_size_groups;
if LOG2_BITS > 6 {
level_size_groups = ceiling_div(desired_total_size, 1usize<<LOG2_BITS);
level_size_segments = (level_size_groups as usize) << (LOG2_BITS - 6);
} else {
level_size_segments = ceiling_div(desired_total_size, 64);
level_size_groups = level_size_segments << (6 - LOG2_BITS);
}
(level_size_groups, level_size_segments)
}
#[inline(always)] fn copy_group_if_better<CB>(&self, dst: &mut [u64], src: &[u64], group_index: usize, callback: CB)
where CB: FnOnce()
{
let vec_index = group_index / Self::VALUES_PER_64 as usize;
let shift = Self::shift_for(group_index);
let dst_v = &mut dst[vec_index];
let best = (*dst_v >> shift) & Self::MASK64;
let new = (src[vec_index] >> shift) & Self::MASK64;
if best.count_ones() < new.count_ones() {
callback();
*dst_v &= !(Self::MASK64 << shift);
*dst_v |= new << shift;
}
}
}