use entry::{
Entry, OccupiedEntry, OccupiedHashMapEntry, OccupiedVecEntry, VacantEntry, VacantHashMapEntry,
VacantVecEntry,
};
use key::Key;
use key::PrimInt;
use sealed::sealed;
use std::collections::HashMap;
use std::fmt::Debug;
use std::hash::BuildHasher;
use std::num::NonZero;
use std::ops::AddAssign;
pub(crate) mod entry;
pub(crate) mod iterators;
pub(crate) mod key;
#[derive(Debug, Clone)]
pub struct MuleMap<
K,
V,
S,
const TABLE_MIN_VALUE: i128 = 0,
const TABLE_SIZE: usize = { u8::MAX as usize + 1 },
> {
hash_map: HashMap<K, V, S>,
table: [Option<V>; TABLE_SIZE],
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> Default
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: Default + BuildHasher,
<K as TryFrom<i128>>::Error: Debug,
{
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> PartialEq
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
V: PartialEq,
S: BuildHasher,
{
#[inline]
fn eq(&self, other: &MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>) -> bool {
self.hash_map == other.hash_map && self.table == other.table
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> Eq
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
V: Eq,
S: BuildHasher,
{
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> std::ops::Index<K>
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: BuildHasher,
{
type Output = V;
#[inline]
fn index(&self, key: K) -> &V {
self.get(key).expect("No entry found for key")
}
}
impl<'a, K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> Extend<(K, &'a V)>
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: Default + BuildHasher,
V: Copy,
{
#[inline]
fn extend<T: IntoIterator<Item = (K, &'a V)>>(&mut self, iter: T) {
for (key, val) in iter {
self.insert(key, *val);
}
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> Extend<(K, V)>
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: BuildHasher,
{
#[inline]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
for (key, val) in iter {
self.insert(key, val);
}
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize, const N: usize>
From<[(K, V); N]> for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: BuildHasher + Default,
<K as TryFrom<i128>>::Error: Debug,
{
#[inline]
fn from(arr: [(K, V); N]) -> Self {
let mut map = Self::default();
for (key, val) in arr {
map.insert(key, val);
}
map
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize> FromIterator<(K, V)>
for MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: BuildHasher + Default,
<K as TryFrom<i128>>::Error: Debug,
{
#[inline]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut map = Self::default();
map.extend(iter);
map
}
}
impl<K, V, S, const TABLE_MIN_VALUE: i128, const TABLE_SIZE: usize>
MuleMap<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE>
where
K: Key<TABLE_MIN_VALUE>,
S: BuildHasher,
{
const STATIC_ASSERT_LIMIT_SIZE_TO_I32_MAX: () =
assert!((TABLE_SIZE as u128) <= i32::MAX as u128 + 1);
const STATIC_ASSERT_SIZE_FITS_I128: () = assert!(
TABLE_MIN_VALUE
.checked_add(const { TABLE_SIZE.saturating_sub(1) as i128 })
.is_some()
);
const STATIC_ASSERT_ISIZE_FITS_I32: () = assert!(isize::MAX as u128 >= i32::MAX as u128);
#[must_use]
#[inline]
pub(crate) fn use_lookup_table(key: K) -> bool {
if const { TABLE_SIZE == 0 } {
return false;
}
let promoted_sum = K::add_promoted(
K::i128_as_promoted(TABLE_MIN_VALUE),
K::usize_as_promoted(const { TABLE_SIZE.saturating_sub(1) }),
);
key <= promoted_sum && key >= K::i128_as_k(TABLE_MIN_VALUE)
}
#[inline]
pub(crate) fn check_bounds()
where
<K as TryFrom<i128>>::Error: Debug,
{
let () = Self::STATIC_ASSERT_LIMIT_SIZE_TO_I32_MAX;
let () = Self::STATIC_ASSERT_SIZE_FITS_I128;
let () = Self::STATIC_ASSERT_ISIZE_FITS_I32;
<i128 as TryInto<K>>::try_into(
TABLE_MIN_VALUE + const { TABLE_SIZE.saturating_sub(1) } as i128,
)
.unwrap_or_else(|_| {
panic!(
"TABLE_MIN_VALUE ({TABLE_MIN_VALUE:?}) + TABLE_SIZE ({TABLE_SIZE:?}) should fit into key type, K"
)
});
<i128 as TryInto<K>>::try_into(TABLE_MIN_VALUE)
.expect("TABLE_MIN_VALUE should fit into key type, K");
}
#[must_use]
#[inline]
pub fn new() -> Self
where
S: Default,
<K as TryFrom<i128>>::Error: Debug,
{
Self::with_capacity_and_hasher(0, S::default())
}
#[must_use]
#[inline]
pub fn with_capacity(capacity: usize) -> Self
where
S: Default,
<K as TryFrom<i128>>::Error: Debug,
{
Self::with_capacity_and_hasher(capacity, S::default())
}
#[must_use]
#[inline]
pub fn with_hasher(hash_builder: S) -> Self
where
<K as TryFrom<i128>>::Error: Debug,
{
Self::with_capacity_and_hasher(0, hash_builder)
}
#[must_use]
#[inline]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self
where
<K as TryFrom<i128>>::Error: Debug,
{
Self::check_bounds();
MuleMap::<K, V, S, TABLE_MIN_VALUE, TABLE_SIZE> {
hash_map: HashMap::with_capacity_and_hasher(capacity, hash_builder),
table: [const { None }; TABLE_SIZE],
}
}
#[must_use]
#[inline]
pub fn capacity(&self) -> usize {
self.hash_map.capacity()
}
#[inline]
pub fn clear(&mut self) {
self.hash_map.clear();
self.table = [const { None }; TABLE_SIZE];
}
#[must_use]
#[inline]
pub fn contains_key(&self, key: K) -> bool {
if Self::use_lookup_table(key) {
self.table[key.key_index()].is_some()
} else {
self.hash_map.contains_key(&key)
}
}
#[must_use]
#[inline]
pub fn get(&self, key: K) -> Option<&V> {
if Self::use_lookup_table(key) {
self.table[key.key_index()].as_ref()
} else {
self.hash_map.get(&key)
}
}
#[must_use]
#[inline]
pub fn get_key_value(&self, key: K) -> Option<(K, &V)> {
let result = Some(key);
result.zip(self.get(key))
}
#[allow(clippy::reversed_empty_ranges)]
pub fn get_disjoint_mut<const N: usize>(&mut self, ks: [K; N]) -> [Option<&'_ mut V>; N]
where
K: Key<TABLE_MIN_VALUE>,
{
let mut key_indices = [const { 0..0 }; N];
let references: [&K; N] = std::array::from_fn(|i| &ks[i]);
let mut result = self.hash_map.get_disjoint_mut(references);
for (i, &key) in ks.iter().enumerate() {
if Self::use_lookup_table(key) {
#[allow(clippy::range_plus_one)]
{
key_indices[i] = key.key_index()..key.key_index() + 1;
}
}
}
for (i, range) in self
.table
.get_disjoint_mut(key_indices)
.expect("keys should not overlap, or be out of bounds")
.into_iter()
.enumerate()
{
for v in range {
if v.is_some() {
result[i] = v.as_mut();
}
}
}
result
}
#[allow(clippy::reversed_empty_ranges)]
pub unsafe fn get_disjoint_unchecked_mut<const N: usize>(
&mut self,
ks: [K; N],
) -> [Option<&'_ mut V>; N]
where
K: Key<TABLE_MIN_VALUE>,
{
let mut key_indices = [const { 0..0 }; N];
let references: [&K; N] = std::array::from_fn(|i| &ks[i]);
let mut result = unsafe { self.hash_map.get_disjoint_unchecked_mut(references) };
for (i, &key) in ks.iter().enumerate() {
if Self::use_lookup_table(key) {
#[allow(clippy::range_plus_one)]
{
key_indices[i] = key.key_index()..key.key_index() + 1;
}
}
}
for (i, range) in unsafe {
self.table
.get_disjoint_unchecked_mut(key_indices)
.into_iter()
.enumerate()
} {
for v in range {
if v.is_some() {
result[i] = v.as_mut();
}
}
}
result
}
#[must_use]
#[inline]
pub fn get_mut(&mut self, key: K) -> Option<&mut V> {
if Self::use_lookup_table(key) {
self.table[key.key_index()].as_mut()
} else {
self.hash_map.get_mut(&key)
}
}
#[must_use]
#[inline]
pub fn hasher(&self) -> &S {
self.hash_map.hasher()
}
#[inline]
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
if Self::use_lookup_table(key) {
self.table[key.key_index()].replace(value)
} else {
self.hash_map.insert(key, value)
}
}
#[must_use]
#[inline]
pub fn is_empty(&self) -> bool {
self.hash_map.is_empty() && !self.table.iter().any(Option::is_some)
}
#[must_use]
#[inline]
pub fn len(&self) -> usize {
self.hash_map.len() + self.table.iter().filter(|&x| x.is_some()).count()
}
#[inline]
pub fn remove(&mut self, key: K) -> Option<V> {
if Self::use_lookup_table(key) {
self.table[key.key_index()].take()
} else {
self.hash_map.remove(&key)
}
}
#[inline]
pub fn remove_entry(&mut self, key: K) -> Option<(K, V)> {
if Self::use_lookup_table(key) {
let result = Some(key);
result.zip(self.table[key.key_index()].take())
} else {
self.hash_map.remove_entry(&key)
}
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
self.hash_map.reserve(additional);
}
#[inline]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.hash_map.shrink_to(min_capacity);
}
#[inline]
pub fn shrink_to_fit(&mut self) {
self.hash_map.shrink_to_fit();
}
#[inline]
pub fn try_reserve(
&mut self,
additional: usize,
) -> Result<(), std::collections::TryReserveError> {
self.hash_map.try_reserve(additional)
}
#[inline]
pub fn modify_or_insert<F>(&mut self, key: K, f: F, default: V)
where
F: FnOnce(&mut V),
{
if Self::use_lookup_table(key) {
let value = &mut self.table[key.key_index()];
match value {
Some(x) => f(x),
None => *value = Some(default),
}
} else {
self.hash_map.entry(key).and_modify(f).or_insert(default);
}
}
#[inline]
pub fn bump_int(&mut self, key: K)
where
V: AddAssign<V> + num_traits::One + num_traits::Zero,
{
if Self::use_lookup_table(key) {
*self.table[key.key_index()].get_or_insert(V::zero()) += V::one();
} else {
self.hash_map
.entry(key)
.and_modify(|counter| *counter += V::one())
.or_insert(V::one());
}
}
#[inline]
pub fn bump_non_zero(&mut self, key: K)
where
V: NonZeroInt + bytemuck::PodInOption,
<V as NonZeroInt>::UnderlyingType: AddAssign<V::UnderlyingType>,
<V as NonZeroInt>::UnderlyingType: bytemuck::Pod + PrimInt,
{
use num_traits::One;
if Self::use_lookup_table(key) {
*bytemuck::cast_mut::<Option<V>, V::UnderlyingType>(
&mut self.table[key.key_index()],
) += V::UnderlyingType::one();
} else {
self.hash_map
.entry(key)
.and_modify(|counter| {
*counter = counter
.checked_add(V::UnderlyingType::one())
.expect("Addition should not overflow");
})
.or_insert(V::ONE);
}
}
#[must_use]
#[inline]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
if Self::use_lookup_table(key) {
let value: &mut Option<V> = &mut self.table[key.key_index()];
match value {
Some(_) => {
Entry::<K, V>::Occupied(OccupiedEntry::Vec(OccupiedVecEntry { value, key }))
}
None => Entry::<K, V>::Vacant(VacantEntry::Vec(VacantVecEntry { value, key })),
}
} else {
match self.hash_map.entry(key) {
std::collections::hash_map::Entry::Occupied(base) => {
Entry::<K, V>::Occupied(OccupiedEntry::HashMap(OccupiedHashMapEntry { base }))
}
std::collections::hash_map::Entry::Vacant(base) => {
Entry::<K, V>::Vacant(VacantEntry::HashMap(VacantHashMapEntry { base }))
}
}
}
}
}
#[sealed]
#[doc(hidden)]
pub trait NonZeroInt {
type UnderlyingType;
const ONE: Self;
#[must_use]
#[inline]
fn checked_add(self, other: Self::UnderlyingType) -> Option<Self>
where
Self::UnderlyingType: bytemuck::Pod,
Self::UnderlyingType: AddAssign<Self::UnderlyingType>,
Self: bytemuck::PodInOption,
Self::UnderlyingType: PrimInt,
{
let mut result = Some(self);
let x = bytemuck::cast_mut::<Option<Self>, Self::UnderlyingType>(&mut result);
*x += other;
result
}
}
macro_rules! impl_non_zero {
(non_zero=$non_zero_type:ty, underlying=$underlying_type:ty) => {
#[sealed]
impl NonZeroInt for $non_zero_type {
const ONE: $non_zero_type = const { NonZero::new(1).expect("1 is not 0") };
type UnderlyingType = $underlying_type;
}
};
}
impl_non_zero!(non_zero = std::num::NonZeroI8, underlying = i8);
impl_non_zero!(non_zero = std::num::NonZeroI16, underlying = i16);
impl_non_zero!(non_zero = std::num::NonZeroI32, underlying = i32);
impl_non_zero!(non_zero = std::num::NonZeroI64, underlying = i64);
impl_non_zero!(non_zero = std::num::NonZeroI128, underlying = i128);
impl_non_zero!(non_zero = std::num::NonZeroIsize, underlying = isize);
impl_non_zero!(non_zero = std::num::NonZeroU8, underlying = u8);
impl_non_zero!(non_zero = std::num::NonZeroU16, underlying = u16);
impl_non_zero!(non_zero = std::num::NonZeroU32, underlying = u32);
impl_non_zero!(non_zero = std::num::NonZeroU64, underlying = u64);
impl_non_zero!(non_zero = std::num::NonZeroU128, underlying = u128);
impl_non_zero!(non_zero = std::num::NonZeroUsize, underlying = usize);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn use_lookup_table() {
type DefaultRange = MuleMap<i32, i32, fnv_rs::FnvBuildHasher, 0, { u8::MAX as usize + 1 }>;
type NegRange = MuleMap<i32, i32, fnv_rs::FnvBuildHasher, -100, { u8::MAX as usize + 1 }>;
type ZeroSize = MuleMap<i32, i32, fnv_rs::FnvBuildHasher, 0, { u8::MIN as usize }>;
assert!(DefaultRange::use_lookup_table(0));
assert!(!DefaultRange::use_lookup_table(-1));
assert!(DefaultRange::use_lookup_table(255));
assert!(!DefaultRange::use_lookup_table(256));
assert!(NegRange::use_lookup_table(-100));
assert!(!NegRange::use_lookup_table(-101));
assert!(NegRange::use_lookup_table(155));
assert!(!NegRange::use_lookup_table(156));
assert!(!ZeroSize::use_lookup_table(0));
assert!(!ZeroSize::use_lookup_table(1));
}
#[test]
fn check_table_range() {
type BadRange = MuleMap<u8, i32, fnv_rs::FnvBuildHasher, 0, { u8::MAX as usize + 2 }>;
type BadRange2 = MuleMap<u8, i32, fnv_rs::FnvBuildHasher, -1, { u8::MAX as usize }>;
type OkRange = MuleMap<u8, i32, fnv_rs::FnvBuildHasher, 0, { u8::MAX as usize + 1 }>;
type OkRange2 = MuleMap<u8, i32, fnv_rs::FnvBuildHasher, 1, { u8::MAX as usize }>;
type ZeroSize = MuleMap<u8, i32, fnv_rs::FnvBuildHasher, 0, { u8::MIN as usize }>;
assert!(test_utils::catch_unwind_silent(BadRange::new).is_err());
assert!(test_utils::catch_unwind_silent(BadRange2::new).is_err());
assert!(test_utils::catch_unwind_silent(OkRange::new).is_ok());
assert!(test_utils::catch_unwind_silent(OkRange2::new).is_ok());
assert!(test_utils::catch_unwind_silent(ZeroSize::new).is_ok());
}
}