#![deny(missing_docs, missing_debug_implementations)]
pub mod capacity;
pub mod entry;
pub mod indices;
pub mod iter;
pub mod replacement;
pub use capacity::*;
pub use entry::*;
pub use indices::*;
pub use iter::*;
pub use replacement::*;
use std::borrow::Borrow;
use std::cmp::max;
use std::marker::PhantomData;
use std::mem;
pub trait Capacity {
const CAPACITY: usize;
}
pub trait Indices<K, C>
where
K: ?Sized,
C: Capacity,
{
type Indices: ExactSizeIterator<Item = usize>;
fn indices(key: &K) -> Self::Indices;
}
pub trait Replacement<V, C: Capacity> {
fn choose_for_replacement<'a>(
&mut self,
candidates: impl ExactSizeIterator<Item = (usize, &'a V)>,
) -> usize
where
V: 'a;
fn on_hit(&self, value: &V) {
let _ = value;
}
fn on_insert(&self, value: &V) {
let _ = value;
}
}
#[derive(Debug)]
pub struct AssociativeCache<K, V, C, I, R>
where
C: Capacity,
R: Replacement<V, C>,
{
entries: Vec<Option<(K, V)>>,
len: usize,
replacement_policy: R,
_capacity: PhantomData<C>,
_indices: PhantomData<I>,
}
impl<K, V, C, I, R> Default for AssociativeCache<K, V, C, I, R>
where
C: Capacity,
R: Default + Replacement<V, C>,
{
fn default() -> Self {
AssociativeCache::with_replacement_policy(R::default())
}
}
impl<K, V, C, I, R> AssociativeCache<K, V, C, I, R>
where
C: Capacity,
R: Replacement<V, C>,
{
pub fn with_replacement_policy(replacement_policy: R) -> Self {
assert!(C::CAPACITY > 0);
let mut entries = Vec::with_capacity(C::CAPACITY);
for _ in 0..C::CAPACITY {
entries.push(None);
}
AssociativeCache {
entries,
len: 0,
replacement_policy,
_capacity: PhantomData,
_indices: PhantomData,
}
}
#[inline]
pub fn replacement_policy(&self) -> &R {
&self.replacement_policy
}
#[inline]
pub fn replacement_policy_mut(&mut self) -> &mut R {
&mut self.replacement_policy
}
#[inline]
pub fn capacity(&self) -> usize {
assert_eq!(self.entries.len(), C::CAPACITY);
C::CAPACITY
}
#[inline]
pub fn len(&self) -> usize {
debug_assert!(self.len <= self.capacity());
self.len
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn insert(&mut self, key: K, value: V) -> Option<(K, V)>
where
I: Indices<K, C>,
K: PartialEq,
{
let capacity = self.capacity();
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum InsertionCandidate {
New(usize),
Replace(usize),
}
assert!(None < Some(InsertionCandidate::New(0)));
assert!(InsertionCandidate::New(0) < InsertionCandidate::Replace(0));
let mut best = None;
for index in I::indices(&key) {
assert!(
index < capacity,
"`Indices::indices` must always yield indices within the capacity"
);
match self.entries[index] {
None => {
best = max(best, Some(InsertionCandidate::New(index)));
}
Some((ref k, _)) if *k == key => {
best = max(best, Some(InsertionCandidate::Replace(index)));
}
_ => continue,
}
}
match best {
None => {}
Some(InsertionCandidate::New(index)) => {
self.entries[index] = Some((key, value));
self.len += 1;
return None;
}
Some(InsertionCandidate::Replace(index)) => {
return mem::replace(&mut self.entries[index], Some((key, value)));
}
}
let AssociativeCache {
ref entries,
ref mut replacement_policy,
..
} = self;
let candidates = I::indices(&key).map(|index| {
assert!(
index < capacity,
"`I::indices` must always yield indices within the capacity"
);
let value = &entries[index]
.as_ref()
.expect(
"`Indices::indices` must always yield the same indices for the same entries",
)
.1;
(index, value)
});
let index = replacement_policy.choose_for_replacement(candidates);
debug_assert!(
I::indices(&key).any(|i| i == index),
"`ReplacementPolicy::choose_for_replacement` must return a candidate index"
);
assert!(index < capacity);
assert!(self.entries[index].is_some());
mem::replace(&mut self.entries[index], Some((key, value)))
}
#[inline]
pub fn get<Q>(&self, key: &Q) -> Option<&V>
where
K: Borrow<Q>,
I: Indices<Q, C>,
Q: ?Sized + PartialEq,
{
assert_eq!(self.entries.len(), C::CAPACITY);
for index in I::indices(key) {
assert!(
index < self.entries.len(),
"`Indices::indices` must always yield indices within the capacity"
);
match &self.entries[index] {
Some((k, v)) if k.borrow() == key => {
self.replacement_policy.on_hit(v);
return Some(v);
}
_ => continue,
}
}
None
}
#[inline]
pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
I: Indices<Q, C>,
Q: ?Sized + PartialEq,
{
assert_eq!(self.entries.len(), C::CAPACITY);
for index in I::indices(key) {
assert!(
index < C::CAPACITY,
"`Indices::indices` must always yield indices within the capacity"
);
match &self.entries[index] {
Some((k, _)) if k.borrow() == key => {
let v = &mut self.entries[index].as_mut().unwrap().1;
self.replacement_policy.on_hit(v);
return Some(v);
}
_ => continue,
}
}
None
}
#[inline]
pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
where
K: Borrow<Q>,
I: Indices<Q, C>,
Q: ?Sized + PartialEq,
{
assert_eq!(self.entries.len(), C::CAPACITY);
for index in I::indices(key) {
assert!(
index < self.entries.len(),
"`Indices::indices` must always yield indices within the capacity"
);
match &self.entries[index] {
Some((k, _)) if k.borrow() == key => {
self.len -= 1;
return self.entries[index].take().map(|(_, v)| v);
}
_ => continue,
}
}
None
}
pub fn retain(&mut self, mut f: impl FnMut(&K, &mut V) -> bool) {
for e in &mut self.entries {
if let Some((k, v)) = e {
if !f(k, v) {
*e = None;
self.len -= 1;
}
}
}
}
#[inline]
pub fn entry<Q>(&mut self, key: &Q) -> Entry<'_, K, V, C, I, R>
where
K: Borrow<Q>,
I: Indices<Q, C>,
Q: ?Sized + PartialEq,
{
let capacity = self.capacity();
let mut empty_index = None;
for index in I::indices(key) {
assert!(
index < capacity,
"`Indices::indices` must always yield indices within the capacity"
);
match &mut self.entries[index] {
None => {
empty_index = Some(index);
}
Some((k, v)) if (*k).borrow() == key => {
self.replacement_policy.on_hit(v);
return Entry {
cache: self,
kind: EntryKind::Occupied,
index,
};
}
_ => continue,
}
}
if let Some(index) = empty_index {
return Entry {
cache: self,
kind: EntryKind::Vacant,
index,
};
}
let AssociativeCache {
ref entries,
ref mut replacement_policy,
..
} = self;
let candidates = I::indices(key).map(|index| {
assert!(
index < capacity,
"`I::indices` must always yield indices within the capacity"
);
let value = &entries[index]
.as_ref()
.expect(
"`Indices::indices` must always yield the same indices for the same entries",
)
.1;
(index, value)
});
let index = replacement_policy.choose_for_replacement(candidates);
Entry {
cache: self,
kind: EntryKind::Replace,
index,
}
}
#[inline]
pub fn iter(&self) -> Iter<'_, K, V> {
<&Self as IntoIterator>::into_iter(self)
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
<&mut Self as IntoIterator>::into_iter(self)
}
#[inline]
#[allow(clippy::should_implement_trait)]
pub fn into_iter(self) -> IntoIter<K, V> {
<Self as IntoIterator>::into_iter(self)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn replacement_policy() {
let mut policy = RoundRobinReplacement::default();
let mut cache = AssociativeCache::<usize, usize, Capacity4, HashDirectMapped,_>::with_replacement_policy(policy.clone());
assert_eq!(cache.replacement_policy(), &policy);
assert_eq!(cache.replacement_policy_mut(), &mut policy);
}
#[test]
fn capacity() {
let cache = AssociativeCache::<
usize,
usize,
Capacity2,
HashDirectMapped,
RoundRobinReplacement,
>::default();
assert_eq!(cache.capacity(), 2);
let cache = AssociativeCache::<
usize,
usize,
Capacity4,
HashDirectMapped,
RoundRobinReplacement,
>::default();
assert_eq!(cache.capacity(), 4);
let cache = AssociativeCache::<
usize,
usize,
Capacity8,
HashDirectMapped,
RoundRobinReplacement,
>::default();
assert_eq!(cache.capacity(), 8);
}
#[test]
fn len() {
let mut cache = AssociativeCache::<
usize,
usize,
Capacity512,
HashDirectMapped,
RoundRobinReplacement,
>::default();
assert_eq!(cache.insert(1, 2), None);
assert_eq!(cache.len(), 1);
assert_eq!(cache.insert(3, 4), None);
assert_eq!(cache.len(), 2);
assert_eq!(cache.insert(5, 6), None);
assert_eq!(cache.len(), 3);
cache.insert(1, 7).unwrap();
assert_eq!(cache.len(), 3);
cache.insert(3, 8).unwrap();
assert_eq!(cache.len(), 3);
cache.insert(5, 9).unwrap();
assert_eq!(cache.len(), 3);
}
#[test]
fn insert() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerTwoWay,
RoundRobinReplacement,
>::default();
assert_eq!(cache.insert(0 as *mut u8, 0), None);
assert_eq!(cache.insert(1 as *mut u8, 1), None);
assert_eq!(cache.insert(2 as *mut u8, 2), None);
assert_eq!(cache.insert(3 as *mut u8, 3), None);
assert_eq!(cache.insert(4 as *mut u8, 4), Some((2 as *mut u8, 2)));
assert_eq!(cache.insert(6 as *mut u8, 6), Some((0 as *mut u8, 0)));
assert_eq!(cache.insert(5 as *mut u8, 5), Some((3 as *mut u8, 3)));
assert_eq!(cache.insert(7 as *mut u8, 7), Some((1 as *mut u8, 1)));
}
#[test]
fn get() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerTwoWay,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
assert_eq!(cache.get(&(0 as *mut _)), Some(&0));
assert_eq!(cache.get(&(1 as *mut _)), None);
cache.insert(4 as *mut _, 4);
assert_eq!(cache.get(&(0 as *mut _)), Some(&0));
assert_eq!(cache.get(&(4 as *mut _)), Some(&4));
assert_eq!(cache.get(&(1 as *mut _)), None);
assert_eq!(cache.insert(8 as *mut _, 8), Some((4 as *mut _, 4)));
assert_eq!(cache.get(&(0 as *mut _)), Some(&0));
assert_eq!(cache.get(&(8 as *mut _)), Some(&8));
assert_eq!(cache.get(&(1 as *mut _)), None);
}
#[test]
fn get_mut() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerTwoWay,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
assert_eq!(cache.get_mut(&(0 as *mut _)), Some(&mut 0));
assert_eq!(cache.get_mut(&(1 as *mut _)), None);
cache.insert(4 as *mut _, 4);
assert_eq!(cache.get_mut(&(0 as *mut _)), Some(&mut 0));
assert_eq!(cache.get_mut(&(4 as *mut _)), Some(&mut 4));
assert_eq!(cache.get_mut(&(1 as *mut _)), None);
assert_eq!(cache.insert(8 as *mut _, 8), Some((4 as *mut _, 4)));
assert_eq!(cache.get_mut(&(0 as *mut _)), Some(&mut 0));
assert_eq!(cache.get_mut(&(8 as *mut _)), Some(&mut 8));
assert_eq!(cache.get_mut(&(1 as *mut _)), None);
}
#[test]
fn remove() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerTwoWay,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
cache.insert(4 as *mut _, 4);
assert_eq!(cache.len(), 2);
assert_eq!(cache.remove(&(4 as *mut _)), Some(4));
assert_eq!(cache.remove(&(4 as *mut _)), None);
assert_eq!(cache.remove(&(0 as *mut _)), Some(0));
assert_eq!(cache.remove(&(0 as *mut _)), None);
}
#[test]
fn retain() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerTwoWay,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
cache.insert(1 as *mut _, 1);
cache.insert(2 as *mut _, 2);
cache.insert(3 as *mut _, 3);
assert_eq!(cache.len(), 4);
cache.retain(|_, v| *v % 2 == 0);
assert_eq!(cache.len(), 2);
assert_eq!(cache.get(&(0 as *mut _)), Some(&0));
assert_eq!(cache.get(&(1 as *mut _)), None);
assert_eq!(cache.get(&(2 as *mut _)), Some(&2));
assert_eq!(cache.get(&(3 as *mut _)), None);
}
#[test]
fn entry() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity1,
PointerDirectMapped,
RoundRobinReplacement,
>::default();
assert_eq!(
cache
.entry(&(0 as *mut _))
.or_insert_with(|| 0 as *mut _, || 0),
&mut 0
);
assert_eq!(cache.len(), 1);
assert_eq!(
cache
.entry(&(0 as *mut _))
.or_insert_with(|| unreachable!(), || unreachable!()),
&mut 0
);
assert_eq!(cache.len(), 1);
let mut entry = cache.entry(&(1 as *mut _));
assert_eq!(
entry.take_entry_that_will_be_replaced(),
Some((0 as *mut _, 0))
);
assert_eq!(entry.or_insert_with(|| 1 as *mut _, || 1), &mut 1);
assert_eq!(cache.len(), 1);
}
#[test]
fn iter() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerDirectMapped,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
cache.insert(1 as *mut _, 1);
cache.insert(2 as *mut _, 2);
cache.insert(3 as *mut _, 3);
assert_eq!(cache.len(), 4);
let mut seen = vec![false; 4];
for (&k, &v) in &cache {
assert!(!seen[v]);
seen[v] = true;
assert_eq!(k as usize, v);
}
assert!(seen.iter().all(|&b| b));
}
#[test]
fn iter_mut() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerDirectMapped,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
cache.insert(1 as *mut _, 1);
cache.insert(2 as *mut _, 2);
cache.insert(3 as *mut _, 3);
assert_eq!(cache.len(), 4);
let mut seen = vec![false; 4];
for (&k, v) in &mut cache {
assert!(!seen[*v]);
seen[*v] = true;
assert_eq!(k as usize, *v);
*v += 1;
}
assert!(seen.iter().all(|&b| b));
assert_eq!(cache.get(&(0 as *mut _)), Some(&1));
assert_eq!(cache.get(&(1 as *mut _)), Some(&2));
assert_eq!(cache.get(&(2 as *mut _)), Some(&3));
assert_eq!(cache.get(&(3 as *mut _)), Some(&4));
}
#[test]
fn into_iter() {
let mut cache = AssociativeCache::<
*mut u8,
usize,
Capacity4,
PointerDirectMapped,
RoundRobinReplacement,
>::default();
cache.insert(0 as *mut _, 0);
cache.insert(1 as *mut _, 1);
cache.insert(2 as *mut _, 2);
cache.insert(3 as *mut _, 3);
assert_eq!(cache.len(), 4);
let mut seen = vec![false; 4];
for (k, v) in cache {
assert!(!seen[v]);
seen[v] = true;
assert_eq!(k as usize, v);
}
assert!(seen.iter().all(|&b| b));
}
}