pub mod algorithms;
pub mod consistent;
pub mod weighted;
use super::Backend;
use std::collections::{BTreeSet, HashSet};
use std::sync::Arc;
use weighted::Weighted;
pub trait BackendSelection: Sized {
type Iter;
type Config: Send + Sync;
fn build_with_config(backends: &BTreeSet<Backend>, _config: &Self::Config) -> Self {
Self::build(backends)
}
fn build(backends: &BTreeSet<Backend>) -> Self;
fn iter(self: &Arc<Self>, key: &[u8]) -> Self::Iter
where
Self::Iter: BackendIter;
}
pub trait BackendIter {
fn next(&mut self) -> Option<&Backend>;
}
pub trait SelectionAlgorithm {
fn new() -> Self;
fn next(&self, key: &[u8]) -> u64;
}
pub type FNVHash = Weighted<fnv::FnvHasher>;
#[doc(hidden)]
pub type FVNHash = Weighted<fnv::FnvHasher>;
pub type Random = Weighted<algorithms::Random>;
pub type RoundRobin = Weighted<algorithms::RoundRobin>;
pub type Consistent = consistent::KetamaHashing;
pub struct UniqueIterator<I>
where
I: BackendIter,
{
iter: I,
seen: HashSet<u64>,
max_iterations: usize,
steps: usize,
}
impl<I> UniqueIterator<I>
where
I: BackendIter,
{
pub fn new(iter: I, max_iterations: usize) -> Self {
Self {
iter,
max_iterations,
seen: HashSet::new(),
steps: 0,
}
}
pub fn get_next(&mut self) -> Option<Backend> {
while let Some(item) = self.iter.next() {
if self.steps >= self.max_iterations {
return None;
}
self.steps += 1;
let hash_key = item.hash_key();
if !self.seen.contains(&hash_key) {
self.seen.insert(hash_key);
return Some(item.clone());
}
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
struct TestIter {
seq: Vec<Backend>,
idx: usize,
}
impl TestIter {
fn new(input: &[&Backend]) -> Self {
Self {
seq: input.iter().cloned().cloned().collect(),
idx: 0,
}
}
}
impl BackendIter for TestIter {
fn next(&mut self) -> Option<&Backend> {
let idx = self.idx;
self.idx += 1;
self.seq.get(idx)
}
}
#[test]
fn unique_iter_max_iterations_is_correct() {
let b1 = Backend::new("1.1.1.1:80").unwrap();
let b2 = Backend::new("1.0.0.1:80").unwrap();
let b3 = Backend::new("1.0.0.255:80").unwrap();
let items = [&b1, &b2, &b3];
let mut all = UniqueIterator::new(TestIter::new(&items), 3);
assert_eq!(all.get_next(), Some(b1.clone()));
assert_eq!(all.get_next(), Some(b2.clone()));
assert_eq!(all.get_next(), Some(b3.clone()));
assert_eq!(all.get_next(), None);
let mut stop = UniqueIterator::new(TestIter::new(&items), 1);
assert_eq!(stop.get_next(), Some(b1));
assert_eq!(stop.get_next(), None);
}
#[test]
fn unique_iter_duplicate_items_are_filtered() {
let b1 = Backend::new("1.1.1.1:80").unwrap();
let b2 = Backend::new("1.0.0.1:80").unwrap();
let b3 = Backend::new("1.0.0.255:80").unwrap();
let items = [&b1, &b1, &b2, &b2, &b2, &b3];
let mut uniq = UniqueIterator::new(TestIter::new(&items), 10);
assert_eq!(uniq.get_next(), Some(b1));
assert_eq!(uniq.get_next(), Some(b2));
assert_eq!(uniq.get_next(), Some(b3));
}
}