pingora_load_balancing/selection/
mod.rs1pub mod algorithms;
18pub mod consistent;
19pub mod weighted;
20
21use super::Backend;
22use std::collections::{BTreeSet, HashSet};
23use std::sync::Arc;
24use weighted::Weighted;
25
26pub trait BackendSelection {
28 type Iter;
30 fn build(backends: &BTreeSet<Backend>) -> Self;
32 fn iter(self: &Arc<Self>, key: &[u8]) -> Self::Iter
38 where
39 Self::Iter: BackendIter;
40}
41
42pub trait BackendIter {
46 fn next(&mut self) -> Option<&Backend>;
48}
49
50pub trait SelectionAlgorithm {
54 fn new() -> Self;
56 fn next(&self, key: &[u8]) -> u64;
59}
60
61pub type FNVHash = Weighted<fnv::FnvHasher>;
64
65#[doc(hidden)]
67pub type FVNHash = Weighted<fnv::FnvHasher>;
68pub type Random = Weighted<algorithms::Random>;
70pub type RoundRobin = Weighted<algorithms::RoundRobin>;
72pub type Consistent = consistent::KetamaHashing;
74
75pub struct UniqueIterator<I>
80where
81 I: BackendIter,
82{
83 iter: I,
84 seen: HashSet<u64>,
85 max_iterations: usize,
86 steps: usize,
87}
88
89impl<I> UniqueIterator<I>
90where
91 I: BackendIter,
92{
93 pub fn new(iter: I, max_iterations: usize) -> Self {
95 Self {
96 iter,
97 max_iterations,
98 seen: HashSet::new(),
99 steps: 0,
100 }
101 }
102
103 pub fn get_next(&mut self) -> Option<Backend> {
104 while let Some(item) = self.iter.next() {
105 if self.steps >= self.max_iterations {
106 return None;
107 }
108 self.steps += 1;
109
110 let hash_key = item.hash_key();
111 if !self.seen.contains(&hash_key) {
112 self.seen.insert(hash_key);
113 return Some(item.clone());
114 }
115 }
116
117 None
118 }
119}
120
121#[cfg(test)]
122mod tests {
123 use super::*;
124
125 struct TestIter {
126 seq: Vec<Backend>,
127 idx: usize,
128 }
129 impl TestIter {
130 fn new(input: &[&Backend]) -> Self {
131 Self {
132 seq: input.iter().cloned().cloned().collect(),
133 idx: 0,
134 }
135 }
136 }
137 impl BackendIter for TestIter {
138 fn next(&mut self) -> Option<&Backend> {
139 let idx = self.idx;
140 self.idx += 1;
141 self.seq.get(idx)
142 }
143 }
144
145 #[test]
146 fn unique_iter_max_iterations_is_correct() {
147 let b1 = Backend::new("1.1.1.1:80").unwrap();
148 let b2 = Backend::new("1.0.0.1:80").unwrap();
149 let b3 = Backend::new("1.0.0.255:80").unwrap();
150 let items = [&b1, &b2, &b3];
151
152 let mut all = UniqueIterator::new(TestIter::new(&items), 3);
153 assert_eq!(all.get_next(), Some(b1.clone()));
154 assert_eq!(all.get_next(), Some(b2.clone()));
155 assert_eq!(all.get_next(), Some(b3.clone()));
156 assert_eq!(all.get_next(), None);
157
158 let mut stop = UniqueIterator::new(TestIter::new(&items), 1);
159 assert_eq!(stop.get_next(), Some(b1));
160 assert_eq!(stop.get_next(), None);
161 }
162
163 #[test]
164 fn unique_iter_duplicate_items_are_filtered() {
165 let b1 = Backend::new("1.1.1.1:80").unwrap();
166 let b2 = Backend::new("1.0.0.1:80").unwrap();
167 let b3 = Backend::new("1.0.0.255:80").unwrap();
168 let items = [&b1, &b1, &b2, &b2, &b2, &b3];
169
170 let mut uniq = UniqueIterator::new(TestIter::new(&items), 10);
171 assert_eq!(uniq.get_next(), Some(b1));
172 assert_eq!(uniq.get_next(), Some(b2));
173 assert_eq!(uniq.get_next(), Some(b3));
174 }
175}