stack_map/
lib.rs

1#[cfg(feature = "serde")]
2mod serde;
3
4use std::borrow::Borrow;
5use std::fmt;
6use std::mem::MaybeUninit;
7
8/// `StackMap` is a constant-size, zero-allocation associative container
9/// backed by an array. It can be used as a building block for various interesting
10/// higher-level data structures.
11pub struct StackMap<K: Ord, V, const FANOUT: usize> {
12    len: usize,
13    inner: [MaybeUninit<(K, V)>; FANOUT],
14}
15
16impl<K: Ord + fmt::Debug, V: fmt::Debug, const FANOUT: usize> fmt::Debug
17    for StackMap<K, V, FANOUT>
18{
19    fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result {
20        w.debug_struct(&format!("StackMap<{}>", FANOUT)).finish()?;
21        w.debug_map()
22            .entries(self.iter().map(|&(ref k, ref v)| (k, v)))
23            .finish()
24    }
25}
26
27impl<K: Ord + PartialEq, V: PartialEq, const FANOUT: usize> PartialEq for StackMap<K, V, FANOUT> {
28    fn eq(&self, other: &Self) -> bool {
29        self.len == other.len && {
30            let self_iter = self.iter();
31            let mut other_iter = other.iter();
32
33            for self_kv in self_iter {
34                if !Some(self_kv).eq(&other_iter.next()) {
35                    return false;
36                }
37            }
38
39            other_iter.next().is_none()
40        }
41    }
42}
43
44impl<K: Ord, V, const FANOUT: usize> Drop for StackMap<K, V, FANOUT> {
45    fn drop(&mut self) {
46        for i in 0..self.len() {
47            let ptr = self.inner[i].as_mut_ptr();
48            unsafe {
49                std::ptr::drop_in_place(ptr);
50            }
51        }
52    }
53}
54
55impl<K: Clone + Ord, V: Clone, const FANOUT: usize> Clone for StackMap<K, V, FANOUT> {
56    fn clone(&self) -> Self {
57        let mut inner: [MaybeUninit<(K, V)>; FANOUT] =
58            core::array::from_fn(|_i| MaybeUninit::uninit());
59
60        for (i, item) in self.iter().cloned().enumerate() {
61            inner[i].write(item);
62        }
63
64        StackMap {
65            inner,
66            len: self.len,
67        }
68    }
69}
70
71impl<K: Ord, V, const FANOUT: usize> Default for StackMap<K, V, FANOUT> {
72    #[inline]
73    fn default() -> Self {
74        StackMap::new()
75    }
76}
77
78impl<K: Ord, V, const FANOUT: usize> StackMap<K, V, FANOUT> {
79    pub const fn new() -> Self {
80        Self {
81            inner: unsafe { MaybeUninit::<[MaybeUninit<_>; FANOUT]>::uninit().assume_init() },
82            len: 0,
83        }
84    }
85
86    fn binary_search<Q>(&self, key: &Q) -> Result<usize, usize>
87    where
88        K: Borrow<Q>,
89        Q: Ord + ?Sized,
90    {
91        self.inner[..self.len()]
92            .binary_search_by_key(&key, |slot| unsafe { slot.assume_init_ref().0.borrow() })
93    }
94
95    pub fn get<Q>(&self, key: &Q) -> Option<&V>
96    where
97        K: Borrow<Q>,
98        Q: Ord + ?Sized,
99    {
100        if let Ok(index) = self.binary_search(key) {
101            Some(unsafe { &self.inner.get_unchecked(index).assume_init_ref().1 })
102        } else {
103            None
104        }
105    }
106
107    /// Inserts an item and return the previous value if it exists.
108    ///
109    /// # Panics
110    ///
111    /// This method will panic if called with a new key-value pair when
112    /// already full.
113    ///
114    /// The `StackMap` should be checked to ensure that it is not already
115    /// full before calling this method. It is full when the `self.is_full()`
116    /// method returns `true`, which happens when `self.len() == FANOUT`.
117    pub fn insert(&mut self, key: K, value: V) -> Option<V> {
118        match self.binary_search(&key) {
119            Ok(index) => {
120                let slot = unsafe { &mut self.inner.get_unchecked_mut(index).assume_init_mut().1 };
121                Some(std::mem::replace(slot, value))
122            }
123            Err(index) => {
124                assert!(self.len() < FANOUT);
125
126                unsafe {
127                    if index < self.len() {
128                        let src = self.inner.get_unchecked(index).as_ptr();
129                        let dst = self.inner.get_unchecked_mut(index + 1).as_mut_ptr();
130
131                        std::ptr::copy(src, dst, self.len() - index);
132                    }
133
134                    self.len += 1;
135
136                    self.inner.get_unchecked_mut(index).write((key, value));
137                }
138                None
139            }
140        }
141    }
142
143    pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
144    where
145        K: Borrow<Q>,
146        Q: Ord + ?Sized,
147    {
148        if let Ok(index) = self.binary_search(key) {
149            unsafe {
150                let ret = std::ptr::read(self.inner.get_unchecked(index).as_ptr()).1;
151
152                if index + 1 < self.len() {
153                    let src = self.inner.get_unchecked(index + 1).as_ptr();
154                    let dst = self.inner.get_unchecked_mut(index).as_mut_ptr();
155
156                    std::ptr::copy(src, dst, self.len() - index);
157                }
158
159                self.len -= 1;
160
161                Some(ret)
162            }
163        } else {
164            None
165        }
166    }
167
168    pub fn contains_key(&self, key: &K) -> bool {
169        self.binary_search(key).is_ok()
170    }
171
172    pub fn iter(&self) -> impl DoubleEndedIterator<Item = &(K, V)> {
173        self.inner[..self.len()]
174            .iter()
175            .map(|slot| unsafe { slot.assume_init_ref() })
176    }
177
178    /// Splits this `StackMap` into two. `self` will retain
179    /// all key-value pairs before the provided split index.
180    /// Returns a new `StackMap` created out of all key-value pairs
181    /// at or after the provided split index.
182    pub fn split_off(&mut self, split_idx: usize) -> Self {
183        assert!(split_idx < self.len());
184        assert!(split_idx < FANOUT);
185
186        let mut rhs = Self::default();
187
188        for i in split_idx..self.len() {
189            let src = self.inner[i].as_ptr();
190            let dst = rhs.inner[i - split_idx].as_mut_ptr();
191            unsafe {
192                std::ptr::copy_nonoverlapping(src, dst, 1);
193            }
194        }
195
196        rhs.len = self.len - split_idx;
197        self.len = split_idx;
198
199        rhs
200    }
201
202    /// Get a key-value pair based on its internal relative
203    /// index in the backing array.
204    pub fn get_index(&self, index: usize) -> Option<&(K, V)> {
205        if index < self.len() {
206            Some(unsafe { self.inner.get_unchecked(index).assume_init_ref() })
207        } else {
208            None
209        }
210    }
211
212    /// Get the key-value pair that is less than or equal
213    /// to the provided key. Useful for any least upper
214    /// bound operation, such as MVCC lookups where the
215    /// key is suffixed by a version or an internal b-tree
216    /// index lookup where you are looking for the next
217    /// node based on a node's low key.
218    ///
219    /// # Examples
220    /// ```
221    /// let mut map = stack_map::StackMap::<u8, u8, 64>::default();
222    /// map.insert(1, 1);
223    /// map.insert(2, 2);
224    /// map.insert(3, 3);
225    ///
226    /// let lt = map.get_less_than_or_equal(&4).unwrap();
227    /// let expected = &(3, 3);
228    /// assert_eq!(expected, lt);
229    ///
230    /// let lt = map.get_less_than_or_equal(&3).unwrap();
231    /// let expected = &(3, 3);
232    /// assert_eq!(expected, lt);
233    ///
234    /// let lt = map.get_less_than_or_equal(&2).unwrap();
235    /// let expected = &(2, 2);
236    /// assert_eq!(expected, lt);
237    ///
238    /// let lt = map.get_less_than_or_equal(&1).unwrap();
239    /// let expected = &(1, 1);
240    /// assert_eq!(expected, lt);
241    ///
242    /// let lt = map.get_less_than_or_equal(&0);
243    /// let expected = None;
244    /// assert_eq!(expected, lt);
245    /// ```
246    pub fn get_less_than_or_equal<Q>(&self, key: &Q) -> Option<&(K, V)>
247    where
248        K: Borrow<Q>,
249        Q: Ord + ?Sized,
250    {
251        // binary search LUB
252        let index = match self.binary_search(key) {
253            Ok(i) => i,
254            Err(0) => return None,
255            Err(i) => i - 1,
256        };
257
258        self.get_index(index)
259    }
260
261    /// Gets a kv pair that has a key that is less than the provided key.
262    ///
263    /// # Examples
264    /// ```
265    /// let mut map = stack_map::StackMap::<u8, u8, 64>::default();
266    /// map.insert(1, 1);
267    /// map.insert(2, 2);
268    /// map.insert(3, 3);
269    ///
270    /// let lt = map.get_less_than(&4).unwrap();
271    /// let expected = &(3, 3);
272    /// assert_eq!(expected, lt);
273    ///
274    /// let lt = map.get_less_than(&3).unwrap();
275    /// let expected = &(2, 2);
276    /// assert_eq!(expected, lt);
277    ///
278    /// let lt = map.get_less_than(&2).unwrap();
279    /// let expected = &(1, 1);
280    /// assert_eq!(expected, lt);
281    ///
282    /// let lt = map.get_less_than(&1);
283    /// let expected = None;
284    /// assert_eq!(expected, lt);
285    ///
286    /// let lt = map.get_less_than(&0);
287    /// let expected = None;
288    /// assert_eq!(expected, lt);
289    /// ```
290    pub fn get_less_than<Q>(&self, key: &Q) -> Option<&(K, V)>
291    where
292        K: Borrow<Q>,
293        Q: Ord + ?Sized,
294    {
295        // binary search LUB
296        let index = match self.binary_search(key) {
297            Ok(0) | Err(0) => return None,
298            Ok(i) => i - 1,
299            Err(i) => i - 1,
300        };
301
302        self.get_index(index)
303    }
304
305    /// Returns the first kv pair in the StackMap, if any exists
306    ///
307    /// # Examples
308    /// ```
309    /// let mut sm = stack_map::StackMap::<u8, u8, 3>::default();
310    /// sm.insert(1, 1);
311    /// sm.insert(2, 2);
312    /// sm.insert(3, 3);
313    ///
314    /// let expected = Some(&(1, 1));
315    /// let actual = sm.first();
316    /// assert_eq!(expected, actual);
317    /// ```
318    pub fn first(&self) -> Option<&(K, V)> {
319        self.get_index(0)
320    }
321
322    /// Returns the last kv pair in the StackMap, if any exists
323    ///
324    /// # Examples
325    /// ```
326    /// let mut sm = stack_map::StackMap::<u8, u8, 3>::default();
327    /// sm.insert(1, 1);
328    /// sm.insert(2, 2);
329    /// sm.insert(3, 3);
330    ///
331    /// let expected = Some(&(3, 3));
332    /// let actual = sm.last();
333    /// assert_eq!(expected, actual);
334    /// ```
335    pub fn last(&self) -> Option<&(K, V)> {
336        if self.is_empty() {
337            None
338        } else {
339            self.get_index(self.len - 1)
340        }
341    }
342
343    /// Returns `true` if this `StackMap` is at its maximum capacity and
344    /// unable to receive additional data.
345    ///
346    /// # Examples
347    /// ```
348    /// let mut sm = stack_map::StackMap::<u8, u8, 3>::default();
349    /// sm.insert(1, 1);
350    /// sm.insert(2, 2);
351    /// sm.insert(3, 3);
352    ///
353    /// let expected = true;
354    /// let actual = sm.is_full();
355    /// assert_eq!(expected, actual);
356    /// ```
357    pub const fn is_full(&self) -> bool {
358        self.len == FANOUT
359    }
360
361    pub const fn len(&self) -> usize {
362        self.len
363    }
364
365    pub const fn is_empty(&self) -> bool {
366        self.len == 0
367    }
368}