1use core::borrow::Borrow;
9use core::cmp::Ordering;
10use heapless::Vec as HeaplessVec;
11
12#[derive(Debug, Clone)]
25pub struct Entry<K, V>(pub K, pub V);
26
27impl<K: PartialEq, V> PartialEq for Entry<K, V> {
28 fn eq(&self, other: &Self) -> bool {
30 self.0.eq(&other.0)
31 }
32}
33
34impl<K: Eq, V> Eq for Entry<K, V> {}
35
36impl<K: PartialOrd, V> PartialOrd for Entry<K, V> {
37 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
39 self.0.partial_cmp(&other.0)
40 }
41}
42
43impl<K: Ord, V> Ord for Entry<K, V> {
44 fn cmp(&self, other: &Self) -> Ordering {
46 self.0.cmp(&other.0)
47 }
48}
49
50#[derive(Debug, Clone)]
105pub struct HeaplessBTreeMap<K, V, const N: usize> {
106 buf: HeaplessVec<Entry<K, V>, N>,
107}
108
109impl<K: Ord, V, const N: usize> HeaplessBTreeMap<K, V, N> {
110 pub fn new() -> Self {
112 const {
113 assert!(
114 std::mem::size_of::<Self>() <= 16 * 1024,
115 "HeaplessBTreeMap is too large! The struct size exceeds the 16KB limit. Reduce N."
116 );
117 }
118 Self {
119 buf: HeaplessVec::new(),
120 }
121 }
122
123 pub fn len(&self) -> usize {
125 self.buf.len()
126 }
127
128 pub fn is_empty(&self) -> bool {
130 self.buf.is_empty()
131 }
132
133 pub fn is_full(&self) -> bool {
137 self.buf.is_full()
138 }
139
140 pub fn clear(&mut self) {
142 self.buf.clear();
143 }
144
145 pub fn insert(&mut self, key: K, value: V) -> Result<Option<V>, (K, V)> {
157 match self.buf.binary_search_by(|e| e.0.cmp(&key)) {
158 Ok(idx) => Ok(Some(core::mem::replace(&mut self.buf[idx].1, value))),
159 Err(idx) => {
160 if self.buf.is_full() {
161 Err((key, value))
162 } else {
163 self.buf.insert(idx, Entry(key, value)).ok().unwrap();
164 Ok(None)
165 }
166 }
167 }
168 }
169
170 pub fn get<Q>(&self, key: &Q) -> Option<&V>
175 where
176 K: Borrow<Q>,
177 Q: Ord + ?Sized,
178 {
179 match self.buf.binary_search_by(|e| e.0.borrow().cmp(key)) {
180 Ok(idx) => Some(&self.buf[idx].1),
181 Err(_) => None,
182 }
183 }
184
185 pub fn get_mut<Q>(&mut self, key: &Q) -> Option<&mut V>
190 where
191 K: Borrow<Q>,
192 Q: Ord + ?Sized,
193 {
194 match self.buf.binary_search_by(|e| e.0.borrow().cmp(key)) {
195 Ok(idx) => Some(&mut self.buf[idx].1),
196 Err(_) => None,
197 }
198 }
199
200 pub fn remove<Q>(&mut self, key: &Q) -> Option<V>
205 where
206 K: Borrow<Q>,
207 Q: Ord + ?Sized,
208 {
209 match self.buf.binary_search_by(|e| e.0.borrow().cmp(key)) {
210 Ok(idx) => Some(self.buf.remove(idx).1),
211 Err(_) => None,
212 }
213 }
214
215 pub fn iter(&self) -> core::slice::Iter<'_, Entry<K, V>> {
219 self.buf.iter()
220 }
221
222 pub fn into_vec(self) -> HeaplessVec<Entry<K, V>, N> {
227 self.buf
228 }
229}
230
231impl<K: Ord, V, const N: usize> Default for HeaplessBTreeMap<K, V, N> {
232 fn default() -> Self {
234 Self::new()
235 }
236}
237
238impl<K: Ord, V, const N: usize> IntoIterator for HeaplessBTreeMap<K, V, N> {
239 type Item = Entry<K, V>;
240 type IntoIter = heapless::vec::IntoIter<Entry<K, V>, N, usize>;
241
242 fn into_iter(self) -> Self::IntoIter {
244 self.buf.into_iter()
245 }
246}
247
248impl<K: Ord, V: PartialEq, const N: usize> PartialEq for HeaplessBTreeMap<K, V, N> {
249 fn eq(&self, other: &Self) -> bool {
250 if self.len() != other.len() {
251 return false;
252 }
253 self.iter()
254 .zip(other.iter())
255 .all(|(a, b)| a.0 == b.0 && a.1 == b.1)
256 }
257}
258
259impl<K: Ord, V: Eq, const N: usize> Eq for HeaplessBTreeMap<K, V, N> {}
260
261impl<K: Ord, V: PartialOrd, const N: usize> PartialOrd for HeaplessBTreeMap<K, V, N> {
262 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
263 self.iter()
264 .map(|e| (&e.0, &e.1))
265 .partial_cmp(other.iter().map(|e| (&e.0, &e.1)))
266 }
267}
268
269impl<K: Ord, V: Ord, const N: usize> Ord for HeaplessBTreeMap<K, V, N> {
270 fn cmp(&self, other: &Self) -> Ordering {
271 self.iter()
272 .map(|e| (&e.0, &e.1))
273 .cmp(other.iter().map(|e| (&e.0, &e.1)))
274 }
275}
276
277#[cfg(test)]
278mod tests {
279 use super::*;
280
281 #[test]
282 fn test_heapless_btree_map_stack_ops_sorted_order() {
283 let mut map: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
284 map.insert(3, 30).unwrap();
285 map.insert(1, 10).unwrap();
286 map.insert(2, 20).unwrap();
287
288 let keys: Vec<_> = map.iter().map(|e| e.0).collect();
289 assert_eq!(keys, vec![1, 2, 3]);
290 }
291
292 #[test]
293 fn test_heapless_btree_map_stack_ops_update() {
294 let mut map: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
295 assert_eq!(map.insert(1, 10), Ok(None));
296 assert_eq!(map.insert(1, 20), Ok(Some(10)));
297 assert_eq!(map.get(&1), Some(&20));
298 }
299
300 #[test]
301 fn test_heapless_btree_map_stack_ops_remove() {
302 let mut map: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
303 map.insert(1, 10).unwrap();
304 map.insert(2, 20).unwrap();
305 assert_eq!(map.remove(&1), Some(10));
306 assert_eq!(map.len(), 1);
307 assert_eq!(map.get(&1), None);
308 }
309
310 #[test]
311 fn test_heapless_btree_map_stack_ops_full_returns_err() {
312 let mut map: HeaplessBTreeMap<i32, i32, 2> = HeaplessBTreeMap::new();
313 map.insert(1, 10).unwrap();
314 map.insert(2, 20).unwrap();
315 assert!(map.is_full());
316 assert_eq!(map.insert(3, 30), Err((3, 30)));
317 }
318
319 #[test]
320 fn test_heapless_btree_map_stack_ops_get_mut() {
321 let mut map: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
322 map.insert(1, 10).unwrap();
323 if let Some(v) = map.get_mut(&1) {
324 *v = 99;
325 }
326 assert_eq!(map.get(&1), Some(&99));
327 }
328
329 #[test]
330 fn test_heapless_btree_map_traits_clone_default() {
331 let map1: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::default();
332 let mut map2 = map1.clone();
333 map2.insert(1, 1).unwrap();
334 assert_eq!(map1.len(), 0);
335 assert_eq!(map2.len(), 1);
336 }
337
338 #[test]
339 fn test_heapless_btree_map_traits_entry_ord() {
340 let e1 = Entry(1i32, 10i32);
341 let e2 = Entry(1i32, 20i32);
342 let e3 = Entry(2i32, 10i32);
343 assert_eq!(e1, e2);
344 assert!(e1 < e3);
345 assert_eq!(e1.cmp(&e2), Ordering::Equal);
346 assert_eq!(e1.partial_cmp(&e3), Some(Ordering::Less));
347 }
348}
349
350#[cfg(test)]
351mod heapless_btree_map_coverage_tests {
352 use super::*;
353
354 #[test]
355 fn test_is_empty_false() {
356 let mut map: HeaplessBTreeMap<i32, i32, 2> = HeaplessBTreeMap::new();
357 map.insert(1, 10).unwrap();
358 assert!(!map.is_empty());
359 }
360
361 #[test]
362 fn test_get_mut_missing() {
363 let mut map: HeaplessBTreeMap<i32, i32, 2> = HeaplessBTreeMap::new();
364 map.insert(1, 10).unwrap();
365 assert_eq!(map.get_mut(&2), None);
366 }
367
368 #[test]
369 fn test_into_vec() {
370 let mut map: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
371 map.insert(2, 20).unwrap();
372 map.insert(1, 10).unwrap();
373
374 let vec = map.into_vec();
376 assert_eq!(vec.len(), 2);
377 assert_eq!(vec[0].0, 1);
378 assert_eq!(vec[1].0, 2);
379 }
380
381 #[test]
382 fn test_traits_partial_eq() {
383 let mut m1: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
384 m1.insert(1, 10).unwrap();
385
386 let mut m2: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
387 m2.insert(1, 10).unwrap();
388
389 let mut m3: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
390 m3.insert(1, 10).unwrap();
391 m3.insert(2, 20).unwrap();
392
393 let mut m4: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
394 m4.insert(1, 99).unwrap(); assert_eq!(m1, m2);
397 assert_ne!(m1, m3); assert_ne!(m1, m4); }
400
401 #[test]
402 fn test_traits_ord_partial_ord() {
403 let mut m1: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
404 m1.insert(1, 10).unwrap();
405 m1.insert(2, 20).unwrap();
406
407 let mut m2: HeaplessBTreeMap<i32, i32, 4> = HeaplessBTreeMap::new();
408 m2.insert(1, 10).unwrap();
409 m2.insert(3, 30).unwrap();
410
411 assert!(m1 < m2);
412 assert_eq!(m1.cmp(&m2), Ordering::Less);
413 assert_eq!(m1.partial_cmp(&m2), Some(Ordering::Less));
414
415 assert_eq!(m1.cmp(&m1), Ordering::Equal);
417 }
418}