1use core::fmt::Debug;
2use core::mem::ManuallyDrop;
3use core::ops::Add;
4use core::ops::Mul;
5use core::ptr::NonNull;
6
7use allocated::AllocResult;
8use allocator_api2::alloc::Allocator;
9use generic_array::ArrayLength;
10use typenum::Prod;
11use typenum::Sum;
12use typenum::U1;
13use typenum::U2;
14
15use super::node::InsertOption;
16use super::node::InteriorNode;
17use super::node::MutNodeRef;
18use super::node::Node;
19use super::node::NodeEntry;
20use super::node::OccupiedNodeEntry;
21use super::node::VacantNodeEntry;
22use super::AllocatedBTreeMap;
23
24#[allow(clippy::module_name_repetitions)]
29pub enum Entry<'a, 's, A: Allocator, K: core::cmp::PartialOrd + core::fmt::Debug, V, B: ArrayLength>
30where
31 U2: Mul<B>,
32 Prod<U2, B>: ArrayLength,
33 U1: Add<Prod<U2, B>>,
34 Sum<U1, Prod<U2, B>>: ArrayLength,
35{
36 Vacant(VacantEntry<'a, 's, A, K, V, B>),
38 Occupied(OccupiedEntry<'a, 's, A, K, V, B>),
40}
41
42#[allow(clippy::module_name_repetitions)]
45pub struct VacantEntry<
46 'a,
47 's,
48 A: Allocator,
49 K: core::cmp::PartialOrd + core::fmt::Debug,
50 V,
51 B: ArrayLength,
52> where
53 U2: Mul<B>,
54 Prod<U2, B>: ArrayLength,
55 U1: Add<Prod<U2, B>>,
56 Sum<U1, Prod<U2, B>>: ArrayLength,
57{
58 alloc: &'a A,
59 inner: VacantNodeEntry<'s, K, K, V, B, MutNodeRef<'s, K, V, B>>,
60 map: NonNull<AllocatedBTreeMap<K, V, B>>,
61}
62
63#[allow(clippy::module_name_repetitions)]
66pub struct OccupiedEntry<
67 'a,
68 's,
69 A: Allocator,
70 K: core::cmp::PartialOrd + core::fmt::Debug,
71 V,
72 B: ArrayLength,
73> where
74 U2: Mul<B>,
75 Prod<U2, B>: ArrayLength,
76 U1: Add<Prod<U2, B>>,
77 Sum<U1, Prod<U2, B>>: ArrayLength,
78{
79 alloc: &'a A,
80 inner: OccupiedNodeEntry<'s, K, V, B, MutNodeRef<'s, K, V, B>>,
81 map: NonNull<AllocatedBTreeMap<K, V, B>>,
82}
83
84impl<'a, 's, A: Allocator, K: core::cmp::PartialOrd + core::fmt::Debug, V, B: ArrayLength>
85 VacantEntry<'a, 's, A, K, V, B>
86where
87 U2: Mul<B>,
88 Prod<U2, B>: ArrayLength,
89 U1: Add<Prod<U2, B>>,
90 Sum<U1, Prod<U2, B>>: ArrayLength,
91{
92 pub fn key(&self) -> &K {
94 self.inner.key()
95 }
96
97 pub fn into_key(self) -> K {
99 self.inner.into_key()
100 }
101
102 #[must_use]
106 pub fn key_below(&self) -> Option<&K> {
107 self.inner.key_below()
108 }
109
110 #[must_use]
114 pub fn key_above(&self) -> Option<&K> {
115 self.inner.key_above()
116 }
117
118 pub fn insert(mut self, value: V) -> AllocResult<Option<V>> {
124 let alloc = self.alloc;
125 let io = self.inner.insert_in(alloc, value)?;
126 match io {
127 InsertOption::NewKey(_) => {
128 unsafe { self.map.as_mut() }.n += 1;
130 Ok(None)
131 }
132 InsertOption::Split((k, v, n)) => {
133 let map = unsafe { self.map.as_mut() };
135
136 let root =
137 InteriorNode::root(alloc, k, v, map.root.take().unwrap(), n.into_inner())?;
138 map.root
139 .replace(ManuallyDrop::new(Node::Interior(ManuallyDrop::into_inner(
140 root.into_inner(),
141 ))));
142 map.n += 1;
143
144 Ok(None)
145 }
146 }
147 }
148}
149
150impl<'a, 's, A: Allocator, K: core::cmp::PartialOrd + core::fmt::Debug, V, B: ArrayLength>
151 OccupiedEntry<'a, 's, A, K, V, B>
152where
153 U2: Mul<B>,
154 Prod<U2, B>: ArrayLength,
155 U1: Add<Prod<U2, B>>,
156 Sum<U1, Prod<U2, B>>: ArrayLength,
157{
158 pub(crate) unsafe fn new(
162 alloc: &'a A,
163 inner: OccupiedNodeEntry<'s, K, V, B, MutNodeRef<'s, K, V, B>>,
164 map: NonNull<AllocatedBTreeMap<K, V, B>>,
165 ) -> Self {
166 OccupiedEntry { alloc, inner, map }
167 }
168
169 #[must_use]
171 pub fn key(&self) -> &K {
172 self.inner.key()
173 }
174
175 #[must_use]
177 pub fn get(&self) -> &V {
178 self.inner.get()
179 }
180
181 pub fn get_mut(&mut self) -> &mut V {
183 self.inner.get_mut()
184 }
185
186 #[must_use]
188 pub fn into_mut(self) -> &'s mut V {
189 self.inner.into_mut()
190 }
191
192 pub fn insert(&mut self, value: V) -> V {
194 self.inner.insert(value)
195 }
196
197 #[must_use]
199 pub fn remove(self) -> V {
200 self.remove_entry().1
201 }
202
203 #[must_use]
205 pub fn remove_entry(mut self) -> (K, V) {
206 let (entry, check_underflow) = unsafe { self.inner.remove(self.alloc) };
208
209 unsafe { self.map.as_mut() }.n -= 1;
211
212 if check_underflow {
213 let map: &mut AllocatedBTreeMap<K, V, B> = unsafe { self.map.as_mut() };
215 let root = map.root.as_mut().unwrap();
216 if root.n_keys() == 0 {
217 let old_root = map.root.take().unwrap();
218 let new_root = match ManuallyDrop::into_inner(old_root) {
219 Node::Interior(mut n) => n.take_child_at_in(self.alloc, 0),
220 Node::Leaf(n) => Node::Leaf(n),
221 };
222 map.root.replace(ManuallyDrop::new(new_root));
223 }
224 }
225
226 entry
227 }
228}
229
230impl<'a, 's, A: Allocator, K: core::cmp::PartialOrd + Debug, V, B: ArrayLength>
231 Entry<'a, 's, A, K, V, B>
232where
233 U2: Mul<B>,
234 Prod<U2, B>: ArrayLength,
235 U1: Add<Prod<U2, B>>,
236 Sum<U1, Prod<U2, B>>: ArrayLength,
237{
238 pub(super) fn new(
239 alloc: &'a A,
240 inner: NodeEntry<'s, K, K, V, B, MutNodeRef<'s, K, V, B>>,
241 map: NonNull<AllocatedBTreeMap<K, V, B>>,
242 ) -> Self {
243 match inner {
244 NodeEntry::Vacant(inner) => Entry::Vacant(VacantEntry { alloc, inner, map }),
245 NodeEntry::Occupied(inner) => Entry::Occupied(OccupiedEntry { alloc, inner, map }),
246 }
247 }
248
249 #[must_use]
251 pub fn and_modify<F>(self, f: F) -> Self
252 where
253 F: FnOnce(&mut V),
254 {
255 match self {
256 Self::Occupied(mut o) => {
257 f(o.get_mut());
258 Self::Occupied(o)
259 }
260 Self::Vacant(v) => Self::Vacant(v),
261 }
262 }
263
264 #[inline]
266 pub fn key(&self) -> &K {
267 match self {
268 Self::Occupied(o) => o.key(),
269 Self::Vacant(v) => v.key(),
270 }
271 }
272
273 pub fn insert(self, v: V) -> AllocResult<Option<V>> {
280 match self {
281 Self::Occupied(mut o) => Ok(Some(o.insert(v))),
282 Self::Vacant(o) => {
283 o.insert(v)?;
284 Ok(None)
285 }
286 }
287 }
288
289 pub fn unwrap_occupied(self) -> OccupiedEntry<'a, 's, A, K, V, B> {
291 match self {
292 Self::Occupied(o) => o,
293 Self::Vacant(_) => panic!("Expected Occupied(_)"),
294 }
295 }
296}