1use std::cmp;
23use std::iter;
24use std::ops;
25use std::slice;
26use std::marker::PhantomData;
27
28pub trait AsIndex: Copy {
29 fn as_index(self) -> usize;
30 fn from_index(index: usize) -> Self;
31}
32
33#[derive(Debug)]
34pub struct IntMap<K: AsIndex, V> {
35 map: Vec<V>,
36 _marker: PhantomData<fn(K)>,
37}
38
39impl<K: AsIndex, V> Default for IntMap<K, V> {
40 fn default() -> Self {
41 Self {
42 map: vec![],
43 _marker: PhantomData,
44 }
45 }
46}
47impl<K: AsIndex, V> Clone for IntMap<K, V>
48where
49 V: Clone,
50{
51 fn clone(&self) -> Self {
52 Self {
53 map: self.map.clone(),
54 _marker: PhantomData,
55 }
56 }
57}
58
59impl<K: AsIndex, V> IntMap<K, V> {
60 pub fn new() -> Self {
61 Self::default()
62 }
63 pub fn has(&self, k: K) -> bool {
64 k.as_index() < self.map.len()
65 }
66 pub fn reserve(&mut self, key: K, pad: V)
67 where
68 V: Clone,
69 {
70 let index = key.as_index();
71 if index >= self.map.len() {
72 self.map.resize(index + 1, pad);
73 }
74 }
75 pub fn reserve_default(&mut self, key: K)
76 where
77 V: Default,
78 {
79 let index = key.as_index();
80 if index >= self.map.len() {
81 let len = index + 1 - self.map.len();
83 self.map.extend((0..len).map(|_| V::default()));
84 }
85 }
86 pub fn insert(&mut self, key: K, val: V, pad: V)
87 where
88 V: Clone,
89 {
90 self.reserve(key, pad);
91 self[key] = val;
92 }
93 pub fn insert_default(&mut self, key: K, val: V)
94 where
95 V: Default,
96 {
97 self.reserve_default(key);
98 self[key] = val;
99 }
100 pub fn clear(&mut self) {
101 self.map.clear();
102 }
103 pub fn free(&mut self) {
104 self.map.clear();
105 self.map.shrink_to_fit();
106 }
107 pub fn iter(&self) -> IntMapIter<K, V> {
108 IntMapIter(self.map.iter().enumerate(), PhantomData)
109 }
110 pub fn iter_mut(&mut self) -> IntMapIterMut<K, V> {
111 IntMapIterMut(self.map.iter_mut().enumerate(), PhantomData)
112 }
113}
114
115impl<K: AsIndex, V> ops::Index<K> for IntMap<K, V> {
116 type Output = V;
117 fn index(&self, index: K) -> &Self::Output {
118 &self.map[index.as_index()]
119 }
120}
121impl<K: AsIndex, V> ops::IndexMut<K> for IntMap<K, V> {
122 fn index_mut(&mut self, index: K) -> &mut Self::Output {
123 &mut self.map[index.as_index()]
124 }
125}
126
127#[derive(Debug, Clone)]
128pub struct IntMapIter<'a, K: AsIndex, V: 'a>(
129 iter::Enumerate<slice::Iter<'a, V>>,
130 PhantomData<fn(K)>,
131);
132
133impl<'a, K: AsIndex, V: 'a> Iterator for IntMapIter<'a, K, V> {
134 type Item = (K, &'a V);
135 fn next(&mut self) -> Option<Self::Item> {
136 self.0.next().map(|(k, v)| (K::from_index(k), v))
137 }
138 fn size_hint(&self) -> (usize, Option<usize>) {
139 self.0.size_hint()
140 }
141}
142
143#[derive(Debug)]
144pub struct IntMapIterMut<'a, K: AsIndex, V: 'a>(
145 iter::Enumerate<slice::IterMut<'a, V>>,
146 PhantomData<fn(K)>,
147);
148
149impl<'a, K: AsIndex, V: 'a> Iterator for IntMapIterMut<'a, K, V> {
150 type Item = (K, &'a mut V);
151 fn next(&mut self) -> Option<Self::Item> {
152 self.0.next().map(|(k, v)| (K::from_index(k), v))
153 }
154 fn size_hint(&self) -> (usize, Option<usize>) {
155 self.0.size_hint()
156 }
157}
158
159#[derive(Debug)]
160pub struct IntSet<K: AsIndex> {
161 in_set: IntMap<K, bool>,
162 xs: Vec<K>,
163}
164impl<K: AsIndex> Default for IntSet<K> {
165 fn default() -> Self {
166 Self {
167 in_set: IntMap::default(),
168 xs: vec![],
169 }
170 }
171}
172impl<K: AsIndex> Clone for IntSet<K> {
173 fn clone(&self) -> Self {
174 Self {
175 in_set: self.in_set.clone(),
176 xs: self.xs.clone(),
177 }
178 }
179}
180
181impl<K: AsIndex> IntSet<K> {
182 pub fn new() -> Self {
183 Self::default()
184 }
185 pub fn len(&self) -> usize {
186 self.xs.len()
187 }
188 pub fn clear(&mut self) {
189 for x in &mut self.in_set.map {
190 *x = false;
191 }
192 self.xs.clear()
193 }
194 pub fn as_slice(&self) -> &[K] {
195 &self.xs
196 }
197 pub fn insert(&mut self, k: K) {
198 self.in_set.reserve(k, false);
199 if !self.in_set[k] {
200 self.in_set[k] = true;
201 self.xs.push(k);
202 }
203 }
204 pub fn has(&self, k: K) -> bool {
205 self.in_set.has(k) && self.in_set[k]
206 }
207}
208impl<K: AsIndex> ops::Index<usize> for IntSet<K> {
209 type Output = K;
210 fn index(&self, index: usize) -> &Self::Output {
211 &self.xs[index]
212 }
213}
214
215#[derive(Debug, Clone)]
216pub struct HeapData<K: AsIndex> {
217 heap: Vec<K>,
218 indices: IntMap<K, i32>,
219}
220
221impl<K: AsIndex> Default for HeapData<K> {
222 fn default() -> Self {
223 Self {
224 heap: vec![],
225 indices: IntMap::new(),
226 }
227 }
228}
229
230impl<K: AsIndex> HeapData<K> {
231 pub fn new() -> Self {
232 Self::default()
233 }
234 pub fn len(&self) -> usize {
235 self.heap.len()
236 }
237 pub fn is_empty(&self) -> bool {
238 self.heap.is_empty()
239 }
240 pub fn in_heap(&self, k: K) -> bool {
241 self.indices.has(k) && self.indices[k] >= 0
242 }
243
244 pub fn promote<Comp: Comparator<K>>(&mut self, comp: Comp) -> Heap<K, Comp> {
245 Heap {
246 data: self,
247 comp: comp,
248 }
249 }
250}
251
252impl<K: AsIndex> ops::Index<usize> for HeapData<K> {
253 type Output = K;
254 fn index(&self, index: usize) -> &Self::Output {
255 &self.heap[index]
256 }
257}
258
259pub trait Comparator<T: ?Sized>: PartialComparator<T> {
260 fn cmp(&self, lhs: &T, rhs: &T) -> cmp::Ordering;
261 fn max(&self, lhs: T, rhs: T) -> T
262 where
263 T: Sized,
264 {
265 if self.ge(&rhs, &lhs) {
266 rhs
267 } else {
268 lhs
269 }
270 }
271 fn min(&self, lhs: T, rhs: T) -> T
272 where
273 T: Sized,
274 {
275 if self.le(&lhs, &rhs) {
276 lhs
277 } else {
278 rhs
279 }
280 }
281}
282pub trait PartialComparator<Rhs: ?Sized, Lhs: ?Sized = Rhs> {
283 fn partial_cmp(&self, lhs: &Lhs, rhs: &Rhs) -> Option<cmp::Ordering>;
284 fn lt(&self, lhs: &Lhs, rhs: &Rhs) -> bool {
285 match self.partial_cmp(lhs, rhs) {
286 Some(cmp::Ordering::Less) => true,
287 _ => false,
288 }
289 }
290 fn le(&self, lhs: &Lhs, rhs: &Rhs) -> bool {
291 match self.partial_cmp(lhs, rhs) {
292 Some(cmp::Ordering::Less) | Some(cmp::Ordering::Equal) => true,
293 _ => false,
294 }
295 }
296 fn gt(&self, lhs: &Lhs, rhs: &Rhs) -> bool {
297 match self.partial_cmp(lhs, rhs) {
298 Some(cmp::Ordering::Greater) => true,
299 _ => false,
300 }
301 }
302 fn ge(&self, lhs: &Lhs, rhs: &Rhs) -> bool {
303 match self.partial_cmp(lhs, rhs) {
304 Some(cmp::Ordering::Greater) | Some(cmp::Ordering::Equal) => true,
305 _ => false,
306 }
307 }
308}
309
310#[derive(Debug)]
311pub struct Heap<'a, K: AsIndex + 'a, Comp: Comparator<K>> {
312 data: &'a mut HeapData<K>,
313 comp: Comp,
314}
315
316impl<'a, K: AsIndex + 'a, Comp: Comparator<K>> ops::Deref for Heap<'a, K, Comp> {
317 type Target = HeapData<K>;
318 fn deref(&self) -> &Self::Target {
319 &self.data
320 }
321}
322impl<'a, K: AsIndex + 'a, Comp: Comparator<K>> ops::DerefMut for Heap<'a, K, Comp> {
323 fn deref_mut(&mut self) -> &mut Self::Target {
324 &mut self.data
325 }
326}
327
328impl<'a, K: AsIndex + 'a, Comp: Comparator<K>> Heap<'a, K, Comp> {
329 fn percolate_up(&mut self, mut i: u32) {
330 let x = self.heap[i as usize];
331 let mut p = parent_index(i);
332
333 while i != 0 && self.comp.lt(&x, &self.heap[p as usize]) {
334 self.heap[i as usize] = self.heap[p as usize];
335 let tmp = self.heap[p as usize];
336 self.indices[tmp] = i as i32;
337 i = p;
338 p = parent_index(p);
339 }
340 self.heap[i as usize] = x;
341 self.indices[x] = i as i32;
342 }
343
344 fn percolate_down(&mut self, mut i: u32) {
345 let x = self.heap[i as usize];
346 while (left_index(i) as usize) < self.heap.len() {
347 let child = if (right_index(i) as usize) < self.heap.len()
348 && self.comp.lt(
349 &self.heap[right_index(i) as usize],
350 &self.heap[left_index(i) as usize],
351 ) {
352 right_index(i)
353 } else {
354 left_index(i)
355 };
356 if !self.comp.lt(&self.heap[child as usize], &x) {
357 break;
358 }
359 self.heap[i as usize] = self.heap[child as usize];
360 let tmp = self.heap[i as usize];
361 self.indices[tmp] = i as i32;
362 i = child;
363 }
364 self.heap[i as usize] = x;
365 self.indices[x] = i as i32;
366 }
367 pub fn decrease(&mut self, k: K) {
368 debug_assert!(self.in_heap(k));
369 let k_index = self.indices[k];
370 self.percolate_up(k_index as u32);
371 }
372 pub fn increase(&mut self, k: K) {
373 debug_assert!(self.in_heap(k));
374 let k_index = self.indices[k];
375 self.percolate_down(k_index as u32);
376 }
377
378 pub fn update(&mut self, k: K) {
380 if !self.in_heap(k) {
381 self.insert(k);
382 } else {
383 let k_index = self.indices[k];
384 self.percolate_up(k_index as u32);
385 let k_index = self.indices[k];
386 self.percolate_down(k_index as u32);
387 }
388 }
389
390 pub fn insert(&mut self, k: K) {
391 self.indices.reserve(k, -1);
392 debug_assert!(!self.in_heap(k));
393
394 self.indices[k] = self.heap.len() as i32;
395 self.heap.push(k);
396 let k_index = self.indices[k];
397 self.percolate_up(k_index as u32);
398 }
399
400 pub fn remove(&mut self, k: K) {
401 debug_assert!(self.in_heap(k));
402 let k_pos = self.indices[k] as u32;
403 self.indices[k] = -1;
404 if (k_pos as usize) < self.heap.len() - 1 {
405 self.heap[k_pos as usize] = *self.heap.last().unwrap();
406 let tmp = self.heap[k_pos as usize];
407 self.indices[tmp] = k_pos as i32;
408 self.heap.pop().expect("cannot pop from empty heap");
409 self.percolate_down(k_pos);
410 } else {
411 self.heap.pop().expect("cannot pop from empty heap");
412 }
413 }
414
415 pub fn remove_min(&mut self) -> K {
416 let x = *self.heap.first().expect("heap is empty");
417 let lastval = *self.heap.last().expect("heap is empty");
418 self.heap[0] = lastval;
419 self.indices[lastval] = 0;
420 self.indices[x] = -1;
421 self.heap.pop().expect("cannot pop from empty heap");
422 if self.heap.len() > 1 {
423 self.percolate_down(0);
424 }
425 x
426 }
427
428 pub fn build(&mut self, ns: &[K]) {
430 {
431 let data = &mut self.data;
432 for &x in &data.heap {
433 data.indices[x] = -1;
434 }
435 }
436 self.heap.clear();
437
438 for (i, &x) in ns.iter().enumerate() {
439 debug_assert!(self.indices.has(x));
441 self.indices[x] = i as i32;
442 self.heap.push(x);
443 }
444
445 let mut i = self.heap.len() as i32 / 2 - 1;
446 while i >= 0 {
447 self.percolate_down(i as u32);
448 i -= 1;
449 }
450 }
451
452 pub fn clear_dispose(&mut self, dispose: bool) {
453 {
455 let data = &mut self.data;
456 for &x in &data.heap {
457 data.indices[x] = -1;
458 }
459 }
460 self.heap.clear();
461 if dispose {
462 self.heap.shrink_to_fit();
463 }
464 }
465
466 pub fn clear(&mut self) {
467 self.clear_dispose(false)
468 }
469}
470
471fn left_index(i: u32) -> u32 {
472 i * 2 + 1
473}
474fn right_index(i: u32) -> u32 {
475 (i + 1) * 2
476}
477fn parent_index(i: u32) -> u32 {
478 (i.wrapping_sub(1)) >> 1
479}