1use super::raw::{RawIntoParIter, RawParDrain, RawParIter};
4use crate::hash_map::HashMap;
5use crate::raw::{Allocator, Global};
6use core::fmt;
7use core::hash::{BuildHasher, Hash};
8use core::marker::PhantomData;
9use rayon::iter::plumbing::UnindexedConsumer;
10use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator};
11
12pub struct ParIter<'a, K, V> {
22 inner: RawParIter<(K, V)>,
23 marker: PhantomData<(&'a K, &'a V)>,
24}
25
26impl<'a, K: Sync, V: Sync> ParallelIterator for ParIter<'a, K, V> {
27 type Item = (&'a K, &'a V);
28
29 #[cfg_attr(feature = "inline-more", inline)]
30 fn drive_unindexed<C>(self, consumer: C) -> C::Result
31 where
32 C: UnindexedConsumer<Self::Item>,
33 {
34 self.inner
35 .map(|x| unsafe {
36 let r = x.as_ref();
37 (&r.0, &r.1)
38 })
39 .drive_unindexed(consumer)
40 }
41}
42
43impl<K, V> Clone for ParIter<'_, K, V> {
44 #[cfg_attr(feature = "inline-more", inline)]
45 fn clone(&self) -> Self {
46 Self {
47 inner: self.inner.clone(),
48 marker: PhantomData,
49 }
50 }
51}
52
53impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug> fmt::Debug for ParIter<'_, K, V> {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 let iter = unsafe { self.inner.iter() }.map(|x| unsafe {
56 let r = x.as_ref();
57 (&r.0, &r.1)
58 });
59 f.debug_list().entries(iter).finish()
60 }
61}
62
63pub struct ParKeys<'a, K, V> {
71 inner: RawParIter<(K, V)>,
72 marker: PhantomData<(&'a K, &'a V)>,
73}
74
75impl<'a, K: Sync, V: Sync> ParallelIterator for ParKeys<'a, K, V> {
76 type Item = &'a K;
77
78 #[cfg_attr(feature = "inline-more", inline)]
79 fn drive_unindexed<C>(self, consumer: C) -> C::Result
80 where
81 C: UnindexedConsumer<Self::Item>,
82 {
83 self.inner
84 .map(|x| unsafe { &x.as_ref().0 })
85 .drive_unindexed(consumer)
86 }
87}
88
89impl<K, V> Clone for ParKeys<'_, K, V> {
90 #[cfg_attr(feature = "inline-more", inline)]
91 fn clone(&self) -> Self {
92 Self {
93 inner: self.inner.clone(),
94 marker: PhantomData,
95 }
96 }
97}
98
99impl<K: fmt::Debug + Eq + Hash, V> fmt::Debug for ParKeys<'_, K, V> {
100 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
101 let iter = unsafe { self.inner.iter() }.map(|x| unsafe { &x.as_ref().0 });
102 f.debug_list().entries(iter).finish()
103 }
104}
105
106pub struct ParValues<'a, K, V> {
114 inner: RawParIter<(K, V)>,
115 marker: PhantomData<(&'a K, &'a V)>,
116}
117
118impl<'a, K: Sync, V: Sync> ParallelIterator for ParValues<'a, K, V> {
119 type Item = &'a V;
120
121 #[cfg_attr(feature = "inline-more", inline)]
122 fn drive_unindexed<C>(self, consumer: C) -> C::Result
123 where
124 C: UnindexedConsumer<Self::Item>,
125 {
126 self.inner
127 .map(|x| unsafe { &x.as_ref().1 })
128 .drive_unindexed(consumer)
129 }
130}
131
132impl<K, V> Clone for ParValues<'_, K, V> {
133 #[cfg_attr(feature = "inline-more", inline)]
134 fn clone(&self) -> Self {
135 Self {
136 inner: self.inner.clone(),
137 marker: PhantomData,
138 }
139 }
140}
141
142impl<K: Eq + Hash, V: fmt::Debug> fmt::Debug for ParValues<'_, K, V> {
143 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
144 let iter = unsafe { self.inner.iter() }.map(|x| unsafe { &x.as_ref().1 });
145 f.debug_list().entries(iter).finish()
146 }
147}
148
149pub struct ParIterMut<'a, K, V> {
159 inner: RawParIter<(K, V)>,
160 marker: PhantomData<(&'a K, &'a mut V)>,
161}
162
163impl<'a, K: Sync, V: Send> ParallelIterator for ParIterMut<'a, K, V> {
164 type Item = (&'a K, &'a mut V);
165
166 #[cfg_attr(feature = "inline-more", inline)]
167 fn drive_unindexed<C>(self, consumer: C) -> C::Result
168 where
169 C: UnindexedConsumer<Self::Item>,
170 {
171 self.inner
172 .map(|x| unsafe {
173 let r = x.as_mut();
174 (&r.0, &mut r.1)
175 })
176 .drive_unindexed(consumer)
177 }
178}
179
180impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug> fmt::Debug for ParIterMut<'_, K, V> {
181 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
182 ParIter {
183 inner: self.inner.clone(),
184 marker: PhantomData,
185 }
186 .fmt(f)
187 }
188}
189
190pub struct ParValuesMut<'a, K, V> {
198 inner: RawParIter<(K, V)>,
199 marker: PhantomData<(&'a K, &'a mut V)>,
200}
201
202impl<'a, K: Sync, V: Send> ParallelIterator for ParValuesMut<'a, K, V> {
203 type Item = &'a mut V;
204
205 #[cfg_attr(feature = "inline-more", inline)]
206 fn drive_unindexed<C>(self, consumer: C) -> C::Result
207 where
208 C: UnindexedConsumer<Self::Item>,
209 {
210 self.inner
211 .map(|x| unsafe { &mut x.as_mut().1 })
212 .drive_unindexed(consumer)
213 }
214}
215
216impl<K: Eq + Hash, V: fmt::Debug> fmt::Debug for ParValuesMut<'_, K, V> {
217 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
218 ParValues {
219 inner: self.inner.clone(),
220 marker: PhantomData,
221 }
222 .fmt(f)
223 }
224}
225
226pub struct IntoParIter<K, V, A: Allocator = Global> {
236 inner: RawIntoParIter<(K, V), A>,
237}
238
239impl<K: Send, V: Send, A: Allocator + Send> ParallelIterator for IntoParIter<K, V, A> {
240 type Item = (K, V);
241
242 #[cfg_attr(feature = "inline-more", inline)]
243 fn drive_unindexed<C>(self, consumer: C) -> C::Result
244 where
245 C: UnindexedConsumer<Self::Item>,
246 {
247 self.inner.drive_unindexed(consumer)
248 }
249}
250
251impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, A: Allocator> fmt::Debug for IntoParIter<K, V, A> {
252 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
253 ParIter {
254 inner: unsafe { self.inner.par_iter() },
255 marker: PhantomData,
256 }
257 .fmt(f)
258 }
259}
260
261pub struct ParDrain<'a, K, V, A: Allocator = Global> {
269 inner: RawParDrain<'a, (K, V), A>,
270}
271
272impl<K: Send, V: Send, A: Allocator + Sync> ParallelIterator for ParDrain<'_, K, V, A> {
273 type Item = (K, V);
274
275 #[cfg_attr(feature = "inline-more", inline)]
276 fn drive_unindexed<C>(self, consumer: C) -> C::Result
277 where
278 C: UnindexedConsumer<Self::Item>,
279 {
280 self.inner.drive_unindexed(consumer)
281 }
282}
283
284impl<K: fmt::Debug + Eq + Hash, V: fmt::Debug, A: Allocator> fmt::Debug for ParDrain<'_, K, V, A> {
285 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
286 ParIter {
287 inner: unsafe { self.inner.par_iter() },
288 marker: PhantomData,
289 }
290 .fmt(f)
291 }
292}
293
294impl<K: Sync, V: Sync, S, A: Allocator> HashMap<K, V, S, A> {
295 #[cfg_attr(feature = "inline-more", inline)]
297 pub fn par_keys(&self) -> ParKeys<'_, K, V> {
298 ParKeys {
299 inner: unsafe { self.table.par_iter() },
300 marker: PhantomData,
301 }
302 }
303
304 #[cfg_attr(feature = "inline-more", inline)]
306 pub fn par_values(&self) -> ParValues<'_, K, V> {
307 ParValues {
308 inner: unsafe { self.table.par_iter() },
309 marker: PhantomData,
310 }
311 }
312}
313
314impl<K: Send, V: Send, S, A: Allocator> HashMap<K, V, S, A> {
315 #[cfg_attr(feature = "inline-more", inline)]
317 pub fn par_values_mut(&mut self) -> ParValuesMut<'_, K, V> {
318 ParValuesMut {
319 inner: unsafe { self.table.par_iter() },
320 marker: PhantomData,
321 }
322 }
323
324 #[cfg_attr(feature = "inline-more", inline)]
327 pub fn par_drain(&mut self) -> ParDrain<'_, K, V, A> {
328 ParDrain {
329 inner: self.table.par_drain(),
330 }
331 }
332}
333
334impl<K, V, S, A> HashMap<K, V, S, A>
335where
336 K: Eq + Hash + Sync,
337 V: PartialEq + Sync,
338 S: BuildHasher + Sync,
339 A: Allocator + Sync,
340{
341 pub fn par_eq(&self, other: &Self) -> bool {
346 self.len() == other.len()
347 && self
348 .into_par_iter()
349 .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
350 }
351}
352
353impl<K: Send, V: Send, S, A: Allocator + Send> IntoParallelIterator for HashMap<K, V, S, A> {
354 type Item = (K, V);
355 type Iter = IntoParIter<K, V, A>;
356
357 #[cfg_attr(feature = "inline-more", inline)]
358 fn into_par_iter(self) -> Self::Iter {
359 IntoParIter {
360 inner: self.table.into_par_iter(),
361 }
362 }
363}
364
365impl<'a, K: Sync, V: Sync, S, A: Allocator> IntoParallelIterator for &'a HashMap<K, V, S, A> {
366 type Item = (&'a K, &'a V);
367 type Iter = ParIter<'a, K, V>;
368
369 #[cfg_attr(feature = "inline-more", inline)]
370 fn into_par_iter(self) -> Self::Iter {
371 ParIter {
372 inner: unsafe { self.table.par_iter() },
373 marker: PhantomData,
374 }
375 }
376}
377
378impl<'a, K: Sync, V: Send, S, A: Allocator> IntoParallelIterator for &'a mut HashMap<K, V, S, A> {
379 type Item = (&'a K, &'a mut V);
380 type Iter = ParIterMut<'a, K, V>;
381
382 #[cfg_attr(feature = "inline-more", inline)]
383 fn into_par_iter(self) -> Self::Iter {
384 ParIterMut {
385 inner: unsafe { self.table.par_iter() },
386 marker: PhantomData,
387 }
388 }
389}
390
391impl<K, V, S> FromParallelIterator<(K, V)> for HashMap<K, V, S, Global>
396where
397 K: Eq + Hash + Send,
398 V: Send,
399 S: BuildHasher + Default,
400{
401 fn from_par_iter<P>(par_iter: P) -> Self
402 where
403 P: IntoParallelIterator<Item = (K, V)>,
404 {
405 let mut map = HashMap::default();
406 map.par_extend(par_iter);
407 map
408 }
409}
410
411impl<K, V, S, A> ParallelExtend<(K, V)> for HashMap<K, V, S, A>
413where
414 K: Eq + Hash + Send,
415 V: Send,
416 S: BuildHasher,
417 A: Allocator,
418{
419 fn par_extend<I>(&mut self, par_iter: I)
420 where
421 I: IntoParallelIterator<Item = (K, V)>,
422 {
423 extend(self, par_iter);
424 }
425}
426
427impl<'a, K, V, S, A> ParallelExtend<(&'a K, &'a V)> for HashMap<K, V, S, A>
429where
430 K: Copy + Eq + Hash + Sync,
431 V: Copy + Sync,
432 S: BuildHasher,
433 A: Allocator,
434{
435 fn par_extend<I>(&mut self, par_iter: I)
436 where
437 I: IntoParallelIterator<Item = (&'a K, &'a V)>,
438 {
439 extend(self, par_iter);
440 }
441}
442
443fn extend<K, V, S, A, I>(map: &mut HashMap<K, V, S, A>, par_iter: I)
445where
446 K: Eq + Hash,
447 S: BuildHasher,
448 I: IntoParallelIterator,
449 A: Allocator,
450 HashMap<K, V, S, A>: Extend<I::Item>,
451{
452 let (list, len) = super::helpers::collect(par_iter);
453
454 let reserve = if map.is_empty() { len } else { (len + 1) / 2 };
459 map.reserve(reserve);
460 for vec in list {
461 map.extend(vec);
462 }
463}
464
465#[cfg(test)]
466mod test_par_map {
467 use alloc::vec::Vec;
468 use core::hash::{Hash, Hasher};
469 use core::sync::atomic::{AtomicUsize, Ordering};
470
471 use rayon::prelude::*;
472
473 use crate::hash_map::HashMap;
474
475 struct Droppable<'a> {
476 k: usize,
477 counter: &'a AtomicUsize,
478 }
479
480 impl Droppable<'_> {
481 fn new(k: usize, counter: &AtomicUsize) -> Droppable<'_> {
482 counter.fetch_add(1, Ordering::Relaxed);
483
484 Droppable { k, counter }
485 }
486 }
487
488 impl Drop for Droppable<'_> {
489 fn drop(&mut self) {
490 self.counter.fetch_sub(1, Ordering::Relaxed);
491 }
492 }
493
494 impl Clone for Droppable<'_> {
495 fn clone(&self) -> Self {
496 Droppable::new(self.k, self.counter)
497 }
498 }
499
500 impl Hash for Droppable<'_> {
501 fn hash<H>(&self, state: &mut H)
502 where
503 H: Hasher,
504 {
505 self.k.hash(state);
506 }
507 }
508
509 impl PartialEq for Droppable<'_> {
510 fn eq(&self, other: &Self) -> bool {
511 self.k == other.k
512 }
513 }
514
515 impl Eq for Droppable<'_> {}
516
517 #[test]
518 fn test_into_iter_drops() {
519 let key = AtomicUsize::new(0);
520 let value = AtomicUsize::new(0);
521
522 let hm = {
523 let mut hm = HashMap::new();
524
525 assert_eq!(key.load(Ordering::Relaxed), 0);
526 assert_eq!(value.load(Ordering::Relaxed), 0);
527
528 for i in 0..100 {
529 let d1 = Droppable::new(i, &key);
530 let d2 = Droppable::new(i + 100, &value);
531 hm.insert(d1, d2);
532 }
533
534 assert_eq!(key.load(Ordering::Relaxed), 100);
535 assert_eq!(value.load(Ordering::Relaxed), 100);
536
537 hm
538 };
539
540 drop(hm.clone());
542
543 assert_eq!(key.load(Ordering::Relaxed), 100);
544 assert_eq!(value.load(Ordering::Relaxed), 100);
545
546 drop(hm.clone().into_par_iter());
548
549 {
550 assert_eq!(key.load(Ordering::Relaxed), 100);
551 assert_eq!(value.load(Ordering::Relaxed), 100);
552
553 let _v: Vec<_> = hm.into_par_iter().filter(|(key, _)| key.k < 50).collect();
555
556 assert_eq!(key.load(Ordering::Relaxed), 50);
557 assert_eq!(value.load(Ordering::Relaxed), 50);
558 };
559
560 assert_eq!(key.load(Ordering::Relaxed), 0);
561 assert_eq!(value.load(Ordering::Relaxed), 0);
562 }
563
564 #[test]
565 fn test_drain_drops() {
566 let key = AtomicUsize::new(0);
567 let value = AtomicUsize::new(0);
568
569 let mut hm = {
570 let mut hm = HashMap::new();
571
572 assert_eq!(key.load(Ordering::Relaxed), 0);
573 assert_eq!(value.load(Ordering::Relaxed), 0);
574
575 for i in 0..100 {
576 let d1 = Droppable::new(i, &key);
577 let d2 = Droppable::new(i + 100, &value);
578 hm.insert(d1, d2);
579 }
580
581 assert_eq!(key.load(Ordering::Relaxed), 100);
582 assert_eq!(value.load(Ordering::Relaxed), 100);
583
584 hm
585 };
586
587 drop(hm.clone());
589
590 assert_eq!(key.load(Ordering::Relaxed), 100);
591 assert_eq!(value.load(Ordering::Relaxed), 100);
592
593 drop(hm.clone().par_drain());
595
596 {
597 assert_eq!(key.load(Ordering::Relaxed), 100);
598 assert_eq!(value.load(Ordering::Relaxed), 100);
599
600 let _v: Vec<_> = hm.drain().filter(|(key, _)| key.k < 50).collect();
602 assert!(hm.is_empty());
603
604 assert_eq!(key.load(Ordering::Relaxed), 50);
605 assert_eq!(value.load(Ordering::Relaxed), 50);
606 };
607
608 assert_eq!(key.load(Ordering::Relaxed), 0);
609 assert_eq!(value.load(Ordering::Relaxed), 0);
610 }
611
612 #[test]
613 fn test_empty_iter() {
614 let mut m: HashMap<isize, bool> = HashMap::new();
615 assert_eq!(m.par_drain().count(), 0);
616 assert_eq!(m.par_keys().count(), 0);
617 assert_eq!(m.par_values().count(), 0);
618 assert_eq!(m.par_values_mut().count(), 0);
619 assert_eq!(m.par_iter().count(), 0);
620 assert_eq!(m.par_iter_mut().count(), 0);
621 assert_eq!(m.len(), 0);
622 assert!(m.is_empty());
623 assert_eq!(m.into_par_iter().count(), 0);
624 }
625
626 #[test]
627 fn test_iterate() {
628 let mut m = HashMap::with_capacity(4);
629 for i in 0..32 {
630 assert!(m.insert(i, i * 2).is_none());
631 }
632 assert_eq!(m.len(), 32);
633
634 let observed = AtomicUsize::new(0);
635
636 m.par_iter().for_each(|(k, v)| {
637 assert_eq!(*v, *k * 2);
638 observed.fetch_or(1 << *k, Ordering::Relaxed);
639 });
640 assert_eq!(observed.into_inner(), 0xFFFF_FFFF);
641 }
642
643 #[test]
644 fn test_keys() {
645 let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
646 let map: HashMap<_, _> = vec.into_par_iter().collect();
647 let keys: Vec<_> = map.par_keys().cloned().collect();
648 assert_eq!(keys.len(), 3);
649 assert!(keys.contains(&1));
650 assert!(keys.contains(&2));
651 assert!(keys.contains(&3));
652 }
653
654 #[test]
655 fn test_values() {
656 let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
657 let map: HashMap<_, _> = vec.into_par_iter().collect();
658 let values: Vec<_> = map.par_values().cloned().collect();
659 assert_eq!(values.len(), 3);
660 assert!(values.contains(&'a'));
661 assert!(values.contains(&'b'));
662 assert!(values.contains(&'c'));
663 }
664
665 #[test]
666 fn test_values_mut() {
667 let vec = vec![(1, 1), (2, 2), (3, 3)];
668 let mut map: HashMap<_, _> = vec.into_par_iter().collect();
669 map.par_values_mut().for_each(|value| *value *= 2);
670 let values: Vec<_> = map.par_values().cloned().collect();
671 assert_eq!(values.len(), 3);
672 assert!(values.contains(&2));
673 assert!(values.contains(&4));
674 assert!(values.contains(&6));
675 }
676
677 #[test]
678 fn test_eq() {
679 let mut m1 = HashMap::new();
680 m1.insert(1, 2);
681 m1.insert(2, 3);
682 m1.insert(3, 4);
683
684 let mut m2 = HashMap::new();
685 m2.insert(1, 2);
686 m2.insert(2, 3);
687
688 assert!(!m1.par_eq(&m2));
689
690 m2.insert(3, 4);
691
692 assert!(m1.par_eq(&m2));
693 }
694
695 #[test]
696 fn test_from_iter() {
697 let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
698
699 let map: HashMap<_, _> = xs.par_iter().cloned().collect();
700
701 for &(k, v) in &xs {
702 assert_eq!(map.get(&k), Some(&v));
703 }
704 }
705
706 #[test]
707 fn test_extend_ref() {
708 let mut a = HashMap::new();
709 a.insert(1, "one");
710 let mut b = HashMap::new();
711 b.insert(2, "two");
712 b.insert(3, "three");
713
714 a.par_extend(&b);
715
716 assert_eq!(a.len(), 3);
717 assert_eq!(a[&1], "one");
718 assert_eq!(a[&2], "two");
719 assert_eq!(a[&3], "three");
720 }
721}