1use crate::alloc::alloc::{alloc, dealloc, handle_alloc_error};
2use crate::scopeguard::guard;
3use crate::CollectionAllocErr;
4use core::alloc::Layout;
5use core::hint;
6use core::iter::FusedIterator;
7use core::marker::PhantomData;
8use core::mem;
9use core::mem::ManuallyDrop;
10use core::ptr::NonNull;
11
12cfg_if! {
13 if #[cfg(all(
22 target_feature = "sse2",
23 any(target_arch = "x86", target_arch = "x86_64"),
24 not(miri)
25 ))] {
26 mod sse2;
27 use sse2 as imp;
28 } else {
29 #[path = "generic.rs"]
30 mod generic;
31 use generic as imp;
32 }
33}
34
35mod bitmask;
36
37use self::bitmask::BitMask;
38use self::imp::Group;
39
40#[cfg(feature = "nightly")]
43use core::intrinsics::{likely, unlikely};
44#[cfg(not(feature = "nightly"))]
45#[inline]
46fn likely(b: bool) -> bool {
47 b
48}
49#[cfg(not(feature = "nightly"))]
50#[inline]
51fn unlikely(b: bool) -> bool {
52 b
53}
54
55#[cfg(feature = "nightly")]
56#[cfg_attr(feature = "inline-more", inline)]
57unsafe fn offset_from<T>(to: *const T, from: *const T) -> usize {
58 to.offset_from(from) as usize
59}
60#[cfg(not(feature = "nightly"))]
61#[cfg_attr(feature = "inline-more", inline)]
62unsafe fn offset_from<T>(to: *const T, from: *const T) -> usize {
63 (to as usize - from as usize) / mem::size_of::<T>()
64}
65
66#[derive(Copy, Clone)]
68enum Fallibility {
69 Fallible,
70 Infallible,
71}
72
73impl Fallibility {
74 #[cfg_attr(feature = "inline-more", inline)]
76 fn capacity_overflow(self) -> CollectionAllocErr {
77 match self {
78 Fallibility::Fallible => CollectionAllocErr::CapacityOverflow,
79 Fallibility::Infallible => panic!("Hash table capacity overflow"),
80 }
81 }
82
83 #[cfg_attr(feature = "inline-more", inline)]
85 fn alloc_err(self, layout: Layout) -> CollectionAllocErr {
86 match self {
87 Fallibility::Fallible => CollectionAllocErr::AllocErr { layout },
88 Fallibility::Infallible => handle_alloc_error(layout),
89 }
90 }
91}
92
93const EMPTY: u8 = 0b1111_1111;
95
96const DELETED: u8 = 0b1000_0000;
98
99#[inline]
101fn is_full(ctrl: u8) -> bool {
102 ctrl & 0x80 == 0
103}
104
105#[inline]
107fn is_special(ctrl: u8) -> bool {
108 ctrl & 0x80 != 0
109}
110
111#[inline]
113fn special_is_empty(ctrl: u8) -> bool {
114 debug_assert!(is_special(ctrl));
115 ctrl & 0x01 != 0
116}
117
118#[inline]
120#[allow(clippy::cast_possible_truncation)]
121fn h1(hash: u64) -> usize {
122 hash as usize
124}
125
126#[inline]
128#[allow(clippy::cast_possible_truncation)]
129fn h2(hash: u64) -> u8 {
130 let hash_len = usize::min(mem::size_of::<usize>(), mem::size_of::<u64>());
134 let top7 = hash >> (hash_len * 8 - 7);
135 (top7 & 0x7f) as u8 }
137
138struct ProbeSeq {
148 bucket_mask: usize,
149 pos: usize,
150 stride: usize,
151}
152
153impl Iterator for ProbeSeq {
154 type Item = usize;
155
156 #[inline]
157 fn next(&mut self) -> Option<usize> {
158 debug_assert!(
160 self.stride <= self.bucket_mask,
161 "Went past end of probe sequence"
162 );
163
164 let result = self.pos;
165 self.stride += Group::WIDTH;
166 self.pos += self.stride;
167 self.pos &= self.bucket_mask;
168 Some(result)
169 }
170}
171
172#[cfg_attr(feature = "inline-more", inline)]
177#[cfg_attr(target_os = "emscripten", inline(never))]
179fn capacity_to_buckets(cap: usize) -> Option<usize> {
180 let adjusted_cap = if cap < 8 {
181 cap + 1
183 } else {
184 cap.checked_mul(8)? / 7
189 };
190
191 Some(adjusted_cap.next_power_of_two())
195}
196
197#[cfg_attr(feature = "inline-more", inline)]
200fn bucket_mask_to_capacity(bucket_mask: usize) -> usize {
201 if bucket_mask < 8 {
202 bucket_mask
205 } else {
206 ((bucket_mask + 1) / 8) * 7
208 }
209}
210
211#[cfg_attr(feature = "inline-more", inline)]
216#[cfg(feature = "nightly")]
217fn calculate_layout<T>(buckets: usize) -> Option<(Layout, usize)> {
218 debug_assert!(buckets.is_power_of_two());
219
220 let data = Layout::array::<T>(buckets).ok()?;
222
223 let ctrl = unsafe { Layout::from_size_align_unchecked(buckets + Group::WIDTH, Group::WIDTH) };
232
233 ctrl.extend(data).ok()
234}
235
236#[cfg_attr(feature = "inline-more", inline)]
239#[cfg(not(feature = "nightly"))]
240fn calculate_layout<T>(buckets: usize) -> Option<(Layout, usize)> {
241 debug_assert!(buckets.is_power_of_two());
242
243 let data_align = usize::max(mem::align_of::<T>(), Group::WIDTH);
245 let data_offset = (buckets + Group::WIDTH).checked_add(data_align - 1)? & !(data_align - 1);
246 let len = data_offset.checked_add(mem::size_of::<T>().checked_mul(buckets)?)?;
247
248 Some((
249 unsafe { Layout::from_size_align_unchecked(len, data_align) },
250 data_offset,
251 ))
252}
253
254pub struct Bucket<T> {
260 ptr: *const T,
262}
263
264unsafe impl<T> Send for Bucket<T> {}
267
268impl<T> Clone for Bucket<T> {
269 #[cfg_attr(feature = "inline-more", inline)]
270 fn clone(&self) -> Self {
271 Self { ptr: self.ptr }
272 }
273}
274
275impl<T> Bucket<T> {
276 #[cfg_attr(feature = "inline-more", inline)]
277 unsafe fn from_base_index(base: *const T, index: usize) -> Self {
278 let ptr = if mem::size_of::<T>() == 0 {
279 index as *const T
280 } else {
281 base.add(index)
282 };
283 Self { ptr }
284 }
285 #[cfg_attr(feature = "inline-more", inline)]
286 pub unsafe fn as_ptr(&self) -> *mut T {
287 if mem::size_of::<T>() == 0 {
288 mem::align_of::<T>() as *mut T
290 } else {
291 self.ptr as *mut T
292 }
293 }
294 #[cfg_attr(feature = "inline-more", inline)]
295 unsafe fn add(&self, offset: usize) -> Self {
296 let ptr = if mem::size_of::<T>() == 0 {
297 (self.ptr as usize + offset) as *const T
298 } else {
299 self.ptr.add(offset)
300 };
301 Self { ptr }
302 }
303 #[cfg_attr(feature = "inline-more", inline)]
304 pub unsafe fn drop(&self) {
305 self.as_ptr().drop_in_place();
306 }
307 #[cfg_attr(feature = "inline-more", inline)]
308 pub unsafe fn read(&self) -> T {
309 self.as_ptr().read()
310 }
311 #[cfg_attr(feature = "inline-more", inline)]
312 pub unsafe fn write(&self, val: T) {
313 self.as_ptr().write(val);
314 }
315 #[cfg_attr(feature = "inline-more", inline)]
316 pub unsafe fn as_ref<'a>(&self) -> &'a T {
317 &*self.as_ptr()
318 }
319 #[cfg_attr(feature = "inline-more", inline)]
320 pub unsafe fn as_mut<'a>(&self) -> &'a mut T {
321 &mut *self.as_ptr()
322 }
323 #[cfg_attr(feature = "inline-more", inline)]
324 pub unsafe fn copy_from_nonoverlapping(&self, other: &Self) {
325 self.as_ptr().copy_from_nonoverlapping(other.as_ptr(), 1);
326 }
327}
328
329pub struct RawTable<T> {
331 bucket_mask: usize,
334
335 ctrl: NonNull<u8>,
337
338 data: NonNull<T>,
340
341 growth_left: usize,
343
344 items: usize,
346
347 marker: PhantomData<T>,
349}
350
351impl<T> RawTable<T> {
352 #[cfg_attr(feature = "inline-more", inline)]
358 pub fn new() -> Self {
359 Self {
360 data: NonNull::dangling(),
361 ctrl: unsafe { NonNull::new_unchecked(Group::static_empty().as_ptr() as *mut u8) },
363 bucket_mask: 0,
364 items: 0,
365 growth_left: 0,
366 marker: PhantomData,
367 }
368 }
369
370 #[cfg_attr(feature = "inline-more", inline)]
374 unsafe fn new_uninitialized(
375 buckets: usize,
376 fallability: Fallibility,
377 ) -> Result<Self, CollectionAllocErr> {
378 debug_assert!(buckets.is_power_of_two());
379 let (layout, data_offset) =
380 calculate_layout::<T>(buckets).ok_or_else(|| fallability.capacity_overflow())?;
381 let ctrl = NonNull::new(alloc(layout)).ok_or_else(|| fallability.alloc_err(layout))?;
382 let data = NonNull::new_unchecked(ctrl.as_ptr().add(data_offset) as *mut T);
383 Ok(Self {
384 data,
385 ctrl,
386 bucket_mask: buckets - 1,
387 items: 0,
388 growth_left: bucket_mask_to_capacity(buckets - 1),
389 marker: PhantomData,
390 })
391 }
392
393 fn try_with_capacity(
396 capacity: usize,
397 fallability: Fallibility,
398 ) -> Result<Self, CollectionAllocErr> {
399 if capacity == 0 {
400 Ok(Self::new())
401 } else {
402 unsafe {
403 let buckets =
404 capacity_to_buckets(capacity).ok_or_else(|| fallability.capacity_overflow())?;
405 let result = Self::new_uninitialized(buckets, fallability)?;
406 result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes());
407
408 Ok(result)
409 }
410 }
411 }
412
413 pub fn with_capacity(capacity: usize) -> Self {
416 Self::try_with_capacity(capacity, Fallibility::Infallible)
417 .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() })
418 }
419
420 #[cfg_attr(feature = "inline-more", inline)]
422 unsafe fn free_buckets(&mut self) {
423 let (layout, _) =
424 calculate_layout::<T>(self.buckets()).unwrap_or_else(|| hint::unreachable_unchecked());
425 dealloc(self.ctrl.as_ptr(), layout);
426 }
427
428 #[cfg_attr(feature = "inline-more", inline)]
430 pub unsafe fn bucket_index(&self, bucket: &Bucket<T>) -> usize {
431 if mem::size_of::<T>() == 0 {
432 bucket.ptr as usize
433 } else {
434 offset_from(bucket.ptr, self.data.as_ptr())
435 }
436 }
437
438 #[cfg_attr(feature = "inline-more", inline)]
440 unsafe fn ctrl(&self, index: usize) -> *mut u8 {
441 debug_assert!(index < self.num_ctrl_bytes());
442 self.ctrl.as_ptr().add(index)
443 }
444
445 #[cfg_attr(feature = "inline-more", inline)]
447 pub unsafe fn bucket(&self, index: usize) -> Bucket<T> {
448 debug_assert_ne!(self.bucket_mask, 0);
449 debug_assert!(index < self.buckets());
450 Bucket::from_base_index(self.data.as_ptr(), index)
451 }
452
453 #[cfg_attr(feature = "inline-more", inline)]
455 pub unsafe fn erase_no_drop(&mut self, item: &Bucket<T>) {
456 let index = self.bucket_index(item);
457 debug_assert!(is_full(*self.ctrl(index)));
458 let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask;
459 let empty_before = Group::load(self.ctrl(index_before)).match_empty();
460 let empty_after = Group::load(self.ctrl(index)).match_empty();
461
462 let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH {
471 DELETED
472 } else {
473 self.growth_left += 1;
474 EMPTY
475 };
476 self.set_ctrl(index, ctrl);
477 self.items -= 1;
478 }
479
480 #[cfg_attr(feature = "inline-more", inline)]
486 fn probe_seq(&self, hash: u64) -> ProbeSeq {
487 ProbeSeq {
488 bucket_mask: self.bucket_mask,
489 pos: h1(hash) & self.bucket_mask,
490 stride: 0,
491 }
492 }
493
494 #[cfg_attr(feature = "inline-more", inline)]
497 unsafe fn set_ctrl(&self, index: usize, ctrl: u8) {
498 let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH;
517
518 *self.ctrl(index) = ctrl;
519 *self.ctrl(index2) = ctrl;
520 }
521
522 #[cfg_attr(feature = "inline-more", inline)]
527 fn find_insert_slot(&self, hash: u64) -> usize {
528 for pos in self.probe_seq(hash) {
529 unsafe {
530 let group = Group::load(self.ctrl(pos));
531 if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() {
532 let result = (pos + bit) & self.bucket_mask;
533
534 if unlikely(is_full(*self.ctrl(result))) {
544 debug_assert!(self.bucket_mask < Group::WIDTH);
545 debug_assert_ne!(pos, 0);
546 return Group::load_aligned(self.ctrl(0))
547 .match_empty_or_deleted()
548 .lowest_set_bit_nonzero();
549 } else {
550 return result;
551 }
552 }
553 }
554 }
555
556 unreachable!();
558 }
559
560 #[cfg_attr(feature = "inline-more", inline)]
562 pub fn clear_no_drop(&mut self) {
563 if !self.is_empty_singleton() {
564 unsafe {
565 self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes());
566 }
567 }
568 self.items = 0;
569 self.growth_left = bucket_mask_to_capacity(self.bucket_mask);
570 }
571
572 #[cfg_attr(feature = "inline-more", inline)]
574 pub fn clear(&mut self) {
575 let self_ = guard(self, |self_| self_.clear_no_drop());
577
578 if mem::needs_drop::<T>() {
579 unsafe {
580 for item in self_.iter() {
581 item.drop();
582 }
583 }
584 }
585 }
586
587 #[cfg_attr(feature = "inline-more", inline)]
589 pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) {
590 let min_size = usize::max(self.items, min_size);
593 if min_size == 0 {
594 *self = Self::new();
595 return;
596 }
597
598 let min_buckets = match capacity_to_buckets(min_size) {
603 Some(buckets) => buckets,
604 None => return,
605 };
606
607 if min_buckets < self.buckets() {
609 if self.items == 0 {
611 *self = Self::with_capacity(min_size)
612 } else {
613 self.resize(min_size, hasher, Fallibility::Infallible)
614 .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
615 }
616 }
617 }
618
619 #[cfg_attr(feature = "inline-more", inline)]
622 pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) {
623 if additional > self.growth_left {
624 self.reserve_rehash(additional, hasher, Fallibility::Infallible)
625 .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
626 }
627 }
628
629 #[cfg_attr(feature = "inline-more", inline)]
632 pub fn try_reserve(
633 &mut self,
634 additional: usize,
635 hasher: impl Fn(&T) -> u64,
636 ) -> Result<(), CollectionAllocErr> {
637 if additional > self.growth_left {
638 self.reserve_rehash(additional, hasher, Fallibility::Fallible)
639 } else {
640 Ok(())
641 }
642 }
643
644 #[cold]
646 #[inline(never)]
647 fn reserve_rehash(
648 &mut self,
649 additional: usize,
650 hasher: impl Fn(&T) -> u64,
651 fallability: Fallibility,
652 ) -> Result<(), CollectionAllocErr> {
653 let new_items = self
654 .items
655 .checked_add(additional)
656 .ok_or_else(|| fallability.capacity_overflow())?;
657
658 let full_capacity = bucket_mask_to_capacity(self.bucket_mask);
659 if new_items <= full_capacity / 2 {
660 self.rehash_in_place(hasher);
663 Ok(())
664 } else {
665 self.resize(
668 usize::max(new_items, full_capacity + 1),
669 hasher,
670 fallability,
671 )
672 }
673 }
674
675 fn rehash_in_place(&mut self, hasher: impl Fn(&T) -> u64) {
680 unsafe {
681 for i in (0..self.buckets()).step_by(Group::WIDTH) {
685 let group = Group::load_aligned(self.ctrl(i));
686 let group = group.convert_special_to_empty_and_full_to_deleted();
687 group.store_aligned(self.ctrl(i));
688 }
689
690 if self.buckets() < Group::WIDTH {
693 self.ctrl(0)
694 .copy_to(self.ctrl(Group::WIDTH), self.buckets());
695 } else {
696 self.ctrl(0)
697 .copy_to(self.ctrl(self.buckets()), Group::WIDTH);
698 }
699
700 let mut guard = guard(self, |self_| {
705 if mem::needs_drop::<T>() {
706 for i in 0..self_.buckets() {
707 if *self_.ctrl(i) == DELETED {
708 self_.set_ctrl(i, EMPTY);
709 self_.bucket(i).drop();
710 self_.items -= 1;
711 }
712 }
713 }
714 self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items;
715 });
716
717 'outer: for i in 0..guard.buckets() {
721 if *guard.ctrl(i) != DELETED {
722 continue;
723 }
724 'inner: loop {
725 let item = guard.bucket(i);
727 let hash = hasher(item.as_ref());
728
729 let new_i = guard.find_insert_slot(hash);
731
732 let probe_index = |pos: usize| {
738 (pos.wrapping_sub(guard.probe_seq(hash).pos) & guard.bucket_mask)
739 / Group::WIDTH
740 };
741 if likely(probe_index(i) == probe_index(new_i)) {
742 guard.set_ctrl(i, h2(hash));
743 continue 'outer;
744 }
745
746 let prev_ctrl = *guard.ctrl(new_i);
749 guard.set_ctrl(new_i, h2(hash));
750
751 if prev_ctrl == EMPTY {
752 guard.set_ctrl(i, EMPTY);
756 guard.bucket(new_i).copy_from_nonoverlapping(&item);
757 continue 'outer;
758 } else {
759 debug_assert_eq!(prev_ctrl, DELETED);
763 mem::swap(guard.bucket(new_i).as_mut(), item.as_mut());
764 continue 'inner;
765 }
766 }
767 }
768
769 guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items;
770 mem::forget(guard);
771 }
772 }
773
774 fn resize(
777 &mut self,
778 capacity: usize,
779 hasher: impl Fn(&T) -> u64,
780 fallability: Fallibility,
781 ) -> Result<(), CollectionAllocErr> {
782 unsafe {
783 debug_assert!(self.items <= capacity);
784
785 let mut new_table = Self::try_with_capacity(capacity, fallability)?;
787 new_table.growth_left -= self.items;
788 new_table.items = self.items;
789
790 let mut new_table = guard(ManuallyDrop::new(new_table), |new_table| {
797 if !new_table.is_empty_singleton() {
798 new_table.free_buckets();
799 }
800 });
801
802 for item in self.iter() {
804 let hash = hasher(item.as_ref());
806
807 let index = new_table.find_insert_slot(hash);
812 new_table.set_ctrl(index, h2(hash));
813 new_table.bucket(index).copy_from_nonoverlapping(&item);
814 }
815
816 mem::swap(self, &mut new_table);
821
822 Ok(())
823 }
824 }
825
826 #[cfg_attr(feature = "inline-more", inline)]
830 pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket<T> {
831 unsafe {
832 let mut index = self.find_insert_slot(hash);
833
834 let old_ctrl = *self.ctrl(index);
838 if unlikely(self.growth_left == 0 && special_is_empty(old_ctrl)) {
839 self.reserve(1, hasher);
840 index = self.find_insert_slot(hash);
841 }
842
843 let bucket = self.bucket(index);
844 self.growth_left -= special_is_empty(old_ctrl) as usize;
845 self.set_ctrl(index, h2(hash));
846 bucket.write(value);
847 self.items += 1;
848 bucket
849 }
850 }
851
852 #[cfg_attr(feature = "inline-more", inline)]
858 #[cfg(feature = "rustc-internal-api")]
859 pub fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket<T> {
860 unsafe {
861 let index = self.find_insert_slot(hash);
862 let bucket = self.bucket(index);
863
864 let old_ctrl = *self.ctrl(index);
867 self.growth_left -= special_is_empty(old_ctrl) as usize;
868
869 self.set_ctrl(index, h2(hash));
870 bucket.write(value);
871 self.items += 1;
872 bucket
873 }
874 }
875
876 #[inline]
878 pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option<Bucket<T>> {
879 unsafe {
880 for pos in self.probe_seq(hash) {
881 let group = Group::load(self.ctrl(pos));
882 for bit in group.match_byte(h2(hash)) {
883 let index = (pos + bit) & self.bucket_mask;
884 let bucket = self.bucket(index);
885 if likely(eq(bucket.as_ref())) {
886 return Some(bucket);
887 }
888 }
889 if likely(group.match_empty().any_bit_set()) {
890 return None;
891 }
892 }
893 }
894
895 unreachable!();
897 }
898
899 #[cfg_attr(feature = "inline-more", inline)]
904 pub fn capacity(&self) -> usize {
905 self.items + self.growth_left
906 }
907
908 #[cfg_attr(feature = "inline-more", inline)]
910 pub fn len(&self) -> usize {
911 self.items
912 }
913
914 #[cfg_attr(feature = "inline-more", inline)]
916 pub fn buckets(&self) -> usize {
917 self.bucket_mask + 1
918 }
919
920 #[cfg_attr(feature = "inline-more", inline)]
922 fn num_ctrl_bytes(&self) -> usize {
923 self.bucket_mask + 1 + Group::WIDTH
924 }
925
926 #[cfg_attr(feature = "inline-more", inline)]
929 fn is_empty_singleton(&self) -> bool {
930 self.bucket_mask == 0
931 }
932
933 #[cfg_attr(feature = "inline-more", inline)]
938 pub unsafe fn iter(&self) -> RawIter<T> {
939 let data = Bucket::from_base_index(self.data.as_ptr(), 0);
940 RawIter {
941 iter: RawIterRange::new(self.ctrl.as_ptr(), data, self.buckets()),
942 items: self.items,
943 }
944 }
945
946 #[cfg_attr(feature = "inline-more", inline)]
951 pub unsafe fn drain(&mut self) -> RawDrain<'_, T> {
952 RawDrain {
953 iter: self.iter(),
954 table: ManuallyDrop::new(mem::replace(self, Self::new())),
955 orig_table: NonNull::from(self),
956 marker: PhantomData,
957 }
958 }
959
960 #[cfg_attr(feature = "inline-more", inline)]
963 pub(crate) fn into_alloc(self) -> Option<(NonNull<u8>, Layout)> {
964 let alloc = if self.is_empty_singleton() {
965 None
966 } else {
967 let (layout, _) = calculate_layout::<T>(self.buckets())
968 .unwrap_or_else(|| unsafe { hint::unreachable_unchecked() });
969 Some((self.ctrl.cast(), layout))
970 };
971 mem::forget(self);
972 alloc
973 }
974}
975
976unsafe impl<T> Send for RawTable<T> where T: Send {}
977unsafe impl<T> Sync for RawTable<T> where T: Sync {}
978
979impl<T: Clone> Clone for RawTable<T> {
980 fn clone(&self) -> Self {
981 if self.is_empty_singleton() {
982 Self::new()
983 } else {
984 unsafe {
985 let mut new_table = ManuallyDrop::new(
986 Self::new_uninitialized(self.buckets(), Fallibility::Infallible)
987 .unwrap_or_else(|_| hint::unreachable_unchecked()),
988 );
989
990 new_table.clone_from_spec(self, |new_table| {
991 new_table.free_buckets();
993 });
994
995 ManuallyDrop::into_inner(new_table)
997 }
998 }
999 }
1000
1001 fn clone_from(&mut self, source: &Self) {
1002 if source.is_empty_singleton() {
1003 *self = Self::new();
1004 } else {
1005 unsafe {
1006 if mem::needs_drop::<T>() {
1008 for item in self.iter() {
1009 item.drop();
1010 }
1011 }
1012
1013 if self.buckets() != source.buckets() {
1015 if !self.is_empty_singleton() {
1017 self.free_buckets();
1018 }
1019 (self as *mut Self).write(
1020 Self::new_uninitialized(source.buckets(), Fallibility::Infallible)
1021 .unwrap_or_else(|_| hint::unreachable_unchecked()),
1022 );
1023 }
1024
1025 self.clone_from_spec(source, |self_| {
1026 self_.clear_no_drop()
1028 });
1029 }
1030 }
1031 }
1032}
1033
1034trait RawTableClone {
1036 unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self));
1037}
1038impl<T: Clone> RawTableClone for RawTable<T> {
1039 #[cfg(feature = "nightly")]
1040 #[cfg_attr(feature = "inline-more", inline)]
1041 default unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
1042 self.clone_from_impl(source, on_panic);
1043 }
1044
1045 #[cfg(not(feature = "nightly"))]
1046 #[cfg_attr(feature = "inline-more", inline)]
1047 unsafe fn clone_from_spec(&mut self, source: &Self, on_panic: impl FnMut(&mut Self)) {
1048 self.clone_from_impl(source, on_panic);
1049 }
1050}
1051#[cfg(feature = "nightly")]
1052impl<T: Copy> RawTableClone for RawTable<T> {
1053 #[cfg_attr(feature = "inline-more", inline)]
1054 unsafe fn clone_from_spec(&mut self, source: &Self, _on_panic: impl FnMut(&mut Self)) {
1055 source
1056 .ctrl(0)
1057 .copy_to_nonoverlapping(self.ctrl(0), self.num_ctrl_bytes());
1058 source
1059 .data
1060 .as_ptr()
1061 .copy_to_nonoverlapping(self.data.as_ptr(), self.buckets());
1062
1063 self.items = source.items;
1064 self.growth_left = source.growth_left;
1065 }
1066}
1067
1068impl<T: Clone> RawTable<T> {
1069 #[cfg_attr(feature = "inline-more", inline)]
1071 unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) {
1072 source
1074 .ctrl(0)
1075 .copy_to_nonoverlapping(self.ctrl(0), self.num_ctrl_bytes());
1076
1077 let mut guard = guard((0, &mut *self), |(index, self_)| {
1081 if mem::needs_drop::<T>() {
1082 for i in 0..=*index {
1083 if is_full(*self_.ctrl(i)) {
1084 self_.bucket(i).drop();
1085 }
1086 }
1087 }
1088
1089 on_panic(self_);
1093 });
1094
1095 for from in source.iter() {
1096 let index = source.bucket_index(&from);
1097 let to = guard.1.bucket(index);
1098 to.write(from.as_ref().clone());
1099
1100 guard.0 = index;
1102 }
1103
1104 mem::forget(guard);
1106
1107 self.items = source.items;
1108 self.growth_left = source.growth_left;
1109 }
1110
1111 #[cfg(any(feature = "nightly", feature = "raw"))]
1113 pub fn clone_from_with_hasher(&mut self, source: &Self, hasher: impl Fn(&T) -> u64) {
1114 if self.buckets() != source.buckets()
1119 && bucket_mask_to_capacity(self.bucket_mask) >= source.len()
1120 {
1121 self.clear();
1122
1123 let guard_self = guard(&mut *self, |self_| {
1124 self_.clear();
1128 });
1129
1130 unsafe {
1131 for item in source.iter() {
1132 let item = item.as_ref().clone();
1134 let hash = hasher(&item);
1135
1136 let index = guard_self.find_insert_slot(hash);
1141 guard_self.set_ctrl(index, h2(hash));
1142 guard_self.bucket(index).write(item);
1143 }
1144 }
1145
1146 mem::forget(guard_self);
1148
1149 self.items = source.items;
1150 self.growth_left -= source.items;
1151 } else {
1152 self.clone_from(source);
1153 }
1154 }
1155}
1156
1157#[cfg(feature = "nightly")]
1158unsafe impl<#[may_dangle] T> Drop for RawTable<T> {
1159 #[cfg_attr(feature = "inline-more", inline)]
1160 fn drop(&mut self) {
1161 if !self.is_empty_singleton() {
1162 unsafe {
1163 if mem::needs_drop::<T>() {
1164 for item in self.iter() {
1165 item.drop();
1166 }
1167 }
1168 self.free_buckets();
1169 }
1170 }
1171 }
1172}
1173#[cfg(not(feature = "nightly"))]
1174impl<T> Drop for RawTable<T> {
1175 #[cfg_attr(feature = "inline-more", inline)]
1176 fn drop(&mut self) {
1177 if !self.is_empty_singleton() {
1178 unsafe {
1179 if mem::needs_drop::<T>() {
1180 for item in self.iter() {
1181 item.drop();
1182 }
1183 }
1184 self.free_buckets();
1185 }
1186 }
1187 }
1188}
1189
1190impl<T> IntoIterator for RawTable<T> {
1191 type Item = T;
1192 type IntoIter = RawIntoIter<T>;
1193
1194 #[cfg_attr(feature = "inline-more", inline)]
1195 fn into_iter(self) -> RawIntoIter<T> {
1196 unsafe {
1197 let iter = self.iter();
1198 let alloc = self.into_alloc();
1199 RawIntoIter {
1200 iter,
1201 alloc,
1202 marker: PhantomData,
1203 }
1204 }
1205 }
1206}
1207
1208pub(crate) struct RawIterRange<T> {
1211 current_group: BitMask,
1214
1215 data: Bucket<T>,
1217
1218 next_ctrl: *const u8,
1221
1222 end: *const u8,
1224}
1225
1226impl<T> RawIterRange<T> {
1227 #[cfg_attr(feature = "inline-more", inline)]
1231 unsafe fn new(ctrl: *const u8, data: Bucket<T>, len: usize) -> Self {
1232 debug_assert_ne!(len, 0);
1233 debug_assert_eq!(ctrl as usize % Group::WIDTH, 0);
1234 let end = ctrl.add(len);
1235
1236 let current_group = Group::load_aligned(ctrl).match_full();
1238 let next_ctrl = ctrl.add(Group::WIDTH);
1239
1240 Self {
1241 current_group,
1242 data,
1243 next_ctrl,
1244 end,
1245 }
1246 }
1247
1248 #[cfg_attr(feature = "inline-more", inline)]
1253 #[cfg(feature = "rayon")]
1254 pub(crate) fn split(mut self) -> (Self, Option<RawIterRange<T>>) {
1255 unsafe {
1256 if self.end <= self.next_ctrl {
1257 (self, None)
1260 } else {
1261 let len = offset_from(self.end, self.next_ctrl);
1265 debug_assert_eq!(len % Group::WIDTH, 0);
1266
1267 let mid = (len / 2) & !(Group::WIDTH - 1);
1274
1275 let tail = Self::new(
1276 self.next_ctrl.add(mid),
1277 self.data.add(Group::WIDTH).add(mid),
1278 len - mid,
1279 );
1280 debug_assert_eq!(self.data.add(Group::WIDTH).add(mid).ptr, tail.data.ptr);
1281 debug_assert_eq!(self.end, tail.end);
1282 self.end = self.next_ctrl.add(mid);
1283 debug_assert_eq!(self.end.add(Group::WIDTH), tail.next_ctrl);
1284 (self, Some(tail))
1285 }
1286 }
1287 }
1288}
1289
1290unsafe impl<T> Send for RawIterRange<T> {}
1293unsafe impl<T> Sync for RawIterRange<T> {}
1294
1295impl<T> Clone for RawIterRange<T> {
1296 #[cfg_attr(feature = "inline-more", inline)]
1297 fn clone(&self) -> Self {
1298 Self {
1299 data: self.data.clone(),
1300 next_ctrl: self.next_ctrl,
1301 current_group: self.current_group,
1302 end: self.end,
1303 }
1304 }
1305}
1306
1307impl<T> Iterator for RawIterRange<T> {
1308 type Item = Bucket<T>;
1309
1310 #[cfg_attr(feature = "inline-more", inline)]
1311 fn next(&mut self) -> Option<Bucket<T>> {
1312 unsafe {
1313 loop {
1314 if let Some(index) = self.current_group.lowest_set_bit() {
1315 self.current_group = self.current_group.remove_lowest_bit();
1316 return Some(self.data.add(index));
1317 }
1318
1319 if self.next_ctrl >= self.end {
1320 return None;
1321 }
1322
1323 self.current_group = Group::load_aligned(self.next_ctrl).match_full();
1329 self.data = self.data.add(Group::WIDTH);
1330 self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
1331 }
1332 }
1333 }
1334
1335 #[cfg_attr(feature = "inline-more", inline)]
1336 fn size_hint(&self) -> (usize, Option<usize>) {
1337 (
1339 0,
1340 Some(unsafe { offset_from(self.end, self.next_ctrl) + Group::WIDTH }),
1341 )
1342 }
1343}
1344
1345impl<T> FusedIterator for RawIterRange<T> {}
1346
1347pub struct RawIter<T> {
1349 pub(crate) iter: RawIterRange<T>,
1350 items: usize,
1351}
1352
1353impl<T> Clone for RawIter<T> {
1354 #[cfg_attr(feature = "inline-more", inline)]
1355 fn clone(&self) -> Self {
1356 Self {
1357 iter: self.iter.clone(),
1358 items: self.items,
1359 }
1360 }
1361}
1362
1363impl<T> Iterator for RawIter<T> {
1364 type Item = Bucket<T>;
1365
1366 #[cfg_attr(feature = "inline-more", inline)]
1367 fn next(&mut self) -> Option<Bucket<T>> {
1368 if let Some(b) = self.iter.next() {
1369 self.items -= 1;
1370 Some(b)
1371 } else {
1372 debug_assert_eq!(self.items, 0);
1376 None
1377 }
1378 }
1379
1380 #[cfg_attr(feature = "inline-more", inline)]
1381 fn size_hint(&self) -> (usize, Option<usize>) {
1382 (self.items, Some(self.items))
1383 }
1384}
1385
1386impl<T> ExactSizeIterator for RawIter<T> {}
1387impl<T> FusedIterator for RawIter<T> {}
1388
1389pub struct RawIntoIter<T> {
1391 iter: RawIter<T>,
1392 alloc: Option<(NonNull<u8>, Layout)>,
1393 marker: PhantomData<T>,
1394}
1395
1396impl<T> RawIntoIter<T> {
1397 #[cfg_attr(feature = "inline-more", inline)]
1398 pub fn iter(&self) -> RawIter<T> {
1399 self.iter.clone()
1400 }
1401}
1402
1403unsafe impl<T> Send for RawIntoIter<T> where T: Send {}
1404unsafe impl<T> Sync for RawIntoIter<T> where T: Sync {}
1405
1406#[cfg(feature = "nightly")]
1407unsafe impl<#[may_dangle] T> Drop for RawIntoIter<T> {
1408 #[cfg_attr(feature = "inline-more", inline)]
1409 fn drop(&mut self) {
1410 unsafe {
1411 if mem::needs_drop::<T>() {
1413 while let Some(item) = self.iter.next() {
1414 item.drop();
1415 }
1416 }
1417
1418 if let Some((ptr, layout)) = self.alloc {
1420 dealloc(ptr.as_ptr(), layout);
1421 }
1422 }
1423 }
1424}
1425#[cfg(not(feature = "nightly"))]
1426impl<T> Drop for RawIntoIter<T> {
1427 #[cfg_attr(feature = "inline-more", inline)]
1428 fn drop(&mut self) {
1429 unsafe {
1430 if mem::needs_drop::<T>() {
1432 while let Some(item) = self.iter.next() {
1433 item.drop();
1434 }
1435 }
1436
1437 if let Some((ptr, layout)) = self.alloc {
1439 dealloc(ptr.as_ptr(), layout);
1440 }
1441 }
1442 }
1443}
1444
1445impl<T> Iterator for RawIntoIter<T> {
1446 type Item = T;
1447
1448 #[cfg_attr(feature = "inline-more", inline)]
1449 fn next(&mut self) -> Option<T> {
1450 unsafe { Some(self.iter.next()?.read()) }
1451 }
1452
1453 #[cfg_attr(feature = "inline-more", inline)]
1454 fn size_hint(&self) -> (usize, Option<usize>) {
1455 self.iter.size_hint()
1456 }
1457}
1458
1459impl<T> ExactSizeIterator for RawIntoIter<T> {}
1460impl<T> FusedIterator for RawIntoIter<T> {}
1461
1462pub struct RawDrain<'a, T> {
1464 iter: RawIter<T>,
1465
1466 table: ManuallyDrop<RawTable<T>>,
1470 orig_table: NonNull<RawTable<T>>,
1471
1472 marker: PhantomData<&'a RawTable<T>>,
1475}
1476
1477impl<T> RawDrain<'_, T> {
1478 #[cfg_attr(feature = "inline-more", inline)]
1479 pub fn iter(&self) -> RawIter<T> {
1480 self.iter.clone()
1481 }
1482}
1483
1484unsafe impl<T> Send for RawDrain<'_, T> where T: Send {}
1485unsafe impl<T> Sync for RawDrain<'_, T> where T: Sync {}
1486
1487impl<T> Drop for RawDrain<'_, T> {
1488 #[cfg_attr(feature = "inline-more", inline)]
1489 fn drop(&mut self) {
1490 unsafe {
1491 if mem::needs_drop::<T>() {
1493 while let Some(item) = self.iter.next() {
1494 item.drop();
1495 }
1496 }
1497
1498 self.table.clear_no_drop();
1501
1502 self.orig_table
1504 .as_ptr()
1505 .copy_from_nonoverlapping(&*self.table, 1);
1506 }
1507 }
1508}
1509
1510impl<T> Iterator for RawDrain<'_, T> {
1511 type Item = T;
1512
1513 #[cfg_attr(feature = "inline-more", inline)]
1514 fn next(&mut self) -> Option<T> {
1515 unsafe {
1516 let item = self.iter.next()?;
1517 Some(item.read())
1518 }
1519 }
1520
1521 #[cfg_attr(feature = "inline-more", inline)]
1522 fn size_hint(&self) -> (usize, Option<usize>) {
1523 self.iter.size_hint()
1524 }
1525}
1526
1527impl<T> ExactSizeIterator for RawDrain<'_, T> {}
1528impl<T> FusedIterator for RawDrain<'_, T> {}