#ifndef ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
#define ABSL_CONTAINER_INTERNAL_RAW_HASH_SET_H_
#include <algorithm>
#include <cassert>
#include <cmath>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include <initializer_list>
#include <iterator>
#include <limits>
#include <memory>
#include <tuple>
#include <type_traits>
#include <utility>
#include "absl/base/attributes.h"
#include "absl/base/config.h"
#include "absl/base/internal/endian.h"
#include "absl/base/internal/raw_logging.h"
#include "absl/base/macros.h"
#include "absl/base/optimization.h"
#include "absl/base/options.h"
#include "absl/base/port.h"
#include "absl/base/prefetch.h"
#include "absl/container/internal/common.h"
#include "absl/container/internal/compressed_tuple.h"
#include "absl/container/internal/container_memory.h"
#include "absl/container/internal/hash_policy_traits.h"
#include "absl/container/internal/hashtable_debug_hooks.h"
#include "absl/container/internal/hashtablez_sampler.h"
#include "absl/memory/memory.h"
#include "absl/meta/type_traits.h"
#include "absl/numeric/bits.h"
#include "absl/utility/utility.h"
#ifdef ABSL_INTERNAL_HAVE_SSE2
#include <emmintrin.h>
#endif
#ifdef ABSL_INTERNAL_HAVE_SSSE3
#include <tmmintrin.h>
#endif
#ifdef _MSC_VER
#include <intrin.h>
#endif
#ifdef ABSL_INTERNAL_HAVE_ARM_NEON
#include <arm_neon.h>
#endif
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace container_internal {
#ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
#error ABSL_SWISSTABLE_ENABLE_GENERATIONS cannot be directly set
#elif (defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
defined(ABSL_HAVE_HWADDRESS_SANITIZER) || \
defined(ABSL_HAVE_MEMORY_SANITIZER)) && \
!defined(NDEBUG_SANITIZER)
#define ABSL_SWISSTABLE_ENABLE_GENERATIONS
#endif
using GenerationType = uint8_t;
constexpr GenerationType SentinelEmptyGeneration() { return 0; }
constexpr GenerationType NextGeneration(GenerationType generation) {
return ++generation == SentinelEmptyGeneration() ? ++generation : generation;
}
#ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
constexpr bool SwisstableGenerationsEnabled() { return true; }
constexpr size_t NumGenerationBytes() { return sizeof(GenerationType); }
#else
constexpr bool SwisstableGenerationsEnabled() { return false; }
constexpr size_t NumGenerationBytes() { return 0; }
#endif
template <typename AllocType>
void SwapAlloc(AllocType& lhs, AllocType& rhs,
std::true_type ) {
using std::swap;
swap(lhs, rhs);
}
template <typename AllocType>
void SwapAlloc(AllocType& lhs, AllocType& rhs,
std::false_type ) {
(void)lhs;
(void)rhs;
assert(lhs == rhs &&
"It's UB to call swap with unequal non-propagating allocators.");
}
template <typename AllocType>
void CopyAlloc(AllocType& lhs, AllocType& rhs,
std::true_type ) {
lhs = rhs;
}
template <typename AllocType>
void CopyAlloc(AllocType&, AllocType&, std::false_type ) {}
template <size_t Width>
class probe_seq {
public:
probe_seq(size_t hash, size_t mask) {
assert(((mask + 1) & mask) == 0 && "not a mask");
mask_ = mask;
offset_ = hash & mask_;
}
size_t offset() const { return offset_; }
size_t offset(size_t i) const { return (offset_ + i) & mask_; }
void next() {
index_ += Width;
offset_ += index_;
offset_ &= mask_;
}
size_t index() const { return index_; }
private:
size_t mask_;
size_t offset_;
size_t index_ = 0;
};
template <class ContainerKey, class Hash, class Eq>
struct RequireUsableKey {
template <class PassedKey, class... Args>
std::pair<
decltype(std::declval<const Hash&>()(std::declval<const PassedKey&>())),
decltype(std::declval<const Eq&>()(std::declval<const ContainerKey&>(),
std::declval<const PassedKey&>()))>*
operator()(const PassedKey&, const Args&...) const;
};
template <class E, class Policy, class Hash, class Eq, class... Ts>
struct IsDecomposable : std::false_type {};
template <class Policy, class Hash, class Eq, class... Ts>
struct IsDecomposable<
absl::void_t<decltype(Policy::apply(
RequireUsableKey<typename Policy::key_type, Hash, Eq>(),
std::declval<Ts>()...))>,
Policy, Hash, Eq, Ts...> : std::true_type {};
template <class T>
constexpr bool IsNoThrowSwappable(std::true_type = {} ) {
using std::swap;
return noexcept(swap(std::declval<T&>(), std::declval<T&>()));
}
template <class T>
constexpr bool IsNoThrowSwappable(std::false_type ) {
return false;
}
template <typename T>
uint32_t TrailingZeros(T x) {
ABSL_ASSUME(x != 0);
return static_cast<uint32_t>(countr_zero(x));
}
constexpr uint64_t kMsbs8Bytes = 0x8080808080808080ULL;
template <class T, int SignificantBits, int Shift = 0>
class NonIterableBitMask {
public:
explicit NonIterableBitMask(T mask) : mask_(mask) {}
explicit operator bool() const { return this->mask_ != 0; }
uint32_t LowestBitSet() const {
return container_internal::TrailingZeros(mask_) >> Shift;
}
uint32_t HighestBitSet() const {
return static_cast<uint32_t>((bit_width(mask_) - 1) >> Shift);
}
uint32_t TrailingZeros() const {
return container_internal::TrailingZeros(mask_) >> Shift;
}
uint32_t LeadingZeros() const {
constexpr int total_significant_bits = SignificantBits << Shift;
constexpr int extra_bits = sizeof(T) * 8 - total_significant_bits;
return static_cast<uint32_t>(
countl_zero(static_cast<T>(mask_ << extra_bits))) >>
Shift;
}
T mask_;
};
template <class T, int SignificantBits, int Shift = 0,
bool NullifyBitsOnIteration = false>
class BitMask : public NonIterableBitMask<T, SignificantBits, Shift> {
using Base = NonIterableBitMask<T, SignificantBits, Shift>;
static_assert(std::is_unsigned<T>::value, "");
static_assert(Shift == 0 || Shift == 3, "");
static_assert(!NullifyBitsOnIteration || Shift == 3, "");
public:
explicit BitMask(T mask) : Base(mask) {
if (Shift == 3 && !NullifyBitsOnIteration) {
assert(this->mask_ == (this->mask_ & kMsbs8Bytes));
}
}
using value_type = int;
using iterator = BitMask;
using const_iterator = BitMask;
BitMask& operator++() {
if (Shift == 3 && NullifyBitsOnIteration) {
this->mask_ &= kMsbs8Bytes;
}
this->mask_ &= (this->mask_ - 1);
return *this;
}
uint32_t operator*() const { return Base::LowestBitSet(); }
BitMask begin() const { return *this; }
BitMask end() const { return BitMask(0); }
private:
friend bool operator==(const BitMask& a, const BitMask& b) {
return a.mask_ == b.mask_;
}
friend bool operator!=(const BitMask& a, const BitMask& b) {
return a.mask_ != b.mask_;
}
};
using h2_t = uint8_t;
enum class ctrl_t : int8_t {
kEmpty = -128, kDeleted = -2, kSentinel = -1, };
static_assert(
(static_cast<int8_t>(ctrl_t::kEmpty) &
static_cast<int8_t>(ctrl_t::kDeleted) &
static_cast<int8_t>(ctrl_t::kSentinel) & 0x80) != 0,
"Special markers need to have the MSB to make checking for them efficient");
static_assert(
ctrl_t::kEmpty < ctrl_t::kSentinel && ctrl_t::kDeleted < ctrl_t::kSentinel,
"ctrl_t::kEmpty and ctrl_t::kDeleted must be smaller than "
"ctrl_t::kSentinel to make the SIMD test of IsEmptyOrDeleted() efficient");
static_assert(
ctrl_t::kSentinel == static_cast<ctrl_t>(-1),
"ctrl_t::kSentinel must be -1 to elide loading it from memory into SIMD "
"registers (pcmpeqd xmm, xmm)");
static_assert(ctrl_t::kEmpty == static_cast<ctrl_t>(-128),
"ctrl_t::kEmpty must be -128 to make the SIMD check for its "
"existence efficient (psignb xmm, xmm)");
static_assert(
(~static_cast<int8_t>(ctrl_t::kEmpty) &
~static_cast<int8_t>(ctrl_t::kDeleted) &
static_cast<int8_t>(ctrl_t::kSentinel) & 0x7F) != 0,
"ctrl_t::kEmpty and ctrl_t::kDeleted must share an unset bit that is not "
"shared by ctrl_t::kSentinel to make the scalar test for "
"MaskEmptyOrDeleted() efficient");
static_assert(ctrl_t::kDeleted == static_cast<ctrl_t>(-2),
"ctrl_t::kDeleted must be -2 to make the implementation of "
"ConvertSpecialToEmptyAndFullToDeleted efficient");
ABSL_DLL extern const ctrl_t kEmptyGroup[32];
inline ctrl_t* EmptyGroup() {
return const_cast<ctrl_t*>(kEmptyGroup + 16);
}
ABSL_DLL extern const ctrl_t kSooControl[17];
inline ctrl_t* SooControl() {
return const_cast<ctrl_t*>(kSooControl);
}
inline bool IsSooControl(const ctrl_t* ctrl) { return ctrl == SooControl(); }
GenerationType* EmptyGeneration();
inline bool IsEmptyGeneration(const GenerationType* generation) {
return *generation == SentinelEmptyGeneration();
}
bool ShouldInsertBackwardsForDebug(size_t capacity, size_t hash,
const ctrl_t* ctrl);
ABSL_ATTRIBUTE_ALWAYS_INLINE inline bool ShouldInsertBackwards(
ABSL_ATTRIBUTE_UNUSED size_t capacity, ABSL_ATTRIBUTE_UNUSED size_t hash,
ABSL_ATTRIBUTE_UNUSED const ctrl_t* ctrl) {
#if defined(NDEBUG)
return false;
#else
return ShouldInsertBackwardsForDebug(capacity, hash, ctrl);
#endif
}
template <class Mask>
ABSL_ATTRIBUTE_ALWAYS_INLINE inline auto GetInsertionOffset(
Mask mask, ABSL_ATTRIBUTE_UNUSED size_t capacity,
ABSL_ATTRIBUTE_UNUSED size_t hash,
ABSL_ATTRIBUTE_UNUSED const ctrl_t* ctrl) {
#if defined(NDEBUG)
return mask.LowestBitSet();
#else
return ShouldInsertBackwardsForDebug(capacity, hash, ctrl)
? mask.HighestBitSet()
: mask.LowestBitSet();
#endif
}
inline size_t PerTableSalt(const ctrl_t* ctrl) {
return reinterpret_cast<uintptr_t>(ctrl) >> 12;
}
inline size_t H1(size_t hash, const ctrl_t* ctrl) {
return (hash >> 7) ^ PerTableSalt(ctrl);
}
inline h2_t H2(size_t hash) { return hash & 0x7F; }
inline bool IsEmpty(ctrl_t c) { return c == ctrl_t::kEmpty; }
inline bool IsFull(ctrl_t c) {
return static_cast<std::underlying_type_t<ctrl_t>>(c) >= 0;
}
inline bool IsDeleted(ctrl_t c) { return c == ctrl_t::kDeleted; }
inline bool IsEmptyOrDeleted(ctrl_t c) { return c < ctrl_t::kSentinel; }
#ifdef ABSL_INTERNAL_HAVE_SSE2
inline __m128i _mm_cmpgt_epi8_fixed(__m128i a, __m128i b) {
#if defined(__GNUC__) && !defined(__clang__)
if (std::is_unsigned<char>::value) {
const __m128i mask = _mm_set1_epi8(0x80);
const __m128i diff = _mm_subs_epi8(b, a);
return _mm_cmpeq_epi8(_mm_and_si128(diff, mask), mask);
}
#endif
return _mm_cmpgt_epi8(a, b);
}
struct GroupSse2Impl {
static constexpr size_t kWidth = 16;
explicit GroupSse2Impl(const ctrl_t* pos) {
ctrl = _mm_loadu_si128(reinterpret_cast<const __m128i*>(pos));
}
BitMask<uint16_t, kWidth> Match(h2_t hash) const {
auto match = _mm_set1_epi8(static_cast<char>(hash));
BitMask<uint16_t, kWidth> result = BitMask<uint16_t, kWidth>(0);
result = BitMask<uint16_t, kWidth>(
static_cast<uint16_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl))));
return result;
}
NonIterableBitMask<uint16_t, kWidth> MaskEmpty() const {
#ifdef ABSL_INTERNAL_HAVE_SSSE3
return NonIterableBitMask<uint16_t, kWidth>(
static_cast<uint16_t>(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl))));
#else
auto match = _mm_set1_epi8(static_cast<char>(ctrl_t::kEmpty));
return NonIterableBitMask<uint16_t, kWidth>(
static_cast<uint16_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl))));
#endif
}
BitMask<uint16_t, kWidth> MaskFull() const {
return BitMask<uint16_t, kWidth>(
static_cast<uint16_t>(_mm_movemask_epi8(ctrl) ^ 0xffff));
}
auto MaskNonFull() const {
return BitMask<uint16_t, kWidth>(
static_cast<uint16_t>(_mm_movemask_epi8(ctrl)));
}
NonIterableBitMask<uint16_t, kWidth> MaskEmptyOrDeleted() const {
auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel));
return NonIterableBitMask<uint16_t, kWidth>(static_cast<uint16_t>(
_mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl))));
}
uint32_t CountLeadingEmptyOrDeleted() const {
auto special = _mm_set1_epi8(static_cast<char>(ctrl_t::kSentinel));
return TrailingZeros(static_cast<uint32_t>(
_mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1));
}
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
auto msbs = _mm_set1_epi8(static_cast<char>(-128));
auto x126 = _mm_set1_epi8(126);
#ifdef ABSL_INTERNAL_HAVE_SSSE3
auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
#else
auto zero = _mm_setzero_si128();
auto special_mask = _mm_cmpgt_epi8_fixed(zero, ctrl);
auto res = _mm_or_si128(msbs, _mm_andnot_si128(special_mask, x126));
#endif
_mm_storeu_si128(reinterpret_cast<__m128i*>(dst), res);
}
__m128i ctrl;
};
#endif
#if defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
struct GroupAArch64Impl {
static constexpr size_t kWidth = 8;
explicit GroupAArch64Impl(const ctrl_t* pos) {
ctrl = vld1_u8(reinterpret_cast<const uint8_t*>(pos));
}
auto Match(h2_t hash) const {
uint8x8_t dup = vdup_n_u8(hash);
auto mask = vceq_u8(ctrl, dup);
return BitMask<uint64_t, kWidth, 3,
true>(
vget_lane_u64(vreinterpret_u64_u8(mask), 0));
}
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmpty() const {
uint64_t mask =
vget_lane_u64(vreinterpret_u64_u8(vceq_s8(
vdup_n_s8(static_cast<int8_t>(ctrl_t::kEmpty)),
vreinterpret_s8_u8(ctrl))),
0);
return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
}
auto MaskFull() const {
uint64_t mask = vget_lane_u64(
vreinterpret_u64_u8(vcge_s8(vreinterpret_s8_u8(ctrl),
vdup_n_s8(static_cast<int8_t>(0)))),
0);
return BitMask<uint64_t, kWidth, 3,
true>(mask);
}
auto MaskNonFull() const {
uint64_t mask = vget_lane_u64(
vreinterpret_u64_u8(vclt_s8(vreinterpret_s8_u8(ctrl),
vdup_n_s8(static_cast<int8_t>(0)))),
0);
return BitMask<uint64_t, kWidth, 3,
true>(mask);
}
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
uint64_t mask =
vget_lane_u64(vreinterpret_u64_u8(vcgt_s8(
vdup_n_s8(static_cast<int8_t>(ctrl_t::kSentinel)),
vreinterpret_s8_u8(ctrl))),
0);
return NonIterableBitMask<uint64_t, kWidth, 3>(mask);
}
uint32_t CountLeadingEmptyOrDeleted() const {
uint64_t mask =
vget_lane_u64(vreinterpret_u64_u8(vcle_s8(
vdup_n_s8(static_cast<int8_t>(ctrl_t::kSentinel)),
vreinterpret_s8_u8(ctrl))),
0);
return static_cast<uint32_t>(countr_zero(mask)) >> 3;
}
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
uint64_t mask = vget_lane_u64(vreinterpret_u64_u8(ctrl), 0);
constexpr uint64_t slsbs = 0x0202020202020202ULL;
constexpr uint64_t midbs = 0x7e7e7e7e7e7e7e7eULL;
auto x = slsbs & (mask >> 6);
auto res = (x + midbs) | kMsbs8Bytes;
little_endian::Store64(dst, res);
}
uint8x8_t ctrl;
};
#endif
struct GroupPortableImpl {
static constexpr size_t kWidth = 8;
explicit GroupPortableImpl(const ctrl_t* pos)
: ctrl(little_endian::Load64(pos)) {}
BitMask<uint64_t, kWidth, 3> Match(h2_t hash) const {
constexpr uint64_t lsbs = 0x0101010101010101ULL;
auto x = ctrl ^ (lsbs * hash);
return BitMask<uint64_t, kWidth, 3>((x - lsbs) & ~x & kMsbs8Bytes);
}
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmpty() const {
return NonIterableBitMask<uint64_t, kWidth, 3>((ctrl & ~(ctrl << 6)) &
kMsbs8Bytes);
}
BitMask<uint64_t, kWidth, 3> MaskFull() const {
return BitMask<uint64_t, kWidth, 3>((ctrl ^ kMsbs8Bytes) & kMsbs8Bytes);
}
auto MaskNonFull() const {
return BitMask<uint64_t, kWidth, 3>(ctrl & kMsbs8Bytes);
}
NonIterableBitMask<uint64_t, kWidth, 3> MaskEmptyOrDeleted() const {
return NonIterableBitMask<uint64_t, kWidth, 3>((ctrl & ~(ctrl << 7)) &
kMsbs8Bytes);
}
uint32_t CountLeadingEmptyOrDeleted() const {
constexpr uint64_t bits = 0x0101010101010101ULL;
return static_cast<uint32_t>(countr_zero((ctrl | ~(ctrl >> 7)) & bits) >>
3);
}
void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
constexpr uint64_t lsbs = 0x0101010101010101ULL;
auto x = ctrl & kMsbs8Bytes;
auto res = (~x + (x >> 7)) & ~lsbs;
little_endian::Store64(dst, res);
}
uint64_t ctrl;
};
#ifdef ABSL_INTERNAL_HAVE_SSE2
using Group = GroupSse2Impl;
using GroupFullEmptyOrDeleted = GroupSse2Impl;
#elif defined(ABSL_INTERNAL_HAVE_ARM_NEON) && defined(ABSL_IS_LITTLE_ENDIAN)
using Group = GroupAArch64Impl;
using GroupFullEmptyOrDeleted = GroupPortableImpl;
#else
using Group = GroupPortableImpl;
using GroupFullEmptyOrDeleted = GroupPortableImpl;
#endif
inline size_t RehashProbabilityConstant() { return 16; }
class CommonFieldsGenerationInfoEnabled {
static constexpr size_t kReservedGrowthJustRanOut =
(std::numeric_limits<size_t>::max)();
public:
CommonFieldsGenerationInfoEnabled() = default;
CommonFieldsGenerationInfoEnabled(CommonFieldsGenerationInfoEnabled&& that)
: reserved_growth_(that.reserved_growth_),
reservation_size_(that.reservation_size_),
generation_(that.generation_) {
that.reserved_growth_ = 0;
that.reservation_size_ = 0;
that.generation_ = EmptyGeneration();
}
CommonFieldsGenerationInfoEnabled& operator=(
CommonFieldsGenerationInfoEnabled&&) = default;
bool should_rehash_for_bug_detection_on_insert(const ctrl_t* ctrl,
size_t capacity) const;
bool should_rehash_for_bug_detection_on_move(const ctrl_t* ctrl,
size_t capacity) const;
void maybe_increment_generation_on_insert() {
if (reserved_growth_ == kReservedGrowthJustRanOut) reserved_growth_ = 0;
if (reserved_growth_ > 0) {
if (--reserved_growth_ == 0) reserved_growth_ = kReservedGrowthJustRanOut;
} else {
increment_generation();
}
}
void increment_generation() { *generation_ = NextGeneration(*generation_); }
void reset_reserved_growth(size_t reservation, size_t size) {
reserved_growth_ = reservation - size;
}
size_t reserved_growth() const { return reserved_growth_; }
void set_reserved_growth(size_t r) { reserved_growth_ = r; }
size_t reservation_size() const { return reservation_size_; }
void set_reservation_size(size_t r) { reservation_size_ = r; }
GenerationType generation() const { return *generation_; }
void set_generation(GenerationType g) { *generation_ = g; }
GenerationType* generation_ptr() const { return generation_; }
void set_generation_ptr(GenerationType* g) { generation_ = g; }
private:
size_t reserved_growth_ = 0;
size_t reservation_size_ = 0;
GenerationType* generation_ = EmptyGeneration();
};
class CommonFieldsGenerationInfoDisabled {
public:
CommonFieldsGenerationInfoDisabled() = default;
CommonFieldsGenerationInfoDisabled(CommonFieldsGenerationInfoDisabled&&) =
default;
CommonFieldsGenerationInfoDisabled& operator=(
CommonFieldsGenerationInfoDisabled&&) = default;
bool should_rehash_for_bug_detection_on_insert(const ctrl_t*, size_t) const {
return false;
}
bool should_rehash_for_bug_detection_on_move(const ctrl_t*, size_t) const {
return false;
}
void maybe_increment_generation_on_insert() {}
void increment_generation() {}
void reset_reserved_growth(size_t, size_t) {}
size_t reserved_growth() const { return 0; }
void set_reserved_growth(size_t) {}
size_t reservation_size() const { return 0; }
void set_reservation_size(size_t) {}
GenerationType generation() const { return 0; }
void set_generation(GenerationType) {}
GenerationType* generation_ptr() const { return nullptr; }
void set_generation_ptr(GenerationType*) {}
};
class HashSetIteratorGenerationInfoEnabled {
public:
HashSetIteratorGenerationInfoEnabled() = default;
explicit HashSetIteratorGenerationInfoEnabled(
const GenerationType* generation_ptr)
: generation_ptr_(generation_ptr), generation_(*generation_ptr) {}
GenerationType generation() const { return generation_; }
void reset_generation() { generation_ = *generation_ptr_; }
const GenerationType* generation_ptr() const { return generation_ptr_; }
void set_generation_ptr(const GenerationType* ptr) { generation_ptr_ = ptr; }
private:
const GenerationType* generation_ptr_ = EmptyGeneration();
GenerationType generation_ = *generation_ptr_;
};
class HashSetIteratorGenerationInfoDisabled {
public:
HashSetIteratorGenerationInfoDisabled() = default;
explicit HashSetIteratorGenerationInfoDisabled(const GenerationType*) {}
GenerationType generation() const { return 0; }
void reset_generation() {}
const GenerationType* generation_ptr() const { return nullptr; }
void set_generation_ptr(const GenerationType*) {}
};
#ifdef ABSL_SWISSTABLE_ENABLE_GENERATIONS
using CommonFieldsGenerationInfo = CommonFieldsGenerationInfoEnabled;
using HashSetIteratorGenerationInfo = HashSetIteratorGenerationInfoEnabled;
#else
using CommonFieldsGenerationInfo = CommonFieldsGenerationInfoDisabled;
using HashSetIteratorGenerationInfo = HashSetIteratorGenerationInfoDisabled;
#endif
class GrowthInfo {
public:
GrowthInfo() = default;
void InitGrowthLeftNoDeleted(size_t growth_left) {
growth_left_info_ = growth_left;
}
void OverwriteFullAsEmpty() { ++growth_left_info_; }
void OverwriteEmptyAsFull() {
assert(GetGrowthLeft() > 0);
--growth_left_info_;
}
void OverwriteManyEmptyAsFull(size_t cnt) {
assert(GetGrowthLeft() >= cnt);
growth_left_info_ -= cnt;
}
void OverwriteControlAsFull(ctrl_t ctrl) {
assert(GetGrowthLeft() >= static_cast<size_t>(IsEmpty(ctrl)));
growth_left_info_ -= static_cast<size_t>(IsEmpty(ctrl));
}
void OverwriteFullAsDeleted() { growth_left_info_ |= kDeletedBit; }
bool HasNoDeletedAndGrowthLeft() const {
return static_cast<std::make_signed_t<size_t>>(growth_left_info_) > 0;
}
bool HasNoGrowthLeftAndNoDeleted() const { return growth_left_info_ == 0; }
bool HasNoDeleted() const {
return static_cast<std::make_signed_t<size_t>>(growth_left_info_) >= 0;
}
size_t GetGrowthLeft() const { return growth_left_info_ & kGrowthLeftMask; }
private:
static constexpr size_t kGrowthLeftMask = ((~size_t{}) >> 1);
static constexpr size_t kDeletedBit = ~kGrowthLeftMask;
size_t growth_left_info_;
};
static_assert(sizeof(GrowthInfo) == sizeof(size_t), "");
static_assert(alignof(GrowthInfo) == alignof(size_t), "");
inline bool IsValidCapacity(size_t n) { return ((n + 1) & n) == 0 && n > 0; }
constexpr size_t NumClonedBytes() { return Group::kWidth - 1; }
constexpr size_t NumControlBytes(size_t capacity) {
return capacity + 1 + NumClonedBytes();
}
inline static size_t ControlOffset(bool has_infoz) {
return (has_infoz ? sizeof(HashtablezInfoHandle) : 0) + sizeof(GrowthInfo);
}
class RawHashSetLayout {
public:
explicit RawHashSetLayout(size_t capacity, size_t slot_align, bool has_infoz)
: capacity_(capacity),
control_offset_(ControlOffset(has_infoz)),
generation_offset_(control_offset_ + NumControlBytes(capacity)),
slot_offset_(
(generation_offset_ + NumGenerationBytes() + slot_align - 1) &
(~slot_align + 1)) {
assert(IsValidCapacity(capacity));
}
size_t capacity() const { return capacity_; }
size_t control_offset() const { return control_offset_; }
size_t generation_offset() const { return generation_offset_; }
size_t slot_offset() const { return slot_offset_; }
size_t alloc_size(size_t slot_size) const {
return slot_offset_ + capacity_ * slot_size;
}
private:
size_t capacity_;
size_t control_offset_;
size_t generation_offset_;
size_t slot_offset_;
};
struct HashtableFreeFunctionsAccess;
constexpr size_t SooCapacity() { return 1; }
struct soo_tag_t {};
struct full_soo_tag_t {};
#if !defined(__clang__) && defined(__GNUC__)
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x) \
_Pragma("GCC diagnostic push") \
_Pragma("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") \
_Pragma("GCC diagnostic ignored \"-Wuninitialized\"") x; \
_Pragma("GCC diagnostic pop")
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) \
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(return x)
#else
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(x) x
#define ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(x) return x
#endif
union MaybeInitializedPtr {
void* get() const { ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(p); }
void set(void* ptr) { p = ptr; }
void* p;
};
struct HeapPtrs {
HeapPtrs() = default;
explicit HeapPtrs(ctrl_t* c) : control(c) {}
ctrl_t* control;
MaybeInitializedPtr slot_array;
};
union HeapOrSoo {
HeapOrSoo() = default;
explicit HeapOrSoo(ctrl_t* c) : heap(c) {}
ctrl_t*& control() {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.control);
}
ctrl_t* control() const {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.control);
}
MaybeInitializedPtr& slot_array() {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.slot_array);
}
MaybeInitializedPtr slot_array() const {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(heap.slot_array);
}
void* get_soo_data() {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(soo_data);
}
const void* get_soo_data() const {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(soo_data);
}
HeapPtrs heap;
unsigned char soo_data[sizeof(HeapPtrs)];
};
class CommonFields : public CommonFieldsGenerationInfo {
public:
CommonFields() : capacity_(0), size_(0), heap_or_soo_(EmptyGroup()) {}
explicit CommonFields(soo_tag_t) : capacity_(SooCapacity()), size_(0) {}
explicit CommonFields(full_soo_tag_t)
: capacity_(SooCapacity()), size_(size_t{1} << HasInfozShift()) {}
CommonFields(const CommonFields&) = delete;
CommonFields& operator=(const CommonFields&) = delete;
CommonFields(CommonFields&& that) = default;
CommonFields& operator=(CommonFields&&) = default;
template <bool kSooEnabled>
static CommonFields CreateDefault() {
return kSooEnabled ? CommonFields{soo_tag_t{}} : CommonFields{};
}
const void* soo_data() const { return heap_or_soo_.get_soo_data(); }
void* soo_data() { return heap_or_soo_.get_soo_data(); }
HeapOrSoo heap_or_soo() const { return heap_or_soo_; }
const HeapOrSoo& heap_or_soo_ref() const { return heap_or_soo_; }
ctrl_t* control() const { return heap_or_soo_.control(); }
void set_control(ctrl_t* c) { heap_or_soo_.control() = c; }
void* backing_array_start() const {
assert(reinterpret_cast<uintptr_t>(control()) % alignof(size_t) == 0);
return control() - ControlOffset(has_infoz());
}
void* slot_array() const { return heap_or_soo_.slot_array().get(); }
MaybeInitializedPtr slots_union() const { return heap_or_soo_.slot_array(); }
void set_slots(void* s) { heap_or_soo_.slot_array().set(s); }
size_t size() const { return size_ >> HasInfozShift(); }
void set_size(size_t s) {
size_ = (s << HasInfozShift()) | (size_ & HasInfozMask());
}
void set_empty_soo() {
AssertInSooMode();
size_ = 0;
}
void set_full_soo() {
AssertInSooMode();
size_ = size_t{1} << HasInfozShift();
}
void increment_size() {
assert(size() < capacity());
size_ += size_t{1} << HasInfozShift();
}
void decrement_size() {
assert(size() > 0);
size_ -= size_t{1} << HasInfozShift();
}
size_t capacity() const { return capacity_; }
void set_capacity(size_t c) {
assert(c == 0 || IsValidCapacity(c));
capacity_ = c;
}
size_t growth_left() const { return growth_info().GetGrowthLeft(); }
GrowthInfo& growth_info() {
auto* gl_ptr = reinterpret_cast<GrowthInfo*>(control()) - 1;
assert(reinterpret_cast<uintptr_t>(gl_ptr) % alignof(GrowthInfo) == 0);
return *gl_ptr;
}
GrowthInfo growth_info() const {
return const_cast<CommonFields*>(this)->growth_info();
}
bool has_infoz() const {
return ABSL_PREDICT_FALSE((size_ & HasInfozMask()) != 0);
}
void set_has_infoz(bool has_infoz) {
size_ = (size() << HasInfozShift()) | static_cast<size_t>(has_infoz);
}
HashtablezInfoHandle infoz() {
return has_infoz()
? *reinterpret_cast<HashtablezInfoHandle*>(backing_array_start())
: HashtablezInfoHandle();
}
void set_infoz(HashtablezInfoHandle infoz) {
assert(has_infoz());
*reinterpret_cast<HashtablezInfoHandle*>(backing_array_start()) = infoz;
}
bool should_rehash_for_bug_detection_on_insert() const {
return CommonFieldsGenerationInfo::
should_rehash_for_bug_detection_on_insert(control(), capacity());
}
bool should_rehash_for_bug_detection_on_move() const {
return CommonFieldsGenerationInfo::should_rehash_for_bug_detection_on_move(
control(), capacity());
}
void reset_reserved_growth(size_t reservation) {
CommonFieldsGenerationInfo::reset_reserved_growth(reservation, size());
}
size_t alloc_size(size_t slot_size, size_t slot_align) const {
return RawHashSetLayout(capacity(), slot_align, has_infoz())
.alloc_size(slot_size);
}
void move_non_heap_or_soo_fields(CommonFields& that) {
static_cast<CommonFieldsGenerationInfo&>(*this) =
std::move(static_cast<CommonFieldsGenerationInfo&>(that));
capacity_ = that.capacity_;
size_ = that.size_;
}
size_t TombstonesCount() const {
return static_cast<size_t>(
std::count(control(), control() + capacity(), ctrl_t::kDeleted));
}
private:
static constexpr size_t HasInfozShift() { return 1; }
static constexpr size_t HasInfozMask() {
return (size_t{1} << HasInfozShift()) - 1;
}
void AssertInSooMode() const {
assert(capacity() == SooCapacity());
assert(!has_infoz());
}
size_t capacity_;
size_t size_;
HeapOrSoo heap_or_soo_;
};
template <class Policy, class Hash, class Eq, class Alloc>
class raw_hash_set;
inline size_t NextCapacity(size_t n) {
assert(IsValidCapacity(n) || n == 0);
return n * 2 + 1;
}
void ConvertDeletedToEmptyAndFullToDeleted(ctrl_t* ctrl, size_t capacity);
inline size_t NormalizeCapacity(size_t n) {
return n ? ~size_t{} >> countl_zero(n) : 1;
}
inline size_t CapacityToGrowth(size_t capacity) {
assert(IsValidCapacity(capacity));
if (Group::kWidth == 8 && capacity == 7) {
return 6;
}
return capacity - capacity / 8;
}
inline size_t GrowthToLowerboundCapacity(size_t growth) {
if (Group::kWidth == 8 && growth == 7) {
return 8;
}
return growth + static_cast<size_t>((static_cast<int64_t>(growth) - 1) / 7);
}
template <class InputIter>
size_t SelectBucketCountForIterRange(InputIter first, InputIter last,
size_t bucket_count) {
if (bucket_count != 0) {
return bucket_count;
}
using InputIterCategory =
typename std::iterator_traits<InputIter>::iterator_category;
if (std::is_base_of<std::random_access_iterator_tag,
InputIterCategory>::value) {
return GrowthToLowerboundCapacity(
static_cast<size_t>(std::distance(first, last)));
}
return 0;
}
constexpr bool SwisstableDebugEnabled() {
#if defined(ABSL_SWISSTABLE_ENABLE_GENERATIONS) || \
ABSL_OPTION_HARDENED == 1 || !defined(NDEBUG)
return true;
#else
return false;
#endif
}
inline void AssertIsFull(const ctrl_t* ctrl, GenerationType generation,
const GenerationType* generation_ptr,
const char* operation) {
if (!SwisstableDebugEnabled()) return;
if (ABSL_PREDICT_FALSE(ctrl == nullptr)) {
ABSL_RAW_LOG(FATAL, "%s called on end() iterator.", operation);
}
if (ABSL_PREDICT_FALSE(ctrl == EmptyGroup())) {
ABSL_RAW_LOG(FATAL, "%s called on default-constructed iterator.",
operation);
}
if (SwisstableGenerationsEnabled()) {
if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
ABSL_RAW_LOG(FATAL,
"%s called on invalid iterator. The table could have "
"rehashed or moved since this iterator was initialized.",
operation);
}
if (ABSL_PREDICT_FALSE(!IsFull(*ctrl))) {
ABSL_RAW_LOG(
FATAL,
"%s called on invalid iterator. The element was likely erased.",
operation);
}
} else {
if (ABSL_PREDICT_FALSE(!IsFull(*ctrl))) {
ABSL_RAW_LOG(
FATAL,
"%s called on invalid iterator. The element might have been erased "
"or the table might have rehashed. Consider running with "
"--config=asan to diagnose rehashing issues.",
operation);
}
}
}
inline void AssertIsValidForComparison(const ctrl_t* ctrl,
GenerationType generation,
const GenerationType* generation_ptr) {
if (!SwisstableDebugEnabled()) return;
const bool ctrl_is_valid_for_comparison =
ctrl == nullptr || ctrl == EmptyGroup() || IsFull(*ctrl);
if (SwisstableGenerationsEnabled()) {
if (ABSL_PREDICT_FALSE(generation != *generation_ptr)) {
ABSL_RAW_LOG(FATAL,
"Invalid iterator comparison. The table could have rehashed "
"or moved since this iterator was initialized.");
}
if (ABSL_PREDICT_FALSE(!ctrl_is_valid_for_comparison)) {
ABSL_RAW_LOG(
FATAL, "Invalid iterator comparison. The element was likely erased.");
}
} else {
ABSL_HARDENING_ASSERT(
ctrl_is_valid_for_comparison &&
"Invalid iterator comparison. The element might have been erased or "
"the table might have rehashed. Consider running with --config=asan to "
"diagnose rehashing issues.");
}
}
inline bool AreItersFromSameContainer(const ctrl_t* ctrl_a,
const ctrl_t* ctrl_b,
const void* const& slot_a,
const void* const& slot_b) {
if (ctrl_a == nullptr || ctrl_b == nullptr) return true;
const bool a_is_soo = IsSooControl(ctrl_a);
if (a_is_soo != IsSooControl(ctrl_b)) return false;
if (a_is_soo) return slot_a == slot_b;
const void* low_slot = slot_a;
const void* hi_slot = slot_b;
if (ctrl_a > ctrl_b) {
std::swap(ctrl_a, ctrl_b);
std::swap(low_slot, hi_slot);
}
return ctrl_b < low_slot && low_slot <= hi_slot;
}
inline void AssertSameContainer(const ctrl_t* ctrl_a, const ctrl_t* ctrl_b,
const void* const& slot_a,
const void* const& slot_b,
const GenerationType* generation_ptr_a,
const GenerationType* generation_ptr_b) {
if (!SwisstableDebugEnabled()) return;
const auto fail_if = [](bool is_invalid, const char* message) {
if (ABSL_PREDICT_FALSE(is_invalid)) {
ABSL_RAW_LOG(FATAL, "Invalid iterator comparison. %s", message);
}
};
const bool a_is_default = ctrl_a == EmptyGroup();
const bool b_is_default = ctrl_b == EmptyGroup();
if (a_is_default && b_is_default) return;
fail_if(a_is_default != b_is_default,
"Comparing default-constructed hashtable iterator with a "
"non-default-constructed hashtable iterator.");
if (SwisstableGenerationsEnabled()) {
if (ABSL_PREDICT_TRUE(generation_ptr_a == generation_ptr_b)) return;
const bool a_is_soo = IsSooControl(ctrl_a);
const bool b_is_soo = IsSooControl(ctrl_b);
fail_if(a_is_soo != b_is_soo || (a_is_soo && b_is_soo),
"Comparing iterators from different hashtables.");
const bool a_is_empty = IsEmptyGeneration(generation_ptr_a);
const bool b_is_empty = IsEmptyGeneration(generation_ptr_b);
fail_if(a_is_empty != b_is_empty,
"Comparing an iterator from an empty hashtable with an iterator "
"from a non-empty hashtable.");
fail_if(a_is_empty && b_is_empty,
"Comparing iterators from different empty hashtables.");
const bool a_is_end = ctrl_a == nullptr;
const bool b_is_end = ctrl_b == nullptr;
fail_if(a_is_end || b_is_end,
"Comparing iterator with an end() iterator from a different "
"hashtable.");
fail_if(true, "Comparing non-end() iterators from different hashtables.");
} else {
ABSL_HARDENING_ASSERT(
AreItersFromSameContainer(ctrl_a, ctrl_b, slot_a, slot_b) &&
"Invalid iterator comparison. The iterators may be from different "
"containers or the container might have rehashed or moved. Consider "
"running with --config=asan to diagnose issues.");
}
}
struct FindInfo {
size_t offset;
size_t probe_length;
};
inline bool is_small(size_t capacity) { return capacity < Group::kWidth - 1; }
inline bool is_single_group(size_t capacity) {
return capacity <= Group::kWidth;
}
inline probe_seq<Group::kWidth> probe(const ctrl_t* ctrl, const size_t capacity,
size_t hash) {
return probe_seq<Group::kWidth>(H1(hash, ctrl), capacity);
}
inline probe_seq<Group::kWidth> probe(const CommonFields& common, size_t hash) {
return probe(common.control(), common.capacity(), hash);
}
template <typename = void>
inline FindInfo find_first_non_full(const CommonFields& common, size_t hash) {
auto seq = probe(common, hash);
const ctrl_t* ctrl = common.control();
if (IsEmptyOrDeleted(ctrl[seq.offset()]) &&
!ShouldInsertBackwards(common.capacity(), hash, ctrl)) {
return {seq.offset(), 0};
}
while (true) {
GroupFullEmptyOrDeleted g{ctrl + seq.offset()};
auto mask = g.MaskEmptyOrDeleted();
if (mask) {
return {
seq.offset(GetInsertionOffset(mask, common.capacity(), hash, ctrl)),
seq.index()};
}
seq.next();
assert(seq.index() <= common.capacity() && "full table!");
}
}
extern template FindInfo find_first_non_full(const CommonFields&, size_t);
FindInfo find_first_non_full_outofline(const CommonFields&, size_t);
inline void ResetGrowthLeft(CommonFields& common) {
common.growth_info().InitGrowthLeftNoDeleted(
CapacityToGrowth(common.capacity()) - common.size());
}
inline void ResetCtrl(CommonFields& common, size_t slot_size) {
const size_t capacity = common.capacity();
ctrl_t* ctrl = common.control();
std::memset(ctrl, static_cast<int8_t>(ctrl_t::kEmpty),
capacity + 1 + NumClonedBytes());
ctrl[capacity] = ctrl_t::kSentinel;
SanitizerPoisonMemoryRegion(common.slot_array(), slot_size * capacity);
}
inline void DoSanitizeOnSetCtrl(const CommonFields& c, size_t i, ctrl_t h,
size_t slot_size) {
assert(i < c.capacity());
auto* slot_i = static_cast<const char*>(c.slot_array()) + i * slot_size;
if (IsFull(h)) {
SanitizerUnpoisonMemoryRegion(slot_i, slot_size);
} else {
SanitizerPoisonMemoryRegion(slot_i, slot_size);
}
}
inline void SetCtrl(const CommonFields& c, size_t i, ctrl_t h,
size_t slot_size) {
DoSanitizeOnSetCtrl(c, i, h, slot_size);
ctrl_t* ctrl = c.control();
ctrl[i] = h;
ctrl[((i - NumClonedBytes()) & c.capacity()) +
(NumClonedBytes() & c.capacity())] = h;
}
inline void SetCtrl(const CommonFields& c, size_t i, h2_t h, size_t slot_size) {
SetCtrl(c, i, static_cast<ctrl_t>(h), slot_size);
}
inline void SetCtrlInSingleGroupTable(const CommonFields& c, size_t i, ctrl_t h,
size_t slot_size) {
assert(is_single_group(c.capacity()));
DoSanitizeOnSetCtrl(c, i, h, slot_size);
ctrl_t* ctrl = c.control();
ctrl[i] = h;
ctrl[i + c.capacity() + 1] = h;
}
inline void SetCtrlInSingleGroupTable(const CommonFields& c, size_t i, h2_t h,
size_t slot_size) {
SetCtrlInSingleGroupTable(c, i, static_cast<ctrl_t>(h), slot_size);
}
constexpr size_t BackingArrayAlignment(size_t align_of_slot) {
return (std::max)(align_of_slot, alignof(GrowthInfo));
}
inline void* SlotAddress(void* slot_array, size_t slot, size_t slot_size) {
return static_cast<void*>(static_cast<char*>(slot_array) +
(slot * slot_size));
}
template <class SlotType, class Callback>
ABSL_ATTRIBUTE_ALWAYS_INLINE inline void IterateOverFullSlots(
const CommonFields& c, SlotType* slot, Callback cb) {
const size_t cap = c.capacity();
const ctrl_t* ctrl = c.control();
if (is_small(cap)) {
assert(cap <= GroupPortableImpl::kWidth &&
"unexpectedly large small capacity");
static_assert(Group::kWidth >= GroupPortableImpl::kWidth,
"unexpected group width");
const auto mask = GroupPortableImpl(ctrl + cap).MaskFull();
--ctrl;
--slot;
for (uint32_t i : mask) {
cb(ctrl + i, slot + i);
}
return;
}
size_t remaining = c.size();
ABSL_ATTRIBUTE_UNUSED const size_t original_size_for_assert = remaining;
while (remaining != 0) {
for (uint32_t i : GroupFullEmptyOrDeleted(ctrl).MaskFull()) {
assert(IsFull(ctrl[i]) && "hash table was modified unexpectedly");
cb(ctrl + i, slot + i);
--remaining;
}
ctrl += Group::kWidth;
slot += Group::kWidth;
assert((remaining == 0 || *(ctrl - 1) != ctrl_t::kSentinel) &&
"hash table was modified unexpectedly");
}
assert(original_size_for_assert >= c.size() &&
"hash table was modified unexpectedly");
}
template <typename CharAlloc>
constexpr bool ShouldSampleHashtablezInfo() {
return std::is_same<CharAlloc, std::allocator<char>>::value;
}
template <bool kSooEnabled>
HashtablezInfoHandle SampleHashtablezInfo(size_t sizeof_slot, size_t sizeof_key,
size_t sizeof_value,
size_t old_capacity, bool was_soo,
HashtablezInfoHandle forced_infoz,
CommonFields& c) {
if (forced_infoz.IsSampled()) return forced_infoz;
if (kSooEnabled && was_soo && c.size() == 0) {
return Sample(sizeof_slot, sizeof_key, sizeof_value, SooCapacity());
}
if (!kSooEnabled && old_capacity == 0) {
return Sample(sizeof_slot, sizeof_key, sizeof_value, 0);
}
return c.infoz();
}
class HashSetResizeHelper {
public:
explicit HashSetResizeHelper(CommonFields& c, bool was_soo, bool had_soo_slot,
HashtablezInfoHandle forced_infoz)
: old_capacity_(c.capacity()),
had_infoz_(c.has_infoz()),
was_soo_(was_soo),
had_soo_slot_(had_soo_slot),
forced_infoz_(forced_infoz) {}
static FindInfo FindFirstNonFullAfterResize(const CommonFields& c,
size_t old_capacity,
size_t hash) {
if (!IsGrowingIntoSingleGroupApplicable(old_capacity, c.capacity())) {
return find_first_non_full(c, hash);
}
size_t offset = probe(c, hash).offset();
if (offset - (old_capacity + 1) >= old_capacity) {
offset = old_capacity / 2;
}
assert(IsEmpty(c.control()[offset]));
return FindInfo{offset, 0};
}
HeapOrSoo& old_heap_or_soo() { return old_heap_or_soo_; }
void* old_soo_data() { return old_heap_or_soo_.get_soo_data(); }
ctrl_t* old_ctrl() const {
assert(!was_soo_);
return old_heap_or_soo_.control();
}
void* old_slots() const {
assert(!was_soo_);
return old_heap_or_soo_.slot_array().get();
}
size_t old_capacity() const { return old_capacity_; }
static size_t SooSlotIndex() { return 1; }
template <typename Alloc, size_t SizeOfSlot, bool TransferUsesMemcpy,
bool SooEnabled, size_t AlignOfSlot>
ABSL_ATTRIBUTE_NOINLINE bool InitializeSlots(CommonFields& c, Alloc alloc,
ctrl_t soo_slot_h2,
size_t key_size,
size_t value_size) {
assert(c.capacity());
HashtablezInfoHandle infoz =
ShouldSampleHashtablezInfo<Alloc>()
? SampleHashtablezInfo<SooEnabled>(SizeOfSlot, key_size, value_size,
old_capacity_, was_soo_,
forced_infoz_, c)
: HashtablezInfoHandle{};
const bool has_infoz = infoz.IsSampled();
RawHashSetLayout layout(c.capacity(), AlignOfSlot, has_infoz);
char* mem = static_cast<char*>(Allocate<BackingArrayAlignment(AlignOfSlot)>(
&alloc, layout.alloc_size(SizeOfSlot)));
const GenerationType old_generation = c.generation();
c.set_generation_ptr(
reinterpret_cast<GenerationType*>(mem + layout.generation_offset()));
c.set_generation(NextGeneration(old_generation));
c.set_control(reinterpret_cast<ctrl_t*>(mem + layout.control_offset()));
c.set_slots(mem + layout.slot_offset());
ResetGrowthLeft(c);
const bool grow_single_group =
IsGrowingIntoSingleGroupApplicable(old_capacity_, layout.capacity());
if (SooEnabled && was_soo_ && grow_single_group) {
InitControlBytesAfterSoo(c.control(), soo_slot_h2, layout.capacity());
if (TransferUsesMemcpy && had_soo_slot_) {
TransferSlotAfterSoo(c, SizeOfSlot);
}
} else if ((SooEnabled || old_capacity_ != 0) && grow_single_group) {
if (TransferUsesMemcpy) {
GrowSizeIntoSingleGroupTransferable(c, SizeOfSlot);
DeallocateOld<AlignOfSlot>(alloc, SizeOfSlot);
} else {
GrowIntoSingleGroupShuffleControlBytes(c.control(), layout.capacity());
}
} else {
ResetCtrl(c, SizeOfSlot);
}
c.set_has_infoz(has_infoz);
if (has_infoz) {
infoz.RecordStorageChanged(c.size(), layout.capacity());
if ((SooEnabled && was_soo_) || grow_single_group || old_capacity_ == 0) {
infoz.RecordRehash(0);
}
c.set_infoz(infoz);
}
return grow_single_group;
}
template <class PolicyTraits, class Alloc>
void GrowSizeIntoSingleGroup(CommonFields& c, Alloc& alloc_ref) {
assert(old_capacity_ < Group::kWidth / 2);
assert(IsGrowingIntoSingleGroupApplicable(old_capacity_, c.capacity()));
using slot_type = typename PolicyTraits::slot_type;
assert(is_single_group(c.capacity()));
auto* new_slots = static_cast<slot_type*>(c.slot_array());
auto* old_slots_ptr = static_cast<slot_type*>(old_slots());
size_t shuffle_bit = old_capacity_ / 2 + 1;
for (size_t i = 0; i < old_capacity_; ++i) {
if (IsFull(old_ctrl()[i])) {
size_t new_i = i ^ shuffle_bit;
SanitizerUnpoisonMemoryRegion(new_slots + new_i, sizeof(slot_type));
PolicyTraits::transfer(&alloc_ref, new_slots + new_i,
old_slots_ptr + i);
}
}
PoisonSingleGroupEmptySlots(c, sizeof(slot_type));
}
template <size_t AlignOfSlot, class CharAlloc>
void DeallocateOld(CharAlloc alloc_ref, size_t slot_size) {
SanitizerUnpoisonMemoryRegion(old_slots(), slot_size * old_capacity_);
auto layout = RawHashSetLayout(old_capacity_, AlignOfSlot, had_infoz_);
Deallocate<BackingArrayAlignment(AlignOfSlot)>(
&alloc_ref, old_ctrl() - layout.control_offset(),
layout.alloc_size(slot_size));
}
private:
static bool IsGrowingIntoSingleGroupApplicable(size_t old_capacity,
size_t new_capacity) {
return is_single_group(new_capacity) && old_capacity < new_capacity;
}
void GrowSizeIntoSingleGroupTransferable(CommonFields& c, size_t slot_size);
void TransferSlotAfterSoo(CommonFields& c, size_t slot_size);
void GrowIntoSingleGroupShuffleControlBytes(ctrl_t* new_ctrl,
size_t new_capacity) const;
void InitControlBytesAfterSoo(ctrl_t* new_ctrl, ctrl_t h2,
size_t new_capacity);
void GrowIntoSingleGroupShuffleTransferableSlots(void* new_slots,
size_t slot_size) const;
void PoisonSingleGroupEmptySlots(CommonFields& c, size_t slot_size) const {
for (size_t i = 0; i < c.capacity(); ++i) {
if (!IsFull(c.control()[i])) {
SanitizerPoisonMemoryRegion(SlotAddress(c.slot_array(), i, slot_size),
slot_size);
}
}
}
HeapOrSoo old_heap_or_soo_;
size_t old_capacity_;
bool had_infoz_;
bool was_soo_;
bool had_soo_slot_;
HashtablezInfoHandle forced_infoz_;
};
inline void PrepareInsertCommon(CommonFields& common) {
common.increment_size();
common.maybe_increment_generation_on_insert();
}
size_t PrepareInsertAfterSoo(size_t hash, size_t slot_size,
CommonFields& common);
struct PolicyFunctions {
size_t slot_size;
const void* (*hash_fn)(const CommonFields& common);
size_t (*hash_slot)(const void* hash_fn, void* slot);
void (*transfer)(void* set, void* dst_slot, void* src_slot);
void (*dealloc)(CommonFields& common, const PolicyFunctions& policy);
void (*resize)(CommonFields& common, size_t new_capacity,
HashtablezInfoHandle forced_infoz);
};
void ClearBackingArray(CommonFields& c, const PolicyFunctions& policy,
bool reuse, bool soo_enabled);
void EraseMetaOnly(CommonFields& c, size_t index, size_t slot_size);
template <size_t AlignOfSlot>
ABSL_ATTRIBUTE_NOINLINE void DeallocateStandard(CommonFields& common,
const PolicyFunctions& policy) {
SanitizerUnpoisonMemoryRegion(common.slot_array(),
policy.slot_size * common.capacity());
std::allocator<char> alloc;
common.infoz().Unregister();
Deallocate<BackingArrayAlignment(AlignOfSlot)>(
&alloc, common.backing_array_start(),
common.alloc_size(policy.slot_size, AlignOfSlot));
}
template <size_t SizeOfSlot>
ABSL_ATTRIBUTE_NOINLINE void TransferRelocatable(void*, void* dst, void* src) {
memcpy(dst, src, SizeOfSlot);
}
const void* GetHashRefForEmptyHasher(const CommonFields& common);
size_t PrepareInsertNonSoo(CommonFields& common, size_t hash, FindInfo target,
const PolicyFunctions& policy);
template <class Policy, class Hash, class Eq, class Alloc>
class raw_hash_set {
using PolicyTraits = hash_policy_traits<Policy>;
using KeyArgImpl =
KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
public:
using init_type = typename PolicyTraits::init_type;
using key_type = typename PolicyTraits::key_type;
using slot_type = typename PolicyTraits::slot_type;
using allocator_type = Alloc;
using size_type = size_t;
using difference_type = ptrdiff_t;
using hasher = Hash;
using key_equal = Eq;
using policy_type = Policy;
using value_type = typename PolicyTraits::value_type;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = typename absl::allocator_traits<
allocator_type>::template rebind_traits<value_type>::pointer;
using const_pointer = typename absl::allocator_traits<
allocator_type>::template rebind_traits<value_type>::const_pointer;
template <class K>
using key_arg = typename KeyArgImpl::template type<K, key_type>;
private:
constexpr static bool SooEnabled() {
return PolicyTraits::soo_enabled() &&
sizeof(slot_type) <= sizeof(HeapOrSoo) &&
alignof(slot_type) <= alignof(HeapOrSoo);
}
bool fits_in_soo(size_t size) const {
return SooEnabled() && size <= SooCapacity();
}
bool is_soo() const { return fits_in_soo(capacity()); }
bool is_full_soo() const { return is_soo() && !empty(); }
auto KeyTypeCanBeHashed(const Hash& h, const key_type& k) -> decltype(h(k));
auto KeyTypeCanBeEq(const Eq& eq, const key_type& k) -> decltype(eq(k, k));
using AllocTraits = absl::allocator_traits<allocator_type>;
using SlotAlloc = typename absl::allocator_traits<
allocator_type>::template rebind_alloc<slot_type>;
using CharAlloc =
typename absl::allocator_traits<Alloc>::template rebind_alloc<char>;
using SlotAllocTraits = typename absl::allocator_traits<
allocator_type>::template rebind_traits<slot_type>;
static_assert(std::is_lvalue_reference<reference>::value,
"Policy::element() must return a reference");
template <typename T>
struct SameAsElementReference
: std::is_same<typename std::remove_cv<
typename std::remove_reference<reference>::type>::type,
typename std::remove_cv<
typename std::remove_reference<T>::type>::type> {};
template <class T>
using RequiresInsertable = typename std::enable_if<
absl::disjunction<std::is_convertible<T, init_type>,
SameAsElementReference<T>>::value,
int>::type;
template <class T>
using RequiresNotInit =
typename std::enable_if<!std::is_same<T, init_type>::value, int>::type;
template <class... Ts>
using IsDecomposable = IsDecomposable<void, PolicyTraits, Hash, Eq, Ts...>;
public:
static_assert(std::is_same<pointer, value_type*>::value,
"Allocators with custom pointer types are not supported");
static_assert(std::is_same<const_pointer, const value_type*>::value,
"Allocators with custom pointer types are not supported");
class iterator : private HashSetIteratorGenerationInfo {
friend class raw_hash_set;
friend struct HashtableFreeFunctionsAccess;
public:
using iterator_category = std::forward_iterator_tag;
using value_type = typename raw_hash_set::value_type;
using reference =
absl::conditional_t<PolicyTraits::constant_iterators::value,
const value_type&, value_type&>;
using pointer = absl::remove_reference_t<reference>*;
using difference_type = typename raw_hash_set::difference_type;
iterator() {}
reference operator*() const {
AssertIsFull(ctrl_, generation(), generation_ptr(), "operator*()");
return unchecked_deref();
}
pointer operator->() const {
AssertIsFull(ctrl_, generation(), generation_ptr(), "operator->");
return &operator*();
}
iterator& operator++() {
AssertIsFull(ctrl_, generation(), generation_ptr(), "operator++");
++ctrl_;
++slot_;
skip_empty_or_deleted();
if (ABSL_PREDICT_FALSE(*ctrl_ == ctrl_t::kSentinel)) ctrl_ = nullptr;
return *this;
}
iterator operator++(int) {
auto tmp = *this;
++*this;
return tmp;
}
friend bool operator==(const iterator& a, const iterator& b) {
AssertIsValidForComparison(a.ctrl_, a.generation(), a.generation_ptr());
AssertIsValidForComparison(b.ctrl_, b.generation(), b.generation_ptr());
AssertSameContainer(a.ctrl_, b.ctrl_, a.slot_, b.slot_,
a.generation_ptr(), b.generation_ptr());
return a.ctrl_ == b.ctrl_;
}
friend bool operator!=(const iterator& a, const iterator& b) {
return !(a == b);
}
private:
iterator(ctrl_t* ctrl, slot_type* slot,
const GenerationType* generation_ptr)
: HashSetIteratorGenerationInfo(generation_ptr),
ctrl_(ctrl),
slot_(slot) {
ABSL_ASSUME(ctrl != nullptr);
}
iterator(ctrl_t* ctrl, MaybeInitializedPtr slot,
const GenerationType* generation_ptr)
: HashSetIteratorGenerationInfo(generation_ptr),
ctrl_(ctrl),
slot_(to_slot(slot.get())) {
ABSL_ASSUME(ctrl != nullptr);
}
explicit iterator(const GenerationType* generation_ptr)
: HashSetIteratorGenerationInfo(generation_ptr), ctrl_(nullptr) {}
void skip_empty_or_deleted() {
while (IsEmptyOrDeleted(*ctrl_)) {
uint32_t shift =
GroupFullEmptyOrDeleted{ctrl_}.CountLeadingEmptyOrDeleted();
ctrl_ += shift;
slot_ += shift;
}
}
ctrl_t* control() const { return ctrl_; }
slot_type* slot() const { return slot_; }
ctrl_t* ctrl_ = EmptyGroup();
union {
slot_type* slot_;
};
bool unchecked_equals(const iterator& b) { return ctrl_ == b.control(); }
reference unchecked_deref() const { return PolicyTraits::element(slot_); }
};
class const_iterator {
friend class raw_hash_set;
template <class Container, typename Enabler>
friend struct absl::container_internal::hashtable_debug_internal::
HashtableDebugAccess;
public:
using iterator_category = typename iterator::iterator_category;
using value_type = typename raw_hash_set::value_type;
using reference = typename raw_hash_set::const_reference;
using pointer = typename raw_hash_set::const_pointer;
using difference_type = typename raw_hash_set::difference_type;
const_iterator() = default;
const_iterator(iterator i) : inner_(std::move(i)) {}
reference operator*() const { return *inner_; }
pointer operator->() const { return inner_.operator->(); }
const_iterator& operator++() {
++inner_;
return *this;
}
const_iterator operator++(int) { return inner_++; }
friend bool operator==(const const_iterator& a, const const_iterator& b) {
return a.inner_ == b.inner_;
}
friend bool operator!=(const const_iterator& a, const const_iterator& b) {
return !(a == b);
}
private:
const_iterator(const ctrl_t* ctrl, const slot_type* slot,
const GenerationType* gen)
: inner_(const_cast<ctrl_t*>(ctrl), const_cast<slot_type*>(slot), gen) {
}
ctrl_t* control() const { return inner_.control(); }
slot_type* slot() const { return inner_.slot(); }
iterator inner_;
bool unchecked_equals(const const_iterator& b) {
return inner_.unchecked_equals(b.inner_);
}
};
using node_type = node_handle<Policy, hash_policy_traits<Policy>, Alloc>;
using insert_return_type = InsertReturnType<iterator, node_type>;
raw_hash_set() noexcept(
std::is_nothrow_default_constructible<hasher>::value &&
std::is_nothrow_default_constructible<key_equal>::value &&
std::is_nothrow_default_constructible<allocator_type>::value) {}
ABSL_ATTRIBUTE_NOINLINE explicit raw_hash_set(
size_t bucket_count, const hasher& hash = hasher(),
const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type())
: settings_(CommonFields::CreateDefault<SooEnabled()>(), hash, eq,
alloc) {
if (bucket_count > (SooEnabled() ? SooCapacity() : 0)) {
resize(NormalizeCapacity(bucket_count));
}
}
raw_hash_set(size_t bucket_count, const hasher& hash,
const allocator_type& alloc)
: raw_hash_set(bucket_count, hash, key_equal(), alloc) {}
raw_hash_set(size_t bucket_count, const allocator_type& alloc)
: raw_hash_set(bucket_count, hasher(), key_equal(), alloc) {}
explicit raw_hash_set(const allocator_type& alloc)
: raw_hash_set(0, hasher(), key_equal(), alloc) {}
template <class InputIter>
raw_hash_set(InputIter first, InputIter last, size_t bucket_count = 0,
const hasher& hash = hasher(), const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type())
: raw_hash_set(SelectBucketCountForIterRange(first, last, bucket_count),
hash, eq, alloc) {
insert(first, last);
}
template <class InputIter>
raw_hash_set(InputIter first, InputIter last, size_t bucket_count,
const hasher& hash, const allocator_type& alloc)
: raw_hash_set(first, last, bucket_count, hash, key_equal(), alloc) {}
template <class InputIter>
raw_hash_set(InputIter first, InputIter last, size_t bucket_count,
const allocator_type& alloc)
: raw_hash_set(first, last, bucket_count, hasher(), key_equal(), alloc) {}
template <class InputIter>
raw_hash_set(InputIter first, InputIter last, const allocator_type& alloc)
: raw_hash_set(first, last, 0, hasher(), key_equal(), alloc) {}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
raw_hash_set(std::initializer_list<T> init, size_t bucket_count = 0,
const hasher& hash = hasher(), const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type())
: raw_hash_set(init.begin(), init.end(), bucket_count, hash, eq, alloc) {}
raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count = 0,
const hasher& hash = hasher(), const key_equal& eq = key_equal(),
const allocator_type& alloc = allocator_type())
: raw_hash_set(init.begin(), init.end(), bucket_count, hash, eq, alloc) {}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
raw_hash_set(std::initializer_list<T> init, size_t bucket_count,
const hasher& hash, const allocator_type& alloc)
: raw_hash_set(init, bucket_count, hash, key_equal(), alloc) {}
raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count,
const hasher& hash, const allocator_type& alloc)
: raw_hash_set(init, bucket_count, hash, key_equal(), alloc) {}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
raw_hash_set(std::initializer_list<T> init, size_t bucket_count,
const allocator_type& alloc)
: raw_hash_set(init, bucket_count, hasher(), key_equal(), alloc) {}
raw_hash_set(std::initializer_list<init_type> init, size_t bucket_count,
const allocator_type& alloc)
: raw_hash_set(init, bucket_count, hasher(), key_equal(), alloc) {}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
raw_hash_set(std::initializer_list<T> init, const allocator_type& alloc)
: raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
raw_hash_set(std::initializer_list<init_type> init,
const allocator_type& alloc)
: raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
raw_hash_set(const raw_hash_set& that)
: raw_hash_set(that, AllocTraits::select_on_container_copy_construction(
that.alloc_ref())) {}
raw_hash_set(const raw_hash_set& that, const allocator_type& a)
: raw_hash_set(GrowthToLowerboundCapacity(that.size()), that.hash_ref(),
that.eq_ref(), a) {
const size_t size = that.size();
if (size == 0) {
return;
}
if (fits_in_soo(size)) {
assert(size == 1);
common().set_full_soo();
emplace_at(soo_iterator(), *that.begin());
const HashtablezInfoHandle infoz = try_sample_soo();
if (infoz.IsSampled()) resize_with_soo_infoz(infoz);
return;
}
assert(!that.is_soo());
const size_t cap = capacity();
size_t offset = cap;
const size_t shift =
is_single_group(cap) ? (PerTableSalt(control()) | 1) : 0;
IterateOverFullSlots(
that.common(), that.slot_array(),
[&](const ctrl_t* that_ctrl,
slot_type* that_slot) ABSL_ATTRIBUTE_ALWAYS_INLINE {
if (shift == 0) {
const size_t hash = PolicyTraits::apply(
HashElement{hash_ref()}, PolicyTraits::element(that_slot));
FindInfo target = find_first_non_full_outofline(common(), hash);
infoz().RecordInsert(hash, target.probe_length);
offset = target.offset;
} else {
offset = (offset + shift) & cap;
}
const h2_t h2 = static_cast<h2_t>(*that_ctrl);
assert( H2(PolicyTraits::apply(HashElement{hash_ref()},
PolicyTraits::element(that_slot))) == h2 &&
"hash function value changed unexpectedly during the copy");
SetCtrl(common(), offset, h2, sizeof(slot_type));
emplace_at(iterator_at(offset), PolicyTraits::element(that_slot));
common().maybe_increment_generation_on_insert();
});
if (shift != 0) {
infoz().RecordStorageChanged(size, cap);
}
common().set_size(size);
growth_info().OverwriteManyEmptyAsFull(size);
}
ABSL_ATTRIBUTE_NOINLINE raw_hash_set(raw_hash_set&& that) noexcept(
std::is_nothrow_copy_constructible<hasher>::value &&
std::is_nothrow_copy_constructible<key_equal>::value &&
std::is_nothrow_copy_constructible<allocator_type>::value)
: settings_(PolicyTraits::transfer_uses_memcpy() || !that.is_full_soo()
? std::move(that.common())
: CommonFields{full_soo_tag_t{}},
that.hash_ref(), that.eq_ref(), that.alloc_ref()) {
if (!PolicyTraits::transfer_uses_memcpy() && that.is_full_soo()) {
transfer(soo_slot(), that.soo_slot());
}
that.common() = CommonFields::CreateDefault<SooEnabled()>();
maybe_increment_generation_or_rehash_on_move();
}
raw_hash_set(raw_hash_set&& that, const allocator_type& a)
: settings_(CommonFields::CreateDefault<SooEnabled()>(), that.hash_ref(),
that.eq_ref(), a) {
if (a == that.alloc_ref()) {
swap_common(that);
maybe_increment_generation_or_rehash_on_move();
} else {
move_elements_allocs_unequal(std::move(that));
}
}
raw_hash_set& operator=(const raw_hash_set& that) {
if (ABSL_PREDICT_FALSE(this == &that)) return *this;
constexpr bool propagate_alloc =
AllocTraits::propagate_on_container_copy_assignment::value;
raw_hash_set tmp(that, propagate_alloc ? that.alloc_ref() : alloc_ref());
return assign_impl<propagate_alloc>(std::move(tmp));
}
raw_hash_set& operator=(raw_hash_set&& that) noexcept(
absl::allocator_traits<allocator_type>::is_always_equal::value &&
std::is_nothrow_move_assignable<hasher>::value &&
std::is_nothrow_move_assignable<key_equal>::value) {
return move_assign(
std::move(that),
typename AllocTraits::propagate_on_container_move_assignment());
}
~raw_hash_set() { destructor_impl(); }
iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND {
if (ABSL_PREDICT_FALSE(empty())) return end();
if (is_soo()) return soo_iterator();
iterator it = {control(), common().slots_union(),
common().generation_ptr()};
it.skip_empty_or_deleted();
assert(IsFull(*it.control()));
return it;
}
iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND {
return iterator(common().generation_ptr());
}
const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
return const_cast<raw_hash_set*>(this)->begin();
}
const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
return iterator(common().generation_ptr());
}
const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
return begin();
}
const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return end(); }
bool empty() const { return !size(); }
size_t size() const { return common().size(); }
size_t capacity() const {
const size_t cap = common().capacity();
ABSL_ATTRIBUTE_UNUSED static constexpr bool kEnabled = SooEnabled();
ABSL_ATTRIBUTE_UNUSED static constexpr size_t kCapacity = SooCapacity();
ABSL_ASSUME(!kEnabled || cap >= kCapacity);
return cap;
}
size_t max_size() const { return (std::numeric_limits<size_t>::max)(); }
ABSL_ATTRIBUTE_REINITIALIZES void clear() {
const size_t cap = capacity();
if (cap == 0) {
} else if (is_soo()) {
if (!empty()) destroy(soo_slot());
common().set_empty_soo();
} else {
destroy_slots();
ClearBackingArray(common(), GetPolicyFunctions(), cap < 128,
SooEnabled());
}
common().set_reserved_growth(0);
common().set_reservation_size(0);
}
template <class T, RequiresInsertable<T> = 0, class T2 = T,
typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
T* = nullptr>
std::pair<iterator, bool> insert(T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return emplace(std::forward<T>(value));
}
template <
class T, RequiresInsertable<const T&> = 0,
typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
std::pair<iterator, bool> insert(const T& value)
ABSL_ATTRIBUTE_LIFETIME_BOUND {
return emplace(value);
}
std::pair<iterator, bool> insert(init_type&& value)
ABSL_ATTRIBUTE_LIFETIME_BOUND {
return emplace(std::move(value));
}
template <class T, RequiresInsertable<T> = 0, class T2 = T,
typename std::enable_if<IsDecomposable<T2>::value, int>::type = 0,
T* = nullptr>
iterator insert(const_iterator, T&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return insert(std::forward<T>(value)).first;
}
template <
class T, RequiresInsertable<const T&> = 0,
typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
iterator insert(const_iterator,
const T& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return insert(value).first;
}
iterator insert(const_iterator,
init_type&& value) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return insert(std::move(value)).first;
}
template <class InputIt>
void insert(InputIt first, InputIt last) {
for (; first != last; ++first) emplace(*first);
}
template <class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
void insert(std::initializer_list<T> ilist) {
insert(ilist.begin(), ilist.end());
}
void insert(std::initializer_list<init_type> ilist) {
insert(ilist.begin(), ilist.end());
}
insert_return_type insert(node_type&& node) ABSL_ATTRIBUTE_LIFETIME_BOUND {
if (!node) return {end(), false, node_type()};
const auto& elem = PolicyTraits::element(CommonAccess::GetSlot(node));
auto res = PolicyTraits::apply(
InsertSlot<false>{*this, std::move(*CommonAccess::GetSlot(node))},
elem);
if (res.second) {
CommonAccess::Reset(&node);
return {res.first, true, node_type()};
} else {
return {res.first, false, std::move(node)};
}
}
iterator insert(const_iterator,
node_type&& node) ABSL_ATTRIBUTE_LIFETIME_BOUND {
auto res = insert(std::move(node));
node = std::move(res.node);
return res.position;
}
template <class... Args, typename std::enable_if<
IsDecomposable<Args...>::value, int>::type = 0>
std::pair<iterator, bool> emplace(Args&&... args)
ABSL_ATTRIBUTE_LIFETIME_BOUND {
return PolicyTraits::apply(EmplaceDecomposable{*this},
std::forward<Args>(args)...);
}
template <class... Args, typename std::enable_if<
!IsDecomposable<Args...>::value, int>::type = 0>
std::pair<iterator, bool> emplace(Args&&... args)
ABSL_ATTRIBUTE_LIFETIME_BOUND {
alignas(slot_type) unsigned char raw[sizeof(slot_type)];
slot_type* slot = to_slot(&raw);
construct(slot, std::forward<Args>(args)...);
const auto& elem = PolicyTraits::element(slot);
return PolicyTraits::apply(InsertSlot<true>{*this, std::move(*slot)}, elem);
}
template <class... Args>
iterator emplace_hint(const_iterator,
Args&&... args) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return emplace(std::forward<Args>(args)...).first;
}
class constructor {
friend class raw_hash_set;
public:
template <class... Args>
void operator()(Args&&... args) const {
assert(*slot_);
PolicyTraits::construct(alloc_, *slot_, std::forward<Args>(args)...);
*slot_ = nullptr;
}
private:
constructor(allocator_type* a, slot_type** slot) : alloc_(a), slot_(slot) {}
allocator_type* alloc_;
slot_type** slot_;
};
template <class K = key_type, class F>
iterator lazy_emplace(const key_arg<K>& key,
F&& f) ABSL_ATTRIBUTE_LIFETIME_BOUND {
auto res = find_or_prepare_insert(key);
if (res.second) {
slot_type* slot = res.first.slot();
std::forward<F>(f)(constructor(&alloc_ref(), &slot));
assert(!slot);
}
return res.first;
}
template <class K = key_type>
size_type erase(const key_arg<K>& key) {
auto it = find(key);
if (it == end()) return 0;
erase(it);
return 1;
}
void erase(const_iterator cit) { erase(cit.inner_); }
void erase(iterator it) {
AssertIsFull(it.control(), it.generation(), it.generation_ptr(), "erase()");
destroy(it.slot());
if (is_soo()) {
common().set_empty_soo();
} else {
erase_meta_only(it);
}
}
iterator erase(const_iterator first,
const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND {
if (empty()) return end();
if (first == last) return last.inner_;
if (is_soo()) {
destroy(soo_slot());
common().set_empty_soo();
return end();
}
if (first == begin() && last == end()) {
destroy_slots();
ClearBackingArray(common(), GetPolicyFunctions(), true,
SooEnabled());
common().set_reserved_growth(common().reservation_size());
return end();
}
while (first != last) {
erase(first++);
}
return last.inner_;
}
template <typename H, typename E>
void merge(raw_hash_set<Policy, H, E, Alloc>& src) { assert(this != &src);
const auto insert_slot = [this](slot_type* src_slot) {
return PolicyTraits::apply(InsertSlot<false>{*this, std::move(*src_slot)},
PolicyTraits::element(src_slot))
.second;
};
if (src.is_soo()) {
if (src.empty()) return;
if (insert_slot(src.soo_slot())) src.common().set_empty_soo();
return;
}
for (auto it = src.begin(), e = src.end(); it != e;) {
auto next = std::next(it);
if (insert_slot(it.slot())) src.erase_meta_only(it);
it = next;
}
}
template <typename H, typename E>
void merge(raw_hash_set<Policy, H, E, Alloc>&& src) {
merge(src);
}
node_type extract(const_iterator position) {
AssertIsFull(position.control(), position.inner_.generation(),
position.inner_.generation_ptr(), "extract()");
auto node = CommonAccess::Transfer<node_type>(alloc_ref(), position.slot());
if (is_soo()) {
common().set_empty_soo();
} else {
erase_meta_only(position);
}
return node;
}
template <
class K = key_type,
typename std::enable_if<!std::is_same<K, iterator>::value, int>::type = 0>
node_type extract(const key_arg<K>& key) {
auto it = find(key);
return it == end() ? node_type() : extract(const_iterator{it});
}
void swap(raw_hash_set& that) noexcept(
IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
IsNoThrowSwappable<allocator_type>(
typename AllocTraits::propagate_on_container_swap{})) {
using std::swap;
swap_common(that);
swap(hash_ref(), that.hash_ref());
swap(eq_ref(), that.eq_ref());
SwapAlloc(alloc_ref(), that.alloc_ref(),
typename AllocTraits::propagate_on_container_swap{});
}
void rehash(size_t n) {
const size_t cap = capacity();
if (n == 0) {
if (cap == 0 || is_soo()) return;
if (empty()) {
ClearBackingArray(common(), GetPolicyFunctions(), false,
SooEnabled());
return;
}
if (fits_in_soo(size())) {
if (infoz().IsSampled()) {
const size_t kInitialSampledCapacity = NextCapacity(SooCapacity());
if (capacity() > kInitialSampledCapacity) {
resize(kInitialSampledCapacity);
}
assert(infoz().IsSampled());
return;
}
alignas(slot_type) unsigned char slot_space[sizeof(slot_type)];
slot_type* tmp_slot = to_slot(slot_space);
transfer(tmp_slot, begin().slot());
ClearBackingArray(common(), GetPolicyFunctions(), false,
SooEnabled());
transfer(soo_slot(), tmp_slot);
common().set_full_soo();
return;
}
}
auto m = NormalizeCapacity(n | GrowthToLowerboundCapacity(size()));
if (n == 0 || m > cap) {
resize(m);
infoz().RecordReservation(n);
}
}
void reserve(size_t n) {
const size_t max_size_before_growth =
is_soo() ? SooCapacity() : size() + growth_left();
if (n > max_size_before_growth) {
size_t m = GrowthToLowerboundCapacity(n);
resize(NormalizeCapacity(m));
infoz().RecordReservation(n);
}
common().reset_reserved_growth(n);
common().set_reservation_size(n);
}
template <class K = key_type>
size_t count(const key_arg<K>& key) const {
return find(key) == end() ? 0 : 1;
}
template <class K = key_type>
void prefetch(const key_arg<K>& key) const {
if (SooEnabled() ? is_soo() : capacity() == 0) return;
(void)key;
#ifdef ABSL_HAVE_PREFETCH
prefetch_heap_block();
auto seq = probe(common(), hash_ref()(key));
PrefetchToLocalCache(control() + seq.offset());
PrefetchToLocalCache(slot_array() + seq.offset());
#endif }
template <class K = key_type>
iterator find(const key_arg<K>& key,
size_t hash) ABSL_ATTRIBUTE_LIFETIME_BOUND {
AssertHashEqConsistent(key);
if (is_soo()) return find_soo(key);
return find_non_soo(key, hash);
}
template <class K = key_type>
iterator find(const key_arg<K>& key) ABSL_ATTRIBUTE_LIFETIME_BOUND {
AssertHashEqConsistent(key);
if (is_soo()) return find_soo(key);
prefetch_heap_block();
return find_non_soo(key, hash_ref()(key));
}
template <class K = key_type>
const_iterator find(const key_arg<K>& key,
size_t hash) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
return const_cast<raw_hash_set*>(this)->find(key, hash);
}
template <class K = key_type>
const_iterator find(const key_arg<K>& key) const
ABSL_ATTRIBUTE_LIFETIME_BOUND {
return const_cast<raw_hash_set*>(this)->find(key);
}
template <class K = key_type>
bool contains(const key_arg<K>& key) const {
return !find(key).unchecked_equals(end());
}
template <class K = key_type>
std::pair<iterator, iterator> equal_range(const key_arg<K>& key)
ABSL_ATTRIBUTE_LIFETIME_BOUND {
auto it = find(key);
if (it != end()) return {it, std::next(it)};
return {it, it};
}
template <class K = key_type>
std::pair<const_iterator, const_iterator> equal_range(
const key_arg<K>& key) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
auto it = find(key);
if (it != end()) return {it, std::next(it)};
return {it, it};
}
size_t bucket_count() const { return capacity(); }
float load_factor() const {
return capacity() ? static_cast<double>(size()) / capacity() : 0.0;
}
float max_load_factor() const { return 1.0f; }
void max_load_factor(float) {
}
hasher hash_function() const { return hash_ref(); }
key_equal key_eq() const { return eq_ref(); }
allocator_type get_allocator() const { return alloc_ref(); }
friend bool operator==(const raw_hash_set& a, const raw_hash_set& b) {
if (a.size() != b.size()) return false;
const raw_hash_set* outer = &a;
const raw_hash_set* inner = &b;
if (outer->capacity() > inner->capacity()) std::swap(outer, inner);
for (const value_type& elem : *outer) {
auto it = PolicyTraits::apply(FindElement{*inner}, elem);
if (it == inner->end() || !(*it == elem)) return false;
}
return true;
}
friend bool operator!=(const raw_hash_set& a, const raw_hash_set& b) {
return !(a == b);
}
template <typename H>
friend typename std::enable_if<H::template is_hashable<value_type>::value,
H>::type
AbslHashValue(H h, const raw_hash_set& s) {
return H::combine(H::combine_unordered(std::move(h), s.begin(), s.end()),
s.size());
}
friend void swap(raw_hash_set& a,
raw_hash_set& b) noexcept(noexcept(a.swap(b))) {
a.swap(b);
}
private:
template <class Container, typename Enabler>
friend struct absl::container_internal::hashtable_debug_internal::
HashtableDebugAccess;
friend struct absl::container_internal::HashtableFreeFunctionsAccess;
struct FindElement {
template <class K, class... Args>
const_iterator operator()(const K& key, Args&&...) const {
return s.find(key);
}
const raw_hash_set& s;
};
struct HashElement {
template <class K, class... Args>
size_t operator()(const K& key, Args&&...) const {
return h(key);
}
const hasher& h;
};
template <class K1>
struct EqualElement {
template <class K2, class... Args>
bool operator()(const K2& lhs, Args&&...) const {
return eq(lhs, rhs);
}
const K1& rhs;
const key_equal& eq;
};
struct EmplaceDecomposable {
template <class K, class... Args>
std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
auto res = s.find_or_prepare_insert(key);
if (res.second) {
s.emplace_at(res.first, std::forward<Args>(args)...);
}
return res;
}
raw_hash_set& s;
};
template <bool do_destroy>
struct InsertSlot {
template <class K, class... Args>
std::pair<iterator, bool> operator()(const K& key, Args&&...) && {
auto res = s.find_or_prepare_insert(key);
if (res.second) {
s.transfer(res.first.slot(), &slot);
} else if (do_destroy) {
s.destroy(&slot);
}
return res;
}
raw_hash_set& s;
slot_type&& slot;
};
template <typename... Args>
inline void construct(slot_type* slot, Args&&... args) {
PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
}
inline void destroy(slot_type* slot) {
PolicyTraits::destroy(&alloc_ref(), slot);
}
inline void transfer(slot_type* to, slot_type* from) {
PolicyTraits::transfer(&alloc_ref(), to, from);
}
template <class K = key_type>
iterator find_soo(const key_arg<K>& key) {
assert(is_soo());
return empty() || !PolicyTraits::apply(EqualElement<K>{key, eq_ref()},
PolicyTraits::element(soo_slot()))
? end()
: soo_iterator();
}
template <class K = key_type>
iterator find_non_soo(const key_arg<K>& key, size_t hash) {
assert(!is_soo());
auto seq = probe(common(), hash);
const ctrl_t* ctrl = control();
while (true) {
Group g{ctrl + seq.offset()};
for (uint32_t i : g.Match(H2(hash))) {
if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
EqualElement<K>{key, eq_ref()},
PolicyTraits::element(slot_array() + seq.offset(i)))))
return iterator_at(seq.offset(i));
}
if (ABSL_PREDICT_TRUE(g.MaskEmpty())) return end();
seq.next();
assert(seq.index() <= capacity() && "full table!");
}
}
inline HashtablezInfoHandle try_sample_soo() {
assert(is_soo());
if (!ShouldSampleHashtablezInfo<CharAlloc>()) return HashtablezInfoHandle{};
return Sample(sizeof(slot_type), sizeof(key_type), sizeof(value_type),
SooCapacity());
}
inline void destroy_slots() {
assert(!is_soo());
if (PolicyTraits::template destroy_is_trivial<Alloc>()) return;
IterateOverFullSlots(
common(), slot_array(),
[&](const ctrl_t*, slot_type* slot)
ABSL_ATTRIBUTE_ALWAYS_INLINE { this->destroy(slot); });
}
inline void dealloc() {
assert(capacity() != 0);
SanitizerUnpoisonMemoryRegion(slot_array(), sizeof(slot_type) * capacity());
infoz().Unregister();
Deallocate<BackingArrayAlignment(alignof(slot_type))>(
&alloc_ref(), common().backing_array_start(),
common().alloc_size(sizeof(slot_type), alignof(slot_type)));
}
inline void destructor_impl() {
if (capacity() == 0) return;
if (is_soo()) {
if (!empty()) {
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED(destroy(soo_slot()));
}
return;
}
destroy_slots();
dealloc();
}
void erase_meta_only(const_iterator it) {
assert(!is_soo());
EraseMetaOnly(common(), static_cast<size_t>(it.control() - control()),
sizeof(slot_type));
}
size_t hash_of(slot_type* slot) const {
return PolicyTraits::apply(HashElement{hash_ref()},
PolicyTraits::element(slot));
}
void resize(size_t new_capacity) {
raw_hash_set::resize_impl(common(), new_capacity, HashtablezInfoHandle{});
}
void resize_with_soo_infoz(HashtablezInfoHandle forced_infoz) {
assert(forced_infoz.IsSampled());
raw_hash_set::resize_impl(common(), NextCapacity(SooCapacity()),
forced_infoz);
}
ABSL_ATTRIBUTE_NOINLINE static void resize_impl(
CommonFields& common, size_t new_capacity,
HashtablezInfoHandle forced_infoz) {
raw_hash_set* set = reinterpret_cast<raw_hash_set*>(&common);
assert(IsValidCapacity(new_capacity));
assert(!set->fits_in_soo(new_capacity));
const bool was_soo = set->is_soo();
const bool had_soo_slot = was_soo && !set->empty();
const ctrl_t soo_slot_h2 =
had_soo_slot ? static_cast<ctrl_t>(H2(set->hash_of(set->soo_slot())))
: ctrl_t::kEmpty;
HashSetResizeHelper resize_helper(common, was_soo, had_soo_slot,
forced_infoz);
if (PolicyTraits::transfer_uses_memcpy() || !had_soo_slot) {
resize_helper.old_heap_or_soo() = common.heap_or_soo();
} else {
set->transfer(set->to_slot(resize_helper.old_soo_data()),
set->soo_slot());
}
common.set_capacity(new_capacity);
const bool grow_single_group =
resize_helper.InitializeSlots<CharAlloc, sizeof(slot_type),
PolicyTraits::transfer_uses_memcpy(),
SooEnabled(), alignof(slot_type)>(
common, CharAlloc(set->alloc_ref()), soo_slot_h2, sizeof(key_type),
sizeof(value_type));
if (!SooEnabled() && resize_helper.old_capacity() == 0) {
return;
}
assert(resize_helper.old_capacity() > 0);
if (was_soo && !had_soo_slot) return;
slot_type* new_slots = set->slot_array();
if (grow_single_group) {
if (PolicyTraits::transfer_uses_memcpy()) {
return;
}
if (was_soo) {
set->transfer(new_slots + resize_helper.SooSlotIndex(),
to_slot(resize_helper.old_soo_data()));
return;
} else {
resize_helper.GrowSizeIntoSingleGroup<PolicyTraits>(common,
set->alloc_ref());
}
} else {
const auto insert_slot = [&](slot_type* slot) {
size_t hash = PolicyTraits::apply(HashElement{set->hash_ref()},
PolicyTraits::element(slot));
auto target = find_first_non_full(common, hash);
SetCtrl(common, target.offset, H2(hash), sizeof(slot_type));
set->transfer(new_slots + target.offset, slot);
return target.probe_length;
};
if (was_soo) {
insert_slot(to_slot(resize_helper.old_soo_data()));
return;
} else {
auto* old_slots = static_cast<slot_type*>(resize_helper.old_slots());
size_t total_probe_length = 0;
for (size_t i = 0; i != resize_helper.old_capacity(); ++i) {
if (IsFull(resize_helper.old_ctrl()[i])) {
total_probe_length += insert_slot(old_slots + i);
}
}
common.infoz().RecordRehash(total_probe_length);
}
}
resize_helper.DeallocateOld<alignof(slot_type)>(CharAlloc(set->alloc_ref()),
sizeof(slot_type));
}
static slot_type* to_slot(void* buf) { return static_cast<slot_type*>(buf); }
static void move_common(bool that_is_full_soo, allocator_type& rhs_alloc,
CommonFields& lhs, CommonFields&& rhs) {
if (PolicyTraits::transfer_uses_memcpy() || !that_is_full_soo) {
lhs = std::move(rhs);
} else {
lhs.move_non_heap_or_soo_fields(rhs);
PolicyTraits::transfer(&rhs_alloc, to_slot(lhs.soo_data()),
to_slot(rhs.soo_data()));
}
}
void swap_common(raw_hash_set& that) {
using std::swap;
if (PolicyTraits::transfer_uses_memcpy()) {
swap(common(), that.common());
return;
}
CommonFields tmp = CommonFields::CreateDefault<SooEnabled()>();
const bool that_is_full_soo = that.is_full_soo();
move_common(that_is_full_soo, that.alloc_ref(), tmp,
std::move(that.common()));
move_common(is_full_soo(), alloc_ref(), that.common(), std::move(common()));
move_common(that_is_full_soo, that.alloc_ref(), common(), std::move(tmp));
}
void maybe_increment_generation_or_rehash_on_move() {
if (!SwisstableGenerationsEnabled() || capacity() == 0 || is_soo()) {
return;
}
common().increment_generation();
if (!empty() && common().should_rehash_for_bug_detection_on_move()) {
resize(capacity());
}
}
template <bool propagate_alloc>
raw_hash_set& assign_impl(raw_hash_set&& that) {
destructor_impl();
move_common(that.is_full_soo(), that.alloc_ref(), common(),
std::move(that.common()));
hash_ref() = that.hash_ref();
eq_ref() = that.eq_ref();
CopyAlloc(alloc_ref(), that.alloc_ref(),
std::integral_constant<bool, propagate_alloc>());
that.common() = CommonFields::CreateDefault<SooEnabled()>();
maybe_increment_generation_or_rehash_on_move();
return *this;
}
raw_hash_set& move_elements_allocs_unequal(raw_hash_set&& that) {
const size_t size = that.size();
if (size == 0) return *this;
reserve(size);
for (iterator it = that.begin(); it != that.end(); ++it) {
insert(std::move(PolicyTraits::element(it.slot())));
that.destroy(it.slot());
}
if (!that.is_soo()) that.dealloc();
that.common() = CommonFields::CreateDefault<SooEnabled()>();
maybe_increment_generation_or_rehash_on_move();
return *this;
}
raw_hash_set& move_assign(raw_hash_set&& that,
std::true_type ) {
return assign_impl<true>(std::move(that));
}
raw_hash_set& move_assign(raw_hash_set&& that,
std::false_type ) {
if (alloc_ref() == that.alloc_ref()) {
return assign_impl<false>(std::move(that));
}
assert(this != &that);
destructor_impl();
hash_ref() = that.hash_ref();
eq_ref() = that.eq_ref();
return move_elements_allocs_unequal(std::move(that));
}
template <class K>
std::pair<iterator, bool> find_or_prepare_insert_soo(const K& key) {
if (empty()) {
const HashtablezInfoHandle infoz = try_sample_soo();
if (infoz.IsSampled()) {
resize_with_soo_infoz(infoz);
} else {
common().set_full_soo();
return {soo_iterator(), true};
}
} else if (PolicyTraits::apply(EqualElement<K>{key, eq_ref()},
PolicyTraits::element(soo_slot()))) {
return {soo_iterator(), false};
} else {
resize(NextCapacity(SooCapacity()));
}
const size_t index =
PrepareInsertAfterSoo(hash_ref()(key), sizeof(slot_type), common());
return {iterator_at(index), true};
}
template <class K>
std::pair<iterator, bool> find_or_prepare_insert_non_soo(const K& key) {
assert(!is_soo());
prefetch_heap_block();
auto hash = hash_ref()(key);
auto seq = probe(common(), hash);
const ctrl_t* ctrl = control();
while (true) {
Group g{ctrl + seq.offset()};
for (uint32_t i : g.Match(H2(hash))) {
if (ABSL_PREDICT_TRUE(PolicyTraits::apply(
EqualElement<K>{key, eq_ref()},
PolicyTraits::element(slot_array() + seq.offset(i)))))
return {iterator_at(seq.offset(i)), false};
}
auto mask_empty = g.MaskEmpty();
if (ABSL_PREDICT_TRUE(mask_empty)) {
size_t target = seq.offset(
GetInsertionOffset(mask_empty, capacity(), hash, control()));
return {iterator_at(PrepareInsertNonSoo(common(), hash,
FindInfo{target, seq.index()},
GetPolicyFunctions())),
true};
}
seq.next();
assert(seq.index() <= capacity() && "full table!");
}
}
protected:
template <class K>
void AssertHashEqConsistent(ABSL_ATTRIBUTE_UNUSED const K& key) {
#ifndef NDEBUG
if (empty()) return;
const size_t hash_of_arg = hash_ref()(key);
const auto assert_consistent = [&](const ctrl_t*, slot_type* slot) {
const value_type& element = PolicyTraits::element(slot);
const bool is_key_equal =
PolicyTraits::apply(EqualElement<K>{key, eq_ref()}, element);
if (!is_key_equal) return;
const size_t hash_of_slot =
PolicyTraits::apply(HashElement{hash_ref()}, element);
const bool is_hash_equal = hash_of_arg == hash_of_slot;
if (!is_hash_equal) {
const size_t once_more_hash_arg = hash_ref()(key);
assert(hash_of_arg == once_more_hash_arg && "hash is not idempotent.");
const size_t once_more_hash_slot =
PolicyTraits::apply(HashElement{hash_ref()}, element);
assert(hash_of_slot == once_more_hash_slot &&
"hash is not idempotent.");
const bool once_more_eq =
PolicyTraits::apply(EqualElement<K>{key, eq_ref()}, element);
assert(is_key_equal == once_more_eq && "equality is not idempotent.");
}
assert((!is_key_equal || is_hash_equal) &&
"eq(k1, k2) must imply that hash(k1) == hash(k2). "
"hash/eq functors are inconsistent.");
};
if (is_soo()) {
assert_consistent( nullptr, soo_slot());
return;
}
if (capacity() > 16) return;
IterateOverFullSlots(common(), slot_array(), assert_consistent);
#endif
}
template <class K>
std::pair<iterator, bool> find_or_prepare_insert(const K& key) {
AssertHashEqConsistent(key);
if (is_soo()) return find_or_prepare_insert_soo(key);
return find_or_prepare_insert_non_soo(key);
}
template <class... Args>
void emplace_at(iterator iter, Args&&... args) {
construct(iter.slot(), std::forward<Args>(args)...);
assert(PolicyTraits::apply(FindElement{*this}, *iter) == iter &&
"constructed value does not match the lookup key");
}
iterator iterator_at(size_t i) ABSL_ATTRIBUTE_LIFETIME_BOUND {
return {control() + i, slot_array() + i, common().generation_ptr()};
}
const_iterator iterator_at(size_t i) const ABSL_ATTRIBUTE_LIFETIME_BOUND {
return const_cast<raw_hash_set*>(this)->iterator_at(i);
}
reference unchecked_deref(iterator it) { return it.unchecked_deref(); }
private:
friend struct RawHashSetTestOnlyAccess;
size_t growth_left() const {
assert(!is_soo());
return common().growth_left();
}
GrowthInfo& growth_info() {
assert(!is_soo());
return common().growth_info();
}
GrowthInfo growth_info() const {
assert(!is_soo());
return common().growth_info();
}
void prefetch_heap_block() const {
assert(!is_soo());
#if ABSL_HAVE_BUILTIN(__builtin_prefetch) || defined(__GNUC__)
__builtin_prefetch(control(), 0, 1);
#endif
}
CommonFields& common() { return settings_.template get<0>(); }
const CommonFields& common() const { return settings_.template get<0>(); }
ctrl_t* control() const {
assert(!is_soo());
return common().control();
}
slot_type* slot_array() const {
assert(!is_soo());
return static_cast<slot_type*>(common().slot_array());
}
slot_type* soo_slot() {
assert(is_soo());
return static_cast<slot_type*>(common().soo_data());
}
const slot_type* soo_slot() const {
return const_cast<raw_hash_set*>(this)->soo_slot();
}
iterator soo_iterator() {
return {SooControl(), soo_slot(), common().generation_ptr()};
}
const_iterator soo_iterator() const {
return const_cast<raw_hash_set*>(this)->soo_iterator();
}
HashtablezInfoHandle infoz() {
assert(!is_soo());
return common().infoz();
}
hasher& hash_ref() { return settings_.template get<1>(); }
const hasher& hash_ref() const { return settings_.template get<1>(); }
key_equal& eq_ref() { return settings_.template get<2>(); }
const key_equal& eq_ref() const { return settings_.template get<2>(); }
allocator_type& alloc_ref() { return settings_.template get<3>(); }
const allocator_type& alloc_ref() const {
return settings_.template get<3>();
}
static const void* get_hash_ref_fn(const CommonFields& common) {
auto* h = reinterpret_cast<const raw_hash_set*>(&common);
return &h->hash_ref();
}
static void transfer_slot_fn(void* set, void* dst, void* src) {
auto* h = static_cast<raw_hash_set*>(set);
h->transfer(static_cast<slot_type*>(dst), static_cast<slot_type*>(src));
}
static void dealloc_fn(CommonFields& common, const PolicyFunctions&) {
auto* set = reinterpret_cast<raw_hash_set*>(&common);
SanitizerUnpoisonMemoryRegion(common.slot_array(),
sizeof(slot_type) * common.capacity());
common.infoz().Unregister();
Deallocate<BackingArrayAlignment(alignof(slot_type))>(
&set->alloc_ref(), common.backing_array_start(),
common.alloc_size(sizeof(slot_type), alignof(slot_type)));
}
static const PolicyFunctions& GetPolicyFunctions() {
static constexpr PolicyFunctions value = {
sizeof(slot_type),
std::is_empty<hasher>::value ? &GetHashRefForEmptyHasher
: &raw_hash_set::get_hash_ref_fn,
PolicyTraits::template get_hash_slot_fn<hasher>(),
PolicyTraits::transfer_uses_memcpy()
? TransferRelocatable<sizeof(slot_type)>
: &raw_hash_set::transfer_slot_fn,
(std::is_same<SlotAlloc, std::allocator<slot_type>>::value
? &DeallocateStandard<alignof(slot_type)>
: &raw_hash_set::dealloc_fn),
&raw_hash_set::resize_impl,
};
return value;
}
absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
allocator_type>
settings_{CommonFields::CreateDefault<SooEnabled()>(), hasher{},
key_equal{}, allocator_type{}};
};
struct HashtableFreeFunctionsAccess {
template <class Predicate, typename Set>
static typename Set::size_type EraseIf(Predicate& pred, Set* c) {
if (c->empty()) {
return 0;
}
if (c->is_soo()) {
auto it = c->soo_iterator();
if (!pred(*it)) {
assert(c->size() == 1 && "hash table was modified unexpectedly");
return 0;
}
c->destroy(it.slot());
c->common().set_empty_soo();
return 1;
}
ABSL_ATTRIBUTE_UNUSED const size_t original_size_for_assert = c->size();
size_t num_deleted = 0;
IterateOverFullSlots(
c->common(), c->slot_array(), [&](const ctrl_t* ctrl, auto* slot) {
if (pred(Set::PolicyTraits::element(slot))) {
c->destroy(slot);
EraseMetaOnly(c->common(), static_cast<size_t>(ctrl - c->control()),
sizeof(*slot));
++num_deleted;
}
});
assert(original_size_for_assert - num_deleted == c->size() &&
"hash table was modified unexpectedly");
return num_deleted;
}
template <class Callback, typename Set>
static void ForEach(Callback& cb, Set* c) {
if (c->empty()) {
return;
}
if (c->is_soo()) {
cb(*c->soo_iterator());
return;
}
using ElementTypeWithConstness = decltype(*c->begin());
IterateOverFullSlots(
c->common(), c->slot_array(), [&cb](const ctrl_t*, auto* slot) {
ElementTypeWithConstness& element = Set::PolicyTraits::element(slot);
cb(element);
});
}
};
template <typename P, typename H, typename E, typename A, typename Predicate>
typename raw_hash_set<P, H, E, A>::size_type EraseIf(
Predicate& pred, raw_hash_set<P, H, E, A>* c) {
return HashtableFreeFunctionsAccess::EraseIf(pred, c);
}
template <typename P, typename H, typename E, typename A, typename Callback>
void ForEach(Callback& cb, raw_hash_set<P, H, E, A>* c) {
return HashtableFreeFunctionsAccess::ForEach(cb, c);
}
template <typename P, typename H, typename E, typename A, typename Callback>
void ForEach(Callback& cb, const raw_hash_set<P, H, E, A>* c) {
return HashtableFreeFunctionsAccess::ForEach(cb, c);
}
namespace hashtable_debug_internal {
template <typename Set>
struct HashtableDebugAccess<Set, absl::void_t<typename Set::raw_hash_set>> {
using Traits = typename Set::PolicyTraits;
using Slot = typename Traits::slot_type;
static size_t GetNumProbes(const Set& set,
const typename Set::key_type& key) {
if (set.is_soo()) return 0;
size_t num_probes = 0;
size_t hash = set.hash_ref()(key);
auto seq = probe(set.common(), hash);
const ctrl_t* ctrl = set.control();
while (true) {
container_internal::Group g{ctrl + seq.offset()};
for (uint32_t i : g.Match(container_internal::H2(hash))) {
if (Traits::apply(
typename Set::template EqualElement<typename Set::key_type>{
key, set.eq_ref()},
Traits::element(set.slot_array() + seq.offset(i))))
return num_probes;
++num_probes;
}
if (g.MaskEmpty()) return num_probes;
seq.next();
++num_probes;
}
}
static size_t AllocatedByteSize(const Set& c) {
size_t capacity = c.capacity();
if (capacity == 0) return 0;
size_t m =
c.is_soo() ? 0 : c.common().alloc_size(sizeof(Slot), alignof(Slot));
size_t per_slot = Traits::space_used(static_cast<const Slot*>(nullptr));
if (per_slot != ~size_t{}) {
m += per_slot * c.size();
} else {
for (auto it = c.begin(); it != c.end(); ++it) {
m += Traits::space_used(it.slot());
}
}
return m;
}
};
} } ABSL_NAMESPACE_END
}
#undef ABSL_SWISSTABLE_ENABLE_GENERATIONS
#undef ABSL_SWISSTABLE_IGNORE_UNINITIALIZED
#undef ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN
#endif