mod cow;
mod newtypes;
mod reference;
#[cfg(doc)]
use crate::{
cpu::cpuset::CpuSet, memory::nodeset::NodeSet, object::TopologyObject, topology::Topology,
};
use crate::{
errors,
ffi::{self, PositiveInt},
};
use hwlocality_sys::hwloc_bitmap_s;
#[cfg(any(test, feature = "proptest"))]
use proptest::prelude::*;
#[allow(unused)]
#[cfg(test)]
use similar_asserts::assert_eq;
#[cfg(test)]
use std::cell::Cell;
#[cfg(any(test, feature = "proptest"))]
use std::collections::HashSet;
use std::{
borrow::Borrow,
cmp::Ordering,
ffi::{c_int, c_uint},
fmt::{self, Debug, Display, Formatter, Pointer},
hash::{self, Hash},
iter::FusedIterator,
ops::{
BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Bound, Deref, Not,
RangeBounds, Sub, SubAssign,
},
ptr::NonNull,
};
pub type BitmapIndex = PositiveInt;
pub use self::{
cow::BitmapCow,
newtypes::{BitmapKind, OwnedBitmap, SpecializedBitmap, SpecializedBitmapRef},
reference::BitmapRef,
};
#[doc(alias = "hwloc_bitmap_t")]
#[doc(alias = "hwloc_const_bitmap_t")]
#[repr(transparent)]
pub struct Bitmap(NonNull<hwloc_bitmap_s>);
impl Bitmap {
pub(crate) unsafe fn from_owned_raw_mut(bitmap: *mut hwloc_bitmap_s) -> Option<Self> {
NonNull::new(bitmap).map(|ptr| unsafe { Self::from_owned_nonnull(ptr) })
}
pub(crate) unsafe fn from_owned_nonnull(bitmap: NonNull<hwloc_bitmap_s>) -> Self {
Self(bitmap)
}
pub(crate) unsafe fn borrow_from_raw<'target>(
bitmap: *const hwloc_bitmap_s,
) -> Option<BitmapRef<'target, Self>> {
unsafe { Self::borrow_from_raw_mut(bitmap.cast_mut()) }
}
pub(crate) unsafe fn borrow_from_raw_mut<'target>(
bitmap: *mut hwloc_bitmap_s,
) -> Option<BitmapRef<'target, Self>> {
NonNull::new(bitmap).map(|ptr| unsafe { Self::borrow_from_nonnull(ptr) })
}
pub(crate) unsafe fn borrow_from_nonnull<'target>(
bitmap: NonNull<hwloc_bitmap_s>,
) -> BitmapRef<'target, Self> {
unsafe { BitmapRef::from_nonnull(bitmap) }
}
pub(crate) fn as_ptr(&self) -> *const hwloc_bitmap_s {
self.0.as_ptr()
}
pub(crate) fn as_mut_ptr(&mut self) -> *mut hwloc_bitmap_s {
self.0.as_ptr()
}
#[doc(alias = "hwloc_bitmap_alloc")]
pub fn new() -> Self {
unsafe {
let ptr = errors::call_hwloc_ptr_mut("hwloc_bitmap_alloc", || {
hwlocality_sys::hwloc_bitmap_alloc()
})
.expect(MALLOC_FAIL_ONLY);
Self::from_owned_nonnull(ptr)
}
}
#[doc(alias = "hwloc_bitmap_alloc_full")]
pub fn full() -> Self {
unsafe {
let ptr = errors::call_hwloc_ptr_mut("hwloc_bitmap_alloc_full", || {
hwlocality_sys::hwloc_bitmap_alloc_full()
})
.expect(MALLOC_FAIL_ONLY);
Self::from_owned_nonnull(ptr)
}
}
pub fn from_range<Idx>(range: impl RangeBounds<Idx>) -> Self
where
Idx: Copy + TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
let mut bitmap = Self::new();
bitmap.set_range(range);
bitmap
}
#[doc(alias = "hwloc_bitmap_copy")]
pub fn copy_from(&mut self, other: impl Deref<Target = Self>) {
fn polymorphized(self_: &mut Bitmap, other: &Bitmap) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_copy", || unsafe {
hwlocality_sys::hwloc_bitmap_copy(self_.as_mut_ptr(), other.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, &other)
}
#[doc(alias = "hwloc_bitmap_zero")]
pub fn clear(&mut self) {
unsafe { hwlocality_sys::hwloc_bitmap_zero(self.as_mut_ptr()) }
}
#[doc(alias = "hwloc_bitmap_fill")]
pub fn fill(&mut self) {
unsafe { hwlocality_sys::hwloc_bitmap_fill(self.as_mut_ptr()) }
}
#[doc(alias = "hwloc_bitmap_only")]
pub fn set_only<Idx>(&mut self, idx: Idx)
where
Idx: TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn polymorphized(self_: &mut Bitmap, idx: Option<BitmapIndex>) {
let idx = idx.expect(BAD_INDEX);
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_only", || unsafe {
hwlocality_sys::hwloc_bitmap_only(self_.as_mut_ptr(), idx.to_c_uint())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, idx.try_into().ok());
}
#[doc(alias = "hwloc_bitmap_allbut")]
pub fn set_all_but<Idx>(&mut self, idx: Idx)
where
Idx: TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn polymorphized(self_: &mut Bitmap, idx: Option<BitmapIndex>) {
let idx = idx.expect(BAD_INDEX);
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_allbut", || unsafe {
hwlocality_sys::hwloc_bitmap_allbut(self_.as_mut_ptr(), idx.to_c_uint())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, idx.try_into().ok());
}
#[doc(alias = "hwloc_bitmap_set")]
pub fn set<Idx>(&mut self, idx: Idx)
where
Idx: TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn polymorphized(self_: &mut Bitmap, idx: Option<BitmapIndex>) {
let idx = idx.expect(BAD_INDEX);
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_set", || unsafe {
hwlocality_sys::hwloc_bitmap_set(self_.as_mut_ptr(), idx.to_c_uint())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, idx.try_into().ok());
}
#[doc(alias = "hwloc_bitmap_set_range")]
pub fn set_range<Idx>(&mut self, range: impl RangeBounds<Idx>)
where
Idx: Copy + TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
unsafe fn polymorphized(self_: &mut Bitmap, (begin, end): (c_uint, c_int)) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_set_range", || unsafe {
hwlocality_sys::hwloc_bitmap_set_range(self_.as_mut_ptr(), begin, end)
})
.expect(MALLOC_FAIL_ONLY);
}
unsafe { polymorphized(self, Self::hwloc_range(range)) };
}
#[doc(alias = "hwloc_bitmap_clr")]
pub fn unset<Idx>(&mut self, idx: Idx)
where
Idx: TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn polymorphized(self_: &mut Bitmap, idx: Option<BitmapIndex>) {
let idx = idx.expect(BAD_INDEX);
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_clr", || unsafe {
hwlocality_sys::hwloc_bitmap_clr(self_.as_mut_ptr(), idx.to_c_uint())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, idx.try_into().ok());
}
#[doc(alias = "hwloc_bitmap_clr_range")]
pub fn unset_range<Idx>(&mut self, range: impl RangeBounds<Idx>)
where
Idx: Copy + TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
unsafe fn polymorphized(self_: &mut Bitmap, (begin, end): (c_uint, c_int)) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_clr_range", || unsafe {
hwlocality_sys::hwloc_bitmap_clr_range(self_.as_mut_ptr(), begin, end)
})
.expect(MALLOC_FAIL_ONLY);
}
unsafe { polymorphized(self, Self::hwloc_range(range)) };
}
#[doc(alias = "hwloc_bitmap_singlify")]
pub fn singlify(&mut self) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_singlify", || unsafe {
hwlocality_sys::hwloc_bitmap_singlify(self.as_mut_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
#[doc(alias = "hwloc_bitmap_isset")]
pub fn is_set<Idx>(&self, idx: Idx) -> bool
where
Idx: TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn polymorphized(self_: &Bitmap, idx: Option<BitmapIndex>) -> bool {
let idx = idx.expect(BAD_INDEX);
errors::call_hwloc_bool("hwloc_bitmap_isset", || unsafe {
hwlocality_sys::hwloc_bitmap_isset(self_.as_ptr(), idx.to_c_uint())
})
.expect(MALLOC_FAIL_ONLY)
}
polymorphized(self, idx.try_into().ok())
}
#[doc(alias = "hwloc_bitmap_iszero")]
pub fn is_empty(&self) -> bool {
errors::call_hwloc_bool("hwloc_bitmap_iszero", || unsafe {
hwlocality_sys::hwloc_bitmap_iszero(self.as_ptr())
})
.expect(SHOULD_NOT_FAIL)
}
#[doc(alias = "hwloc_bitmap_isfull")]
pub fn is_full(&self) -> bool {
errors::call_hwloc_bool("hwloc_bitmap_isfull", || unsafe {
hwlocality_sys::hwloc_bitmap_isfull(self.as_ptr())
})
.expect(SHOULD_NOT_FAIL)
}
#[doc(alias = "hwloc_bitmap_first")]
pub fn first_set(&self) -> Option<BitmapIndex> {
self.query_index(
"hwloc_bitmap_first",
|| unsafe { hwlocality_sys::hwloc_bitmap_first(self.as_ptr()) },
)
}
#[doc(alias = "hwloc_bitmap_foreach_begin")]
#[doc(alias = "hwloc_bitmap_foreach_end")]
#[doc(alias = "hwloc_bitmap_next")]
pub fn iter_set(&self) -> Iter<&Self> {
#[cfg(test)]
{
assert!(
ALLOW_INFINITE_ITERATION.get() || self.last_set().is_some() || self.is_empty(),
"Test code initiated infinite bitmap set index iteration without asserting support for it"
);
}
Iter::new(self, Self::next_set)
}
#[doc(alias = "hwloc_bitmap_last")]
pub fn last_set(&self) -> Option<BitmapIndex> {
self.query_index(
"hwloc_bitmap_last",
|| unsafe { hwlocality_sys::hwloc_bitmap_last(self.as_ptr()) },
)
}
#[doc(alias = "hwloc_bitmap_weight")]
pub fn weight(&self) -> Option<usize> {
let result = errors::call_hwloc_int_raw(
"hwloc_bitmap_weight",
|| unsafe { hwlocality_sys::hwloc_bitmap_weight(self.as_ptr()) },
-1,
)
.expect(SHOULD_NOT_FAIL);
usize::try_from(result).ok()
}
#[doc(alias = "hwloc_bitmap_first_unset")]
pub fn first_unset(&self) -> Option<BitmapIndex> {
self.query_index(
"hwloc_bitmap_first_unset",
|| unsafe { hwlocality_sys::hwloc_bitmap_first_unset(self.as_ptr()) },
)
}
#[doc(alias = "hwloc_bitmap_next_unset")]
pub fn iter_unset(&self) -> Iter<&Self> {
#[cfg(test)]
{
assert!(
ALLOW_INFINITE_ITERATION.get() || self.last_unset().is_some() || self.is_full(),
"Test code initiated infinite bitmap unset index iteration without asserting support for it"
);
}
Iter::new(self, Self::next_unset)
}
#[doc(alias = "hwloc_bitmap_last_unset")]
pub fn last_unset(&self) -> Option<BitmapIndex> {
self.query_index(
"hwloc_bitmap_last_unset",
|| unsafe { hwlocality_sys::hwloc_bitmap_last_unset(self.as_ptr()) },
)
}
pub fn invert(&mut self) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_not", || unsafe {
hwlocality_sys::hwloc_bitmap_not(self.as_mut_ptr(), self.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
#[doc(alias = "hwloc_bitmap_intersects")]
pub fn intersects(&self, rhs: impl Deref<Target = Self>) -> bool {
fn polymorphized(self_: &Bitmap, rhs: &Bitmap) -> bool {
errors::call_hwloc_bool("hwloc_bitmap_intersects", || unsafe {
hwlocality_sys::hwloc_bitmap_intersects(self_.as_ptr(), rhs.as_ptr())
})
.expect(SHOULD_NOT_FAIL)
}
polymorphized(self, &rhs)
}
#[doc(alias = "hwloc_bitmap_isincluded")]
pub fn includes(&self, inner: impl Deref<Target = Self>) -> bool {
fn polymorphized(self_: &Bitmap, inner: &Bitmap) -> bool {
errors::call_hwloc_bool("hwloc_bitmap_isincluded", || unsafe {
hwlocality_sys::hwloc_bitmap_isincluded(inner.as_ptr(), self_.as_ptr())
})
.expect(SHOULD_NOT_FAIL)
}
polymorphized(self, &inner)
}
fn hwloc_range<Idx>(range: impl RangeBounds<Idx>) -> (c_uint, c_int)
where
Idx: Copy + TryInto<BitmapIndex>,
<Idx as TryInto<BitmapIndex>>::Error: Debug,
{
fn translate_bounds_literal(
start: Option<Bound<BitmapIndex>>,
end: Option<Bound<BitmapIndex>>,
) -> Option<(c_uint, c_int)> {
let start = start.expect("Range start is out of the accepted 0..=c_int::MAX range");
let start = match start {
Bound::Unbounded => BitmapIndex::MIN,
Bound::Included(i) => i,
Bound::Excluded(i) => i.checked_add_signed(1)?,
};
let end = end.expect("Range end is out of the accepted 0..=c_int::MAX range");
let end = match end {
Bound::Unbounded => -1,
Bound::Included(i) => i.to_c_int(),
Bound::Excluded(i) => i.checked_add_signed(-1)?.to_c_int(),
};
Some((start.to_c_uint(), end))
}
let convert_idx = |idx: &Idx| (*idx).try_into().ok();
let convert_bound = |bound: Bound<&Idx>| match bound {
Bound::Unbounded => Some(Bound::Unbounded),
Bound::Included(idx) => convert_idx(idx).map(Bound::Included),
Bound::Excluded(idx) => convert_idx(idx).map(Bound::Excluded),
};
translate_bounds_literal(
convert_bound(range.start_bound()),
convert_bound(range.end_bound()),
)
.unwrap_or((1, 0))
}
fn query_index(&self, api: &'static str, call: impl FnOnce() -> c_int) -> Option<BitmapIndex> {
let result = errors::call_hwloc_int_raw(api, call, -1).expect(SHOULD_NOT_FAIL);
BitmapIndex::try_from_c_int(result).ok()
}
unsafe fn next(
&self,
api: &'static str,
next_fn: impl FnOnce(*const hwloc_bitmap_s, c_int) -> c_int,
index: Option<BitmapIndex>,
) -> Option<BitmapIndex> {
self.query_index(api, || {
next_fn(self.as_ptr(), index.map_or(-1, BitmapIndex::to_c_int))
})
}
fn next_set(&self, index: Option<BitmapIndex>) -> Option<BitmapIndex> {
unsafe {
self.next(
"hwloc_bitmap_next",
|bitmap, prev| hwlocality_sys::hwloc_bitmap_next(bitmap, prev),
index,
)
}
}
fn next_unset(&self, index: Option<BitmapIndex>) -> Option<BitmapIndex> {
unsafe {
self.next(
"hwloc_bitmap_next_unset",
|bitmap, prev| hwlocality_sys::hwloc_bitmap_next_unset(bitmap, prev),
index,
)
}
}
}
const BAD_INDEX: &str = "Bitmap index is out of the accepted 0..=c_int::MAX range";
const MALLOC_FAIL_ONLY: &str =
"This operation should only fail on malloc failure, which is a panic in Rust";
const SHOULD_NOT_FAIL: &str = "This operation has no known failure mode";
#[cfg(test)]
thread_local! {
static ALLOW_INFINITE_ITERATION: Cell<bool> = const { Cell::new(false) };
}
pub(crate) fn allow_infinite_iteration<R>(scope: impl FnOnce() -> R) -> R {
#[cfg(test)]
{
assert!(
!ALLOW_INFINITE_ITERATION.get(),
"Don't nest allow_infinite_iteration()"
);
ALLOW_INFINITE_ITERATION.set(true);
}
let result = scope();
#[cfg(test)]
ALLOW_INFINITE_ITERATION.set(false);
result
}
#[cfg(any(test, feature = "proptest"))]
impl Arbitrary for Bitmap {
type Parameters = ();
type Strategy = prop::strategy::Map<
(
prop::strategy::TupleUnion<(
prop::strategy::WA<Just<HashSet<BitmapIndex>>>,
prop::strategy::WA<
prop::collection::HashSetStrategy<crate::strategies::BitmapIndexStrategy>,
>,
)>,
prop::bool::Any,
),
fn((HashSet<BitmapIndex>, bool)) -> Self,
>;
fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
use crate::strategies::bitmap_index;
use prop::collection::SizeRange;
let index_set = prop_oneof![
1 => Just(HashSet::new()),
4 => prop::collection::hash_set(bitmap_index(), SizeRange::default()),
];
(index_set, prop::bool::ANY).prop_map(|(set_indices, invert)| {
let mut result = set_indices.into_iter().collect::<Self>();
if invert {
result.invert();
}
result
})
}
}
impl<B: Borrow<Bitmap>> BitAnd<B> for &Bitmap {
type Output = Bitmap;
#[doc(alias = "hwloc_bitmap_and")]
fn bitand(self, rhs: B) -> Bitmap {
fn polymorphized(self_: &Bitmap, rhs: &Bitmap) -> Bitmap {
let mut result = Bitmap::new();
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_and", || unsafe {
hwlocality_sys::hwloc_bitmap_and(result.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
result
}
polymorphized(self, rhs.borrow())
}
}
impl<B: Borrow<Self>> BitAnd<B> for Bitmap {
type Output = Self;
fn bitand(mut self, rhs: B) -> Self {
self &= rhs.borrow();
self
}
}
impl<B: Borrow<Self>> BitAndAssign<B> for Bitmap {
fn bitand_assign(&mut self, rhs: B) {
fn polymorphized(self_: &mut Bitmap, rhs: &Bitmap) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_and", || unsafe {
hwlocality_sys::hwloc_bitmap_and(self_.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, rhs.borrow());
}
}
impl<B: Borrow<Bitmap>> BitOr<B> for &Bitmap {
type Output = Bitmap;
#[doc(alias = "hwloc_bitmap_or")]
fn bitor(self, rhs: B) -> Bitmap {
fn polymorphized(self_: &Bitmap, rhs: &Bitmap) -> Bitmap {
let mut result = Bitmap::new();
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_or", || unsafe {
hwlocality_sys::hwloc_bitmap_or(result.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
result
}
polymorphized(self, rhs.borrow())
}
}
impl<B: Borrow<Self>> BitOr<B> for Bitmap {
type Output = Self;
fn bitor(mut self, rhs: B) -> Self {
self |= rhs.borrow();
self
}
}
impl<B: Borrow<Self>> BitOrAssign<B> for Bitmap {
fn bitor_assign(&mut self, rhs: B) {
fn polymorphized(self_: &mut Bitmap, rhs: &Bitmap) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_or", || unsafe {
hwlocality_sys::hwloc_bitmap_or(self_.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, rhs.borrow());
}
}
impl<B: Borrow<Bitmap>> BitXor<B> for &Bitmap {
type Output = Bitmap;
#[doc(alias = "hwloc_bitmap_xor")]
fn bitxor(self, rhs: B) -> Bitmap {
fn polymorphized(self_: &Bitmap, rhs: &Bitmap) -> Bitmap {
let mut result = Bitmap::new();
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_xor", || unsafe {
hwlocality_sys::hwloc_bitmap_xor(result.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
result
}
polymorphized(self, rhs.borrow())
}
}
impl<B: Borrow<Self>> BitXor<B> for Bitmap {
type Output = Self;
fn bitxor(mut self, rhs: B) -> Self {
self ^= rhs.borrow();
self
}
}
impl<B: Borrow<Self>> BitXorAssign<B> for Bitmap {
fn bitxor_assign(&mut self, rhs: B) {
fn polymorphized(self_: &mut Bitmap, rhs: &Bitmap) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_xor", || unsafe {
hwlocality_sys::hwloc_bitmap_xor(self_.as_mut_ptr(), self_.as_ptr(), rhs.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, rhs.borrow());
}
}
impl Clone for Bitmap {
#[doc(alias = "hwloc_bitmap_dup")]
fn clone(&self) -> Self {
unsafe {
let ptr = errors::call_hwloc_ptr_mut("hwloc_bitmap_dup", || {
hwlocality_sys::hwloc_bitmap_dup(self.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
Self::from_owned_nonnull(ptr)
}
}
}
impl Debug for Bitmap {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
<Self as Display>::fmt(self, f)
}
}
impl Default for Bitmap {
fn default() -> Self {
Self::new()
}
}
impl Display for Bitmap {
#[doc(alias = "hwloc_bitmap_list_snprintf")]
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
unsafe {
ffi::write_snprintf(f, |buf, len| {
hwlocality_sys::hwloc_bitmap_list_snprintf(buf, len, self.as_ptr())
})
}
}
}
impl Drop for Bitmap {
#[doc(alias = "hwloc_bitmap_free")]
fn drop(&mut self) {
unsafe { hwlocality_sys::hwloc_bitmap_free(self.as_mut_ptr()) }
}
}
impl Eq for Bitmap {}
impl<BI: Borrow<BitmapIndex>> Extend<BI> for Bitmap {
fn extend<T: IntoIterator<Item = BI>>(&mut self, iter: T) {
for i in iter {
self.set(*i.borrow());
}
}
}
impl<BI: Borrow<BitmapIndex>> From<BI> for Bitmap {
fn from(value: BI) -> Self {
let mut result = Self::new();
result.set(*value.borrow());
result
}
}
impl<BI: Borrow<BitmapIndex>> FromIterator<BI> for Bitmap {
fn from_iter<I: IntoIterator<Item = BI>>(iter: I) -> Self {
let mut bitmap = Self::new();
bitmap.extend(iter);
bitmap
}
}
impl Hash for Bitmap {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
let is_infinitely_set = self.weight().is_none();
let last_significant_index = self
.last_set()
.or_else(|| self.last_unset())
.unwrap_or(BitmapIndex::MIN);
allow_infinite_iteration(|| {
for index in self
.iter_set()
.take_while(|&idx| idx <= last_significant_index)
{
index.hash(state);
}
});
is_infinitely_set.hash(state);
}
}
#[derive(Copy, Clone, Debug, Hash)]
pub struct Iter<B> {
bitmap: B,
prev: Option<BitmapIndex>,
next: fn(&Bitmap, Option<BitmapIndex>) -> Option<BitmapIndex>,
}
impl<B> Iter<B> {
fn new(bitmap: B, next: fn(&Bitmap, Option<BitmapIndex>) -> Option<BitmapIndex>) -> Self {
Self {
bitmap,
prev: None,
next,
}
}
}
impl<B: Borrow<Bitmap>> Iterator for Iter<B> {
type Item = BitmapIndex;
fn next(&mut self) -> Option<BitmapIndex> {
self.prev = (self.next)(self.bitmap.borrow(), self.prev);
self.prev
}
}
impl<B: Borrow<Bitmap>> FusedIterator for Iter<B> {}
impl IntoIterator for &Bitmap {
type Item = BitmapIndex;
type IntoIter = Iter<Self>;
fn into_iter(self) -> Self::IntoIter {
#[cfg(test)]
{
assert!(
ALLOW_INFINITE_ITERATION.get() || self.last_set().is_some() || self.is_empty(),
"Test code initiated infinite bitmap set index iteration without asserting support for it"
);
}
Iter::new(self, Bitmap::next_set)
}
}
impl IntoIterator for Bitmap {
type Item = BitmapIndex;
type IntoIter = Iter<Self>;
fn into_iter(self) -> Self::IntoIter {
#[cfg(test)]
{
assert!(
ALLOW_INFINITE_ITERATION.get() || self.last_set().is_some() || self.is_empty(),
"Test code initiated infinite bitmap set index iteration without asserting support for it"
);
}
Iter::new(self, Self::next_set)
}
}
impl Not for &Bitmap {
type Output = Bitmap;
#[doc(alias = "hwloc_bitmap_not")]
fn not(self) -> Bitmap {
let mut result = Bitmap::new();
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_not", || unsafe {
hwlocality_sys::hwloc_bitmap_not(result.as_mut_ptr(), self.as_ptr())
})
.expect(MALLOC_FAIL_ONLY);
result
}
}
impl Not for Bitmap {
type Output = Self;
fn not(mut self) -> Self {
self.invert();
self
}
}
impl Ord for Bitmap {
#[doc(alias = "hwloc_bitmap_compare")]
fn cmp(&self, other: &Self) -> Ordering {
let result = unsafe { hwlocality_sys::hwloc_bitmap_compare(self.as_ptr(), other.as_ptr()) };
match result {
-1 => Ordering::Less,
0 => Ordering::Equal,
1 => Ordering::Greater,
_ => unreachable!("hwloc_bitmap_compare returned unexpected result {result}"),
}
}
}
impl<B: Borrow<Self>> PartialEq<B> for Bitmap {
#[doc(alias = "hwloc_bitmap_isequal")]
fn eq(&self, other: &B) -> bool {
fn polymorphized(self_: &Bitmap, other: &Bitmap) -> bool {
errors::call_hwloc_bool("hwloc_bitmap_isequal", || unsafe {
hwlocality_sys::hwloc_bitmap_isequal(self_.as_ptr(), other.as_ptr())
})
.expect(SHOULD_NOT_FAIL)
}
polymorphized(self, other.borrow())
}
}
impl<B: Borrow<Self>> PartialOrd<B> for Bitmap {
fn partial_cmp(&self, other: &B) -> Option<Ordering> {
Some(self.cmp(other.borrow()))
}
}
impl Pointer for Bitmap {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
<NonNull<hwloc_bitmap_s> as fmt::Pointer>::fmt(&self.0, f)
}
}
unsafe impl Send for Bitmap {}
impl<B: Borrow<Bitmap>> Sub<B> for &Bitmap {
type Output = Bitmap;
#[doc(alias = "hwloc_bitmap_andnot")]
fn sub(self, rhs: B) -> Bitmap {
fn polymorphized(self_: &Bitmap, rhs: &Bitmap) -> Bitmap {
let mut result = Bitmap::new();
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_andnot", || unsafe {
hwlocality_sys::hwloc_bitmap_andnot(
result.as_mut_ptr(),
self_.as_ptr(),
rhs.as_ptr(),
)
})
.expect(MALLOC_FAIL_ONLY);
result
}
polymorphized(self, rhs.borrow())
}
}
impl<B: Borrow<Self>> Sub<B> for Bitmap {
type Output = Self;
fn sub(mut self, rhs: B) -> Self {
self -= rhs.borrow();
self
}
}
impl<B: Borrow<Self>> SubAssign<B> for Bitmap {
fn sub_assign(&mut self, rhs: B) {
fn polymorphized(self_: &mut Bitmap, rhs: &Bitmap) {
errors::call_hwloc_zero_or_minus1("hwloc_bitmap_andnot", || unsafe {
hwlocality_sys::hwloc_bitmap_andnot(
self_.as_mut_ptr(),
self_.as_ptr(),
rhs.as_ptr(),
)
})
.expect(MALLOC_FAIL_ONLY);
}
polymorphized(self, rhs.borrow());
}
}
unsafe impl Sync for Bitmap {}
#[allow(clippy::cognitive_complexity, clippy::op_ref, clippy::too_many_lines)]
#[cfg(test)]
pub(crate) mod tests {
use super::reference::tests::{test_bitmap_ref_binops, test_bitmap_ref_unary};
use super::*;
use crate::strategies::bitmap_index;
use proptest::sample::SizeRange;
#[allow(unused)]
use similar_asserts::assert_eq;
use static_assertions::{
assert_eq_align, assert_eq_size, assert_impl_all, assert_not_impl_any, assert_type_eq_all,
};
use std::{
collections::hash_map::RandomState,
error::Error,
ffi::c_ulonglong,
fmt::{Binary, LowerExp, LowerHex, Octal, UpperExp, UpperHex, Write},
hash::BuildHasher,
io::{self, Read},
mem::ManuallyDrop,
ops::{RangeFrom, RangeInclusive},
panic::UnwindSafe,
ptr,
};
assert_eq_align!(Bitmap, NonNull<hwloc_bitmap_s>);
assert_eq_size!(Bitmap, NonNull<hwloc_bitmap_s>);
assert_impl_all!(Bitmap:
BitAnd<Bitmap>, BitAnd<&'static Bitmap>,
BitAndAssign<Bitmap>, BitAndAssign<&'static Bitmap>,
BitOr<Bitmap>, BitOr<&'static Bitmap>,
BitOrAssign<Bitmap>, BitOrAssign<&'static Bitmap>,
BitXor<Bitmap>, BitXor<&'static Bitmap>,
BitXorAssign<Bitmap>, BitXorAssign<&'static Bitmap>,
Clone, Debug, Default, Display,
Extend<BitmapIndex>, Extend<&'static BitmapIndex>,
From<BitmapIndex>, From<&'static BitmapIndex>,
FromIterator<BitmapIndex>, FromIterator<&'static BitmapIndex>,
Hash, IntoIterator<Item=BitmapIndex>, Not, Ord, OwnedBitmap,
PartialEq<&'static Bitmap>,
PartialOrd<&'static Bitmap>,
Pointer, Sized,
Sub<Bitmap>, Sub<&'static Bitmap>,
SubAssign<Bitmap>, SubAssign<&'static Bitmap>,
Sync, Unpin, UnwindSafe
);
assert_not_impl_any!(Bitmap:
Binary, Copy, Deref, Error, LowerExp, LowerHex, Octal, Read, UpperExp,
UpperHex, fmt::Write, io::Write
);
assert_impl_all!(&Bitmap:
BitAnd<Bitmap>, BitAnd<&'static Bitmap>,
BitOr<Bitmap>, BitOr<&'static Bitmap>,
BitXor<Bitmap>, BitXor<&'static Bitmap>,
IntoIterator<Item=BitmapIndex>,
Not<Output=Bitmap>,
Sub<Bitmap>, Sub<&'static Bitmap>,
);
assert_type_eq_all!(BitmapIndex, PositiveInt);
assert_impl_all!(Iter<Bitmap>:
Clone, Debug, FusedIterator<Item=BitmapIndex>, Hash, Sized, Sync, Unpin,
UnwindSafe
);
assert_not_impl_any!(Iter<Bitmap>:
Binary, Copy, Default, Deref, Display, LowerExp, LowerHex, Octal,
Pointer, Read, UpperExp, UpperHex, fmt::Write, io::Write
);
assert_impl_all!(Iter<&Bitmap>:
Copy, Debug, FusedIterator<Item=BitmapIndex>, Hash, Sized, Sync, Unpin,
UnwindSafe
);
assert_not_impl_any!(Iter<&Bitmap>:
Binary, Default, Deref, Display, LowerExp, LowerHex, Octal, Pointer,
Read, UpperExp, UpperHex, fmt::Write, io::Write
);
pub(crate) const INFINITE_EXPLORE_ITERS: usize = std::mem::size_of::<c_ulonglong>() * 8;
fn range_to_usize(range: &RangeInclusive<BitmapIndex>) -> RangeInclusive<usize> {
usize::from(*range.start())..=usize::from(*range.end())
}
fn split_infinite_bitmap(mut bitmap: Bitmap) -> (Bitmap, Option<RangeFrom<BitmapIndex>>) {
if bitmap.weight().is_none() {
bitmap.last_unset().map_or_else(
|| (Bitmap::new(), Some(BitmapIndex::MIN..)),
|last_unset| {
let infinite_part = last_unset.checked_add_signed(1).unwrap()..;
bitmap.unset_range(infinite_part.clone());
(bitmap, Some(infinite_part))
},
)
} else {
(bitmap, None)
}
}
fn test_basic_inplace(initial: &Bitmap, inverse: &Bitmap) -> Result<(), TestCaseError> {
prop_assert_eq!(format!("{:p}", *initial), format!("{:p}", initial.0));
let mut buf = initial.clone();
buf.clear();
prop_assert!(buf.is_empty());
buf.copy_from(initial);
buf.fill();
prop_assert!(buf.is_full());
buf.copy_from(initial);
buf.invert();
prop_assert_eq!(&buf, inverse);
if initial.weight().unwrap_or(usize::MAX) > 0 {
buf.copy_from(initial);
buf.singlify();
prop_assert_eq!(buf.weight(), Some(1));
}
Ok(())
}
fn test_indexing(
initial: &Bitmap,
index: BitmapIndex,
initially_set: bool,
) -> Result<(), TestCaseError> {
let single = Bitmap::from(index);
let single_hole = !&single;
let max_iters = initial
.weight()
.unwrap_or_else(|| usize::from(index) + INFINITE_EXPLORE_ITERS);
prop_assert_eq!(initial.is_set(index), initially_set);
let mut buf = initial.clone();
buf.set(index);
prop_assert_eq!(
buf.weight(),
initial.weight().map(|w| w + usize::from(!initially_set))
);
allow_infinite_iteration(|| {
for idx in std::iter::once(index).chain(initial.iter_set().take(max_iters)) {
prop_assert!(buf.is_set(idx));
}
Ok(())
})?;
buf.copy_from(initial);
buf.set_only(index);
prop_assert_eq!(&buf, &single);
buf.copy_from(initial);
buf.set_all_but(index);
prop_assert_eq!(&buf, &single_hole);
buf.copy_from(initial);
buf.unset(index);
prop_assert_eq!(
buf.weight(),
initial.weight().map(|w| w - usize::from(initially_set))
);
allow_infinite_iteration(|| {
for idx in initial.iter_set().take(max_iters) {
prop_assert_eq!(buf.is_set(idx), idx != index);
}
Ok(())
})
}
fn test_and_sub(b1: &Bitmap, b2: &Bitmap, and: &Bitmap) -> Result<(), TestCaseError> {
prop_assert_eq!(&(b1 & b2), and);
prop_assert_eq!(&(b1.clone() & b2), and);
let mut buf = b1.clone();
buf &= b2;
prop_assert_eq!(&buf, and);
let b1_andnot_b2 = b1 & !b2;
prop_assert_eq!(&(b1 - b2), &b1_andnot_b2);
prop_assert_eq!(&(b1.clone() - b2), &b1_andnot_b2);
buf.copy_from(b1);
buf -= b2;
prop_assert_eq!(&buf, &b1_andnot_b2);
let b2_andnot_b1 = b2 & !b1;
prop_assert_eq!(&(b2 - b1), &b2_andnot_b1);
prop_assert_eq!(&(b2.clone() - b1), &b2_andnot_b1);
buf.copy_from(b2);
buf -= b1;
prop_assert_eq!(buf, b2_andnot_b1);
Ok(())
}
#[test]
fn test_low_level_null() {
unsafe {
assert_eq!(Bitmap::from_owned_raw_mut(ptr::null_mut()), None);
assert_eq!(Bitmap::borrow_from_raw(ptr::null()), None);
assert_eq!(Bitmap::borrow_from_raw_mut(ptr::null_mut()), None);
}
}
fn test_low_level_nonnull(bitmap: Bitmap) -> Result<(), TestCaseError> {
test_bitmap_ref_unary(&bitmap, BitmapRef::from(&bitmap))?;
let bitmap = ManuallyDrop::new(bitmap);
let inner = bitmap.0;
prop_assert_eq!(unsafe { bitmap.inner() }, inner);
{
let bitmap = unsafe { Bitmap::from_owned_nonnull(inner) };
prop_assert_eq!(bitmap.0, inner);
std::mem::forget(bitmap);
}
{
let bitmap = unsafe { Bitmap::from_owned_raw_mut(inner.as_ptr()).unwrap() };
prop_assert_eq!(bitmap.0, inner);
std::mem::forget(bitmap);
}
let bitmap = ManuallyDrop::into_inner(bitmap);
{
let borrow = unsafe { Bitmap::borrow_from_nonnull(inner) };
prop_assert_eq!(borrow.0, inner);
test_bitmap_ref_unary(&bitmap, borrow)?;
}
{
let borrow = unsafe { Bitmap::borrow_from_raw(bitmap.as_ptr()).unwrap() };
prop_assert_eq!(borrow.0, inner);
test_bitmap_ref_unary(&bitmap, borrow)?;
}
let mut bitmap = bitmap;
{
let borrow = unsafe { Bitmap::borrow_from_raw_mut(bitmap.as_mut_ptr()).unwrap() };
prop_assert_eq!(borrow.0, inner);
test_bitmap_ref_unary(&bitmap, borrow)?;
}
Ok(())
}
#[allow(clippy::redundant_clone)]
#[test]
fn empty() -> Result<(), TestCaseError> {
let empty = Bitmap::new();
let mut empty2 = Bitmap::full();
empty2.unset_range::<BitmapIndex>(..);
let inverse = Bitmap::full();
let test_empty = |empty: &Bitmap| {
prop_assert_eq!(empty.first_set(), None);
prop_assert_eq!(empty.first_unset().map(usize::from), Some(0));
prop_assert!(empty.is_empty());
prop_assert!(!empty.is_full());
prop_assert_eq!(empty.into_iter().count(), 0);
prop_assert_eq!(empty.iter_set().count(), 0);
prop_assert_eq!(empty.last_set(), None);
prop_assert_eq!(empty.last_unset(), None);
prop_assert_eq!(empty.weight(), Some(0));
allow_infinite_iteration(|| {
for (expected, idx) in empty.iter_unset().enumerate().take(INFINITE_EXPLORE_ITERS) {
prop_assert_eq!(expected, usize::from(idx));
}
for (expected, idx) in empty
.clone()
.into_iter()
.enumerate()
.take(INFINITE_EXPLORE_ITERS)
{
prop_assert_eq!(expected, usize::from(idx));
}
Ok(())
})?;
prop_assert_eq!(format!("{empty:?}"), "");
prop_assert_eq!(format!("{empty}"), "");
prop_assert_eq!(&(!empty), &inverse);
prop_assert_eq!(&(!(empty.clone())), &inverse);
Ok(())
};
test_empty(&empty)?;
test_empty(&empty.clone())?;
test_empty(&empty2)?;
test_empty(&Bitmap::default())?;
test_basic_inplace(&empty, &inverse)?;
test_low_level_nonnull(empty)?;
Ok(())
}
pub(crate) fn index_range() -> impl Strategy<Value = RangeInclusive<BitmapIndex>> {
[bitmap_index(), bitmap_index()].prop_map(|[start, end]| start..=end)
}
pub(crate) fn index_vec() -> impl Strategy<Value = Vec<BitmapIndex>> {
prop::collection::vec(bitmap_index(), SizeRange::default())
}
fn index_vec_and_set() -> impl Strategy<Value = (Vec<BitmapIndex>, HashSet<BitmapIndex>)> {
index_vec().prop_map(|v| {
let s = v.iter().copied().collect::<HashSet<_>>();
(v, s)
})
}
proptest! {
#[test]
fn empty_extend((extra, extra_set) in index_vec_and_set()) {
let mut extended = Bitmap::new();
extended.extend(extra);
prop_assert_eq!(extended.weight(), Some(extra_set.len()));
for idx in extra_set {
prop_assert!(extended.is_set(idx));
}
}
#[test]
fn empty_op_index(index in bitmap_index()) {
test_indexing(&Bitmap::new(), index, false)?;
}
#[test]
fn empty_op_range(range in index_range()) {
let mut buf = Bitmap::new();
buf.set_range(range.clone());
prop_assert_eq!(&buf, &Bitmap::from_range(range.clone()));
buf.clear();
buf.unset_range(range);
prop_assert!(buf.is_empty());
}
#[test]
fn empty_op_bitmap(other: Bitmap) {
let empty = Bitmap::new();
prop_assert_eq!(empty.includes(&other), other.is_empty());
prop_assert!(other.includes(&empty));
prop_assert!(!empty.intersects(&other));
prop_assert_eq!(empty == other, other.is_empty());
if other.is_empty() {
let state = RandomState::new();
prop_assert_eq!(state.hash_one(&other), state.hash_one(&empty));
} else {
prop_assert!(empty < other);
}
test_and_sub(&empty, &other, &empty)?;
prop_assert_eq!(&(&empty | &other), &other);
prop_assert_eq!(&(empty.clone() | &other), &other);
let mut buf = Bitmap::new();
buf |= &other;
prop_assert_eq!(&buf, &other);
prop_assert_eq!(&(&empty ^ &other), &other);
prop_assert_eq!(&(empty.clone() ^ &other), &other);
buf.clear();
buf ^= &other;
prop_assert_eq!(&buf, &other);
test_bitmap_ref_binops(&empty, &other)?;
}
}
#[allow(clippy::redundant_clone)]
#[test]
fn full() -> Result<(), TestCaseError> {
let full = Bitmap::full();
let full2 = Bitmap::from_range::<BitmapIndex>(..);
let mut full3 = Bitmap::new();
full3.set_range::<BitmapIndex>(..);
let inverse = Bitmap::new();
let test_full = |full: &Bitmap| {
prop_assert_eq!(full.first_set().map(usize::from), Some(0));
prop_assert_eq!(full.first_unset(), None);
prop_assert!(!full.is_empty());
prop_assert!(full.is_full());
prop_assert_eq!(full.iter_unset().count(), 0);
prop_assert_eq!(full.last_set(), None);
prop_assert_eq!(full.last_unset(), None);
prop_assert_eq!(full.weight(), None);
allow_infinite_iteration(|| {
fn test_iter_set(
iter: impl Iterator<Item = BitmapIndex>,
) -> Result<(), TestCaseError> {
for (expected, idx) in iter.enumerate().take(INFINITE_EXPLORE_ITERS) {
prop_assert_eq!(expected, usize::from(idx));
}
Ok(())
}
test_iter_set(full.into_iter())?;
test_iter_set(full.clone().into_iter())?;
test_iter_set(full.iter_set())?;
Ok::<(), TestCaseError>(())
})?;
prop_assert_eq!(format!("{full:?}"), "0-");
prop_assert_eq!(format!("{full}"), "0-");
prop_assert_eq!(&(!full), &inverse);
prop_assert_eq!(&(!(full.clone())), &inverse);
Ok(())
};
test_full(&full)?;
test_full(&full.clone())?;
test_full(&full2)?;
test_full(&full3)?;
test_basic_inplace(&full, &inverse)?;
test_low_level_nonnull(full)?;
Ok(())
}
proptest! {
#[test]
fn full_extend(extra in index_vec()) {
let mut extended = Bitmap::full();
extended.extend(extra.iter().copied());
prop_assert!(extended.is_full());
}
#[test]
fn full_op_index(index in bitmap_index()) {
test_indexing(&Bitmap::full(), index, true)?;
}
#[test]
fn full_op_range(range in index_range()) {
let mut ranged_hole = Bitmap::from_range(range.clone());
ranged_hole.invert();
let mut buf = Bitmap::full();
buf.set_range(range.clone());
prop_assert!(buf.is_full());
buf.fill();
buf.unset_range(range);
prop_assert_eq!(buf, ranged_hole);
}
#[test]
fn full_op_bitmap(other: Bitmap) {
let full = Bitmap::full();
let not_other = !&other;
prop_assert!(full.includes(&other));
prop_assert_eq!(other.includes(&full), other.is_full());
prop_assert_eq!(full.intersects(&other), !other.is_empty());
prop_assert_eq!(full == other, other.is_full());
if other.is_full() {
let state = RandomState::new();
prop_assert_eq!(state.hash_one(&other), state.hash_one(&full));
}
prop_assert_eq!(
full.cmp(&other),
if other.is_full() {
Ordering::Equal
} else {
Ordering::Greater
}
);
test_and_sub(&full, &other, &other)?;
prop_assert!((&full | &other).is_full());
prop_assert!((full.clone() | &other).is_full());
let mut buf = Bitmap::full();
buf |= &other;
prop_assert!(buf.is_full());
prop_assert_eq!(&(&full ^ &other), ¬_other);
prop_assert_eq!(&(full.clone() ^ &other), ¬_other);
buf.fill();
buf ^= &other;
prop_assert_eq!(buf, not_other);
test_bitmap_ref_binops(&full, &other)?;
}
#[allow(clippy::redundant_clone)]
#[test]
fn from_range(range in index_range()) {
let ranged_bitmap = Bitmap::from_range(range.clone());
let elems = range_to_usize(&range)
.map(|idx| BitmapIndex::try_from(idx).unwrap())
.collect::<Vec<_>>();
let first_unset = if elems.first() == Some(&BitmapIndex::MIN) {
elems
.last()
.copied()
.and_then(|item| item.checked_add_signed(1))
} else {
Some(BitmapIndex::MIN)
};
let unset_after_set = elems.last().map_or(Some(BitmapIndex::MIN), |last_set| {
last_set.checked_add_signed(1)
});
let display = if let (Some(first), Some(last)) = (elems.first(), elems.last()) {
if first == last {
format!("{first}")
} else {
format!("{first}-{last}")
}
} else {
String::new()
};
let inverse = if let (Some(&first), Some(last)) = (elems.first(), elems.last()) {
let mut buf = Bitmap::from_range(..first);
if let Some(after_last) = last.checked_add_signed(1) {
buf.set_range(after_last..)
}
buf
} else {
Bitmap::full()
};
let test_ranged = |ranged_bitmap: &Bitmap| {
prop_assert_eq!(ranged_bitmap.first_set(), elems.first().copied());
prop_assert_eq!(ranged_bitmap.first_unset(), first_unset);
prop_assert_eq!(ranged_bitmap.is_empty(), elems.is_empty());
prop_assert!(!ranged_bitmap.is_full());
prop_assert_eq!(&ranged_bitmap.into_iter().collect::<Vec<_>>(), &elems);
prop_assert_eq!(&ranged_bitmap.clone().into_iter().collect::<Vec<_>>(), &elems);
prop_assert_eq!(&ranged_bitmap.iter_set().collect::<Vec<_>>(), &elems);
prop_assert_eq!(&ranged_bitmap.last_set(), &elems.last().copied());
prop_assert_eq!(ranged_bitmap.last_unset(), None);
prop_assert_eq!(ranged_bitmap.weight(), Some(elems.len()));
allow_infinite_iteration(|| {
let mut unset = ranged_bitmap.iter_unset();
if let Some(first_set) = elems.first() {
for expected_unset in 0..usize::from(*first_set) {
prop_assert_eq!(unset.next().map(usize::from), Some(expected_unset));
}
}
let mut expected_unset =
std::iter::successors(unset_after_set, |unset| unset.checked_add_signed(1));
for unset_index in unset.take(INFINITE_EXPLORE_ITERS) {
prop_assert_eq!(unset_index, expected_unset.next().unwrap())
}
Ok(())
})?;
prop_assert_eq!(&format!("{ranged_bitmap:?}"), &display);
prop_assert_eq!(&format!("{ranged_bitmap}"), &display);
prop_assert_eq!(&(!ranged_bitmap), &inverse);
prop_assert_eq!(&(!(ranged_bitmap.clone())), &inverse);
Ok(())
};
test_ranged(&ranged_bitmap)?;
test_ranged(&ranged_bitmap.clone())?;
test_basic_inplace(&ranged_bitmap, &inverse)?;
test_low_level_nonnull(ranged_bitmap.clone())?;
let mut exclude_left = Bitmap::from_range((
Bound::Excluded(*range.start()),
Bound::Included(*range.end()),
));
prop_assert!(!exclude_left.is_set(*range.start()));
if range.contains(range.start()) {
exclude_left.set(*range.start());
}
prop_assert_eq!(&exclude_left, &ranged_bitmap);
let mut exclude_right = Bitmap::from_range(*range.start()..*range.end());
prop_assert!(!exclude_right.is_set(*range.end()));
if range.contains(range.end()) {
exclude_right.set(*range.end());
}
prop_assert_eq!(exclude_right, ranged_bitmap);
}
#[test]
fn from_range_extend(
range in index_range(),
(extra, extra_set) in index_vec_and_set()
) {
let mut extended = Bitmap::from_range(range.clone());
extended.extend(extra);
let mut predicted = extra_set;
for idx in usize::from(*range.start())..=usize::from(*range.end()) {
predicted.insert(idx.try_into().unwrap());
}
prop_assert_eq!(extended.weight(), Some(predicted.len()));
for idx in predicted {
prop_assert!(extended.is_set(idx));
}
}
#[test]
fn from_range_op_index(range in index_range(), index in bitmap_index()) {
test_indexing(
&Bitmap::from_range(range.clone()),
index,
range.contains(&index),
)?;
}
#[test]
fn from_range_op_range(
[range1, range2] in [index_range(), index_range()]
) {
let usized = range_to_usize(&range1);
let other_usized = range_to_usize(&range2);
let num_indices = |range: &RangeInclusive<usize>| range.clone().count();
let num_common_indices = if usized.is_empty() || other_usized.is_empty() {
0
} else {
num_indices(
&(*usized.start().max(other_usized.start())
..=*usized.end().min(other_usized.end())),
)
};
let ranged_bitmap = Bitmap::from_range(range1);
let mut buf = ranged_bitmap.clone();
buf.set_range(range2.clone());
prop_assert_eq!(
buf.weight().unwrap(),
num_indices(&usized) + num_indices(&other_usized) - num_common_indices
);
for idx in usized.clone().chain(other_usized.clone()) {
prop_assert!(buf.is_set(idx));
}
buf.copy_from(&ranged_bitmap);
buf.unset_range(range2);
prop_assert_eq!(
buf.weight().unwrap(),
num_indices(&usized) - num_common_indices
);
for idx in usized {
prop_assert_eq!(buf.is_set(idx), !other_usized.contains(&idx));
}
}
#[allow(clippy::similar_names)]
#[test]
fn from_range_op_bitmap(
range in index_range(),
other: Bitmap
) {
let ranged_bitmap = Bitmap::from_range(range.clone());
let usized = range_to_usize(&range);
prop_assert_eq!(
ranged_bitmap.includes(&other),
other.is_empty()
|| (other.last_set().is_some() && other.iter_set().all(|idx| range.contains(&idx)))
);
prop_assert_eq!(
other.includes(&ranged_bitmap),
usized.clone().all(|idx| other.is_set(idx))
);
prop_assert_eq!(
ranged_bitmap.intersects(&other),
usized.clone().any(|idx| other.is_set(idx))
);
prop_assert_eq!(
ranged_bitmap == other,
other.weight() == Some(usized.count()) && other.includes(&ranged_bitmap)
);
if ranged_bitmap == other {
let state = RandomState::new();
prop_assert_eq!(state.hash_one(&other), state.hash_one(&ranged_bitmap));
}
if ranged_bitmap.is_empty() {
prop_assert_eq!(
ranged_bitmap.cmp(&other),
if other.is_empty() {
Ordering::Equal
} else {
Ordering::Less
}
);
} else {
match ranged_bitmap.cmp(&other) {
Ordering::Less => {
prop_assert!(
other.last_set().unwrap_or(BitmapIndex::MAX) > *range.end()
|| (other.includes(&ranged_bitmap)
&& other.first_set().unwrap_or(BitmapIndex::MIN) < *range.start())
)
}
Ordering::Equal => prop_assert_eq!(&ranged_bitmap, &other),
Ordering::Greater => prop_assert!(!other.includes(&ranged_bitmap)),
}
}
let (other_finite, other_infinite) = split_infinite_bitmap(other.clone());
let mut ranged_and_other = other_finite
.iter_set()
.filter(|idx| range.contains(idx))
.collect::<Bitmap>();
if let Some(infinite) = &other_infinite {
if !ranged_bitmap.is_empty() {
ranged_and_other.set_range(infinite.start.max(*range.start())..=*range.end());
}
}
test_and_sub(&ranged_bitmap, &other, &ranged_and_other)?;
let mut ranged_or_other = other.clone();
ranged_or_other.set_range(range);
prop_assert_eq!(&(&ranged_bitmap | &other), &ranged_or_other);
prop_assert_eq!(&(ranged_bitmap.clone() | &other), &ranged_or_other);
let mut buf = ranged_bitmap.clone();
buf |= &other;
prop_assert_eq!(&buf, &ranged_or_other);
let ranged_xor_other = ranged_or_other - ranged_and_other;
prop_assert_eq!(&(&ranged_bitmap ^ &other), &ranged_xor_other);
prop_assert_eq!(&(ranged_bitmap.clone() ^ &other), &ranged_xor_other);
let mut buf = ranged_bitmap.clone();
buf ^= &other;
prop_assert_eq!(buf, ranged_xor_other);
test_bitmap_ref_binops(&ranged_bitmap, &other)?;
}
#[test]
fn from_iterator((indices, indices_set) in index_vec_and_set()) {
let bitmap = indices.iter().copied().collect::<Bitmap>();
prop_assert_eq!(bitmap.weight(), Some(indices_set.len()));
for idx in indices_set {
prop_assert!(bitmap.is_set(idx));
}
}
#[allow(clippy::redundant_clone)]
#[test]
fn arbitrary(bitmap: Bitmap) {
allow_infinite_iteration(|| {
prop_assert_eq!(bitmap.first_set(), bitmap.iter_set().next());
prop_assert_eq!(bitmap.first_unset(), bitmap.iter_unset().next());
Ok(())
})?;
prop_assert_eq!(bitmap.is_empty(), bitmap.first_set().is_none());
prop_assert_eq!(bitmap.is_full(), bitmap.first_unset().is_none());
fn test_iter(
bitmap: &Bitmap,
iter_set: impl Iterator<Item = BitmapIndex>,
) -> Result<(Bitmap, String), TestCaseError> {
let mut iter_set = iter_set.peekable();
let mut iter_unset = bitmap.iter_unset().peekable();
let mut next_index = BitmapIndex::MIN;
let mut set_stripe_start = None;
let mut observed_weight = 0;
let mut observed_last_set = None;
let mut observed_last_unset = None;
let mut inverse = Bitmap::full();
let mut display = String::new();
while let (Some(next_set), Some(next_unset)) =
(iter_set.peek().copied(), iter_unset.peek().copied())
{
match next_set.cmp(&next_unset) {
Ordering::Less => {
iter_set.next();
prop_assert_eq!(next_set, next_index);
observed_last_set = Some(next_set);
observed_weight += 1;
if set_stripe_start.is_none() {
set_stripe_start = Some(next_set);
}
}
Ordering::Greater => {
iter_unset.next();
prop_assert_eq!(next_unset, next_index);
observed_last_unset = Some(next_unset);
if let Some(first_set) = set_stripe_start {
let last_set = observed_last_set.unwrap();
inverse.unset_range(first_set..=last_set);
if !display.is_empty() {
write!(display, ",").unwrap();
}
write!(display, "{first_set}").unwrap();
if last_set != first_set {
write!(display, "-{last_set}").unwrap();
}
set_stripe_start = None;
}
}
Ordering::Equal => unreachable!("Next index can't be both set and unset"),
}
next_index = next_index.checked_add_signed(1).expect(
"Shouldn't overflow if we had both a next set & unset index before iterating",
);
}
let mut infinite_iter: Box<dyn Iterator<Item = BitmapIndex>> =
match (iter_set.peek(), iter_unset.peek()) {
(Some(next_set), None) => {
prop_assert_eq!(bitmap.last_set(), None);
prop_assert_eq!(bitmap.last_unset(), observed_last_unset);
prop_assert_eq!(bitmap.weight(), None);
let stripe_start = set_stripe_start.unwrap_or(*next_set);
inverse.unset_range(stripe_start..);
if !display.is_empty() {
write!(display, ",").unwrap();
}
write!(display, "{stripe_start}-").unwrap();
Box::new(iter_set)
}
(None, Some(_unset)) => {
prop_assert_eq!(bitmap.last_set(), observed_last_set);
prop_assert_eq!(bitmap.last_unset(), None);
prop_assert_eq!(bitmap.weight(), Some(observed_weight));
if let Some(first_set) = set_stripe_start {
let last_set = observed_last_set.unwrap();
inverse.unset_range(first_set..=last_set);
if !display.is_empty() {
write!(display, ",").unwrap();
}
write!(display, "{first_set}").unwrap();
if last_set != first_set {
write!(display, "-{last_set}").unwrap();
}
}
Box::new(iter_unset)
}
_ => unreachable!("At least one iterator is finite, they can't both be"),
};
for _ in 0..INFINITE_EXPLORE_ITERS {
prop_assert_eq!(infinite_iter.next(), Some(next_index));
if let Some(index) = next_index.checked_add_signed(1) {
next_index = index;
} else {
break;
}
}
Ok((inverse, display))
}
let ((inverse, display), (inverse2, display2), (inverse3, display3)) =
allow_infinite_iteration(|| {
let tuple1 = test_iter(&bitmap, (&bitmap).into_iter())?;
let tuple2 = test_iter(&bitmap, bitmap.clone().into_iter())?;
let tuple3 = test_iter(&bitmap, bitmap.iter_set())?;
Ok::<_, TestCaseError>((tuple1, tuple2, tuple3))
})?;
prop_assert_eq!(&inverse, &inverse2);
prop_assert_eq!(&inverse, &inverse3);
prop_assert_eq!(&display, &display2);
prop_assert_eq!(&display, &display3);
prop_assert_eq!(&(!&bitmap), &inverse);
prop_assert_eq!(&(!bitmap.clone()), &inverse);
prop_assert_eq!(&format!("{bitmap:?}"), &display);
prop_assert_eq!(&format!("{bitmap}"), &display);
test_basic_inplace(&bitmap, &inverse)?;
test_low_level_nonnull(bitmap.clone())?;
let clone = bitmap.clone();
prop_assert_eq!(clone.first_set(), bitmap.first_set());
prop_assert_eq!(clone.first_unset(), bitmap.first_unset());
prop_assert_eq!(clone.is_empty(), bitmap.is_empty());
prop_assert_eq!(clone.is_full(), bitmap.is_full());
prop_assert_eq!(clone.last_set(), bitmap.last_set());
prop_assert_eq!(clone.last_unset(), bitmap.last_unset());
prop_assert_eq!(clone.weight(), bitmap.weight());
let (finite, infinite) = split_infinite_bitmap(bitmap);
#[allow(clippy::option_if_let_else)]
if let Some(infinite) = infinite {
let test_iter = |mut iter_set: Box<dyn Iterator<Item = BitmapIndex>>| {
let mut iter_unset = clone.iter_unset().fuse();
let infinite_start = usize::from(infinite.start);
for idx in 0..infinite_start {
let next = if finite.is_set(idx) {
iter_set.next()
} else {
iter_unset.next()
};
prop_assert_eq!(next.map(usize::from), Some(idx));
}
prop_assert_eq!(iter_unset.next(), None);
for idx in (infinite_start..).take(INFINITE_EXPLORE_ITERS) {
prop_assert_eq!(iter_set.next().map(usize::from), Some(idx));
}
Ok(())
};
allow_infinite_iteration(|| {
test_iter(Box::new((&clone).into_iter()))?;
test_iter(Box::new(clone.iter_set()))?;
Ok::<(), TestCaseError>(())
})?;
} else {
prop_assert_eq!(&((&clone).into_iter().collect::<Bitmap>()), &finite);
prop_assert_eq!(&(clone.iter_set().collect::<Bitmap>()), &finite);
allow_infinite_iteration(|| {
let num_iters = usize::from(finite.last_set().unwrap_or(BitmapIndex::MIN)) + 1
- finite.weight().unwrap()
+ INFINITE_EXPLORE_ITERS;
let mut iterator = finite.iter_unset().zip(clone.iter_unset());
for _ in 0..num_iters {
let (expected, actual) = iterator.next().unwrap();
prop_assert_eq!(expected, actual);
}
Ok(())
})?;
}
prop_assert_eq!(&format!("{clone:?}"), &display);
prop_assert_eq!(&format!("{clone}"), &display);
prop_assert_eq!(!&clone, inverse);
}
#[test]
fn arbitrary_extend(
bitmap: Bitmap,
(extra, extra_set) in index_vec_and_set()
) {
let mut extended = bitmap.clone();
extended.extend(extra);
if let Some(bitmap_weight) = bitmap.weight() {
let extra_weight = extended
.weight()
.unwrap()
.checked_sub(bitmap_weight)
.expect("Extending a bitmap shouldn't reduce the weight");
prop_assert!(extra_weight <= extra_set.len());
}
for idx in extra_set {
prop_assert!(extended.is_set(idx));
}
}
#[test]
fn arbitrary_op_index(
bitmap: Bitmap,
index in bitmap_index()
) {
test_indexing(&bitmap, index, bitmap.is_set(index))?;
}
#[test]
fn arbitrary_op_range(
bitmap: Bitmap,
range in index_range()
) {
let range_usize = range_to_usize(&range);
let range_len = range_usize.clone().count();
let mut buf = bitmap.clone();
buf.set_range(range.clone());
if let Some(bitmap_weight) = bitmap.weight() {
let extra_weight = buf
.weight()
.unwrap()
.checked_sub(bitmap_weight)
.expect("Setting indices shouldn't reduce the weight");
prop_assert!(extra_weight <= range_len);
for idx in range_usize.clone() {
prop_assert!(buf.is_set(idx));
}
}
buf.copy_from(&bitmap);
buf.unset_range(range);
if let Some(bitmap_weight) = bitmap.weight() {
let lost_weight = bitmap_weight
.checked_sub(buf.weight().unwrap())
.expect("Clearing indices shouldn't increase the weight");
prop_assert!(lost_weight <= range_len);
for idx in range_usize {
prop_assert!(!buf.is_set(idx));
}
}
}
#[allow(clippy::similar_names)]
#[test]
fn arbitrary_op_bitmap([bitmap, other]: [Bitmap; 2]) {
let (finite, infinite) = split_infinite_bitmap(bitmap.clone());
let (other_finite, other_infinite) = split_infinite_bitmap(other.clone());
prop_assert_eq!(
bitmap.includes(&other),
other_finite.iter_set().all(|idx| bitmap.is_set(idx))
&& match (&infinite, &other_infinite) {
(Some(infinite), Some(other_infinite)) => {
(usize::from(other_infinite.start)..usize::from(infinite.start))
.all(|idx| finite.is_set(idx))
}
(_, None) => true,
(None, Some(_)) => false,
}
);
fn infinite_intersects_finite(infinite: &RangeFrom<BitmapIndex>, finite: &Bitmap) -> bool {
finite
.last_set().is_some_and(|last_set| infinite.start <= last_set)
}
prop_assert_eq!(
bitmap.intersects(&other),
finite.iter_set().any(|idx| other.is_set(idx))
|| match (&infinite, &other_infinite) {
(Some(_), Some(_)) => true,
(Some(infinite), None) => infinite_intersects_finite(infinite, &other_finite),
(None, Some(other_infinite)) =>
infinite_intersects_finite(other_infinite, &finite),
(None, None) => false,
}
);
prop_assert_eq!(
bitmap == other,
bitmap.includes(&other) && other.includes(&bitmap)
);
if bitmap == other {
let state = RandomState::new();
prop_assert_eq!(state.hash_one(&other), state.hash_one(&bitmap));
}
fn expected_cmp(bitmap: &Bitmap, reference: &Bitmap) -> Ordering {
let (finite, infinite) = split_infinite_bitmap(bitmap.clone());
let (ref_finite, ref_infinite) = split_infinite_bitmap(reference.clone());
let finite_end = match (infinite, ref_infinite) {
(Some(_), None) => return Ordering::Greater,
(None, Some(_)) => return Ordering::Less,
(Some(infinite), Some(ref_infinite)) => infinite.start.max(ref_infinite.start),
(None, None) => finite
.last_set()
.unwrap_or(BitmapIndex::MIN)
.max(ref_finite.last_set().unwrap_or(BitmapIndex::MIN)),
};
for idx in (0..=usize::from(finite_end)).rev() {
match (bitmap.is_set(idx), reference.is_set(idx)) {
(true, false) => return Ordering::Greater,
(false, true) => return Ordering::Less,
_ => {},
}
}
Ordering::Equal
}
prop_assert_eq!(bitmap.cmp(&other), expected_cmp(&bitmap, &other));
let mut bitmap_and_other = finite
.iter_set()
.filter(|idx| other.is_set(*idx))
.collect::<Bitmap>();
match (&infinite, &other_infinite) {
(Some(infinite), Some(other_infinite)) => {
bitmap_and_other.set_range(infinite.start.max(other_infinite.start)..);
for idx in usize::from(infinite.start)..usize::from(other_infinite.start) {
if other.is_set(idx) {
bitmap_and_other.set(idx);
}
}
}
(Some(infinite), None) => {
let other_end = other_finite.last_set().unwrap_or(BitmapIndex::MIN);
for idx in usize::from(infinite.start)..=usize::from(other_end) {
if other.is_set(idx) {
bitmap_and_other.set(idx)
}
}
}
_ => {}
}
test_and_sub(&bitmap, &other, &bitmap_and_other)?;
let mut bitmap_or_other = finite;
for idx in &other_finite {
bitmap_or_other.set(idx);
}
if let Some(infinite) = infinite {
bitmap_or_other.set_range(infinite);
}
if let Some(other_infinite) = other_infinite {
bitmap_or_other.set_range(other_infinite);
}
prop_assert_eq!(&(&bitmap | &other), &bitmap_or_other);
prop_assert_eq!(&(bitmap.clone() | &other), &bitmap_or_other);
let mut buf = bitmap.clone();
buf |= &other;
prop_assert_eq!(&buf, &bitmap_or_other);
let bitmap_xor_other = bitmap_or_other - bitmap_and_other;
prop_assert_eq!(&(&bitmap ^ &other), &bitmap_xor_other);
prop_assert_eq!(&(bitmap.clone() ^ &other), &bitmap_xor_other);
buf.copy_from(&bitmap);
buf ^= &other;
prop_assert_eq!(buf, bitmap_xor_other);
test_bitmap_ref_binops(&bitmap, &other)?;
}
}
}