#![no_std]
extern crate alloc;
use alloc::{boxed::Box, vec, vec::Vec};
use core::cmp::max;
use core::fmt;
use core::hash;
use core::mem::{forget, replace, size_of};
use core::ops::{Index, Range};
use core::slice;
#[macro_export]
macro_rules! sbvec {
($elem:expr; $n:expr) => (
$crate::SmallBitVec::from_elem($n, $elem)
);
($($x:expr),*) => (
[$($x),*].iter().cloned().collect::<$crate::SmallBitVec>()
);
($($x:expr,)*) => (
sbvec![$($x),*]
);
}
macro_rules! const_debug_assert_le {
($left: ident <= $right: expr) => {
#[cfg(debug_assertions)]
[(); $right + 1][$left];
};
}
#[cfg(test)]
mod tests;
pub struct SmallBitVec {
data: usize,
}
#[inline(always)]
const fn inline_bits() -> usize {
size_of::<usize>() * 8
}
#[inline(always)]
const fn inline_capacity() -> usize {
inline_bits() - 2
}
#[inline(always)]
const fn inline_shift(n: usize) -> usize {
const_debug_assert_le!(n <= inline_capacity());
inline_bits() - 1 - n
}
#[inline(always)]
const fn inline_index(n: usize) -> usize {
1 << inline_shift(n)
}
#[inline(always)]
fn inline_ones(n: usize) -> usize {
if n == 0 {
0
} else {
!0 << (inline_bits() - n)
}
}
const HEAP_FLAG: usize = 1;
type Storage = usize;
#[inline(always)]
fn bits_per_storage() -> usize {
size_of::<Storage>() * 8
}
struct Header {
len: Storage,
buffer_len: Storage,
}
impl Header {
fn new(cap: usize, len: usize, val: bool) -> *mut Header {
let alloc_len = header_len() + buffer_len(cap);
let init = if val { !0 } else { 0 };
let v: Vec<Storage> = vec![init; alloc_len];
let buffer_len = v.capacity() - header_len();
let header_ptr = v.as_ptr() as *mut Header;
forget(v);
unsafe {
(*header_ptr).len = len;
(*header_ptr).buffer_len = buffer_len;
}
header_ptr
}
}
#[inline(always)]
fn header_len() -> usize {
size_of::<Header>() / size_of::<Storage>()
}
#[inline(always)]
fn buffer_len(cap: usize) -> usize {
(cap + bits_per_storage() - 1) / bits_per_storage()
}
pub enum InternalStorage {
Inline(usize),
Spilled(Box<[usize]>),
}
impl SmallBitVec {
#[inline]
pub const fn new() -> SmallBitVec {
SmallBitVec {
data: inline_index(0),
}
}
#[inline]
pub fn from_elem(len: usize, val: bool) -> SmallBitVec {
if len <= inline_capacity() {
return SmallBitVec {
data: if val {
inline_ones(len + 1)
} else {
inline_index(len)
},
};
}
let header_ptr = Header::new(len, len, val);
SmallBitVec {
data: (header_ptr as usize) | HEAP_FLAG,
}
}
#[inline]
pub fn with_capacity(cap: usize) -> SmallBitVec {
if cap <= inline_capacity() {
return SmallBitVec::new();
}
let header_ptr = Header::new(cap, 0, false);
SmallBitVec {
data: (header_ptr as usize) | HEAP_FLAG,
}
}
#[inline]
pub fn len(&self) -> usize {
if self.is_inline() {
inline_bits() - self.data.trailing_zeros() as usize - 1
} else {
self.header().len
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn capacity(&self) -> usize {
if self.is_inline() {
inline_capacity()
} else {
self.header().buffer_len * bits_per_storage()
}
}
#[inline]
pub fn get(&self, n: usize) -> Option<bool> {
if n < self.len() {
Some(unsafe { self.get_unchecked(n) })
} else {
None
}
}
#[inline]
pub fn last(&self) -> Option<bool> {
self.len()
.checked_sub(1)
.map(|n| unsafe { self.get_unchecked(n) })
}
#[inline]
pub unsafe fn get_unchecked(&self, n: usize) -> bool {
if self.is_inline() {
self.data & inline_index(n) != 0
} else {
let buffer = self.buffer();
let i = n / bits_per_storage();
let offset = n % bits_per_storage();
*buffer.get_unchecked(i) & (1 << offset) != 0
}
}
#[inline]
pub fn set(&mut self, n: usize, val: bool) {
assert!(n < self.len(), "Index {} out of bounds", n);
unsafe {
self.set_unchecked(n, val);
}
}
#[inline]
pub unsafe fn set_unchecked(&mut self, n: usize, val: bool) {
if self.is_inline() {
if val {
self.data |= inline_index(n);
} else {
self.data &= !inline_index(n);
}
} else {
let buffer = self.buffer_mut();
let i = n / bits_per_storage();
let offset = n % bits_per_storage();
if val {
*buffer.get_unchecked_mut(i) |= 1 << offset;
} else {
*buffer.get_unchecked_mut(i) &= !(1 << offset);
}
}
}
#[inline]
pub fn push(&mut self, val: bool) {
let idx = self.len();
if idx == self.capacity() {
self.reserve(1);
}
unsafe {
self.set_len(idx + 1);
self.set_unchecked(idx, val);
}
}
#[inline]
pub fn pop(&mut self) -> Option<bool> {
self.len().checked_sub(1).map(|last| unsafe {
let val = self.get_unchecked(last);
self.set_len(last);
val
})
}
#[inline]
pub fn remove(&mut self, idx: usize) -> bool {
let len = self.len();
let val = self[idx];
if self.is_inline() {
let mask = !inline_ones(idx);
let new_vals = (self.data & mask) << 1;
self.data = (self.data & !mask) | (new_vals & mask);
} else {
let first = idx / bits_per_storage();
let offset = idx % bits_per_storage();
let count = buffer_len(len);
{
let buf = self.buffer_mut();
let mask = !0 << offset;
let new_vals = (buf[first] & mask) >> 1;
buf[first] = (buf[first] & !mask) | (new_vals & mask);
}
for i in (first + 1)..count {
let bit_idx = i * bits_per_storage();
unsafe {
let first_bit = self.get_unchecked(bit_idx);
self.set_unchecked(bit_idx - 1, first_bit);
}
self.buffer_mut()[i] >>= 1;
}
unsafe {
self.set_len(len - 1);
}
}
val
}
#[inline]
pub fn clear(&mut self) {
unsafe {
self.set_len(0);
}
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
let old_cap = self.capacity();
let new_cap = self
.len()
.checked_add(additional)
.expect("capacity overflow");
if new_cap <= old_cap {
return;
}
let double_cap = old_cap.saturating_mul(2);
self.reallocate(max(new_cap, double_cap));
}
#[inline]
unsafe fn set_len(&mut self, len: usize) {
debug_assert!(len <= self.capacity());
if self.is_inline() {
let sentinel = inline_index(len);
let mask = !(sentinel - 1);
self.data |= sentinel;
self.data &= mask;
} else {
self.header_mut().len = len;
}
}
#[inline]
pub fn iter(&self) -> Iter {
Iter {
vec: self,
range: 0..self.len(),
}
}
#[inline]
pub fn range(&self, range: Range<usize>) -> VecRange {
assert!(range.end <= self.len(), "range out of bounds");
VecRange { vec: &self, range }
}
#[inline]
pub fn all_false(&self) -> bool {
let mut len = self.len();
if len == 0 {
return true;
}
if self.is_inline() {
let mask = inline_ones(len);
self.data & mask == 0
} else {
for &storage in self.buffer() {
if len >= bits_per_storage() {
if storage != 0 {
return false;
}
len -= bits_per_storage();
} else {
let mask = (1 << len) - 1;
if storage & mask != 0 {
return false;
}
break;
}
}
true
}
}
#[inline]
pub fn all_true(&self) -> bool {
let mut len = self.len();
if len == 0 {
return true;
}
if self.is_inline() {
let mask = inline_ones(len);
self.data & mask == mask
} else {
for &storage in self.buffer() {
if len >= bits_per_storage() {
if storage != !0 {
return false;
}
len -= bits_per_storage();
} else {
let mask = (1 << len) - 1;
if storage & mask != mask {
return false;
}
break;
}
}
true
}
}
pub fn truncate(&mut self, len: usize) {
unsafe {
if len < self.len() {
self.set_len(len);
}
}
}
pub fn resize(&mut self, len: usize, value: bool) {
let old_len = self.len();
if len > old_len {
unsafe {
self.reallocate(len);
self.set_len(len);
for i in old_len..len {
self.set(i, value);
}
}
} else {
self.truncate(len);
}
}
fn reallocate(&mut self, cap: usize) {
let old_cap = self.capacity();
if cap <= old_cap {
return;
}
assert!(self.len() <= cap);
if self.is_heap() {
let old_buffer_len = self.header().buffer_len;
let new_buffer_len = buffer_len(cap);
let old_alloc_len = header_len() + old_buffer_len;
let new_alloc_len = header_len() + new_buffer_len;
let old_ptr = self.header_raw() as *mut Storage;
let mut v = unsafe { Vec::from_raw_parts(old_ptr, old_alloc_len, old_alloc_len) };
v.resize(new_alloc_len, 0);
v.shrink_to_fit();
self.data = v.as_ptr() as usize | HEAP_FLAG;
forget(v);
self.header_mut().buffer_len = new_buffer_len;
} else {
let old_self = replace(self, SmallBitVec::with_capacity(cap));
unsafe {
self.set_len(old_self.len());
for i in 0..old_self.len() {
self.set_unchecked(i, old_self.get_unchecked(i));
}
}
}
}
#[inline]
pub fn heap_ptr(&self) -> Option<*const usize> {
if self.is_heap() {
Some((self.data & !HEAP_FLAG) as *const Storage)
} else {
None
}
}
#[inline]
pub fn into_storage(self) -> InternalStorage {
if self.is_heap() {
let alloc_len = header_len() + self.header().buffer_len;
let ptr = self.header_raw() as *mut Storage;
let slice = unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr, alloc_len)) };
forget(self);
InternalStorage::Spilled(slice)
} else {
InternalStorage::Inline(self.data)
}
}
pub unsafe fn from_storage(storage: InternalStorage) -> SmallBitVec {
match storage {
InternalStorage::Inline(data) => SmallBitVec { data },
InternalStorage::Spilled(vs) => {
let ptr = Box::into_raw(vs);
SmallBitVec {
data: (ptr as *mut usize as usize) | HEAP_FLAG,
}
}
}
}
#[inline]
fn is_inline(&self) -> bool {
self.data & HEAP_FLAG == 0
}
#[inline]
fn is_heap(&self) -> bool {
!self.is_inline()
}
#[inline]
fn header_raw(&self) -> *mut Header {
assert!(self.is_heap());
(self.data & !HEAP_FLAG) as *mut Header
}
#[inline]
fn header_mut(&mut self) -> &mut Header {
unsafe { &mut *self.header_raw() }
}
#[inline]
fn header(&self) -> &Header {
unsafe { &*self.header_raw() }
}
#[inline]
fn buffer_raw(&self) -> *mut [Storage] {
unsafe {
let header_ptr = self.header_raw();
let buffer_len = (*header_ptr).buffer_len;
let buffer_ptr = (header_ptr as *mut Storage)
.offset((size_of::<Header>() / size_of::<Storage>()) as isize);
slice::from_raw_parts_mut(buffer_ptr, buffer_len)
}
}
#[inline]
fn buffer_mut(&mut self) -> &mut [Storage] {
unsafe { &mut *self.buffer_raw() }
}
#[inline]
fn buffer(&self) -> &[Storage] {
unsafe { &*self.buffer_raw() }
}
}
impl fmt::Debug for SmallBitVec {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_list()
.entries(self.iter().map(|b| b as u8))
.finish()
}
}
impl Default for SmallBitVec {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl PartialEq for SmallBitVec {
fn eq(&self, other: &Self) -> bool {
if self.is_inline() && other.is_inline() {
return self.data == other.data;
}
let len = self.len();
if len != other.len() {
return false;
}
if self.is_heap() && other.is_heap() {
let buf0 = self.buffer();
let buf1 = other.buffer();
let full_blocks = len / bits_per_storage();
let remainder = len % bits_per_storage();
if buf0[..full_blocks] != buf1[..full_blocks] {
return false;
}
if remainder != 0 {
let mask = (1 << remainder) - 1;
if buf0[full_blocks] & mask != buf1[full_blocks] & mask {
return false;
}
}
return true;
}
Iterator::eq(self.iter(), other.iter())
}
}
impl Eq for SmallBitVec {}
impl Drop for SmallBitVec {
fn drop(&mut self) {
if self.is_heap() {
unsafe {
let header_ptr = self.header_raw();
let alloc_ptr = header_ptr as *mut Storage;
let alloc_len = header_len() + (*header_ptr).buffer_len;
Vec::from_raw_parts(alloc_ptr, alloc_len, alloc_len);
}
}
}
}
impl Clone for SmallBitVec {
fn clone(&self) -> Self {
if self.is_inline() {
return SmallBitVec { data: self.data };
}
let buffer_len = self.header().buffer_len;
let alloc_len = header_len() + buffer_len;
let ptr = self.header_raw() as *mut Storage;
let raw_allocation = unsafe { slice::from_raw_parts(ptr, alloc_len) };
let v = raw_allocation.to_vec();
let header_ptr = v.as_ptr() as *mut Header;
forget(v);
SmallBitVec {
data: (header_ptr as usize) | HEAP_FLAG,
}
}
}
impl Index<usize> for SmallBitVec {
type Output = bool;
#[inline(always)]
fn index(&self, i: usize) -> &bool {
assert!(i < self.len(), "index out of range");
if self.get(i).unwrap() {
&true
} else {
&false
}
}
}
#[cfg(feature = "malloc_size_of")]
impl malloc_size_of::MallocSizeOf for SmallBitVec {
fn size_of(&self, ops: &mut malloc_size_of::MallocSizeOfOps) -> usize {
if let Some(ptr) = self.heap_ptr() {
unsafe { ops.malloc_size_of(ptr) }
} else {
0
}
}
}
impl hash::Hash for SmallBitVec {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
let len = self.len();
len.hash(state);
if self.is_inline() {
(self.data & inline_ones(len)).reverse_bits().hash(state);
} else {
let full_blocks = len / bits_per_storage();
let remainder = len % bits_per_storage();
let buffer = self.buffer();
if full_blocks != 0 {
buffer[..full_blocks].hash(state);
}
if remainder != 0 {
let mask = (1 << remainder) - 1;
(buffer[full_blocks] & mask).hash(state);
}
}
}
}
impl Extend<bool> for SmallBitVec {
#[inline]
fn extend<I: IntoIterator<Item = bool>>(&mut self, iter: I) {
let iter = iter.into_iter();
let (min, _) = iter.size_hint();
assert!(min <= usize::max_value(), "capacity overflow");
self.reserve(min);
for element in iter {
self.push(element)
}
}
}
impl FromIterator<bool> for SmallBitVec {
#[inline]
fn from_iter<I: IntoIterator<Item = bool>>(iter: I) -> Self {
let mut v = SmallBitVec::new();
v.extend(iter);
v
}
}
impl IntoIterator for SmallBitVec {
type Item = bool;
type IntoIter = IntoIter;
#[inline]
fn into_iter(self) -> IntoIter {
IntoIter {
range: 0..self.len(),
vec: self,
}
}
}
impl<'a> IntoIterator for &'a SmallBitVec {
type Item = bool;
type IntoIter = Iter<'a>;
#[inline]
fn into_iter(self) -> Iter<'a> {
self.iter()
}
}
pub struct IntoIter {
vec: SmallBitVec,
range: Range<usize>,
}
impl Iterator for IntoIter {
type Item = bool;
#[inline]
fn next(&mut self) -> Option<bool> {
self.range
.next()
.map(|i| unsafe { self.vec.get_unchecked(i) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.range.size_hint()
}
}
impl DoubleEndedIterator for IntoIter {
#[inline]
fn next_back(&mut self) -> Option<bool> {
self.range
.next_back()
.map(|i| unsafe { self.vec.get_unchecked(i) })
}
}
impl ExactSizeIterator for IntoIter {}
pub struct Iter<'a> {
vec: &'a SmallBitVec,
range: Range<usize>,
}
impl<'a> Default for Iter<'a> {
#[inline]
fn default() -> Self {
const EMPTY: &'static SmallBitVec = &SmallBitVec::new();
Self {
vec: EMPTY,
range: 0..0,
}
}
}
impl<'a> Iterator for Iter<'a> {
type Item = bool;
#[inline]
fn next(&mut self) -> Option<bool> {
self.range
.next()
.map(|i| unsafe { self.vec.get_unchecked(i) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.range.size_hint()
}
}
impl<'a> DoubleEndedIterator for Iter<'a> {
#[inline]
fn next_back(&mut self) -> Option<bool> {
self.range
.next_back()
.map(|i| unsafe { self.vec.get_unchecked(i) })
}
}
impl<'a> ExactSizeIterator for Iter<'a> {}
#[derive(Debug, Clone)]
pub struct VecRange<'a> {
vec: &'a SmallBitVec,
range: Range<usize>,
}
impl<'a> VecRange<'a> {
#[inline]
pub fn iter(&self) -> Iter<'a> {
Iter {
vec: self.vec,
range: self.range.clone(),
}
}
}
impl<'a> Index<usize> for VecRange<'a> {
type Output = bool;
#[inline]
fn index(&self, i: usize) -> &bool {
let vec_i = i + self.range.start;
assert!(vec_i < self.range.end, "index out of range");
&self.vec[vec_i]
}
}