grit_bitvec/
const_proto_bitvec.rs

1use std::ops::RangeFrom;
2
3use crate::{
4    BitProto,
5    RawBitVec,
6    CProtoBitVecDrain,
7    CProtoBitVecIter,
8    Range,
9    ManuallyDrop
10};
11
12/// ## `CProtoBitVec`: "Constant Prototype Bitwise Vector"  
13/// A `BitVec` where the bit-width and masking data ([`BitProto`]) is saved in a monomorphized constant for
14/// each different value of `BIT_WIDTH`
15/// 
16/// This is a thin wrapper around [`RawBitVec`] that simply calls the underlying raw method and passes the associated
17/// [`BitProto`] along with it. Unlike [`RawBitVec`] this is safe because it is impossible to ever use the wrong [`BitProto`]
18/// 
19/// ### Pros
20/// - Simpler, safer API than [`RawBitVec`]
21/// - Same stack-size as [`RawBitVec`] and [`Vec`] (3 usize)
22/// - Allows for constant-propogation optimizations 
23/// 
24/// ### Cons
25/// - Every separate value of `BIT_WIDTH` creates a distinct type with its own copy of all methods (larger binary)
26/// - Cannot store [`CProtoBitVec`]'s with diferent `BIT_WIDTH`'s in the same homogenous collection (`Array`, [`Vec`], [`HashMap`](std::collections::HashMap), etc.)
27pub struct CProtoBitVec<const BIT_WIDTH: usize>(pub(crate) RawBitVec);
28
29impl<const BIT_WIDTH: usize> CProtoBitVec<BIT_WIDTH> {
30    pub const PROTO: BitProto = BitProto::create(BIT_WIDTH);
31
32    #[inline(always)]
33    pub fn len(&self) -> usize {
34        self.0.len
35    }
36
37    #[inline(always)]
38    pub fn cap(&self) -> usize {
39        unsafe {self.0.cap(Self::PROTO)}
40    }
41
42    #[inline(always)]
43    pub fn free(&self) -> usize {
44        unsafe{self.0.free(Self::PROTO)}
45    }
46
47    #[inline(always)]
48    pub fn new() -> Self {
49        Self(RawBitVec::new())
50    }
51
52    #[inline(always)]
53    pub fn with_capacity(cap: usize) -> Self {
54        Self(RawBitVec::with_capacity(Self::PROTO, cap))
55    }
56
57    #[inline(always)]
58    pub fn grow_exact_for_additional_elements_if_needed(&mut self, extra_elements: usize) -> Result<(), String> {
59        unsafe {self.0.grow_exact_for_additional_elements_if_needed(Self::PROTO, extra_elements)}
60    }
61
62    #[inline(always)]
63    pub fn grow_exact_for_total_elements_if_needed(&mut self, total_elements: usize) -> Result<(), String> {
64        unsafe {self.0.grow_exact_for_total_elements_if_needed(Self::PROTO, total_elements)}
65    }
66
67    #[inline(always)]
68    pub fn grow_for_additional_elements_if_needed(&mut self, extra_elements: usize) -> Result<(), String> {
69        unsafe {self.0.grow_for_additional_elements_if_needed(Self::PROTO, extra_elements)}
70    }
71
72    #[inline(always)]
73    pub fn grow_for_total_elements_if_needed(&mut self, total_elements: usize) -> Result<(), String> {
74        unsafe {self.0.grow_for_total_elements_if_needed(Self::PROTO, total_elements)}
75    }
76
77    #[inline(always)]
78    pub fn clear(&mut self) {
79        self.0.clear()
80    }
81
82    #[inline(always)]
83    pub fn push(&mut self, val: usize) -> Result<(), String> {
84        unsafe {self.0.push(Self::PROTO, val)}
85    }
86
87    #[inline(always)]
88    pub unsafe fn push_unchecked(&mut self, val: usize) {
89        self.0.push_unchecked(Self::PROTO, val)
90    }
91
92    #[inline(always)]
93    pub fn pop(&mut self) -> Result<usize, String> {
94        unsafe{self.0.pop(Self::PROTO)}
95    }
96
97    #[inline(always)]
98    pub unsafe fn pop_unchecked(&mut self) -> usize {
99        self.0.pop_unchecked(Self::PROTO)
100    }
101
102    #[inline(always)]
103    pub fn insert(&mut self, idx: usize, val: usize) -> Result<(), String> {
104        unsafe{self.0.insert(Self::PROTO, idx, val)}
105    }
106
107    #[inline(always)]
108    pub unsafe fn insert_unchecked(&mut self, idx: usize, val: usize) {
109        self.0.insert_unchecked(Self::PROTO, idx, val)
110    }
111
112    #[inline(always)]
113    pub fn insert_bitvec(&mut self, insert_idx: usize, bitvec: Self) -> Result<(), String> {
114        unsafe{self.0.insert_bitvec(Self::PROTO, insert_idx, bitvec.into_raw())}
115    }
116
117    #[inline(always)]
118    pub unsafe fn insert_bitvec_unchecked(&mut self, insert_idx: usize, bitvec: Self) {
119        self.0.insert_bitvec_unchecked(Self::PROTO, insert_idx, bitvec.into_raw())
120    }
121
122    #[inline]
123    pub fn insert_iter<II, TO, ESI>(&mut self, insert_idx: usize, source: II) -> Result<(), String>
124    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
125        unsafe {self.0.insert_iter(Self::PROTO, insert_idx, source)}
126    }
127
128    #[inline]
129    pub unsafe fn insert_iter_unchecked<II, TO, ESI>(&mut self, insert_idx: usize, source: II)
130    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
131        self.0.insert_iter_unchecked(Self::PROTO, insert_idx, source)
132    }
133
134    #[inline(always)]
135    pub fn remove(&mut self, idx: usize) -> Result<usize, String> {
136        unsafe{self.0.remove(Self::PROTO, idx)}
137    }
138
139    #[inline(always)]
140    pub unsafe fn remove_unchecked(&mut self, idx: usize) -> usize {
141        self.0.remove_unchecked(Self::PROTO, idx)
142    }
143
144    #[inline(always)]
145    pub fn remove_range(&mut self, idx_range: Range<usize>) -> Result<Self, String> {
146        match unsafe{self.0.remove_range(Self::PROTO, idx_range)} {
147            Ok(raw) => Ok(Self(raw)),
148            Err(e) => Err(e),
149        }
150    }
151
152    #[inline(always)]
153    pub unsafe fn remove_range_unchecked(&mut self, idx_range: Range<usize>) -> Self {
154        Self(self.0.remove_range_unchecked(Self::PROTO, idx_range))
155    }
156
157    #[inline(always)]
158    pub fn trim_range(&mut self, idx_range: RangeFrom<usize>) -> Result<Self, String> {
159        match unsafe{self.0.trim_range(Self::PROTO, idx_range)} {
160            Ok(raw) => Ok(Self(raw)),
161            Err(e) => Err(e),
162        }
163    }
164
165    #[inline(always)]
166    pub unsafe fn trim_range_unchecked(&mut self, idx_range: RangeFrom<usize>) -> Self {
167        Self(self.0.trim_range_unchecked(Self::PROTO, idx_range))
168    }
169
170    #[inline(always)]
171    pub fn swap(&mut self, idx_a: usize, idx_b: usize) -> Result<(), String> {
172        unsafe{self.0.swap(Self::PROTO, idx_a, idx_b)}
173    }
174
175    #[inline(always)]
176    pub unsafe fn swap_unchecked(&mut self, idx_a: usize, idx_b: usize) {
177        self.0.swap_unchecked(Self::PROTO, idx_a, idx_b)
178}
179
180    #[inline(always)]
181    pub fn swap_pop(&mut self, idx: usize) -> Result<usize, String> {
182        unsafe{self.0.swap_pop(Self::PROTO, idx)}
183    }
184
185    #[inline(always)]
186    pub unsafe fn swap_pop_unchecked(&mut self, idx: usize) -> usize {
187        self.0.swap_pop_unchecked(Self::PROTO, idx)
188    }
189
190    #[inline(always)]
191    pub fn trim_excess_capacity(&mut self, extra_capacity_to_keep: usize) -> Result<(), String> {
192        unsafe{self.0.trim_excess_capacity(Self::PROTO, extra_capacity_to_keep)}
193    }
194    #[inline(always)]
195    pub fn append_bitvec(&mut self, bitvec: Self) -> Result<(), String> {
196        unsafe{self.0.append_bitvec(Self::PROTO, bitvec.into_raw())}
197    }
198    #[inline(always)]
199    pub unsafe fn append_bitvec_unchecked(&mut self, bitvec: Self) {
200        self.0.append_bitvec_unchecked(Self::PROTO, bitvec.into_raw())
201    }
202    #[inline(always)]
203    pub fn append_iter<II, TO, ESI>(&mut self, source: II) -> Result<(), String>
204    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
205        unsafe{self.0.append_iter(Self::PROTO, source)}
206    }
207    #[inline(always)]
208    pub unsafe fn append_iter_unchecked<I, TO>(&mut self, iter: I)
209    where I: Iterator<Item = TO> + ExactSizeIterator, TO: ToOwned<Owned = usize> {
210        self.0.append_iter_unchecked(Self::PROTO, iter)
211    }
212
213    #[inline(always)]
214    pub fn get(&self, idx: usize) -> Result<usize, String> {
215        unsafe{self.0.get(Self::PROTO, idx)}
216    }
217
218    #[inline(always)]
219    pub unsafe fn get_unchecked(&self, idx: usize) -> usize {
220        self.0.get_unchecked(Self::PROTO, idx)
221    }
222
223    #[inline(always)]
224    pub fn replace(&mut self, idx: usize, val: usize) -> Result<usize, String> {
225        unsafe{self.0.replace(Self::PROTO, idx, val)}
226    }
227
228    #[inline(always)]
229    pub unsafe fn replace_unchecked(&mut self, idx: usize, val: usize) -> usize {
230        self.0.replace_unchecked(Self::PROTO, idx, val)
231    }
232
233    #[inline(always)]
234    pub fn set(&mut self, idx: usize, val: usize) -> Result<(), String> {
235        unsafe{self.0.set(Self::PROTO, idx, val)}
236    }
237
238    #[inline(always)]
239    pub unsafe fn set_unchecked(&mut self, idx: usize, val: usize) {
240        self.0.set_unchecked(Self::PROTO, idx, val)
241    }
242
243    #[inline]
244    pub fn discard_from_end(&mut self, count: usize) {
245        self.0.discard_from_end(count)
246    }
247
248    #[inline(always)]
249    pub fn drain<'vec>(&'vec mut self) -> CProtoBitVecDrain<'vec, BIT_WIDTH> {
250        CProtoBitVecDrain(self.0.drain())
251    }
252
253    #[inline(always)]
254    pub unsafe fn into_raw(self) -> RawBitVec {
255        let nodrop_self = ManuallyDrop::new(self);
256        RawBitVec {
257            ptr: nodrop_self.0.ptr,
258            len: nodrop_self.0.len, 
259            true_cap: nodrop_self.0.true_cap 
260        }
261    }
262}
263
264impl<const BIT_WIDTH: usize> IntoIterator for CProtoBitVec<BIT_WIDTH> {
265    type Item = usize;
266
267    type IntoIter = CProtoBitVecIter<BIT_WIDTH>;
268
269    #[inline(always)]
270    fn into_iter(self) -> Self::IntoIter {
271        CProtoBitVecIter(unsafe{self.into_raw().into_iter()})
272    }
273}
274
275impl<const BIT_WIDTH: usize> Drop for CProtoBitVec<BIT_WIDTH> {
276    #[inline(always)]
277    fn drop(&mut self) {/* RawBitVec::drop() will take care of the allocation */}
278}