grit_bitvec/
local_proto_bitvec.rs

1use std::ops::RangeFrom;
2
3use crate::{
4    BitProto,
5    RawBitVec,
6    LProtoBitVecIter, 
7    LProtoBitVecDrain,
8    Range,
9    ManuallyDrop
10};
11
12/// ## `LProtoBitVec`: "Local Prototype Bitwise Vector"  
13/// A `BitVec` where the bit-width and masking data ([`BitProto`]) is saved directly in the struct
14/// 
15/// This is a thin wrapper around [`RawBitVec`] that simply calls the underlying raw method and passes the associated
16/// [`BitProto`] along with it. Unlike [`RawBitVec`] this is safe because it is impossible to ever use the wrong [`BitProto`]
17/// 
18/// ### Pros
19/// - Simpler, safer API than [`RawBitVec`]
20/// - No mono-morphization (smaller binary than [`CProtoBitVec`](crate::const_proto_bitvec::CProtoBitVec))
21/// - Can store [`LProtoBitVec`]'s in a homogenous collection (`Array`, [`Vec`], [`HashMap`](std::collections::HashMap), etc.)
22/// - *Possible* better cache-locality compared to [`SProtoBitVec`](crate::SProtoBitVec) (no pointer to heap)
23/// 
24/// ### Cons
25/// - A full copy of a [`BitProto`] stored in every [`LProtoBitVec`] (6 usize total)
26/// - No constant-propogation optimizations
27pub struct LProtoBitVec {
28    pub(crate) proto: BitProto,
29    pub(crate) vec: RawBitVec
30}
31
32impl LProtoBitVec {
33    #[inline(always)]
34    pub fn len(&self) -> usize {
35        self.vec.len
36    }
37
38    #[inline(always)]
39    pub fn cap(&self) -> usize {
40        unsafe {self.vec.cap(self.proto)}
41    }
42
43    #[inline(always)]
44    pub fn free(&self) -> usize {
45        unsafe{self.vec.free(self.proto)}
46    }
47
48    #[inline(always)]
49    pub fn new(proto: BitProto) -> Self {
50        Self { proto, vec: RawBitVec::new() }
51    }
52
53    #[inline(always)]
54    pub fn with_capacity(proto: BitProto, cap: usize) -> Self {
55        Self { proto, vec: RawBitVec::with_capacity(proto, cap) }
56    }
57
58    #[inline(always)]
59    pub fn grow_exact_for_additional_elements_if_needed(&mut self, extra_elements: usize) -> Result<(), String> {
60        unsafe {self.vec.grow_exact_for_additional_elements_if_needed(self.proto, extra_elements)}
61    }
62
63    #[inline(always)]
64    pub fn grow_exact_for_total_elements_if_needed(&mut self, total_elements: usize) -> Result<(), String> {
65        unsafe {self.vec.grow_exact_for_total_elements_if_needed(self.proto, total_elements)}
66    }
67
68    #[inline(always)]
69    pub fn grow_for_additional_elements_if_needed(&mut self, extra_elements: usize) -> Result<(), String> {
70        unsafe {self.vec.grow_for_additional_elements_if_needed(self.proto, extra_elements)}
71    }
72
73    #[inline(always)]
74    pub fn grow_for_total_elements_if_needed(&mut self, total_elements: usize) -> Result<(), String> {
75        unsafe {self.vec.grow_for_total_elements_if_needed(self.proto, total_elements)}
76    }
77
78    #[inline(always)]
79    pub fn clear(&mut self) {
80        self.vec.clear()
81    }
82
83    #[inline(always)]
84    pub fn push(&mut self, val: usize) -> Result<(), String> {
85        unsafe {self.vec.push(self.proto, val)}
86    }
87
88    #[inline(always)]
89    pub unsafe fn push_unchecked(&mut self, val: usize) {
90        self.vec.push_unchecked(self.proto, val)
91    }
92
93    #[inline(always)]
94    pub fn pop(&mut self) -> Result<usize, String> {
95        unsafe{self.vec.pop(self.proto)}
96    }
97
98    #[inline(always)]
99    pub unsafe fn pop_unchecked(&mut self) -> usize {
100        self.vec.pop_unchecked(self.proto)
101    }
102
103    #[inline(always)]
104    pub fn insert(&mut self, idx: usize, val: usize) -> Result<(), String> {
105        unsafe{self.vec.insert(self.proto, idx, val)}
106    }
107
108    #[inline(always)]
109    pub unsafe fn insert_unchecked(&mut self, idx: usize, val: usize) {
110        self.vec.insert_unchecked(self.proto, idx, val)
111    }
112
113    #[inline(always)]
114    pub fn insert_bitvec(&mut self, insert_idx: usize, bitvec: Self) -> Result<(), String> {
115        unsafe{self.vec.insert_bitvec(self.proto, insert_idx, bitvec.into_raw())}
116    }
117
118    #[inline(always)]
119    pub unsafe fn insert_bitvec_unchecked(&mut self, insert_idx: usize, bitvec: Self) {
120        self.vec.insert_bitvec_unchecked(self.proto, insert_idx, bitvec.into_raw())
121    }
122
123    #[inline]
124    pub fn insert_iter<II, TO, ESI>(&mut self, insert_idx: usize, source: II) -> Result<(), String>
125    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
126        unsafe {self.vec.insert_iter(self.proto, insert_idx, source)}
127    }
128
129    #[inline]
130    pub unsafe fn insert_iter_unchecked<II, TO, ESI>(&mut self, insert_idx: usize, source: II)
131    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
132        self.vec.insert_iter_unchecked(self.proto, insert_idx, source)
133    }
134
135    #[inline(always)]
136    pub fn remove(&mut self, idx: usize) -> Result<usize, String> {
137        unsafe{self.vec.remove(self.proto, idx)}
138    }
139
140    #[inline(always)]
141    pub unsafe fn remove_unchecked(&mut self, idx: usize) -> usize {
142        self.vec.remove_unchecked(self.proto, idx)
143    }
144
145    #[inline(always)]
146    pub fn remove_range(&mut self, idx_range: Range<usize>) -> Result<Self, String> {
147        match unsafe{self.vec.remove_range(self.proto, idx_range)} {
148            Ok(raw) => Ok(Self{
149                proto: self.proto,
150                vec: raw
151            }),
152            Err(e) => Err(e),
153        }
154    }
155
156    #[inline(always)]
157    pub unsafe fn remove_range_unchecked(&mut self, idx_range: Range<usize>) -> Self {
158        Self {
159            proto: self.proto,
160            vec: self.vec.remove_range_unchecked(self.proto, idx_range)
161        }
162    }
163
164    #[inline(always)]
165    pub fn trim_range(&mut self, idx_range: RangeFrom<usize>) -> Result<Self, String> {
166        match unsafe{self.vec.trim_range(self.proto, idx_range)} {
167            Ok(raw) => Ok(Self{
168                proto: self.proto,
169                vec: raw
170            }),
171            Err(e) => Err(e),
172        }
173    }
174
175    #[inline(always)]
176    pub unsafe fn trim_range_unchecked(&mut self, idx_range: RangeFrom<usize>) -> Self {
177        Self {
178            proto: self.proto,
179            vec: self.vec.trim_range_unchecked(self.proto, idx_range)
180        }
181    }
182
183    #[inline(always)]
184    pub fn swap(&mut self, idx_a: usize, idx_b: usize) -> Result<(), String> {
185        unsafe{self.vec.swap(self.proto, idx_a, idx_b)}
186    }
187
188    #[inline(always)]
189    pub unsafe fn swap_unchecked(&mut self, idx_a: usize, idx_b: usize) {
190        self.vec.swap_unchecked(self.proto, idx_a, idx_b)
191}
192
193    #[inline(always)]
194    pub fn swap_pop(&mut self, idx: usize) -> Result<usize, String> {
195        unsafe{self.vec.swap_pop(self.proto, idx)}
196    }
197
198    #[inline(always)]
199    pub unsafe fn swap_pop_unchecked(&mut self, idx: usize) -> usize {
200        self.vec.swap_pop_unchecked(self.proto, idx)
201    }
202
203    #[inline(always)]
204    pub fn trim_excess_capacity(&mut self, extra_capacity_to_keep: usize) -> Result<(), String> {
205        unsafe{self.vec.trim_excess_capacity(self.proto, extra_capacity_to_keep)}
206    }
207
208    #[inline(always)]
209    pub fn append_bitvec(&mut self, bitvec: Self) -> Result<(), String> {
210        unsafe{self.vec.append_bitvec(self.proto, bitvec.into_raw())}
211    }
212
213    #[inline(always)]
214    pub unsafe fn append_bitvec_unchecked(&mut self, bitvec: Self) {
215        self.vec.append_bitvec_unchecked(self.proto, bitvec.into_raw())
216    }
217
218    #[inline(always)]
219    pub fn append_iter<II, TO, ESI>(&mut self, source: II) -> Result<(), String>
220    where II: IntoIterator<Item = TO, IntoIter = ESI>, TO: ToOwned<Owned = usize>, ESI: ExactSizeIterator + Iterator<Item = TO> {
221        unsafe{self.vec.append_iter(self.proto, source)}
222    }
223
224    #[inline(always)]
225    pub unsafe fn append_iter_unchecked<I, TO>(&mut self, iter: I)
226    where I: Iterator<Item = TO> + ExactSizeIterator, TO: ToOwned<Owned = usize> {
227        self.vec.append_iter_unchecked(self.proto, iter)
228    }
229
230    #[inline(always)]
231    pub fn get(&self, idx: usize) -> Result<usize, String> {
232        unsafe{self.vec.get(self.proto, idx)}
233    }
234
235    #[inline(always)]
236    pub unsafe fn get_unchecked(&self, idx: usize) -> usize {
237        self.vec.get_unchecked(self.proto, idx)
238    }
239
240    #[inline(always)]
241    pub fn replace(&mut self, idx: usize, val: usize) -> Result<usize, String> {
242        unsafe{self.vec.replace(self.proto, idx, val)}
243    }
244
245    #[inline(always)]
246    pub unsafe fn replace_unchecked(&mut self, idx: usize, val: usize) -> usize {
247        self.vec.replace_unchecked(self.proto, idx, val)
248    }
249
250    #[inline(always)]
251    pub fn set(&mut self, idx: usize, val: usize) -> Result<(), String> {
252        unsafe{self.vec.set(self.proto, idx, val)}
253    }
254
255    #[inline(always)]
256    pub unsafe fn set_unchecked(&mut self, idx: usize, val: usize) {
257        self.vec.set_unchecked(self.proto, idx, val)
258    }
259
260    #[inline]
261    pub fn discard_from_end(&mut self, count: usize) {
262        self.vec.discard_from_end(count)
263    }
264
265    #[inline(always)]
266    pub fn drain<'vec>(&'vec mut self) -> LProtoBitVecDrain<'vec> {
267        LProtoBitVecDrain{
268            proto: self.proto,
269            drain: self.vec.drain()
270        }
271    }
272
273    #[inline(always)]
274    pub unsafe fn into_raw(self) -> RawBitVec {
275        let nodrop_self = ManuallyDrop::new(self);
276        RawBitVec {
277            ptr: nodrop_self.vec.ptr,
278            len: nodrop_self.vec.len, 
279            true_cap: nodrop_self.vec.true_cap 
280        }
281    }
282}
283
284impl IntoIterator for LProtoBitVec {
285    type Item = usize;
286
287    type IntoIter = LProtoBitVecIter;
288
289    #[inline(always)]
290    fn into_iter(self) -> Self::IntoIter {
291        LProtoBitVecIter{
292            proto: self.proto,
293            iter: unsafe{self.into_raw().into_iter()}
294        }
295    }
296}
297
298impl Drop for LProtoBitVec {
299    #[inline(always)]
300    fn drop(&mut self) {/* RawBitVec::drop() will take care of the deallocation */}
301}