1use std::{iter, ops, slice, vec};
2
3use super::{Block, BITS};
4use BlockStore::{Stack, Heap};
5
6const SIZE: usize = 196 / BITS;
8
9#[derive(Clone, Debug)]
10pub enum BlockStore {
11 Stack([Block; SIZE]),
12 Heap(Vec<Block>),
13}
14
15impl BlockStore {
16 pub fn new() -> Self {
17 Stack([0; SIZE])
18 }
19
20 pub fn with_capacity(cap: usize) -> Self {
21 if cap <= SIZE {
22 Stack([0; SIZE])
23 } else {
24 Heap(Vec::with_capacity(cap))
25 }
26 }
27
28 pub fn clear(&mut self) {
29 if let Heap(ref mut vec) = *self {
30 vec.clear()
31 } else {
32 *self = Stack([0; SIZE])
33 }
34 }
35
36 pub fn capacity(&self) -> usize {
37 if let Heap(ref vec) = *self {
38 vec.capacity()
39 } else {
40 SIZE
41 }
42 }
43
44 pub fn reserve(&mut self, cap: usize) {
45 if SIZE < cap {
46 let vec = match *self {
47 Stack(ref arr) => {
48 let mut vec = Vec::with_capacity(cap);
49 vec.extend(arr);
50 vec
51 }
52 Heap(ref mut vec) => {
53 vec.reserve(cap);
54 return;
55 }
56 };
57 *self = Heap(vec);
58 }
59 }
60
61 pub fn shrink_to_fit(&mut self) {
62 let arr = match *self {
63 Stack(_) => return,
64 Heap(ref mut vec) => {
65 while let Some(&0) = vec.last() {
66 vec.pop();
67 }
68 if vec.len() <= SIZE {
69 let mut arr = [0; SIZE];
70 for i in 0..vec.len() {
71 arr[i] = vec[i];
72 }
73 arr
74 } else {
75 vec.shrink_to_fit();
76 return;
77 }
78 }
79 };
80 *self = Stack(arr);
81 }
82
83 pub fn drain(&mut self, idx: usize) -> Drain {
84 match *self {
85 Stack(ref mut data) => {
86 assert!(idx <= SIZE);
87 Drain::Stack {
88 data,
89 idx: idx as u8,
90 }
91 }
92 Heap(ref mut vec) => Drain::Heap(vec.drain(idx..)),
93 }
94 }
95
96 pub fn resize(&mut self, new_len: usize) {
97 let vec = match *self {
98 Stack(ref mut arr) => {
99 if new_len < SIZE {
100 for block in arr {
101 *block = 0;
102 }
103 return;
104 } else {
105 let mut vec = Vec::with_capacity(new_len);
106 vec.extend(&*arr);
107 vec.resize(new_len, 0);
108 vec
109 }
110 }
111 Heap(ref mut vec) => {
112 vec.resize(new_len, 0);
113 return;
114 }
115 };
116 *self = Heap(vec);
117 }
118
119 pub fn iter(&self) -> Iter {
120 Iter { inner: ops::Deref::deref(self).iter() }
121 }
122
123 pub fn iter_mut(&mut self) -> slice::IterMut<Block> {
124 ops::DerefMut::deref_mut(self).iter_mut()
125 }
126}
127
128impl Default for BlockStore {
129 fn default() -> Self {
130 BlockStore::new()
131 }
132}
133
134impl Extend<Block> for BlockStore {
135 fn extend<I>(&mut self, iter: I)
136 where I: IntoIterator<Item = Block>
137 {
138 let iter = iter.into_iter();
139 let arr = match *self {
140 Stack(arr) => arr,
141 Heap(ref mut vec) => return vec.extend(iter),
142 };
143 let mut vec = Vec::with_capacity(SIZE.saturating_add(iter.size_hint().0));
144 vec.extend(&arr);
145 vec.extend(iter);
146 *self = Heap(vec);
147 }
148}
149
150impl ops::Deref for BlockStore {
151 type Target = [Block];
152
153 fn deref(&self) -> &Self::Target {
154 match *self {
155 Stack(ref arr) => arr,
156 Heap(ref vec) => vec,
157 }
158 }
159}
160
161impl ops::DerefMut for BlockStore {
162 fn deref_mut(&mut self) -> &mut Self::Target {
163 match *self {
164 Stack(ref mut arr) => arr,
165 Heap(ref mut vec) => vec,
166 }
167 }
168}
169
170impl<'a> iter::FromIterator<&'a Block> for BlockStore {
171 fn from_iter<I>(iter: I) -> Self
172 where I: IntoIterator<Item = &'a Block>
173 {
174 BlockStore::from_iter(iter.into_iter().cloned())
175 }
176}
177
178impl iter::FromIterator<Block> for BlockStore {
179 fn from_iter<I>(iter: I) -> Self
180 where I: IntoIterator<Item = Block>
181 {
182 let mut iter = iter.into_iter();
183 if iter.size_hint().0 < SIZE {
184 let mut arr = [0; SIZE];
185 for i in 0..SIZE {
186 if let Some(block) = iter.next() {
187 arr[i] = block;
188 } else {
189 return Stack(arr);
190 }
191 }
192 if let Some(block) = iter.next() {
193 let mut vec = Vec::with_capacity((SIZE + 1).saturating_add(iter.size_hint().0));
194 vec.extend(&arr);
195 vec.push(block);
196 vec.extend(iter);
197 Heap(vec)
198 } else {
199 Stack(arr)
200 }
201 } else {
202 Heap(Vec::from_iter(iter))
203 }
204 }
205}
206
207impl<'a> IntoIterator for &'a BlockStore {
208 type Item = Block;
209 type IntoIter = Iter<'a>;
210
211 fn into_iter(self) -> Self::IntoIter {
212 self.iter()
213 }
214}
215
216impl IntoIterator for BlockStore {
217 type Item = Block;
218 type IntoIter = IntoIter;
219
220 fn into_iter(self) -> Self::IntoIter {
221 IntoIter {
222 kind: match self {
223 Stack(data) => IntoIterKind::Stack { data, idx: 0 },
224 Heap(vec) => IntoIterKind::Heap(vec.into_iter()),
225 },
226 }
227 }
228}
229
230#[derive(Clone, Debug)]
231pub struct Iter<'a> {
233 inner: slice::Iter<'a, Block>,
234}
235
236impl<'a> Iterator for Iter<'a> {
237 type Item = Block;
238
239 fn next(&mut self) -> Option<Self::Item> {
240 self.inner.next().cloned()
241 }
242
243 fn size_hint(&self) -> (usize, Option<usize>) {
244 self.inner.size_hint()
245 }
246}
247
248impl<'a> ExactSizeIterator for Iter<'a> {
249 fn len(&self) -> usize {
250 self.inner.len()
251 }
252}
253
254#[derive(Clone, Debug)]
255pub struct IntoIter {
257 kind: IntoIterKind,
258}
259
260#[derive(Clone, Debug)]
261enum IntoIterKind {
262 Stack { data: [Block; SIZE], idx: u8 },
263 Heap(vec::IntoIter<Block>),
264}
265
266impl Iterator for IntoIter {
267 type Item = Block;
268
269 fn next(&mut self) -> Option<Self::Item> {
270 match self.kind {
271 IntoIterKind::Stack {
272 ref data,
273 ref mut idx,
274 } => {
275 if *idx as usize == SIZE {
276 None
277 } else {
278 let ret = data[*idx as usize];
279 *idx += 1;
280 Some(ret)
281 }
282 }
283 IntoIterKind::Heap(ref mut vec) => vec.next(),
284 }
285 }
286
287 fn size_hint(&self) -> (usize, Option<usize>) {
288 match self.kind {
289 IntoIterKind::Stack { idx, .. } => (SIZE - idx as usize, Some(SIZE - idx as usize)),
290 IntoIterKind::Heap(ref vec) => vec.size_hint(),
291 }
292 }
293}
294
295impl ExactSizeIterator for IntoIter {
296 fn len(&self) -> usize {
297 match self.kind {
298 IntoIterKind::Stack { idx, .. } => SIZE - idx as usize,
299 IntoIterKind::Heap(ref vec) => vec.len(),
300 }
301 }
302}
303
304#[derive(Debug)]
305pub enum Drain<'a> {
306 Stack {
307 data: &'a mut [Block; SIZE],
308 idx: u8,
309 },
310 Heap(vec::Drain<'a, Block>),
311}
312
313impl<'a> Iterator for Drain<'a> {
314 type Item = Block;
315
316 fn next(&mut self) -> Option<Self::Item> {
317 match *self {
318 Drain::Stack {
319 ref mut data,
320 ref mut idx,
321 } => {
322 if *idx as usize == SIZE {
323 None
324 } else {
325 let ret = data[*idx as usize];
326 data[*idx as usize] = 0;
327 *idx += 1;
328 Some(ret)
329 }
330 }
331 Drain::Heap(ref mut vec) => vec.next(),
332 }
333 }
334
335 fn size_hint(&self) -> (usize, Option<usize>) {
336 match *self {
337 Drain::Stack { idx, .. } => (SIZE - idx as usize, Some(SIZE - idx as usize)),
338 Drain::Heap(ref vec) => vec.size_hint(),
339 }
340 }
341}
342
343impl<'a> ExactSizeIterator for Drain<'a> {
344 fn len(&self) -> usize {
345 match *self {
346 Drain::Stack { idx, .. } => SIZE - idx as usize,
347 Drain::Heap(ref vec) => vec.len(),
348 }
349 }
350}
351
352#[test]
353fn size() {
354 use std::{mem, u8};
355
356 assert_eq!(mem::size_of::<[Block; SIZE]>(),
357 mem::size_of::<Vec<Block>>());
358 assert!(SIZE <= u8::MAX as usize);
359}