1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
//! This library provides `SliceQueue`, a optimized queue for efficient working with (byte-)slices.
//! It allows you to
//!  - efficiently push an arbitrary amount of elements by either consuming them or by cloning them
//!    from a slice (if the type supports the `Clone` trait)
//!  - efficiently pop an arbitrary amount of elements from the front
//!  - access the underlying buffer directly by either using `peek*` methods or by using
//!    (range-)indices
//!  - dereference the `SliceQueue<T>` like it's a `Vec<T>` (which usually results in a slice)

use std::{
	usize, fmt::{ Debug, Formatter, Result as FmtResult },
	ops::{
		Index, IndexMut,
		Range, RangeFrom, RangeTo, RangeInclusive, RangeToInclusive, RangeBounds, Bound,
		Deref, DerefMut
	}
};
#[cfg(feature = "fast_unsafe_code")]
use std::{ ptr, mem };

#[derive(Default)]
pub struct SliceQueue<T> {
	backing: Vec<T>
}
impl<T> SliceQueue<T> {
	/// Creates a new `SliceQueue`
	///
	/// Returns _the new `SliceQueue`_
	pub fn new() -> Self {
		SliceQueue{ backing: Vec::new() }
	}
	/// Creates a new `SliceQueue` with a preallocated capacity `n`
	///
	/// Parameters:
	///  - `n`: The capacity to preallocate
	///
	/// Returns _the new `SliceQueue`_
	pub fn with_capacity(n: usize) -> Self {
		SliceQueue{ backing: Vec::with_capacity(n) }
	}
	
	
	/// The amount of elements stored
	///
	/// Returns _the amount of elements stored in `self`_
	pub fn len(&self) -> usize {
		self.backing.len()
	}
	/// Checks if there are __no__ elements stored
	///
	/// Returns either _`true`_ if `self` is empty or _`false`_ otherwise
	pub fn is_empty(&self) -> bool {
		self.backing.is_empty()
	}
	/// Returns the allocated capacity
	///
	/// Returns _the allocated capacity of `self`_
	pub fn capacity(&self) -> usize {
		self.backing.capacity()
	}
	/// Reserves an additional amount of memory to push `additional_element_count` elements without
	/// reallocating
	///
	/// Parameters:
	///  - `additional_element_count`: The amount of elements that we should be able to append
	///    without reallocating
	pub fn reserve(&mut self, additional_element_count: usize) {
		self.backing.reserve(additional_element_count)
	}
	/// Shrinks the allocated capacity if less than it's half is used. This basically mirrors `Vec`'s
	/// allocation strategy.
	pub fn shrink_opportunistic(&mut self) {
		// Compute the half capacity
		let half_capacity = if self.capacity() == 0 { 0 }
			else { self.capacity() / 2 };
		
		// Resize the backing if the used space is smaller than the half capacity
		if self.len() > 4 && self.len() <= half_capacity { self.backing.shrink_to_fit() }
	}
	/// Shrinks the allocated capacity as much as possible
	pub fn shrink_to_fit(&mut self) {
		self.backing.shrink_to_fit()
	}
	
	
	/// Consumes the first element and returns it
	///
	/// Returns either _`Some(element)`_ if there was an element to consume or _`None`_ otherwise
	pub fn pop(&mut self) -> Option<T> {
		match self.is_empty() {
			true => None,
			false => {
				let element = self.backing.remove(0);
				self.shrink_opportunistic();
				Some(element)
			}
		}
	}
	/// Consumes the first `n` elements and returns them
	///
	/// Parameters:
	///  - `n`: The amount of elements to consume
	///
	/// Returns either _`Some(elements)`_ if there were enough elements to consume or _`None`_
	/// otherwise
	pub fn pop_n(&mut self, n: usize) -> Option<Vec<T>> {
		if self.len() < n { return None }
		
		// Copy elements into a new vector
		#[cfg(feature = "fast_unsafe_code")]
		let elements = unsafe {
			// Create target vector
			let mut elements = Vec::with_capacity(n);
			let remaining = self.len() - n;
			
			// Copy stored elements to the new vector and the remaining elements to the front
			ptr::copy_nonoverlapping(self.backing.as_ptr(), elements.as_mut_ptr(), n);
			ptr::copy(self.backing[n..].as_ptr(), self.backing.as_mut_ptr(), remaining);
			
			// Adjust the lengths
			elements.set_len(n);
			self.backing.set_len(remaining);
			
			elements
		};
		#[cfg(not(feature = "fast_unsafe_code"))]
		let elements = /* safe */ {
			// Drain `n` elements and collect them
			self.backing.drain(..n).collect()
		};
		
		self.shrink_opportunistic();
		Some(elements)
	}
	/// Consumes the first `dst.len()` and moves them into `dst`
	///
	/// __Warning: This function panics if there are not enough elements stored to fill `dst`
	/// completely__
	///
	/// Parameters:
	///  - `dst`: The target to move the elements into
	pub fn pop_into(&mut self, dst: &mut[T]) {
		assert!(self.len() >= dst.len(), "`dst` is larger than `self`");
		
		// Copy raw data
		let to_move = dst.len();
		#[cfg(feature = "fast_unsafe_code")]
		unsafe {
			// Replace the elements in dst
			Self::replace_n(self.backing.as_ptr(), dst.as_mut_ptr(), to_move);
			
			// Move the remaining stored elements to the front and adjust length
			let remaining = self.len() - to_move;
			ptr::copy(self.backing[to_move..].as_ptr(), self.backing.as_mut_ptr(), remaining);
			self.backing.set_len(remaining);
		}
		#[cfg(not(feature = "fast_unsafe_code"))]
		/* safe */ {
			// Move `to_move` elements into `dst`
			let (mut src, dst) = (self.backing.drain(..to_move), dst.iter_mut());
			dst.for_each(|t| *t = src.next().unwrap());
		}
		
		self.shrink_opportunistic();
	}
	
	
	/// Discards the first `n` elements
	///
	/// __Warning: This function panics if there are less than `n` elements stored in `self`__
	///
	/// Parameters:
	///  - `n`: The amount of elements to discard
	pub fn discard_n(&mut self, n: usize) {
		assert!(self.len() >= n, "`n` is larger than the amount of elements in `self`");
		
		// Drop `n` elements and copy the remaining elements to the front
		#[cfg(feature = "fast_unsafe_code")]
		unsafe {
			// Move the remaining stored elements to the front and adjust the length
			let remaining = self.len() - n;
			Self::replace_n(self.backing[n..].as_ptr(), self.backing.as_mut_ptr(), remaining);
			self.backing.set_len(remaining);
		}
		#[cfg(not(feature = "fast_unsafe_code"))]
		/* safe */ {
			// Drain `n` elements from the front
			self.backing.drain(..n);
		}
		self.shrink_opportunistic();
	}
	
	
	/// Returns a reference to the first element
	///
	/// Returns either _`Some(element_reference)`_ if there is an element to reference or _`None`_
	/// otherwise
	pub fn peek(&self) -> Option<&T> {
		self.backing.first()
	}
	/// Returns a mutable reference to the first element
	///
	/// Returns either _`Some(element_reference)`_ if there is an element to reference or _`None`_
	/// otherwise
	pub fn peek_mut(&mut self) -> Option<&mut T> {
		self.backing.first_mut()
	}
	/// Returns a reference to the first `n` elements
	///
	/// Parameters:
	///  - `n`: The amount of elements to reference
	///
	/// Returns either _`Some(element_references)`_ if there are enough elements to reference or
	/// _`None`_ otherwise
	pub fn peek_n(&self, n: usize) -> Option<&[T]> {
		if self.len() < n { None }
			else { Some(&self.backing[..n]) }
	}
	/// Returns a mutable reference to the first `n` elements
	///
	/// Parameters:
	///  - `n`: The amount of elements to reference
	///
	/// Returns either _`Some(element_references)`_ if there are enough elements to reference or
	/// _`None`_ otherwise
	pub fn peek_n_mut(&mut self, n: usize) -> Option<&mut[T]> {
		if self.len() < n { None }
			else { Some(&mut self.backing[..n]) }
	}
	
	
	/// Appends `element` at the end
	///
	/// Parameters:
	///  - `element`: The element to append at the end
	pub fn push(&mut self, element: T) {
		self.backing.push(element)
	}
	/// Appends `n` at the end
	///
	/// Parameters:
	///  - `n`: The n elements to append at the end
	pub fn push_n(&mut self, mut n: Vec<T>) {
		self.backing.append(&mut n);
	}
	/// Clones and appends all elements in `src` at the end
	///
	/// Parameters:
	///  - `src`: A slice containing the elements to clone and append
	pub fn push_from(&mut self, src: &[T]) where T: Clone {
		self.backing.extend_from_slice(src)
	}
	
	
	/// A private helper function to translate `RangeBounds` into ranges relative to `self`
	///
	/// __Warning: This function panics if an exclusive range over- or underflows `usize` limits__
	///
	/// Parameters:
	///  - `bounds`: The `RangeBounds` to translate
	///
	/// Returns _the translated range_
	fn range_from_bounds(&self, bounds: impl RangeBounds<usize>) -> Range<usize> {
		let start_included = match bounds.start_bound() {
			Bound::Included(b) => *b,
			Bound::Excluded(_) => unreachable!(),
			Bound::Unbounded => 0
		};
		let end_excluded = match bounds.end_bound() {
			Bound::Included(b) => if *b > usize::MIN { *b - 1 }
					else { panic!("Index usize::MIN - 1 is invalid") },
			Bound::Excluded(b) => *b,
			Bound::Unbounded => self.backing.len()
		};
		start_included..end_excluded
	}
	/// A private helper that copies `n` elements from `src` to `dst`. The elements in `dst` are
	/// dropped if necessary.
	///
	/// __Warning: Because this function operates on raw memory, YOU must take care of stuff like
	/// memory-safety, ownership and ref-counts of the copied elements etc.__
	///
	/// Parameters:
	///  - `src`: A pointer to the source elements
	///  - `dst`: A pointer to the destination
	///  - `n`: The amount of elements to copy
	#[cfg(feature = "fast_unsafe_code")]
	unsafe fn replace_n(src: *const T, dst: *mut T, n: usize) {
		// Drop elements in dst if necessary
		if mem::needs_drop::<T>() {
			let mut ptr = dst;
			(0..n).for_each(|_| {
				ptr.drop_in_place();
				ptr = ptr.offset(1);
			})
		}
		// Copy src to dst
		ptr::copy(src, dst, n);
	}
}
impl<T: Debug> Debug for SliceQueue<T> {
	fn fmt(&self, f: &mut Formatter) -> FmtResult {
		Debug::fmt(&**self, f)
	}
}
impl<T> From<Vec<T>> for SliceQueue<T> {
	fn from(vec: Vec<T>) -> Self {
		SliceQueue { backing: vec }
	}
}
impl<T> Clone for SliceQueue<T> where T: Clone {
	fn clone(&self) -> Self {
		SliceQueue { backing: self.backing.clone() }
	}
}

macro_rules! impl_range_index {
    ($b:ty) => {
    	impl<T> Index<$b> for SliceQueue<T> {
    		type Output = [T];
			fn index(&self, bounds: $b) -> &[T] {
				&self.backing[self.range_from_bounds(bounds)]
			}
    	}
    	impl<T> IndexMut<$b> for SliceQueue<T> {
			fn index_mut(&mut self, bounds: $b) -> &mut [T] {
				let range = self.range_from_bounds(bounds);
				&mut self.backing[range]
			}
    	}
    };
}
impl_range_index!(Range<usize>);
impl_range_index!(RangeFrom<usize>);
impl_range_index!(RangeTo<usize>);
impl_range_index!(RangeInclusive<usize>);
impl_range_index!(RangeToInclusive<usize>);

impl<T> Index<usize> for SliceQueue<T> {
	type Output = T;
	fn index(&self, i: usize) -> &T {
		&self.backing[i]
	}
}
impl<T> IndexMut<usize> for SliceQueue<T> {
	fn index_mut(&mut self, i: usize) -> &mut T {
		&mut self.backing[i]
	}
}

impl<T> Deref for SliceQueue<T> {
	type Target = <Vec<T> as Deref>::Target;
	fn deref(&self) -> &Self::Target {
		self.backing.deref()
	}
}
impl<T> DerefMut for SliceQueue<T> {
	fn deref_mut(&mut self) -> &mut Self::Target {
		self.backing.deref_mut()
	}
}


#[cfg(test)]
mod tests {
	include!("tests.rs");
}