use std::alloc::{Layout, alloc, dealloc, handle_alloc_error};
use std::cmp::{Ordering, max};
use std::collections::VecDeque;
use std::fmt::Debug;
use std::hash::{Hash, Hasher};
use std::iter::{FromIterator, FusedIterator, IntoIterator};
use std::ops::{Index, IndexMut};
use std::{slice, ptr};
pub struct RingBuffer<T> {
memory: *mut T,
len: usize,
capacity: usize,
head: usize,
tail: usize
}
impl<T> RingBuffer<T> {
pub fn new() -> RingBuffer<T> {
RingBuffer::<T>::with_capacity(0)
}
pub fn with_capacity(capacity: usize) -> RingBuffer<T> {
let capacity = max(capacity.next_power_of_two(), 2);
let layout = Layout::from_size_align(
capacity * std::mem::size_of::<T>(), 64).unwrap();
let ptr = unsafe { alloc(layout) };
if ptr.is_null() {
handle_alloc_error(layout);
}
RingBuffer {
memory: ptr as *mut T,
len: 0,
capacity,
head: 0,
tail: 0
}
}
pub fn as_slices(&self) -> (&[T], &[T]) {
unsafe {
if self.wraps_around() {
(slice::from_raw_parts(
self.memory.add(self.tail), self.capacity - self.tail),
slice::from_raw_parts(self.memory, self.head))
} else {
let head_or_capacity = if self.head == 0 {
self.capacity
} else {
self.head
};
(slice::from_raw_parts(
self.memory.add(self.tail), head_or_capacity - self.tail), &[])
}
}
}
#[inline]
pub fn back(&self) -> Option<&T> {
self.get_relative(0)
}
#[inline]
pub fn back_mut(&mut self) -> Option<&mut T> {
self.get_relative_mut(0)
}
#[inline]
pub fn capacity(&self) -> usize {
self.capacity
}
#[inline]
pub fn clear(&mut self) {
while self.len > 0 {
self.pop();
}
self.len = 0;
self.head = 0;
self.tail = 0;
}
#[inline]
pub fn contains(&self, x: &T) -> bool
where T: PartialEq<T>
{
let (a, b) = self.as_slices();
a.contains(x) || b.contains(x)
}
#[inline]
pub fn front(&self) -> Option<&T> {
self.get_relative(self.len.wrapping_sub(1))
}
#[inline]
pub fn front_mut(&mut self) -> Option<&mut T> {
self.get_relative_mut(self.len.wrapping_sub(1))
}
#[inline]
pub fn get_absolute(&self, index: usize) -> Option<&T> {
if self.is_index_in_range(index) {
unsafe {
Some(&*self.memory.add(index & self.mask()))
}
} else {
None
}
}
pub fn get_absolute_mut(&mut self, index: usize) -> Option<&mut T> {
if self.is_index_in_range(index) {
unsafe {
Some(&mut *self.memory.add(index & self.mask()))
}
} else {
None
}
}
#[inline]
pub unsafe fn get_absolute_unchecked(&self, index: usize) -> &T {
&*self.memory.add(index & self.mask())
}
#[inline]
pub unsafe fn get_absolute_mut_unchecked(&mut self, index: usize) -> &mut T {
&mut *self.memory.add(index & self.mask())
}
pub fn get_relative(&self, index: usize) -> Option<&T> {
if index < self.len {
unsafe {
Some(&*self.get_relative_pointer(index))
}
} else {
None
}
}
pub fn get_relative_mut(&mut self, index: usize) -> Option<&mut T> {
if index < self.len {
unsafe {
Some(&mut *self.get_relative_pointer(index))
}
} else {
None
}
}
#[inline]
pub fn is_empty(&self) -> bool{
self.len == 0
}
#[inline]
pub fn is_full(&self) -> bool {
self.len == self.capacity
}
#[inline]
pub fn iter(&self) -> Iter<T> {
Iter {
buffer: unsafe { slice::from_raw_parts(self.memory, self.capacity) },
len: self.len,
index: self.tail
}
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
buffer: unsafe { slice::from_raw_parts_mut(self.memory, self.capacity) },
len: self.len,
index: self.tail
}
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
pub fn pop(&mut self) -> Option<T> {
if !self.is_empty() {
let return_index = self.tail;
self.tail = (self.tail + 1) & self.mask();
self.len -= 1;
unsafe {
Some(self.memory.add(return_index).read())
}
} else {
None
}
}
pub fn push(&mut self, new_element: T) -> usize {
if self.is_full() {
self.expand(self.capacity << 1);
}
let return_index = self.head;
self.head = (self.head + 1) & self.mask();
self.len += 1;
unsafe {
self.memory.add(return_index).write(new_element);
}
return_index + (self.capacity * self.wraps_around() as usize)
}
pub fn reserve(&mut self, additional: usize) {
let new_capacity = self.len.checked_add(additional).and_then(
|cap| cap.checked_next_power_of_two()).expect("capacity overflow");
if new_capacity > self.capacity {
self.expand(new_capacity);
}
}
pub fn shrink_to(&mut self, min_capacity: usize) {
let new_capacity = max(2, max(
min_capacity.next_power_of_two(), self.len.next_power_of_two()));
if new_capacity < self.capacity {
unsafe {
let layout = Layout::from_size_align(
new_capacity * std::mem::size_of::<T>(), 64).unwrap();
let new_ptr: *mut T = alloc(layout) as *mut T;
if new_ptr.is_null() {
handle_alloc_error(layout);
}
let new_mask = new_capacity - 1;
if self.wraps_around() {
self.memory.copy_to(new_ptr, self.head);
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail & new_mask),
self.capacity - self.tail);
} else if !self.is_empty() & ((self.head > new_capacity) | (self.head == 0)) {
if self.tail >= new_capacity {
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail & new_mask), self.len);
} else {
self.memory.add(self.capacity).copy_to(new_ptr, self.head);
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail & new_mask),
self.capacity - self.tail);
}
} else {
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail), self.len);
}
dealloc(self.memory as *mut u8, Layout::from_size_align(
self.capacity * std::mem::size_of::<T>(), 64).unwrap());
self.memory = new_ptr;
}
self.capacity = new_capacity;
self.head &= self.mask();
self.tail &= self.mask();
}
}
pub fn shrink_to_fit(&mut self) {
self.shrink_to(0);
}
pub fn swap_relative(&mut self, i: usize, j: usize) {
assert!((i < self.len) & (j < self.len),
"At least one index is out of bounds, i is {}, j is {}, length of buffer is {}",
i, j, self.len);
unsafe {
self.get_relative_pointer(i).swap(self.get_relative_pointer(j));
}
}
pub fn swap_absolute(&mut self, i: usize, j: usize) {
assert!(self.is_index_in_range(i) & self.is_index_in_range(j),
"At least one index is out of bounds, i is {}, j is {}, buffer is from {} to {}",
i, j, self.tail, self.head);
unsafe {
self.memory.add(i).swap(self.memory.add(j));
}
}
pub fn truncate(&mut self, len: usize) {
for _ in len..self.len {
self.pop();
}
}
#[inline]
fn is_index_in_range(&self, index: usize) -> bool {
if self.wraps_around() {
((index & self.mask()).wrapping_sub(self.head) >= (self.capacity - self.len))
} else {
((index & self.mask()).wrapping_sub(self.tail) < self.len)
}
}
fn expand(&mut self, new_capacity: usize) {
unsafe {
let layout = Layout::from_size_align(
new_capacity * std::mem::size_of::<T>(), 64).unwrap();
let new_ptr: *mut T = alloc(layout) as *mut T;
if new_ptr.is_null() {
handle_alloc_error(layout);
}
if self.wraps_around() {
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail), self.capacity - self.tail);
self.memory.copy_to(new_ptr.add(self.capacity), self.head);
self.head += self.capacity;
} else if !self.is_empty() {
self.memory.add(self.tail).copy_to(
new_ptr.add(self.tail), self.len);
if self.head == 0 {
self.head = self.capacity;
}
}
dealloc(self.memory as *mut u8, Layout::from_size_align(
self.capacity * std::mem::size_of::<T>(), 64).unwrap());
self.memory = new_ptr;
}
self.capacity = new_capacity;
}
#[inline]
unsafe fn get_relative_pointer(&self, index: usize) -> *mut T {
self.memory.add(self.head.wrapping_sub(index + 1) & self.mask())
}
#[inline]
fn mask(&self) -> usize {
self.capacity - 1
}
#[inline]
fn wraps_around(&self) -> bool {
!self.is_empty() & (self.head != 0) & (self.head <= self.tail)
}
}
impl<T> Clone for RingBuffer<T>
where T: Clone {
fn clone(&self) -> RingBuffer<T> {
self.iter().cloned().collect()
}
}
impl<T> Debug for RingBuffer<T>
where T: Debug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_list().entries(self).finish()
}
}
impl<T> Default for RingBuffer<T> {
#[inline]
fn default() -> RingBuffer<T> {
RingBuffer::new()
}
}
impl<T> Drop for RingBuffer<T> {
fn drop(&mut self) {
if !self.is_empty() {
let mut index = self.head;
while index != self.tail {
index = index.wrapping_sub(1) & self.mask();
unsafe {
ptr::drop_in_place(self.memory.add(index));
}
}
}
}
}
impl<T> Eq for RingBuffer<T> where T: Eq {}
impl<T> Extend<T> for RingBuffer<T> {
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
for elem in iter.into_iter() {
self.push(elem);
}
}
}
impl<'a, T> Extend<&'a T> for RingBuffer<T>
where T: 'a + Copy {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
}
impl<T> From<Vec<T>> for RingBuffer<T> {
fn from(other: Vec<T>) -> Self {
let mut buffer = RingBuffer::with_capacity(other.len());
for elem in other.into_iter() {
buffer.push(elem);
}
buffer
}
}
impl<T> Hash for RingBuffer<T>
where T: Hash {
fn hash<H: Hasher>(&self, state: &mut H) {
self.len.hash(state);
self.head.hash(state);
self.tail.hash(state);
let(a, b) = self.as_slices();
Hash::hash_slice(a, state);
Hash::hash_slice(b, state);
}
}
impl<T> Into<VecDeque<T>> for RingBuffer<T> {
fn into(self) -> VecDeque<T> {
let mut vec = VecDeque::<T>::with_capacity(self.len());
for elem in self.into_iter() {
vec.push_back(elem);
}
vec
}
}
impl<T> FromIterator<T> for RingBuffer<T> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> RingBuffer<T> {
let iterator = iter.into_iter();
let (bound, _) = iterator.size_hint();
let mut ring_buffer = RingBuffer::<T>::with_capacity(bound);
ring_buffer.extend(iterator);
ring_buffer
}
}
impl<T> Index<usize> for RingBuffer<T> {
type Output = T;
#[inline]
fn index(&self, index: usize) -> &T {
self.get_absolute(index).expect("Out of bounds access")
}
}
impl<T> IndexMut<usize> for RingBuffer<T> {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut T {
self.get_absolute_mut(index).expect("Out of bounds access")
}
}
impl<T> IntoIterator for RingBuffer<T> {
type Item = T;
type IntoIter = OwnedIter<T>;
fn into_iter(self) -> Self::IntoIter {
OwnedIter {
inner: self
}
}
}
impl<'a, T> IntoIterator for &'a RingBuffer<T> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T> IntoIterator for &'a mut RingBuffer<T> {
type Item = &'a mut T;
type IntoIter = IterMut<'a, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T> Ord for RingBuffer<T>
where T: Ord {
fn cmp(&self, other: &RingBuffer<T>) -> Ordering {
self.iter().cmp(other.iter())
}
}
impl<T> PartialEq<RingBuffer<T>> for RingBuffer<T>
where T: PartialEq {
fn eq(&self, other: &RingBuffer<T>) -> bool {
if (self.len == other.len) & (self.tail == other.tail) & (self.head == other.head) {
let mut other_iter = other.iter();
for elem in self.iter() {
if elem != other_iter.next().unwrap() {
return false;
}
}
true
} else {
false
}
}
}
impl<T> PartialOrd<RingBuffer<T>> for RingBuffer<T>
where T: PartialOrd {
fn partial_cmp(&self, other: &RingBuffer<T>) -> Option<Ordering> {
self.iter().partial_cmp(other.iter())
}
}
pub struct Iter<'a, T: 'a> {
buffer: &'a [T],
len: usize,
index: usize
}
impl<T> Iter<'_, T> {
fn as_slices(&self) -> (&[T], &[T]) {
if (self.index + self.len) > self.buffer.len() {
let remaining_len = self.index + self.len - self.buffer.len();
(&self.buffer[self.index..], &self.buffer[..remaining_len])
} else {
(&self.buffer[self.index..self.index + self.len], &[])
}
}
}
impl<T> Clone for Iter<'_, T> {
fn clone(&self) -> Self {
Iter {
buffer: self.buffer,
len: self.len,
index: self.index
}
}
}
impl<T> Debug for Iter<'_, T>
where T: Debug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let(front, back) = self.as_slices();
f.debug_tuple("Iter").field(&front).field(&back).finish()
}
}
impl<'a, T> Iterator for Iter<'a, T>
where T: 'a {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.len > 0 {
let current_index = self.index;
self.index = (self.index + 1) & (self.buffer.len() - 1);
self.len -= 1;
unsafe {
Some(self.buffer.get_unchecked(current_index))
}
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
impl<T> ExactSizeIterator for Iter<'_, T> {}
impl<T> FusedIterator for Iter<'_, T> {}
pub struct IterMut<'a, T: 'a> {
buffer: &'a mut [T],
len: usize,
index: usize
}
impl<T> IterMut<'_, T> {
fn as_slices(&self) -> (&[T], &[T]) {
if (self.index + self.len) > self.buffer.len() {
let remaining_len = self.index + self.len - self.buffer.len();
(&self.buffer[self.index..], &self.buffer[..remaining_len])
} else {
(&self.buffer[self.index..self.index + self.len], &[])
}
}
}
impl<T> Debug for IterMut<'_, T>
where T: Debug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let(front, back) = self.as_slices();
f.debug_tuple("IterMut").field(&front).field(&back).finish()
}
}
impl<'a, T> Iterator for IterMut<'a, T>
where T: 'a {
type Item = &'a mut T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.len > 0 {
let current_index = self.index;
self.index = (self.index + 1) & (self.buffer.len() - 1);
self.len -= 1;
unsafe {
let elem = self.buffer.get_unchecked_mut(current_index);
Some(&mut *(elem as *mut T))
}
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
impl<T> ExactSizeIterator for IterMut<'_, T> {}
impl<T> FusedIterator for IterMut<'_, T> {}
#[derive(Clone)]
pub struct OwnedIter<T> {
inner: RingBuffer<T>
}
impl<T> Debug for OwnedIter<T>
where T: Debug {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let(front, back) = self.inner.as_slices();
f.debug_tuple("OwnedIter").field(&front).field(&back).finish()
}
}
impl<T> Iterator for OwnedIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.inner.pop()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.inner.len, Some(self.inner.len))
}
}
impl<T> ExactSizeIterator for OwnedIter<T> {}
impl<T> FusedIterator for OwnedIter<T> {}
#[cfg(test)]
mod tests {
use crate::RingBuffer;
#[test]
fn access_and_reallocations() {
let mut buffer = RingBuffer::<usize>::with_capacity(2);
let mut indices = Vec::with_capacity(8);
assert_eq!(buffer.capacity, 2, "Initialised with too much capacity");
for i in 1..=8 {
indices.push(buffer.push(i));
}
assert_eq!(indices.as_slice(), &[0, 1, 2, 3, 4, 5, 6, 7][..]);
assert_eq!(buffer.capacity, 8);
assert_eq!(buffer.as_slices(), (&[1, 2, 3, 4, 5, 6, 7, 8][..], &[][..]));
for i in 0..8 {
assert_eq!(buffer.get_relative(i), Some(&(8 - i)));
assert_eq!(buffer.get_absolute(indices[i]), Some(&(i + 1)));
}
indices.clear();
for i in 1..=4 {
indices.push(i + 3);
assert_eq!(buffer.pop(), Some(i));
}
for i in 9..=12 {
indices.push(buffer.push(i));
}
assert_eq!(indices.as_slice(), &[4, 5, 6, 7, 8, 9, 10, 11][..]);
assert_eq!(buffer.capacity, 8);
assert_eq!(buffer.as_slices(), (&[5, 6, 7, 8][..], &[9, 10, 11, 12][..]));
for i in 13..=16 {
indices.push(buffer.push(i));
}
assert_eq!(indices.as_slice(), &[4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15][..]);
assert_eq!(buffer.capacity, 16);
assert_eq!(buffer.as_slices(), (&[5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16][..], &[][..]));
for i in 0..12 {
assert_eq!(buffer.get_relative(i), Some(&(16 - i)));
assert_eq!(buffer.get_absolute(indices[i]), Some(&(i + 5)));
}
indices.clear();
for i in 5..=8 {
indices.push(i + 3);
assert_eq!(buffer.pop(), Some(i));
}
for i in 12..=15 {
indices.push(i);
}
buffer.shrink_to_fit();
assert_eq!(indices.as_slice(), &[8, 9, 10, 11, 12, 13, 14, 15][..]);
assert_eq!(buffer.capacity, 8);
assert_eq!(buffer.as_slices(), (&[9, 10, 11, 12, 13, 14, 15, 16][..], &[][..]));
indices.clear();
for i in 9..=14 {
assert_eq!(buffer.pop(), Some(i));
}
indices.push(14);
indices.push(15);
for i in 17..=18 {
indices.push(buffer.push(i));
}
buffer.shrink_to_fit();
assert_eq!(indices.as_slice(), &[14, 15, 8, 9][..]);
assert_eq!(buffer.capacity, 4);
assert_eq!(buffer.as_slices(), (&[15, 16][..], &[17, 18][..]));
for i in 0..4 {
assert_eq!(buffer.get_relative(i), Some(&(18 - i)));
assert_eq!(buffer.get_absolute(indices[i]), Some(&(i + 15)));
}
}
}