use core::ops::{Index, IndexMut};
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
pub unsafe trait RingBuffer<T>:
Sized + IntoIterator<Item = T> + Extend<T> + Index<usize, Output = T> + IndexMut<usize>
{
fn len(&self) -> usize {
unsafe { Self::ptr_len(self) }
}
#[doc(hidden)]
unsafe fn ptr_len(rb: *const Self) -> usize;
#[inline]
fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
fn is_full(&self) -> bool {
self.len() == self.capacity()
}
fn capacity(&self) -> usize {
unsafe { Self::ptr_capacity(self) }
}
fn buffer_size(&self) -> usize {
unsafe { Self::ptr_buffer_size(self) }
}
#[doc(hidden)]
unsafe fn ptr_capacity(rb: *const Self) -> usize;
#[doc(hidden)]
unsafe fn ptr_buffer_size(rb: *const Self) -> usize;
fn push(&mut self, value: T);
fn enqueue(&mut self, value: T) {
self.push(value);
}
fn dequeue(&mut self) -> Option<T>;
#[inline]
fn skip(&mut self) {
let _ = self.dequeue();
}
fn drain(&mut self) -> RingBufferDrainingIterator<T, Self> {
RingBufferDrainingIterator::new(self)
}
fn fill_with<F: FnMut() -> T>(&mut self, f: F);
fn fill_default(&mut self)
where
T: Default,
{
self.fill_with(Default::default);
}
fn fill(&mut self, value: T)
where
T: Clone,
{
self.fill_with(|| value.clone());
}
fn clear(&mut self);
fn get_signed(&self, index: isize) -> Option<&T>;
fn get(&self, index: usize) -> Option<&T>;
#[inline]
fn get_mut_signed(&mut self, index: isize) -> Option<&mut T> {
unsafe { Self::ptr_get_mut_signed(self, index).map(|i| &mut *i) }
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut T> {
unsafe { Self::ptr_get_mut(self, index).map(|i| &mut *i) }
}
#[doc(hidden)]
unsafe fn ptr_get_mut(rb: *mut Self, index: usize) -> Option<*mut T>;
#[doc(hidden)]
unsafe fn ptr_get_mut_signed(rb: *mut Self, index: isize) -> Option<*mut T>;
#[inline]
fn peek(&self) -> Option<&T> {
self.front()
}
#[inline]
fn front(&self) -> Option<&T> {
self.get(0)
}
#[inline]
fn front_mut(&mut self) -> Option<&mut T> {
self.get_mut(0)
}
#[inline]
fn back(&self) -> Option<&T> {
self.get_signed(-1)
}
#[inline]
fn back_mut(&mut self) -> Option<&mut T> {
self.get_mut_signed(-1)
}
#[inline]
fn iter_mut(&mut self) -> RingBufferMutIterator<T, Self> {
RingBufferMutIterator::new(self)
}
#[inline]
fn iter(&self) -> RingBufferIterator<T, Self> {
RingBufferIterator::new(self)
}
#[cfg(feature = "alloc")]
fn to_vec(&self) -> Vec<T>
where
T: Clone,
{
self.iter().cloned().collect()
}
fn contains(&self, elem: &T) -> bool
where
T: PartialEq,
{
self.iter().any(|i| i == elem)
}
}
mod iter {
use crate::RingBuffer;
use core::iter::FusedIterator;
use core::marker::PhantomData;
use core::ptr::NonNull;
pub struct RingBufferIterator<'rb, T, RB: RingBuffer<T>> {
obj: &'rb RB,
len: usize,
index: usize,
phantom: PhantomData<T>,
}
impl<'rb, T, RB: RingBuffer<T>> RingBufferIterator<'rb, T, RB> {
#[inline]
pub fn new(obj: &'rb RB) -> Self {
Self {
obj,
len: obj.len(),
index: 0,
phantom: PhantomData,
}
}
}
impl<'rb, T: 'rb, RB: RingBuffer<T>> Iterator for RingBufferIterator<'rb, T, RB> {
type Item = &'rb T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.index < self.len {
let res = self.obj.get(self.index);
self.index += 1;
res
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
impl<'rb, T: 'rb, RB: RingBuffer<T>> FusedIterator for RingBufferIterator<'rb, T, RB> {}
impl<'rb, T: 'rb, RB: RingBuffer<T>> ExactSizeIterator for RingBufferIterator<'rb, T, RB> {}
impl<'rb, T: 'rb, RB: RingBuffer<T>> DoubleEndedIterator for RingBufferIterator<'rb, T, RB> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.len > 0 && self.index < self.len {
let res = self.obj.get(self.len - 1);
self.len -= 1;
res
} else {
None
}
}
}
pub struct RingBufferMutIterator<'rb, T, RB: RingBuffer<T>> {
obj: NonNull<RB>,
index: usize,
len: usize,
phantom: PhantomData<&'rb mut T>,
}
impl<'rb, T, RB: RingBuffer<T>> RingBufferMutIterator<'rb, T, RB> {
pub fn new(obj: &'rb mut RB) -> Self {
Self {
len: obj.len(),
obj: NonNull::from(obj),
index: 0,
phantom: PhantomData,
}
}
}
impl<'rb, T: 'rb, RB: RingBuffer<T> + 'rb> FusedIterator for RingBufferMutIterator<'rb, T, RB> {}
impl<'rb, T: 'rb, RB: RingBuffer<T> + 'rb> ExactSizeIterator for RingBufferMutIterator<'rb, T, RB> {}
impl<'rb, T: 'rb, RB: RingBuffer<T> + 'rb> DoubleEndedIterator
for RingBufferMutIterator<'rb, T, RB>
{
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.len > 0 && self.index < self.len {
self.len -= 1;
let res = unsafe { RB::ptr_get_mut(self.obj.as_ptr(), self.len) };
res.map(|i| unsafe { &mut *i })
} else {
None
}
}
}
impl<'rb, T, RB: RingBuffer<T> + 'rb> Iterator for RingBufferMutIterator<'rb, T, RB> {
type Item = &'rb mut T;
fn next(&mut self) -> Option<Self::Item> {
if self.index < self.len {
let res = unsafe { RB::ptr_get_mut(self.obj.as_ptr(), self.index) };
self.index += 1;
res.map(|i| unsafe { &mut *i })
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
}
pub struct RingBufferDrainingIterator<'rb, T, RB: RingBuffer<T>> {
obj: &'rb mut RB,
phantom: PhantomData<T>,
}
impl<'rb, T, RB: RingBuffer<T>> RingBufferDrainingIterator<'rb, T, RB> {
#[inline]
pub fn new(obj: &'rb mut RB) -> Self {
Self {
obj,
phantom: PhantomData,
}
}
}
impl<'rb, T, RB: RingBuffer<T>> Iterator for RingBufferDrainingIterator<'rb, T, RB> {
type Item = T;
fn next(&mut self) -> Option<T> {
self.obj.dequeue()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.obj.len(), Some(self.obj.len()))
}
}
pub struct RingBufferIntoIterator<T, RB: RingBuffer<T>> {
obj: RB,
phantom: PhantomData<T>,
}
impl<T, RB: RingBuffer<T>> RingBufferIntoIterator<T, RB> {
#[inline]
pub fn new(obj: RB) -> Self {
Self {
obj,
phantom: PhantomData,
}
}
}
impl<T, RB: RingBuffer<T>> Iterator for RingBufferIntoIterator<T, RB> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.obj.dequeue()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.obj.len(), Some(self.obj.len()))
}
}
}
pub use iter::{
RingBufferDrainingIterator, RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator,
};
macro_rules! impl_ringbuffer {
($readptr: ident, $writeptr: ident) => {
#[inline]
unsafe fn ptr_len(rb: *const Self) -> usize {
(*rb).$writeptr - (*rb).$readptr
}
};
}
macro_rules! impl_ringbuffer_ext {
($get_unchecked: ident, $get_unchecked_mut: ident, $readptr: ident, $writeptr: ident, $mask: expr) => {
#[inline]
fn get_signed(&self, index: isize) -> Option<&T> {
use core::ops::Not;
self.is_empty().not().then(move || {
let index_from_readptr = if index >= 0 {
index
} else {
self.len() as isize + index
};
let normalized_index =
self.$readptr as isize + index_from_readptr.rem_euclid(self.len() as isize);
unsafe {
$get_unchecked(self, $mask(self.buffer_size(), normalized_index as usize))
}
})
}
#[inline]
fn get(&self, index: usize) -> Option<&T> {
use core::ops::Not;
self.is_empty().not().then(move || {
let normalized_index = self.$readptr + index.rem_euclid(self.len());
unsafe {
$get_unchecked(self, $mask(self.buffer_size(), normalized_index))
}
})
}
#[inline]
#[doc(hidden)]
unsafe fn ptr_get_mut_signed(rb: *mut Self, index: isize) -> Option<*mut T> {
(Self::ptr_len(rb) != 0).then(move || {
let index_from_readptr = if index >= 0 {
index
} else {
Self::ptr_len(rb) as isize + index
};
let normalized_index = (*rb).$readptr as isize
+ index_from_readptr.rem_euclid(Self::ptr_len(rb) as isize);
unsafe {
$get_unchecked_mut(
rb,
$mask(Self::ptr_buffer_size(rb), normalized_index as usize),
)
}
})
}
#[inline]
#[doc(hidden)]
unsafe fn ptr_get_mut(rb: *mut Self, index: usize) -> Option<*mut T> {
(Self::ptr_len(rb) != 0).then(move || {
let normalized_index = (*rb).$readptr + index.rem_euclid(Self::ptr_len(rb));
unsafe {
$get_unchecked_mut(rb, $mask(Self::ptr_buffer_size(rb), normalized_index))
}
})
}
#[inline]
fn clear(&mut self) {
for i in self.drain() {
drop(i);
}
self.$readptr = 0;
self.$writeptr = 0;
}
};
}