mod buffer;
use crate::buffer::Buffer;
use std::fmt;
use std::mem::MaybeUninit;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::slice;
pub struct BytesDeque {
buffer: Buffer<u8>,
len: usize,
offset: usize,
}
impl BytesDeque {
pub fn with_capacity(capacity: usize) -> BytesDeque {
let mut buffer = Buffer::new();
if capacity != 0 {
buffer.realloc(capacity);
}
BytesDeque {
buffer,
len: 0,
offset: capacity / 2,
}
}
pub fn capacity(&self) -> usize {
self.buffer.cap
}
pub fn is_dangling(&self) -> bool {
self.capacity() == 0
}
}
impl BytesDeque {
pub fn as_ptr(&self) -> *const u8 {
self.buffer.ptr.as_ptr()
}
pub fn as_mut_ptr(&mut self) -> *mut u8 {
self.buffer.ptr.as_ptr()
}
pub fn slice(&self) -> &[u8] {
unsafe { std::slice::from_raw_parts(self.as_ptr().add(self.offset), self.len) }
}
pub fn slice_mut(&mut self) -> &mut [u8] {
unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr().add(self.offset), self.len) }
}
pub fn front_slice(&self) -> &[MaybeUninit<u8>] {
unsafe { slice::from_raw_parts(self.as_ptr() as *const MaybeUninit<u8>, self.offset) }
}
pub fn front_slice_mut(&mut self) -> &mut [MaybeUninit<u8>] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr() as *mut MaybeUninit<u8>, self.offset) }
}
pub fn back_slice(&self) -> &[MaybeUninit<u8>] {
unsafe {
slice::from_raw_parts(
self.as_ptr().add(self.offset + self.len) as *const MaybeUninit<u8>,
self.capacity() - self.offset - self.len,
)
}
}
pub fn back_slice_mut(&mut self) -> &mut [MaybeUninit<u8>] {
unsafe {
slice::from_raw_parts_mut(
self.as_mut_ptr().add(self.offset + self.len) as *mut MaybeUninit<u8>,
self.capacity() - self.offset - self.len,
)
}
}
}
impl BytesDeque {
pub unsafe fn set_offset(&mut self, new_offset: usize) {
self.offset = new_offset;
}
pub unsafe fn set_len(&mut self, new_len: usize) {
self.len = new_len;
}
pub fn realloc(&mut self, new_capacity: usize) {
if new_capacity <= self.offset {
self.offset = new_capacity;
self.len = 0;
} else if new_capacity < self.offset + self.len {
self.len -= self.offset + self.len - new_capacity;
}
self.buffer.realloc(new_capacity);
}
pub fn ensure_back_capacity(&mut self, requested: usize) {
if self.capacity() == 0 {
self.realloc(8192);
self.offset = 8192 / 2;
}
if requested > self.back_slice().len() {
let n = 2 + requested / self.capacity();
self.realloc(n * self.capacity());
}
}
pub fn ensure_front_capacity(&mut self, requested: usize) {
if self.capacity() == 0 {
self.realloc(8192);
self.offset = 8192 / 2;
}
if requested > self.front_slice().len() {
let old_capacity = self.capacity();
let n = 2 + requested / old_capacity;
self.realloc(n * old_capacity);
unsafe {
self.move_active_nonoverlapping(self.offset + (n - 1) * old_capacity);
}
}
}
pub fn move_active(&mut self, new_offset: usize) {
unsafe {
ptr::copy(
self.slice().as_ptr(),
self.as_mut_ptr().add(new_offset),
self.len,
);
}
self.offset = new_offset;
}
pub unsafe fn move_active_nonoverlapping(&mut self, new_offset: usize) {
unsafe {
ptr::copy_nonoverlapping(
self.slice().as_ptr(),
self.as_mut_ptr().add(new_offset),
self.len,
);
}
self.offset = new_offset;
}
}
impl BytesDeque {
pub fn extend(&mut self, src: &[u8]) {
self.ensure_back_capacity(src.len());
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(),
self.as_mut_ptr().add(self.offset + self.len),
src.len(),
);
}
self.len += src.len();
}
pub fn push_back(&mut self, byte: u8) {
self.ensure_back_capacity(1);
unsafe {
*self.as_mut_ptr().add(self.offset + self.len) = byte;
}
self.len += 1;
}
pub fn extend_front(&mut self, src: &[u8]) {
self.ensure_front_capacity(src.len());
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(),
self.as_mut_ptr().add(self.offset - src.len()),
src.len(),
);
}
self.offset -= src.len();
self.len += src.len();
}
pub fn push_front(&mut self, byte: u8) {
self.ensure_front_capacity(1);
unsafe {
*self.as_mut_ptr().add(self.offset - 1) = byte;
}
self.offset -= 1;
self.len += 1;
}
pub fn remove_back(&mut self, n: usize) {
if self.len >= n {
self.len -= n;
} else {
self.len = 0;
}
}
pub fn remove_front(&mut self, n: usize) {
if self.len >= n {
self.offset += n;
self.len -= n;
} else {
self.offset += self.len;
self.len = 0;
}
}
}
impl Deref for BytesDeque {
type Target = [u8];
fn deref(&self) -> &Self::Target {
self.slice()
}
}
impl DerefMut for BytesDeque {
fn deref_mut(&mut self) -> &mut Self::Target {
self.slice_mut()
}
}
impl PartialEq<&[u8]> for BytesDeque {
fn eq(&self, other: &&[u8]) -> bool {
self.slice() == *other
}
}
impl fmt::Debug for BytesDeque {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"BytesDeque {{ active: {:?} len: {:?}, offset: {:?} }}",
self.slice(),
self.len,
self.offset
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_with_capacity() {
let d = BytesDeque::with_capacity(10);
assert_eq!(d.capacity(), 10);
}
#[test]
fn test_with_capacity_dangling() {
let d = BytesDeque::with_capacity(0);
assert!(d.is_dangling());
assert_eq!(d, b"");
}
#[test]
fn test_extend_empty() {
let mut d = BytesDeque::with_capacity(0);
assert_eq!(d.is_dangling(), true);
d.extend(b"world");
assert_eq!(d.is_dangling(), false);
d.push_front(b' ');
d.extend_front(b"Hello");
d.push_back(b'!');
assert_eq!(d, b"Hello world!");
d.remove_back(2);
assert_eq!(d, b"Hello worl");
d.remove_front(2);
assert_eq!(d, b"llo worl");
d.remove_front(100);
assert_eq!(d, b"");
}
#[test]
fn test_extend_nonempty_01() {
let mut d = BytesDeque::with_capacity(1);
assert_eq!(d.is_dangling(), false);
d.extend(b"world");
d.push_front(b' ');
d.extend_front(b"Hello");
d.push_back(b'!');
assert_eq!(d, b"Hello world!");
d.remove_back(2);
assert_eq!(d, b"Hello worl");
d.remove_front(2);
assert_eq!(d, b"llo worl");
d.remove_back(100);
assert_eq!(d, b"");
}
#[test]
fn test_extend_nonempty_02() {
let mut d = BytesDeque::with_capacity(10);
assert_eq!(d.is_dangling(), false);
d.extend_front(b"son");
assert_eq!(d, b"son");
d.extend_front(b"hello ");
assert_eq!(d, b"hello son");
}
}