use heap_data::{TaggedPtr, HeapAllocation};
use std::fmt::{self, Write};
use std::hash;
use std::iter::FromIterator;
use std::io;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::slice;
use u32_to_usize;
use usize_to_u32;
#[derive(Clone)]
pub struct BytesBuf(Inner);
#[cfg(target_endian = "little")]
#[derive(Clone)]
#[repr(C)] struct Inner {
ptr: TaggedPtr,
start: u32,
len: u32,
}
#[cfg(target_endian = "big")]
#[derive(Clone)]
#[repr(C)] struct Inner {
start: u32,
len: u32,
ptr: TaggedPtr,
}
#[cfg(target_endian = "little")]
const INLINE_DATA_OFFSET_BYTES: isize = 1;
#[cfg(target_endian = "big")]
const INLINE_DATA_OFFSET_BYTES: isize = 0;
const INLINE_LENGTH_MASK: usize = 0b_1111_1100;
const INLINE_LENGTH_OFFSET_BITS: usize = 2;
fn inline_length(metadata: usize) -> usize {
(metadata & INLINE_LENGTH_MASK) >> INLINE_LENGTH_OFFSET_BITS
}
fn set_inline_length(metadata: usize, new_len: usize) -> usize {
debug_assert!(new_len <= INLINE_CAPACITY);
let without_len = metadata & !INLINE_LENGTH_MASK;
let with_new_len = without_len | (new_len << INLINE_LENGTH_OFFSET_BITS);
with_new_len
}
#[cfg(target_pointer_width = "32")]
const SIZE_OF_INNER: usize = 4 + 4 + 4;
#[cfg(target_pointer_width = "64")]
const SIZE_OF_INNER: usize = 8 + 4 + 4;
#[allow(dead_code)]
unsafe fn static_assert(x: Inner) {
mem::transmute::<Inner, [u8; SIZE_OF_INNER]>(x); }
const INLINE_CAPACITY: usize = SIZE_OF_INNER - 1;
impl BytesBuf {
#[inline]
pub fn new() -> Self {
let metadata = 0; BytesBuf(Inner {
ptr: TaggedPtr::new_inline_data(metadata),
start: 0,
len: 0,
})
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
if capacity <= INLINE_CAPACITY {
Self::new()
} else {
BytesBuf(Inner {
ptr: TaggedPtr::allocate(capacity),
start: 0,
len: 0,
})
}
}
#[inline]
fn as_allocated(&self) -> Result<&HeapAllocation, usize> {
self.0.ptr.as_allocated()
}
#[inline]
pub fn len(&self) -> usize {
match self.as_allocated() {
Ok(_) => u32_to_usize(self.0.len),
Err(metadata) => inline_length(metadata),
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
fn data_and_uninitialized_tail(&mut self) -> (&mut [u8], *mut [u8]) {
if self.0.ptr.is_shared_allocation() {
*self = {
let slice: &[u8] = self;
Self::from(slice)
}
}
if let Ok(metadata) = self.0.ptr.get_inline_data() {
let len = inline_length(metadata);
let struct_ptr: *mut Inner = &mut self.0;
unsafe {
let data_ptr = (struct_ptr as *mut u8).offset(INLINE_DATA_OFFSET_BYTES);
let inline = slice::from_raw_parts_mut(data_ptr, INLINE_CAPACITY);
let (initialized, tail) = inline.split_at_mut(len);
return (initialized, tail)
}
}
let heap_allocation = self.0.ptr.as_owned_allocated_mut()
.expect("expected owned allocation");
let start = u32_to_usize(self.0.start);
let len = u32_to_usize(self.0.len);
let data = heap_allocation.data_mut();
unsafe {
let (initialized, tail) = (*data)[start..].split_at_mut(len);
return (initialized, tail)
}
}
#[inline]
pub fn capacity(&self) -> usize {
if let Ok(heap_allocation) = self.as_allocated() {
let capacity = if heap_allocation.is_owned() {
heap_allocation.data_capacity().checked_sub(self.0.start)
.expect("data_capacity < start ??")
} else {
self.0.len
};
u32_to_usize(capacity)
} else {
INLINE_CAPACITY
}
}
pub fn pop_front(&mut self, bytes: usize) {
if let Ok(_) = self.as_allocated() {
let bytes = usize_to_u32(bytes);
match self.0.len.checked_sub(bytes) {
None => panic!("tried to pop {} bytes, only {} are available", bytes, self.0.len),
Some(new_len) => {
self.0.len = new_len;
self.0.start = self.0.start.checked_add(bytes).expect("overflow");
}
}
} else {
*self = Self::from(&self[bytes..])
}
}
pub fn pop_back(&mut self, bytes: usize) {
let len = self.len();
match len.checked_sub(bytes) {
None => panic!("tried to pop {} bytes, only {} are available", bytes, len),
Some(new_len) => self.truncate(new_len)
}
}
pub fn split_off(&mut self, at: usize) -> BytesBuf {
let mut tail;
if let Ok(_) = self.as_allocated() {
let _: &[u8] = &self[at..]; let at = usize_to_u32(at);
tail = self.clone();
tail.0.start += at;
tail.0.len -= at;
} else {
tail = Self::from(&self[at..])
}
self.truncate(at);
tail
}
#[inline]
pub fn clear(&mut self) {
self.truncate(0)
}
pub fn truncate(&mut self, new_len: usize) {
if new_len < self.len() {
unsafe {
self.set_len(new_len)
}
}
}
unsafe fn set_len(&mut self, new_len: usize) {
match self.as_allocated() {
Ok(_) => {
self.0.len = usize_to_u32(new_len)
}
Err(metadata) => {
self.0.ptr = TaggedPtr::new_inline_data(set_inline_length(metadata, new_len))
}
}
}
pub fn reserve(&mut self, additional: usize) {
let new_capacity = self.len().checked_add(additional).expect("overflow");
if new_capacity > self.capacity() {
let mut copy = Self::with_capacity(new_capacity);
unsafe {
copy.write_to_uninitialized_tail(|uninit| copy_into_prefix(self, uninit))
}
*self = copy
}
}
pub unsafe fn write_to_uninitialized_tail<F>(&mut self, f: F)
where F: FnOnce(&mut [u8]) -> usize {
let (_, tail) = self.data_and_uninitialized_tail();
let written = f(&mut *tail);
let new_len = self.len().checked_add(written).expect("overflow");
assert!(written <= (*tail).len());
self.set_len(new_len)
}
pub fn write_to_zeroed_tail<F>(&mut self, f: F)
where F: FnOnce(&mut [u8]) -> usize {
unsafe {
self.write_to_uninitialized_tail(|tail| {
ptr::write_bytes(tail.as_mut_ptr(), 0, tail.len());
f(tail)
})
}
}
pub unsafe fn read_into_unititialized_tail_from<R>(&mut self, mut reader: R)
-> io::Result<usize>
where R: io::Read {
let mut result = Ok(0);
self.write_to_uninitialized_tail(|tail| {
let r = reader.read(tail);
let written = match r {
Ok(bytes) => bytes,
Err(_) => 0,
};
result = r;
written
});
result
}
pub fn push_slice(&mut self, slice: &[u8]) {
self.reserve(slice.len());
unsafe {
self.write_to_uninitialized_tail(|uninit| copy_into_prefix(slice, uninit))
}
}
pub fn push_buf(&mut self, other: &BytesBuf) {
if self.is_empty() {
*self = other.clone();
return
}
fn raw<T>(x: &T) -> *const T { x }
if let (Ok(a), Ok(b)) = (self.as_allocated().map(raw), other.as_allocated().map(raw)) {
if ptr::eq(a, b) {
if (self.0.start + self.0.len) == other.0.start {
self.0.len += other.0.len;
return
}
}
}
self.push_slice(other)
}
}
#[inline]
unsafe fn copy_into_prefix(source: &[u8], dest: *mut [u8]) -> usize {
let len = source.len();
(*dest)[..len].copy_from_slice(source);
len
}
impl Deref for BytesBuf {
type Target = [u8];
fn deref(&self) -> &[u8] {
match self.as_allocated() {
Ok(heap_allocation) => {
let start = u32_to_usize(self.0.start);
let len = u32_to_usize(self.0.len);
unsafe {
&(*heap_allocation.data())[start..][..len]
}
}
Err(metadata) => {
let len = inline_length(metadata);
let struct_ptr: *const Inner = &self.0;
let struct_ptr = struct_ptr as *const u8;
unsafe {
let data_ptr = struct_ptr.offset(INLINE_DATA_OFFSET_BYTES);
slice::from_raw_parts(data_ptr, len)
}
}
}
}
}
impl DerefMut for BytesBuf {
fn deref_mut(&mut self) -> &mut [u8] {
let (data, _) = self.data_and_uninitialized_tail();
data
}
}
impl AsRef<[u8]> for BytesBuf {
#[inline]
fn as_ref(&self) -> &[u8] {
self
}
}
impl AsMut<[u8]> for BytesBuf {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
self
}
}
impl<'a> From<&'a [u8]> for BytesBuf {
#[inline]
fn from(slice: &'a [u8]) -> Self {
let mut buf = Self::new();
buf.push_slice(slice);
buf
}
}
impl<'a, 'b> From<&'a &'b [u8]> for BytesBuf {
#[inline]
fn from(slice: &'a &'b [u8]) -> Self {
let mut buf = Self::new();
buf.push_slice(slice);
buf
}
}
impl fmt::Debug for BytesBuf {
#[inline]
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("b\"")?;
for &byte in &**self {
if let b' '...b'~' = byte {
formatter.write_char(byte as char)?
} else {
write!(formatter, "\\x{:02X}", byte)?
}
}
formatter.write_char('"')
}
}
impl hash::Hash for BytesBuf {
#[inline]
fn hash<H>(&self, hasher: &mut H) where H: hash::Hasher {
<[u8]>::hash(self, hasher)
}
}
impl Default for BytesBuf {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl Eq for BytesBuf {}
impl<T: AsRef<[u8]>> PartialEq<T> for BytesBuf {
#[inline]
fn eq(&self, other: &T) -> bool {
<[u8]>::eq(self, other.as_ref())
}
}
impl Ord for BytesBuf {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
<[u8]>::cmp(self, &other)
}
}
impl<T: AsRef<[u8]>> PartialOrd<T> for BytesBuf {
#[inline]
fn partial_cmp(&self, other: &T) -> Option<::std::cmp::Ordering> {
<[u8]>::partial_cmp(self, other.as_ref())
}
}
impl<'a> Extend<&'a [u8]> for BytesBuf {
#[inline]
fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=&'a [u8]> {
for item in iter {
self.push_slice(item)
}
}
}
impl<'a> FromIterator<&'a [u8]> for BytesBuf {
#[inline]
fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item=&'a [u8]> {
let mut buf = Self::new();
buf.extend(iter);
buf
}
}
impl<'a> Extend<&'a BytesBuf> for BytesBuf {
#[inline]
fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=&'a BytesBuf> {
for item in iter {
self.push_buf(item)
}
}
}
impl<'a> FromIterator<&'a BytesBuf> for BytesBuf {
#[inline]
fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item=&'a BytesBuf> {
let mut buf = Self::new();
buf.extend(iter);
buf
}
}
impl Extend<BytesBuf> for BytesBuf {
#[inline]
fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=BytesBuf> {
for item in iter {
self.push_buf(&item)
}
}
}
impl FromIterator<BytesBuf> for BytesBuf {
#[inline]
fn from_iter<I>(iter: I) -> Self where I: IntoIterator<Item=BytesBuf> {
let mut buf = Self::new();
buf.extend(iter);
buf
}
}
impl io::Write for BytesBuf {
#[inline]
fn write(&mut self, slice: &[u8]) -> io::Result<usize> {
self.push_slice(slice);
Ok(slice.len())
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}