use std::borrow::{Borrow, BorrowMut};
use std::iter::{FromIterator, Iterator};
use std::ops::{Deref, DerefMut, RangeBounds};
use std::sync::atomic::Ordering::{Acquire, Relaxed, Release};
use std::sync::atomic::{self, AtomicUsize};
use std::{cmp, fmt, hash, mem, ptr, ptr::NonNull, slice, usize};
use crate::bytes::pool::{PoolId, PoolRef};
use crate::bytes::{buf::IntoIter, buf::UninitSlice, debug, Buf, BufMut};
pub struct Bytes {
inner: Inner,
}
pub struct BytesMut {
inner: Inner,
}
pub struct BytesVec {
inner: InnerVec,
}
#[cfg(target_endian = "little")]
#[repr(C)]
struct Inner {
arc: NonNull<Shared>,
ptr: *mut u8,
len: usize,
cap: usize,
}
#[cfg(target_endian = "big")]
#[repr(C)]
struct Inner {
ptr: *mut u8,
len: usize,
cap: usize,
arc: NonNull<Shared>,
}
struct Shared {
vec: Vec<u8>,
ref_count: AtomicUsize,
pool: PoolRef,
}
struct SharedVec {
cap: usize,
len: u32,
offset: u32,
ref_count: AtomicUsize,
pool: PoolRef,
}
const KIND_ARC: usize = 0b00;
const KIND_INLINE: usize = 0b01;
const KIND_STATIC: usize = 0b10;
const KIND_VEC: usize = 0b11;
const KIND_MASK: usize = 0b11;
const KIND_UNMASK: usize = !KIND_MASK;
const MIN_NON_ZERO_CAP: usize = 64;
const SHARED_VEC_SIZE: usize = mem::size_of::<SharedVec>();
const INLINE_LEN_MASK: usize = 0b1111_1100;
const INLINE_LEN_OFFSET: usize = 2;
#[cfg(target_endian = "little")]
const INLINE_DATA_OFFSET: isize = 2;
#[cfg(target_endian = "big")]
const INLINE_DATA_OFFSET: isize = 0;
#[cfg(target_pointer_width = "64")]
const INLINE_CAP: usize = 4 * 8 - 2;
#[cfg(target_pointer_width = "32")]
const INLINE_CAP: usize = 4 * 4 - 2;
impl Bytes {
#[inline]
pub const fn new() -> Bytes {
Bytes {
inner: Inner::empty_inline(),
}
}
#[inline]
pub const fn from_static(bytes: &'static [u8]) -> Bytes {
Bytes {
inner: Inner::from_static(bytes),
}
}
#[inline]
pub fn len(&self) -> usize {
self.inner.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
pub fn is_inline(&self) -> bool {
self.inner.is_inline()
}
pub fn copy_from_slice(data: &[u8]) -> Self {
Self::copy_from_slice_in(data, PoolId::DEFAULT)
}
pub fn copy_from_slice_in<T>(data: &[u8], pool: T) -> Self
where
PoolRef: From<T>,
{
if data.len() <= INLINE_CAP {
Bytes {
inner: Inner::from_slice_inline(data),
}
} else {
Bytes {
inner: Inner::from_slice(data.len(), data, pool.into()),
}
}
}
pub fn slice(&self, range: impl RangeBounds<usize>) -> Bytes {
use std::ops::Bound;
let len = self.len();
let begin = match range.start_bound() {
Bound::Included(&n) => n,
Bound::Excluded(&n) => n + 1,
Bound::Unbounded => 0,
};
let end = match range.end_bound() {
Bound::Included(&n) => n + 1,
Bound::Excluded(&n) => n,
Bound::Unbounded => len,
};
assert!(begin <= end);
assert!(end <= len);
if end - begin <= INLINE_CAP {
Bytes {
inner: Inner::from_slice_inline(&self[begin..end]),
}
} else {
let mut ret = self.clone();
unsafe {
ret.inner.set_end(end);
ret.inner.set_start(begin);
}
ret
}
}
pub fn slice_ref(&self, subset: &[u8]) -> Bytes {
let bytes_p = self.as_ptr() as usize;
let bytes_len = self.len();
let sub_p = subset.as_ptr() as usize;
let sub_len = subset.len();
assert!(sub_p >= bytes_p);
assert!(sub_p + sub_len <= bytes_p + bytes_len);
let sub_offset = sub_p - bytes_p;
self.slice(sub_offset..(sub_offset + sub_len))
}
pub fn split_off(&mut self, at: usize) -> Bytes {
assert!(at <= self.len());
if at == self.len() {
return Bytes::new();
}
if at == 0 {
mem::replace(self, Bytes::new())
} else {
Bytes {
inner: self.inner.split_off(at, true),
}
}
}
pub fn split_to(&mut self, at: usize) -> Bytes {
assert!(at <= self.len());
if at == self.len() {
return mem::replace(self, Bytes::new());
}
if at == 0 {
Bytes::new()
} else {
Bytes {
inner: self.inner.split_to(at, true),
}
}
}
#[inline]
pub fn truncate(&mut self, len: usize) {
self.inner.truncate(len, true);
}
#[inline]
pub fn trimdown(&mut self) {
let kind = self.inner.kind();
if !(kind == KIND_INLINE || kind == KIND_STATIC) {
if self.inner.len() <= INLINE_CAP {
*self = Bytes {
inner: Inner::from_slice_inline(self),
};
} else if self.inner.capacity() - self.inner.len() >= 64 {
*self = Bytes {
inner: Inner::from_slice(self.len(), self, self.inner.pool()),
}
}
}
}
#[inline]
pub fn clear(&mut self) {
self.inner = Inner::empty_inline();
}
pub fn try_mut(self) -> Result<BytesMut, Bytes> {
if self.inner.is_mut_safe() {
Ok(BytesMut { inner: self.inner })
} else {
Err(self)
}
}
pub fn iter(&'_ self) -> std::slice::Iter<'_, u8> {
self.chunk().iter()
}
}
impl Buf for Bytes {
#[inline]
fn remaining(&self) -> usize {
self.len()
}
#[inline]
fn chunk(&self) -> &[u8] {
self.inner.as_ref()
}
#[inline]
fn advance(&mut self, cnt: usize) {
assert!(
cnt <= self.inner.as_ref().len(),
"cannot advance past `remaining`"
);
unsafe {
self.inner.set_start(cnt);
}
}
}
impl bytes::buf::Buf for Bytes {
#[inline]
fn remaining(&self) -> usize {
self.len()
}
#[inline]
fn chunk(&self) -> &[u8] {
self.inner.as_ref()
}
#[inline]
fn advance(&mut self, cnt: usize) {
assert!(
cnt <= self.inner.as_ref().len(),
"cannot advance past `remaining`"
);
unsafe {
self.inner.set_start(cnt);
}
}
}
impl Clone for Bytes {
fn clone(&self) -> Bytes {
Bytes {
inner: unsafe { self.inner.shallow_clone() },
}
}
}
impl AsRef<[u8]> for Bytes {
#[inline]
fn as_ref(&self) -> &[u8] {
self.inner.as_ref()
}
}
impl Deref for Bytes {
type Target = [u8];
#[inline]
fn deref(&self) -> &[u8] {
self.inner.as_ref()
}
}
impl From<BytesMut> for Bytes {
fn from(src: BytesMut) -> Bytes {
src.freeze()
}
}
impl From<Vec<u8>> for Bytes {
fn from(src: Vec<u8>) -> Bytes {
if src.is_empty() {
Bytes::new()
} else if src.len() <= INLINE_CAP {
Bytes {
inner: Inner::from_slice_inline(&src),
}
} else {
BytesMut::from(src).freeze()
}
}
}
impl From<String> for Bytes {
fn from(src: String) -> Bytes {
if src.is_empty() {
Bytes::new()
} else if src.bytes().len() <= INLINE_CAP {
Bytes {
inner: Inner::from_slice_inline(src.as_bytes()),
}
} else {
BytesMut::from(src).freeze()
}
}
}
impl From<&'static [u8]> for Bytes {
fn from(src: &'static [u8]) -> Bytes {
Bytes::from_static(src)
}
}
impl From<&'static str> for Bytes {
fn from(src: &'static str) -> Bytes {
Bytes::from_static(src.as_bytes())
}
}
impl FromIterator<u8> for Bytes {
fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
BytesMut::from_iter(into_iter).freeze()
}
}
impl<'a> FromIterator<&'a u8> for Bytes {
fn from_iter<T: IntoIterator<Item = &'a u8>>(into_iter: T) -> Self {
BytesMut::from_iter(into_iter).freeze()
}
}
impl Eq for Bytes {}
impl PartialEq for Bytes {
fn eq(&self, other: &Bytes) -> bool {
self.inner.as_ref() == other.inner.as_ref()
}
}
impl PartialOrd for Bytes {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
self.inner.as_ref().partial_cmp(other.inner.as_ref())
}
}
impl Ord for Bytes {
fn cmp(&self, other: &Bytes) -> cmp::Ordering {
self.inner.as_ref().cmp(other.inner.as_ref())
}
}
impl Default for Bytes {
#[inline]
fn default() -> Bytes {
Bytes::new()
}
}
impl fmt::Debug for Bytes {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&debug::BsDebug(self.inner.as_ref()), fmt)
}
}
impl hash::Hash for Bytes {
fn hash<H>(&self, state: &mut H)
where
H: hash::Hasher,
{
let s: &[u8] = self.as_ref();
s.hash(state);
}
}
impl Borrow<[u8]> for Bytes {
fn borrow(&self) -> &[u8] {
self.as_ref()
}
}
impl IntoIterator for Bytes {
type Item = u8;
type IntoIter = IntoIter<Bytes>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<'a> IntoIterator for &'a Bytes {
type Item = &'a u8;
type IntoIter = std::slice::Iter<'a, u8>;
fn into_iter(self) -> Self::IntoIter {
self.as_ref().iter()
}
}
impl BytesMut {
#[inline]
pub fn with_capacity(capacity: usize) -> BytesMut {
Self::with_capacity_in(capacity, PoolId::DEFAULT.pool_ref())
}
#[inline]
pub fn with_capacity_in<T>(capacity: usize, pool: T) -> BytesMut
where
PoolRef: From<T>,
{
BytesMut {
inner: Inner::with_capacity(capacity, pool.into()),
}
}
pub fn copy_from_slice<T: AsRef<[u8]>>(src: T) -> Self {
Self::copy_from_slice_in(src, PoolId::DEFAULT)
}
pub fn copy_from_slice_in<T, U>(src: T, pool: U) -> Self
where
T: AsRef<[u8]>,
PoolRef: From<U>,
{
let s = src.as_ref();
BytesMut {
inner: Inner::from_slice(s.len(), s, pool.into()),
}
}
#[inline]
pub fn from_vec<T>(src: Vec<u8>, pool: T) -> BytesMut
where
PoolRef: From<T>,
{
BytesMut {
inner: Inner::from_vec(src, pool.into()),
}
}
#[inline]
pub fn new() -> BytesMut {
BytesMut::with_capacity(MIN_NON_ZERO_CAP)
}
#[inline]
pub fn len(&self) -> usize {
self.inner.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
#[inline]
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
#[inline]
pub fn freeze(self) -> Bytes {
if self.inner.len() <= INLINE_CAP {
Bytes {
inner: self.inner.to_inline(),
}
} else {
Bytes { inner: self.inner }
}
}
pub fn split_off(&mut self, at: usize) -> BytesMut {
BytesMut {
inner: self.inner.split_off(at, false),
}
}
pub fn split(&mut self) -> BytesMut {
let len = self.len();
self.split_to(len)
}
pub fn split_to(&mut self, at: usize) -> BytesMut {
assert!(at <= self.len());
BytesMut {
inner: self.inner.split_to(at, false),
}
}
pub fn truncate(&mut self, len: usize) {
self.inner.truncate(len, false);
}
pub fn clear(&mut self) {
self.truncate(0);
}
#[inline]
pub fn resize(&mut self, new_len: usize, value: u8) {
self.inner.resize(new_len, value);
}
#[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn set_len(&mut self, len: usize) {
self.inner.set_len(len)
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
let len = self.len();
let rem = self.capacity() - len;
if additional <= rem {
return;
}
self.inner.reserve_inner(additional);
}
#[inline]
pub fn extend_from_slice(&mut self, extend: &[u8]) {
self.put_slice(extend);
}
#[inline]
pub fn iter(&'_ self) -> std::slice::Iter<'_, u8> {
self.chunk().iter()
}
pub(crate) fn move_to_pool(&mut self, pool: PoolRef) {
self.inner.move_to_pool(pool);
}
}
impl Buf for BytesMut {
#[inline]
fn remaining(&self) -> usize {
self.len()
}
#[inline]
fn chunk(&self) -> &[u8] {
self.inner.as_ref()
}
#[inline]
fn advance(&mut self, cnt: usize) {
assert!(
cnt <= self.inner.as_ref().len(),
"cannot advance past `remaining`"
);
unsafe {
self.inner.set_start(cnt);
}
}
}
impl BufMut for BytesMut {
#[inline]
fn remaining_mut(&self) -> usize {
self.capacity() - self.len()
}
#[inline]
unsafe fn advance_mut(&mut self, cnt: usize) {
let new_len = self.len() + cnt;
self.inner.set_len(new_len);
}
#[inline]
fn chunk_mut(&mut self) -> &mut UninitSlice {
let len = self.len();
unsafe {
let ptr = &mut self.inner.as_raw()[len..];
UninitSlice::from_raw_parts_mut(ptr.as_mut_ptr(), self.capacity() - len)
}
}
#[inline]
fn put_slice(&mut self, src: &[u8]) {
let len = src.len();
self.reserve(len);
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(),
self.chunk_mut().as_mut_ptr() as *mut u8,
len,
);
self.advance_mut(len);
}
}
#[inline]
fn put_u8(&mut self, n: u8) {
self.reserve(1);
self.inner.put_u8(n);
}
#[inline]
fn put_i8(&mut self, n: i8) {
self.reserve(1);
self.put_u8(n as u8);
}
}
impl AsRef<[u8]> for BytesMut {
#[inline]
fn as_ref(&self) -> &[u8] {
self.inner.as_ref()
}
}
impl AsMut<[u8]> for BytesMut {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
self.inner.as_mut()
}
}
impl Deref for BytesMut {
type Target = [u8];
#[inline]
fn deref(&self) -> &[u8] {
self.as_ref()
}
}
impl DerefMut for BytesMut {
#[inline]
fn deref_mut(&mut self) -> &mut [u8] {
self.inner.as_mut()
}
}
impl From<Vec<u8>> for BytesMut {
#[inline]
fn from(src: Vec<u8>) -> BytesMut {
BytesMut::from_vec(src, PoolId::DEFAULT.pool_ref())
}
}
impl From<String> for BytesMut {
#[inline]
fn from(src: String) -> BytesMut {
BytesMut::from_vec(src.into_bytes(), PoolId::DEFAULT.pool_ref())
}
}
impl<'a> From<&'a [u8]> for BytesMut {
fn from(src: &'a [u8]) -> BytesMut {
let len = src.len();
if len == 0 {
BytesMut::new()
} else {
BytesMut::copy_from_slice_in(src, PoolId::DEFAULT.pool_ref())
}
}
}
impl<'a> From<&'a str> for BytesMut {
#[inline]
fn from(src: &'a str) -> BytesMut {
BytesMut::from(src.as_bytes())
}
}
impl From<Bytes> for BytesMut {
#[inline]
fn from(src: Bytes) -> BytesMut {
src.try_mut()
.unwrap_or_else(|src| BytesMut::copy_from_slice_in(&src[..], src.inner.pool()))
}
}
impl Eq for BytesMut {}
impl PartialEq for BytesMut {
#[inline]
fn eq(&self, other: &BytesMut) -> bool {
self.inner.as_ref() == other.inner.as_ref()
}
}
impl Default for BytesMut {
#[inline]
fn default() -> BytesMut {
BytesMut::new()
}
}
impl Borrow<[u8]> for BytesMut {
#[inline]
fn borrow(&self) -> &[u8] {
self.as_ref()
}
}
impl BorrowMut<[u8]> for BytesMut {
#[inline]
fn borrow_mut(&mut self) -> &mut [u8] {
self.as_mut()
}
}
impl fmt::Debug for BytesMut {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&debug::BsDebug(self.inner.as_ref()), fmt)
}
}
impl fmt::Write for BytesMut {
#[inline]
fn write_str(&mut self, s: &str) -> fmt::Result {
if self.remaining_mut() >= s.len() {
self.put_slice(s.as_bytes());
Ok(())
} else {
Err(fmt::Error)
}
}
#[inline]
fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
fmt::write(self, args)
}
}
impl Clone for BytesMut {
#[inline]
fn clone(&self) -> BytesMut {
BytesMut {
inner: unsafe { self.inner.shallow_clone() },
}
}
}
impl IntoIterator for BytesMut {
type Item = u8;
type IntoIter = IntoIter<BytesMut>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<'a> IntoIterator for &'a BytesMut {
type Item = &'a u8;
type IntoIter = std::slice::Iter<'a, u8>;
fn into_iter(self) -> Self::IntoIter {
self.as_ref().iter()
}
}
impl FromIterator<u8> for BytesMut {
fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
let iter = into_iter.into_iter();
let (min, maybe_max) = iter.size_hint();
let mut out = BytesMut::with_capacity(maybe_max.unwrap_or(min));
for i in iter {
out.reserve(1);
out.put_u8(i);
}
out
}
}
impl<'a> FromIterator<&'a u8> for BytesMut {
fn from_iter<T: IntoIterator<Item = &'a u8>>(into_iter: T) -> Self {
into_iter.into_iter().copied().collect::<BytesMut>()
}
}
impl Extend<u8> for BytesMut {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = u8>,
{
let iter = iter.into_iter();
let (lower, _) = iter.size_hint();
self.reserve(lower);
for b in iter {
self.put_u8(b);
}
}
}
impl<'a> Extend<&'a u8> for BytesMut {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = &'a u8>,
{
self.extend(iter.into_iter().copied())
}
}
impl BytesVec {
#[inline]
pub fn with_capacity(capacity: usize) -> BytesVec {
Self::with_capacity_in(capacity, PoolId::DEFAULT.pool_ref())
}
#[inline]
pub fn with_capacity_in<T>(capacity: usize, pool: T) -> BytesVec
where
PoolRef: From<T>,
{
BytesVec {
inner: InnerVec::with_capacity(capacity, pool.into()),
}
}
pub fn copy_from_slice<T: AsRef<[u8]>>(src: T) -> Self {
Self::copy_from_slice_in(src, PoolId::DEFAULT)
}
pub fn copy_from_slice_in<T, U>(src: T, pool: U) -> Self
where
T: AsRef<[u8]>,
PoolRef: From<U>,
{
let s = src.as_ref();
BytesVec {
inner: InnerVec::from_slice(s.len(), s, pool.into()),
}
}
#[inline]
pub fn new() -> BytesVec {
BytesVec::with_capacity(MIN_NON_ZERO_CAP)
}
#[inline]
pub fn len(&self) -> usize {
self.inner.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.inner.len() == 0
}
#[inline]
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
#[inline]
pub fn freeze(self) -> Bytes {
Bytes {
inner: self.inner.into_inner(),
}
}
pub fn split(&mut self) -> BytesMut {
self.split_to(self.len())
}
pub fn split_to(&mut self, at: usize) -> BytesMut {
assert!(at <= self.len());
BytesMut {
inner: self.inner.split_to(at, false),
}
}
pub fn truncate(&mut self, len: usize) {
self.inner.truncate(len);
}
pub fn clear(&mut self) {
self.truncate(0);
}
#[inline]
pub fn resize(&mut self, new_len: usize, value: u8) {
self.inner.resize(new_len, value);
}
#[inline]
#[allow(clippy::missing_safety_doc)]
pub unsafe fn set_len(&mut self, len: usize) {
self.inner.set_len(len)
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
let len = self.len();
let rem = self.capacity() - len;
if additional <= rem {
return;
}
self.inner.reserve_inner(additional);
}
#[inline]
pub fn extend_from_slice(&mut self, extend: &[u8]) {
self.put_slice(extend);
}
#[inline]
pub fn with_bytes_mut<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut BytesMut) -> R,
{
self.inner.with_bytes_mut(f)
}
#[inline]
pub fn iter(&'_ self) -> std::slice::Iter<'_, u8> {
self.chunk().iter()
}
pub(crate) fn move_to_pool(&mut self, pool: PoolRef) {
self.inner.move_to_pool(pool);
}
}
impl Buf for BytesVec {
#[inline]
fn remaining(&self) -> usize {
self.len()
}
#[inline]
fn chunk(&self) -> &[u8] {
self.inner.as_ref()
}
#[inline]
fn advance(&mut self, cnt: usize) {
assert!(
cnt <= self.inner.as_ref().len(),
"cannot advance past `remaining`"
);
unsafe {
self.inner.set_start(cnt as u32);
}
}
}
impl BufMut for BytesVec {
#[inline]
fn remaining_mut(&self) -> usize {
self.capacity() - self.len()
}
#[inline]
unsafe fn advance_mut(&mut self, cnt: usize) {
let new_len = self.len() + cnt;
self.inner.set_len(new_len);
}
#[inline]
fn chunk_mut(&mut self) -> &mut UninitSlice {
let len = self.len();
unsafe {
let ptr = &mut self.inner.as_raw()[len..];
UninitSlice::from_raw_parts_mut(ptr.as_mut_ptr(), self.capacity() - len)
}
}
#[inline]
fn put_slice(&mut self, src: &[u8]) {
let len = src.len();
self.reserve(len);
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(),
self.chunk_mut().as_mut_ptr() as *mut u8,
len,
);
self.advance_mut(len);
}
}
#[inline]
fn put_u8(&mut self, n: u8) {
self.reserve(1);
self.inner.put_u8(n);
}
#[inline]
fn put_i8(&mut self, n: i8) {
self.reserve(1);
self.put_u8(n as u8);
}
}
impl AsRef<[u8]> for BytesVec {
#[inline]
fn as_ref(&self) -> &[u8] {
self.inner.as_ref()
}
}
impl AsMut<[u8]> for BytesVec {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
self.inner.as_mut()
}
}
impl Deref for BytesVec {
type Target = [u8];
#[inline]
fn deref(&self) -> &[u8] {
self.as_ref()
}
}
impl DerefMut for BytesVec {
#[inline]
fn deref_mut(&mut self) -> &mut [u8] {
self.inner.as_mut()
}
}
impl Eq for BytesVec {}
impl PartialEq for BytesVec {
#[inline]
fn eq(&self, other: &BytesVec) -> bool {
self.inner.as_ref() == other.inner.as_ref()
}
}
impl Default for BytesVec {
#[inline]
fn default() -> BytesVec {
BytesVec::new()
}
}
impl Borrow<[u8]> for BytesVec {
#[inline]
fn borrow(&self) -> &[u8] {
self.as_ref()
}
}
impl BorrowMut<[u8]> for BytesVec {
#[inline]
fn borrow_mut(&mut self) -> &mut [u8] {
self.as_mut()
}
}
impl fmt::Debug for BytesVec {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&debug::BsDebug(self.inner.as_ref()), fmt)
}
}
impl fmt::Write for BytesVec {
#[inline]
fn write_str(&mut self, s: &str) -> fmt::Result {
if self.remaining_mut() >= s.len() {
self.put_slice(s.as_bytes());
Ok(())
} else {
Err(fmt::Error)
}
}
#[inline]
fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
fmt::write(self, args)
}
}
impl IntoIterator for BytesVec {
type Item = u8;
type IntoIter = IntoIter<BytesVec>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self)
}
}
impl<'a> IntoIterator for &'a BytesVec {
type Item = &'a u8;
type IntoIter = std::slice::Iter<'a, u8>;
fn into_iter(self) -> Self::IntoIter {
self.as_ref().iter()
}
}
impl FromIterator<u8> for BytesVec {
fn from_iter<T: IntoIterator<Item = u8>>(into_iter: T) -> Self {
let iter = into_iter.into_iter();
let (min, maybe_max) = iter.size_hint();
let mut out = BytesVec::with_capacity(maybe_max.unwrap_or(min));
for i in iter {
out.reserve(1);
out.put_u8(i);
}
out
}
}
impl<'a> FromIterator<&'a u8> for BytesVec {
fn from_iter<T: IntoIterator<Item = &'a u8>>(into_iter: T) -> Self {
into_iter.into_iter().copied().collect::<BytesVec>()
}
}
impl Extend<u8> for BytesVec {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = u8>,
{
let iter = iter.into_iter();
let (lower, _) = iter.size_hint();
self.reserve(lower);
for b in iter {
self.put_u8(b);
}
}
}
impl<'a> Extend<&'a u8> for BytesVec {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = &'a u8>,
{
self.extend(iter.into_iter().copied())
}
}
struct InnerVec(NonNull<SharedVec>);
impl InnerVec {
#[inline]
fn with_capacity(capacity: usize, pool: PoolRef) -> InnerVec {
Self::from_slice(capacity, &[], pool)
}
#[inline]
fn from_slice(cap: usize, src: &[u8], pool: PoolRef) -> InnerVec {
let mut vec_cap = (cap / SHARED_VEC_SIZE) + 1;
if cap % SHARED_VEC_SIZE != 0 {
vec_cap += 1;
}
let mut vec = Vec::<SharedVec>::with_capacity(vec_cap);
unsafe {
let len = src.len() as u32;
let cap = vec.capacity() * SHARED_VEC_SIZE;
let shared_ptr = vec.as_mut_ptr();
mem::forget(vec);
pool.acquire(cap);
let ptr = shared_ptr.add(1) as *mut u8;
if !src.is_empty() {
ptr::copy_nonoverlapping(src.as_ptr(), ptr, src.len());
}
ptr::write(
shared_ptr,
SharedVec {
len,
cap,
pool,
ref_count: AtomicUsize::new(1),
offset: SHARED_VEC_SIZE as u32,
},
);
InnerVec(NonNull::new_unchecked(shared_ptr))
}
}
#[inline]
fn move_to_pool(&mut self, pool: PoolRef) {
unsafe {
let inner = self.as_inner();
if pool != inner.pool {
pool.acquire(inner.cap);
let pool = mem::replace(&mut inner.pool, pool);
pool.release(inner.cap);
}
}
}
#[inline]
fn as_ref(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len()) }
}
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
unsafe { slice::from_raw_parts_mut(self.as_ptr(), self.len()) }
}
#[inline]
unsafe fn as_raw(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.as_ptr(), self.capacity())
}
#[inline]
unsafe fn as_ptr(&self) -> *mut u8 {
(self.0.as_ptr() as *mut u8).add((*self.0.as_ptr()).offset as usize)
}
#[inline]
unsafe fn as_inner(&mut self) -> &mut SharedVec {
self.0.as_mut()
}
#[inline]
fn put_u8(&mut self, n: u8) {
unsafe {
let inner = self.as_inner();
let len = inner.len as usize;
assert!(len < (inner.cap - inner.offset as usize));
inner.len += 1;
*self.as_ptr().add(len) = n;
}
}
#[inline]
fn len(&self) -> usize {
unsafe { (*self.0.as_ptr()).len as usize }
}
#[inline]
unsafe fn set_len(&mut self, len: usize) {
let inner = self.as_inner();
assert!(len <= (inner.cap - inner.offset as usize) && len < u32::MAX as usize);
inner.len = len as u32;
}
#[inline]
fn capacity(&self) -> usize {
unsafe { (*self.0.as_ptr()).cap - (*self.0.as_ptr()).offset as usize }
}
fn into_inner(mut self) -> Inner {
unsafe {
let ptr = self.as_ptr();
if self.len() <= INLINE_CAP {
Inner::from_ptr_inline(ptr, self.len())
} else {
let inner = self.as_inner();
let inner = Inner {
ptr,
len: inner.len as usize,
cap: inner.cap - inner.offset as usize,
arc: NonNull::new_unchecked(
(self.0.as_ptr() as usize ^ KIND_VEC) as *mut Shared,
),
};
mem::forget(self);
inner
}
}
}
fn with_bytes_mut<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut BytesMut) -> R,
{
unsafe {
let ptr = self.as_ptr();
let inner = self.as_inner();
let inner = Inner {
ptr,
len: inner.len as usize,
cap: inner.cap - inner.offset as usize,
arc: NonNull::new_unchecked(
(self.0.as_ptr() as usize ^ KIND_VEC) as *mut Shared,
),
};
let mut buf = BytesMut { inner };
let result = f(&mut buf);
let kind = buf.inner.kind();
let new_inner =
if kind == KIND_INLINE || kind == KIND_STATIC || kind == KIND_ARC {
InnerVec::from_slice(
buf.inner.capacity(),
buf.inner.as_ref(),
buf.inner.pool(),
)
} else if kind == KIND_VEC {
let ptr = buf.inner.shared_vec();
let offset = buf.inner.ptr as usize - ptr as usize;
if buf.inner.cap < (*ptr).cap - offset {
InnerVec::from_slice(
buf.inner.capacity(),
buf.inner.as_ref(),
buf.inner.pool(),
)
} else {
(*ptr).len = buf.len() as u32;
(*ptr).offset = offset as u32;
let inner = InnerVec(NonNull::new_unchecked(ptr));
mem::forget(buf); inner
}
} else {
panic!()
};
let old = mem::replace(self, new_inner);
mem::forget(old);
result
}
}
fn split_to(&mut self, at: usize, create_inline: bool) -> Inner {
unsafe {
let ptr = self.as_ptr();
let other = if create_inline && at <= INLINE_CAP {
Inner::from_ptr_inline(ptr, at)
} else {
let inner = self.as_inner();
let old_size = inner.ref_count.fetch_add(1, Relaxed);
if old_size == usize::MAX {
abort();
}
Inner {
ptr,
len: at,
cap: at,
arc: NonNull::new_unchecked(
(self.0.as_ptr() as usize ^ KIND_VEC) as *mut Shared,
),
}
};
self.set_start(at as u32);
other
}
}
fn truncate(&mut self, len: usize) {
unsafe {
if len <= self.len() {
self.set_len(len);
}
}
}
fn resize(&mut self, new_len: usize, value: u8) {
let len = self.len();
if new_len > len {
let additional = new_len - len;
self.reserve(additional);
unsafe {
let dst = self.as_raw()[len..].as_mut_ptr();
ptr::write_bytes(dst, value, additional);
self.set_len(new_len);
}
} else {
self.truncate(new_len);
}
}
#[inline]
fn reserve(&mut self, additional: usize) {
let len = self.len();
let rem = self.capacity() - len;
if additional <= rem {
return;
}
self.reserve_inner(additional)
}
#[inline]
fn reserve_inner(&mut self, additional: usize) {
let len = self.len();
let new_cap = len + additional;
unsafe {
let inner = self.as_inner();
let vec_cap = inner.cap - SHARED_VEC_SIZE;
if inner.is_unique() && vec_cap >= new_cap {
let offset = inner.offset;
inner.offset = SHARED_VEC_SIZE as u32;
let src = (self.0.as_ptr() as *mut u8).add(offset as usize);
let dst = (self.0.as_ptr() as *mut u8).add(SHARED_VEC_SIZE);
ptr::copy(src, dst, len);
} else {
let pool = inner.pool;
*self = InnerVec::from_slice(new_cap, self.as_ref(), pool);
}
}
}
unsafe fn set_start(&mut self, start: u32) {
if start == 0 {
return;
}
let inner = self.as_inner();
assert!(start <= inner.cap as u32);
inner.offset += start;
if inner.len >= start {
inner.len -= start;
} else {
inner.len = 0;
}
}
}
impl Drop for InnerVec {
fn drop(&mut self) {
release_shared_vec(self.0.as_ptr());
}
}
impl Inner {
#[inline]
const fn from_static(bytes: &'static [u8]) -> Inner {
let ptr = bytes.as_ptr() as *mut u8;
Inner {
arc: unsafe { NonNull::new_unchecked(KIND_STATIC as *mut Shared) },
ptr,
len: bytes.len(),
cap: bytes.len(),
}
}
#[inline]
const fn empty_inline() -> Inner {
Inner {
arc: unsafe { NonNull::new_unchecked(KIND_INLINE as *mut Shared) },
ptr: 0 as *mut u8,
len: 0,
cap: 0,
}
}
#[inline]
fn from_vec(mut vec: Vec<u8>, pool: PoolRef) -> Inner {
let len = vec.len();
let cap = vec.capacity();
let ptr = vec.as_mut_ptr();
pool.acquire(cap);
let shared = Box::into_raw(Box::new(Shared {
vec,
pool,
ref_count: AtomicUsize::new(1),
}));
debug_assert!(0 == (shared as usize & KIND_MASK));
Inner {
ptr,
len,
cap,
arc: unsafe { NonNull::new_unchecked(shared) },
}
}
#[inline]
fn with_capacity(capacity: usize, pool: PoolRef) -> Inner {
Inner::from_slice(capacity, &[], pool)
}
#[inline]
fn from_slice(cap: usize, src: &[u8], pool: PoolRef) -> Inner {
let mut vec_cap = (cap / SHARED_VEC_SIZE) + 1;
if cap % SHARED_VEC_SIZE != 0 {
vec_cap += 1;
}
let mut vec = Vec::<SharedVec>::with_capacity(vec_cap);
unsafe {
let len = src.len();
let full_cap = vec.capacity() * SHARED_VEC_SIZE;
let cap = full_cap - SHARED_VEC_SIZE;
let shared_ptr = vec.as_mut_ptr();
mem::forget(vec);
pool.acquire(full_cap);
let ptr = shared_ptr.add(1) as *mut u8;
ptr::copy_nonoverlapping(src.as_ptr(), ptr, src.len());
ptr::write(
shared_ptr,
SharedVec {
pool,
cap: full_cap,
ref_count: AtomicUsize::new(1),
len: 0,
offset: 0,
},
);
Inner {
len,
cap,
ptr,
arc: NonNull::new_unchecked(
(shared_ptr as usize ^ KIND_VEC) as *mut Shared,
),
}
}
}
#[inline]
fn from_slice_inline(src: &[u8]) -> Inner {
unsafe { Inner::from_ptr_inline(src.as_ptr(), src.len()) }
}
#[inline]
unsafe fn from_ptr_inline(src: *const u8, len: usize) -> Inner {
#[allow(invalid_value, clippy::uninit_assumed_init)]
let mut inner: Inner = mem::MaybeUninit::uninit().assume_init();
inner.arc = NonNull::new_unchecked(KIND_INLINE as *mut Shared);
let dst = inner.inline_ptr();
ptr::copy(src, dst, len);
inner.set_inline_len(len);
inner
}
#[inline]
fn pool(&self) -> PoolRef {
let kind = self.kind();
if kind == KIND_VEC {
unsafe { (*self.shared_vec()).pool }
} else if kind == KIND_ARC {
unsafe { (*self.arc.as_ptr()).pool }
} else {
PoolId::DEFAULT.pool_ref()
}
}
#[inline]
fn move_to_pool(&mut self, pool: PoolRef) {
let kind = self.kind();
if kind == KIND_VEC {
let vec = self.shared_vec();
unsafe {
let cap = (*vec).cap;
pool.acquire(cap);
let pool = mem::replace(&mut (*vec).pool, pool);
pool.release(cap);
}
} else if kind == KIND_ARC {
let arc = self.arc.as_ptr();
unsafe {
let cap = (*arc).vec.capacity();
pool.acquire(cap);
let pool = mem::replace(&mut (*arc).pool, pool);
pool.release(cap);
}
}
}
#[inline]
fn as_ref(&self) -> &[u8] {
unsafe {
if self.is_inline() {
slice::from_raw_parts(self.inline_ptr(), self.inline_len())
} else {
slice::from_raw_parts(self.ptr, self.len)
}
}
}
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
debug_assert!(!self.is_static());
unsafe {
if self.is_inline() {
slice::from_raw_parts_mut(self.inline_ptr(), self.inline_len())
} else {
slice::from_raw_parts_mut(self.ptr, self.len)
}
}
}
#[inline]
unsafe fn as_raw(&mut self) -> &mut [u8] {
debug_assert!(!self.is_static());
if self.is_inline() {
slice::from_raw_parts_mut(self.inline_ptr(), INLINE_CAP)
} else {
slice::from_raw_parts_mut(self.ptr, self.cap)
}
}
#[inline]
unsafe fn as_ptr(&mut self) -> *mut u8 {
if self.is_inline() {
self.inline_ptr()
} else {
self.ptr
}
}
#[inline]
fn put_u8(&mut self, n: u8) {
if self.is_inline() {
let len = self.inline_len();
assert!(len < INLINE_CAP);
unsafe {
*self.inline_ptr().add(len) = n;
}
self.set_inline_len(len + 1);
} else {
assert!(self.len < self.cap);
unsafe {
*self.ptr.add(self.len) = n;
}
self.len += 1;
}
}
#[inline]
fn len(&self) -> usize {
if self.is_inline() {
self.inline_len()
} else {
self.len
}
}
#[inline]
unsafe fn inline_ptr(&self) -> *mut u8 {
(self as *const Inner as *mut Inner as *mut u8).offset(INLINE_DATA_OFFSET)
}
#[inline]
fn to_inline(&self) -> Inner {
unsafe {
#[allow(invalid_value, clippy::uninit_assumed_init)]
let mut inner: Inner = mem::MaybeUninit::uninit().assume_init();
inner.arc = NonNull::new_unchecked(KIND_INLINE as *mut Shared);
let len = self.len();
inner.as_raw()[..len].copy_from_slice(self.as_ref());
inner.set_inline_len(len);
inner
}
}
#[inline]
fn inline_len(&self) -> usize {
(self.arc.as_ptr() as usize & INLINE_LEN_MASK) >> INLINE_LEN_OFFSET
}
#[inline]
fn set_inline_len(&mut self, len: usize) {
debug_assert!(len <= INLINE_CAP);
self.arc = unsafe {
NonNull::new_unchecked(
((self.arc.as_ptr() as usize & !INLINE_LEN_MASK)
| (len << INLINE_LEN_OFFSET)) as _,
)
};
}
#[inline]
unsafe fn set_len(&mut self, len: usize) {
if self.is_inline() {
assert!(len <= INLINE_CAP);
self.set_inline_len(len);
} else {
assert!(len <= self.cap);
self.len = len;
}
}
#[inline]
fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
fn capacity(&self) -> usize {
if self.is_inline() {
INLINE_CAP
} else {
self.cap
}
}
fn split_off(&mut self, at: usize, create_inline: bool) -> Inner {
let other = unsafe {
if create_inline && self.len() - at <= INLINE_CAP {
Inner::from_ptr_inline(self.as_ptr().add(at), self.len() - at)
} else {
let mut other = self.shallow_clone();
other.set_start(at);
other
}
};
unsafe {
if create_inline && at <= INLINE_CAP {
*self = Inner::from_ptr_inline(self.as_ptr(), at);
} else {
self.set_end(at);
}
}
other
}
fn split_to(&mut self, at: usize, create_inline: bool) -> Inner {
let other = unsafe {
if create_inline && at <= INLINE_CAP {
Inner::from_ptr_inline(self.as_ptr(), at)
} else {
let mut other = self.shallow_clone();
other.set_end(at);
other
}
};
unsafe {
if create_inline && self.len() - at <= INLINE_CAP {
*self = Inner::from_ptr_inline(self.as_ptr().add(at), self.len() - at);
} else {
self.set_start(at);
}
}
other
}
fn truncate(&mut self, len: usize, create_inline: bool) {
unsafe {
if len <= self.len() {
if create_inline && len < INLINE_CAP {
*self = Inner::from_ptr_inline(self.as_ptr(), len);
} else {
self.set_len(len);
}
}
}
}
fn resize(&mut self, new_len: usize, value: u8) {
let len = self.len();
if new_len > len {
let additional = new_len - len;
self.reserve(additional);
unsafe {
let dst = self.as_raw()[len..].as_mut_ptr();
ptr::write_bytes(dst, value, additional);
self.set_len(new_len);
}
} else {
self.truncate(new_len, false);
}
}
unsafe fn set_start(&mut self, start: usize) {
if start == 0 {
return;
}
let kind = self.kind();
if kind == KIND_INLINE {
assert!(start <= INLINE_CAP);
let len = self.inline_len();
if len <= start {
self.set_inline_len(0);
} else {
let new_len = len - start;
let dst = self.inline_ptr();
let src = (dst as *const u8).add(start);
ptr::copy(src, dst, new_len);
self.set_inline_len(new_len);
}
} else {
assert!(start <= self.cap);
self.ptr = self.ptr.add(start);
if self.len >= start {
self.len -= start;
} else {
self.len = 0;
}
self.cap -= start;
}
}
unsafe fn set_end(&mut self, end: usize) {
if self.is_inline() {
assert!(end <= INLINE_CAP);
let new_len = cmp::min(self.inline_len(), end);
self.set_inline_len(new_len);
} else {
assert!(end <= self.cap);
self.cap = end;
self.len = cmp::min(self.len, end);
}
}
fn is_mut_safe(&self) -> bool {
let kind = self.kind();
if kind == KIND_INLINE {
true
} else if kind == KIND_STATIC {
false
} else if kind == KIND_VEC {
unsafe { (*self.shared_vec()).is_unique() }
} else {
unsafe { (*self.arc.as_ptr()).is_unique() }
}
}
unsafe fn shallow_clone(&self) -> Inner {
if self.is_inline_or_static() {
let mut inner: mem::MaybeUninit<Inner> = mem::MaybeUninit::uninit();
ptr::copy_nonoverlapping(self, inner.as_mut_ptr(), 1);
inner.assume_init()
} else {
self.shallow_clone_sync()
}
}
#[cold]
unsafe fn shallow_clone_sync(&self) -> Inner {
let arc: *mut Shared = self.arc.as_ptr();
let kind = arc as usize & KIND_MASK;
if kind == KIND_ARC {
let old_size = (*arc).ref_count.fetch_add(1, Relaxed);
if old_size == usize::MAX {
abort();
}
Inner {
arc: NonNull::new_unchecked(arc),
..*self
}
} else {
assert!(kind == KIND_VEC);
let vec_arc = (arc as usize & KIND_UNMASK) as *mut SharedVec;
let old_size = (*vec_arc).ref_count.fetch_add(1, Relaxed);
if old_size == usize::MAX {
abort();
}
Inner {
arc: NonNull::new_unchecked(arc),
..*self
}
}
}
#[inline]
fn reserve(&mut self, additional: usize) {
let len = self.len();
let rem = self.capacity() - len;
if additional <= rem {
return;
}
self.reserve_inner(additional)
}
#[inline]
fn reserve_inner(&mut self, additional: usize) {
let len = self.len();
let kind = self.kind();
if kind == KIND_INLINE {
let new_cap = len + additional;
*self = Inner::from_slice(new_cap, self.as_ref(), PoolId::DEFAULT.pool_ref());
return;
}
let new_cap = len + additional;
if kind == KIND_VEC {
let vec = self.shared_vec();
unsafe {
let vec_cap = (*vec).cap - SHARED_VEC_SIZE;
if (*vec).is_unique() && vec_cap >= new_cap {
let ptr = (vec as *mut u8).add(SHARED_VEC_SIZE);
ptr::copy(self.ptr, ptr, len);
self.ptr = ptr;
self.cap = vec_cap;
} else {
*self = Inner::from_slice(new_cap, self.as_ref(), (*vec).pool);
}
}
} else {
debug_assert!(kind == KIND_ARC);
let arc = self.arc.as_ptr();
unsafe {
if (*arc).is_unique() {
let v = &mut (*arc).vec;
if v.capacity() >= new_cap {
let ptr = v.as_mut_ptr();
ptr::copy(self.ptr, ptr, len);
self.ptr = ptr;
self.cap = v.capacity();
return;
}
}
*self = Inner::from_slice(new_cap, self.as_ref(), (*arc).pool);
}
}
}
#[inline]
fn is_inline(&self) -> bool {
self.kind() == KIND_INLINE
}
#[inline]
fn is_inline_or_static(&self) -> bool {
let kind = self.kind();
kind == KIND_INLINE || kind == KIND_STATIC
}
#[inline]
fn is_static(&self) -> bool {
matches!(self.kind(), KIND_STATIC)
}
#[inline]
fn shared_vec(&self) -> *mut SharedVec {
((self.arc.as_ptr() as usize) & KIND_UNMASK) as *mut SharedVec
}
#[inline]
fn kind(&self) -> usize {
#[cfg(target_endian = "little")]
#[inline]
fn imp(arc: *mut Shared) -> usize {
(arc as usize) & KIND_MASK
}
#[cfg(target_endian = "big")]
#[inline]
fn imp(arc: *mut Shared) -> usize {
unsafe {
let p: *const usize = arc as *const usize;
*p & KIND_MASK
}
}
imp(self.arc.as_ptr())
}
}
impl Drop for Inner {
fn drop(&mut self) {
let kind = self.kind();
if kind == KIND_VEC {
release_shared_vec(self.shared_vec());
} else if kind == KIND_ARC {
release_shared(self.arc.as_ptr());
}
}
}
fn release_shared(ptr: *mut Shared) {
unsafe {
if (*ptr).ref_count.fetch_sub(1, Release) != 1 {
return;
}
atomic::fence(Acquire);
let arc = Box::from_raw(ptr);
arc.pool.release(arc.vec.capacity());
}
}
fn release_shared_vec(ptr: *mut SharedVec) {
unsafe {
if (*ptr).ref_count.fetch_sub(1, Release) != 1 {
return;
}
atomic::fence(Acquire);
let cap = (*ptr).cap;
(*ptr).pool.release(cap);
ptr::drop_in_place(ptr);
Vec::<u8>::from_raw_parts(ptr as *mut u8, 0, cap);
}
}
impl Shared {
fn is_unique(&self) -> bool {
self.ref_count.load(Acquire) == 1
}
}
impl SharedVec {
fn is_unique(&self) -> bool {
self.ref_count.load(Acquire) == 1
}
}
unsafe impl Send for Inner {}
unsafe impl Sync for Inner {}
impl PartialEq<[u8]> for BytesMut {
fn eq(&self, other: &[u8]) -> bool {
&**self == other
}
}
impl PartialEq<BytesMut> for [u8] {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl PartialEq<str> for BytesMut {
fn eq(&self, other: &str) -> bool {
&**self == other.as_bytes()
}
}
impl PartialEq<BytesMut> for str {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl PartialEq<Vec<u8>> for BytesMut {
fn eq(&self, other: &Vec<u8>) -> bool {
*self == other[..]
}
}
impl PartialEq<BytesMut> for Vec<u8> {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl PartialEq<String> for BytesMut {
fn eq(&self, other: &String) -> bool {
*self == other[..]
}
}
impl PartialEq<BytesMut> for String {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl<'a, T: ?Sized> PartialEq<&'a T> for BytesMut
where
BytesMut: PartialEq<T>,
{
fn eq(&self, other: &&'a T) -> bool {
*self == **other
}
}
impl PartialEq<BytesMut> for &[u8] {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl PartialEq<BytesMut> for &str {
fn eq(&self, other: &BytesMut) -> bool {
*other == *self
}
}
impl PartialEq<[u8]> for Bytes {
fn eq(&self, other: &[u8]) -> bool {
self.inner.as_ref() == other
}
}
impl PartialOrd<[u8]> for Bytes {
fn partial_cmp(&self, other: &[u8]) -> Option<cmp::Ordering> {
self.inner.as_ref().partial_cmp(other)
}
}
impl PartialEq<Bytes> for [u8] {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for [u8] {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl PartialEq<str> for Bytes {
fn eq(&self, other: &str) -> bool {
self.inner.as_ref() == other.as_bytes()
}
}
impl PartialOrd<str> for Bytes {
fn partial_cmp(&self, other: &str) -> Option<cmp::Ordering> {
self.inner.as_ref().partial_cmp(other.as_bytes())
}
}
impl PartialEq<Bytes> for str {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for str {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl PartialEq<Vec<u8>> for Bytes {
fn eq(&self, other: &Vec<u8>) -> bool {
*self == other[..]
}
}
impl PartialOrd<Vec<u8>> for Bytes {
fn partial_cmp(&self, other: &Vec<u8>) -> Option<cmp::Ordering> {
self.inner.as_ref().partial_cmp(&other[..])
}
}
impl PartialEq<Bytes> for Vec<u8> {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for Vec<u8> {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl PartialEq<String> for Bytes {
fn eq(&self, other: &String) -> bool {
*self == other[..]
}
}
impl PartialOrd<String> for Bytes {
fn partial_cmp(&self, other: &String) -> Option<cmp::Ordering> {
self.inner.as_ref().partial_cmp(other.as_bytes())
}
}
impl PartialEq<Bytes> for String {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for String {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl PartialEq<Bytes> for &[u8] {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for &[u8] {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl PartialEq<Bytes> for &str {
fn eq(&self, other: &Bytes) -> bool {
*other == *self
}
}
impl PartialOrd<Bytes> for &str {
fn partial_cmp(&self, other: &Bytes) -> Option<cmp::Ordering> {
other.partial_cmp(self)
}
}
impl<'a, T: ?Sized> PartialEq<&'a T> for Bytes
where
Bytes: PartialEq<T>,
{
fn eq(&self, other: &&'a T) -> bool {
*self == **other
}
}
impl From<BytesVec> for Bytes {
fn from(b: BytesVec) -> Self {
b.freeze()
}
}
impl<'a, T: ?Sized> PartialOrd<&'a T> for Bytes
where
Bytes: PartialOrd<T>,
{
fn partial_cmp(&self, other: &&'a T) -> Option<cmp::Ordering> {
self.partial_cmp(&**other)
}
}
impl PartialEq<BytesMut> for Bytes {
fn eq(&self, other: &BytesMut) -> bool {
other[..] == self[..]
}
}
impl PartialEq<BytesVec> for Bytes {
fn eq(&self, other: &BytesVec) -> bool {
other[..] == self[..]
}
}
impl PartialEq<Bytes> for BytesVec {
fn eq(&self, other: &Bytes) -> bool {
other[..] == self[..]
}
}
impl PartialEq<Bytes> for BytesMut {
fn eq(&self, other: &Bytes) -> bool {
other[..] == self[..]
}
}
impl PartialEq<BytesMut> for BytesVec {
fn eq(&self, other: &BytesMut) -> bool {
other[..] == self[..]
}
}
impl PartialEq<BytesVec> for BytesMut {
fn eq(&self, other: &BytesVec) -> bool {
other[..] == self[..]
}
}
impl PartialEq<[u8]> for BytesVec {
fn eq(&self, other: &[u8]) -> bool {
&**self == other
}
}
impl PartialEq<BytesVec> for [u8] {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
impl PartialEq<str> for BytesVec {
fn eq(&self, other: &str) -> bool {
&**self == other.as_bytes()
}
}
impl PartialEq<BytesVec> for str {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
impl PartialEq<Vec<u8>> for BytesVec {
fn eq(&self, other: &Vec<u8>) -> bool {
*self == other[..]
}
}
impl PartialEq<BytesVec> for Vec<u8> {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
impl PartialEq<String> for BytesVec {
fn eq(&self, other: &String) -> bool {
*self == other[..]
}
}
impl PartialEq<BytesVec> for String {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
impl<'a, T: ?Sized> PartialEq<&'a T> for BytesVec
where
BytesVec: PartialEq<T>,
{
fn eq(&self, other: &&'a T) -> bool {
*self == **other
}
}
impl PartialEq<BytesVec> for &[u8] {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
impl PartialEq<BytesVec> for &str {
fn eq(&self, other: &BytesVec) -> bool {
*other == *self
}
}
struct Abort;
impl Drop for Abort {
fn drop(&mut self) {
panic!();
}
}
#[inline(never)]
#[cold]
fn abort() {
let _a = Abort;
panic!();
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use super::*;
const LONG: &[u8] =
b"mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb";
#[test]
fn trimdown() {
let mut b = Bytes::from(LONG.to_vec());
assert_eq!(b.inner.capacity(), 263);
unsafe { b.inner.set_len(68) };
assert_eq!(b.len(), 68);
assert_eq!(b.inner.capacity(), 263);
b.trimdown();
assert_eq!(b.inner.capacity(), 96);
unsafe { b.inner.set_len(16) };
b.trimdown();
assert!(b.is_inline());
}
#[test]
fn bytes() {
let mut b = Bytes::from(LONG.to_vec());
b.clear();
assert!(b.is_inline());
assert!(b.is_empty());
assert!(b.len() == 0);
let b = Bytes::from(BytesMut::from(LONG));
assert_eq!(b, LONG);
let b = BytesMut::try_from(b).unwrap();
assert_eq!(b, LONG);
}
#[test]
fn bytes_vec() {
let bv = BytesVec::copy_from_slice(&LONG[..]);
assert_eq!(bv.capacity(), mem::size_of::<SharedVec>() * 9);
assert_eq!(bv.len(), 263);
assert_eq!(bv.as_ref().len(), 263);
assert_eq!(bv.as_ref(), &LONG[..]);
let mut bv = BytesVec::copy_from_slice(&b"hello"[..]);
assert_eq!(bv.capacity(), mem::size_of::<SharedVec>());
assert_eq!(bv.len(), 5);
assert_eq!(bv.as_ref().len(), 5);
assert_eq!(bv.as_ref()[0], b"h"[0]);
bv.put_u8(b" "[0]);
assert_eq!(bv.as_ref(), &b"hello "[..]);
bv.put("world");
assert_eq!(bv, "hello world");
let b = Bytes::from(bv);
assert_eq!(b, "hello world");
let mut b = BytesMut::try_from(b).unwrap();
b.put(".");
assert_eq!(b, "hello world.");
}
}