use crate::alloc::alloc::{self, Layout, LayoutError};
use std::sync::atomic::Ordering::{Acquire, Relaxed, Release};
use std::sync::atomic::{self, AtomicU32};
use std::{cmp, mem, num::NonZeroUsize, ptr, ptr::NonNull, slice};
use crate::{info::Info, info::Kind};
#[cfg(target_endian = "little")]
#[repr(C)]
pub(crate) struct Storage {
offset: NonZeroUsize,
ptr: *mut u8,
len: usize,
}
#[cfg(target_endian = "big")]
#[repr(C)]
pub(crate) struct Storage {
len: usize,
ptr: *mut u8,
offset: NonZeroUsize,
}
#[derive(Debug)]
struct SharedVec {
offset: u32,
len: u32,
capacity: u32,
remaining: u32,
ref_count: AtomicU32,
}
pub(crate) struct StorageVec(NonNull<SharedVec>);
const KIND_VEC: usize = 0b01;
const KIND_INLINE: usize = 0b10;
const KIND_STATIC: usize = 0b11;
const KIND_MASK: usize = 0b11;
const KIND_OFFSET_BITS: usize = 2;
pub const METADATA_SIZE: usize = mem::size_of::<SharedVec>();
const METADATA_SIZE_U32: u32 = METADATA_SIZE as u32;
pub(crate) const MIN_CAPACITY: usize = 128 - crate::METADATA_SIZE;
const INLINE_LEN_MASK: usize = 0b1111_1100;
#[cfg(target_endian = "little")]
const INLINE_DATA_OFFSET: isize = 1;
#[cfg(target_endian = "big")]
const INLINE_DATA_OFFSET: isize = 0;
#[cfg(target_pointer_width = "64")]
pub(crate) const INLINE_CAP: usize = 3 * 8 - 1;
#[cfg(target_pointer_width = "32")]
pub(crate) const INLINE_CAP: usize = 3 * 4 - 1;
const PTR_INLINE: NonZeroUsize = NonZeroUsize::new(KIND_INLINE).unwrap();
const PTR_STATIC: NonZeroUsize = NonZeroUsize::new(KIND_STATIC).unwrap();
const DEFAUILT_OFFSET: NonZeroUsize =
NonZeroUsize::new((METADATA_SIZE << KIND_OFFSET_BITS) ^ KIND_VEC).unwrap();
impl Storage {
#[inline]
pub(crate) const fn empty() -> Storage {
Storage {
ptr: ptr::null_mut(),
len: 0,
offset: PTR_INLINE,
}
}
#[inline]
pub(crate) const fn from_static(bytes: &'static [u8]) -> Storage {
let ptr = bytes.as_ptr().cast_mut();
Storage {
ptr,
len: bytes.len(),
offset: PTR_STATIC,
}
}
#[inline]
pub(crate) fn from_slice(src: &[u8]) -> Storage {
if src.len() <= INLINE_CAP {
unsafe { Storage::from_ptr_inline(src.as_ptr(), src.len()) }
} else {
Storage::from_slice_with_capacity(src.len(), src)
}
}
#[inline]
fn from_slice_with_capacity(cap: usize, src: &[u8]) -> Storage {
unsafe {
let shared = SharedVec::create(cap, src);
Storage {
len: src.len(),
ptr: shared.as_ptr().add(1).cast::<u8>(),
offset: DEFAUILT_OFFSET,
}
}
}
#[inline]
unsafe fn from_ptr_inline(src: *const u8, len: usize) -> Storage {
let mut st = Storage {
ptr: ptr::null_mut(),
len: 0,
offset: PTR_INLINE,
};
let dst = st.inline_ptr();
ptr::copy(src, dst, len);
st.set_inline_len(len);
st
}
#[inline]
pub(crate) fn as_ref(&self) -> &[u8] {
unsafe {
if self.kind() == KIND_INLINE {
slice::from_raw_parts(self.inline_ptr_ro(), self.inline_len())
} else {
slice::from_raw_parts(self.ptr, self.len)
}
}
}
#[inline]
unsafe fn as_ptr(&mut self) -> *mut u8 {
unsafe {
if self.kind() == KIND_INLINE {
self.inline_ptr()
} else {
self.ptr
}
}
}
#[inline]
pub(crate) fn len(&self) -> usize {
if self.kind() == KIND_INLINE {
self.inline_len()
} else {
self.len
}
}
#[inline]
unsafe fn inline_ptr(&mut self) -> *mut u8 {
(ptr::from_mut::<Storage>(self).cast::<u8>()).offset(INLINE_DATA_OFFSET)
}
#[inline]
unsafe fn inline_ptr_ro(&self) -> *const u8 {
ptr::from_ref::<Storage>(self)
.cast::<u8>()
.offset(INLINE_DATA_OFFSET)
}
#[inline]
fn inline_len(&self) -> usize {
(self.offset.get() & INLINE_LEN_MASK) >> KIND_OFFSET_BITS
}
#[inline]
pub(crate) fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub(crate) fn capacity(&self) -> usize {
let kind = self.kind();
match kind {
KIND_VEC => unsafe { (*self.shared_vec()).capacity() },
KIND_INLINE => INLINE_CAP,
_ => self.len,
}
}
pub(crate) fn split_off(&mut self, at: usize, create_inline: bool) -> Storage {
let other = unsafe {
if create_inline && self.len() - at <= INLINE_CAP {
Storage::from_ptr_inline(self.as_ptr().add(at), self.len() - at)
} else {
let mut other = self.shallow_clone();
other.set_start(at);
other
}
};
unsafe {
if create_inline && at <= INLINE_CAP {
*self = Storage::from_ptr_inline(self.as_ptr(), at);
} else {
self.set_end(at);
}
}
other
}
pub(crate) fn split_to(&mut self, at: usize) -> Storage {
let other = unsafe {
if at <= INLINE_CAP {
Storage::from_ptr_inline(self.as_ptr(), at)
} else {
let mut other = self.shallow_clone();
other.set_end(at);
other
}
};
unsafe {
self.set_start(at);
}
other
}
pub(crate) fn truncate(&mut self, len: usize, create_inline: bool) {
unsafe {
if len <= self.len() {
if create_inline && len < INLINE_CAP {
*self = Storage::from_ptr_inline(self.as_ptr(), len);
} else {
self.set_len(len);
}
}
}
}
pub(crate) fn trimdown(&mut self) {
let kind = self.kind();
if !(kind == KIND_INLINE || kind == KIND_STATIC) {
if self.len() <= INLINE_CAP {
*self = unsafe { Storage::from_ptr_inline(self.as_ptr(), self.len()) };
} else if self.capacity() - self.len() >= 64 {
*self = Storage::from_slice_with_capacity(self.len(), self.as_ref());
}
}
}
#[inline]
pub(crate) unsafe fn set_len(&mut self, len: usize) {
let kind = self.kind();
match kind {
KIND_VEC => {
assert!(len <= self.capacity());
self.len = len;
}
KIND_INLINE => self.set_inline_len(len),
_ => {
assert!(len <= self.len);
self.len = len;
}
}
}
#[inline]
fn set_inline_len(&mut self, len: usize) {
debug_assert!(len <= INLINE_CAP);
self.offset = unsafe {
NonZeroUsize::new_unchecked(
self.offset.get() & !INLINE_LEN_MASK | (len << KIND_OFFSET_BITS),
)
};
}
pub(crate) unsafe fn set_start(&mut self, start: usize) {
if start == 0 {
return;
}
match self.kind() {
KIND_VEC => {
let shared = self.shared_vec();
let offset = (self.offset.get() >> KIND_OFFSET_BITS) + start;
self.ptr = (shared.cast::<u8>()).add(offset);
if self.len >= start {
self.len -= start;
} else {
self.len = 0;
}
self.offset =
NonZeroUsize::new_unchecked((offset << KIND_OFFSET_BITS) ^ KIND_VEC);
}
KIND_INLINE => {
assert!(start <= INLINE_CAP);
let len = self.inline_len();
if len <= start {
self.set_inline_len(0);
} else {
let new_len = len - start;
let dst = self.inline_ptr();
let src = (dst.cast_const()).add(start);
ptr::copy(src, dst, new_len);
self.set_inline_len(new_len);
}
}
_ => {
assert!(start <= self.len);
self.len -= start;
self.ptr = self.ptr.add(start);
}
}
}
pub(crate) unsafe fn set_end(&mut self, end: usize) {
match self.kind() {
KIND_VEC => {
self.len = cmp::min(self.len, end);
}
KIND_INLINE => {
assert!(end <= INLINE_CAP);
let new_len = cmp::min(self.inline_len(), end);
self.set_inline_len(new_len);
}
_ => {
assert!(end <= self.len);
self.len = end;
}
}
}
#[inline]
unsafe fn shallow_clone(&self) -> Storage {
let kind = self.kind();
if kind == KIND_INLINE || kind == KIND_STATIC {
let mut inner: mem::MaybeUninit<Storage> = mem::MaybeUninit::uninit();
ptr::copy_nonoverlapping(self, inner.as_mut_ptr(), 1);
inner.assume_init()
} else {
let shared = self.shared_vec();
let ref_cnt = (*shared).ref_count.fetch_add(1, Relaxed);
if ref_cnt == u32::MAX {
abort();
}
Storage { ..*self }
}
}
#[inline]
pub(crate) fn is_inline(&self) -> bool {
self.kind() == KIND_INLINE
}
#[inline]
fn shared_vec(&self) -> *mut SharedVec {
let offset = self.offset.get() >> KIND_OFFSET_BITS;
#[allow(clippy::cast_ptr_alignment)]
unsafe {
self.ptr.sub(offset).cast::<SharedVec>()
}
}
#[inline]
fn kind(&self) -> usize {
#[cfg(target_endian = "little")]
#[inline]
fn imp(ptr: usize) -> usize {
ptr & KIND_MASK
}
#[cfg(target_endian = "big")]
#[inline]
fn imp(arc: usize) -> usize {
unsafe {
let p: *const u8 = arc as *const u8;
*p & KIND_MASK
}
}
imp(self.offset.get())
}
pub(crate) fn info(&self) -> Info {
let kind = self.kind();
let (id, refs, capacity) = unsafe {
if kind == KIND_VEC {
let ptr = self.shared_vec();
(
ptr as usize,
(*ptr).ref_count.load(Relaxed),
(*ptr).offset as usize
+ (*ptr).len as usize
+ (*ptr).remaining as usize,
)
} else {
(0, 0, 0)
}
};
Info {
id,
refs,
capacity,
kind: Kind::from_raw(kind),
}
}
}
unsafe impl Send for Storage {}
unsafe impl Sync for Storage {}
impl Clone for Storage {
fn clone(&self) -> Storage {
unsafe { self.shallow_clone() }
}
}
impl Drop for Storage {
fn drop(&mut self) {
if self.kind() == KIND_VEC {
release_shared_vec(self.shared_vec());
}
}
}
impl StorageVec {
pub(crate) fn with_capacity(capacity: usize) -> StorageVec {
StorageVec(SharedVec::create(capacity, &[]))
}
pub(crate) fn from_slice(capacity: usize, src: &[u8]) -> StorageVec {
StorageVec(SharedVec::create(capacity, src))
}
pub(crate) fn as_ref(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.as_ptr(), self.len()) }
}
pub(crate) fn as_mut(&mut self) -> &mut [u8] {
unsafe { slice::from_raw_parts_mut(self.as_ptr(), self.len()) }
}
pub(crate) unsafe fn as_raw(&mut self) -> &mut [u8] {
slice::from_raw_parts_mut(self.as_ptr(), self.capacity())
}
pub(crate) unsafe fn as_ptr(&self) -> *mut u8 {
(self.0.as_ptr().cast::<u8>()).add((*self.0.as_ptr()).offset as usize)
}
unsafe fn as_inner(&mut self) -> &mut SharedVec {
self.0.as_mut()
}
pub(crate) fn put_u8(&mut self, n: u8) {
let len = self.len();
unsafe {
let inner = self.as_inner();
inner.len += 1;
inner.remaining -= 1;
*self.as_ptr().add(len) = n;
}
}
pub(crate) fn len(&self) -> usize {
unsafe { (*self.0.as_ptr()).len as usize }
}
pub(crate) fn capacity(&self) -> usize {
unsafe { (*self.0.as_ptr()).capacity as usize }
}
pub(crate) fn remaining(&self) -> usize {
unsafe { (*self.0.as_ptr()).remaining as usize }
}
pub(crate) fn freeze(self) -> Storage {
unsafe {
if self.len() <= INLINE_CAP {
Storage::from_ptr_inline(self.as_ptr(), self.len())
} else {
let inner = self.0.as_ref();
let offset = inner.offset as usize;
let inner = Storage {
ptr: (self.0.as_ptr().cast::<u8>()).add(offset),
len: self.len(),
offset: NonZeroUsize::new_unchecked(
(offset << KIND_OFFSET_BITS) ^ KIND_VEC,
),
};
mem::forget(self);
inner
}
}
}
pub(crate) fn split_to(&mut self, at: usize) -> Storage {
unsafe {
let ptr = self.as_ptr();
let other = if at <= INLINE_CAP {
Storage::from_ptr_inline(ptr, at)
} else {
let inner = self.as_inner();
let ref_cnt = inner.ref_count.fetch_add(1, Relaxed);
if ref_cnt == u32::MAX {
abort();
}
let offset = inner.offset as usize;
Storage {
ptr: (self.0.as_ptr().cast::<u8>()).add(offset),
len: at,
offset: NonZeroUsize::new_unchecked(
(offset << KIND_OFFSET_BITS) ^ KIND_VEC,
),
}
};
self.set_start(at as u32);
other
}
}
pub(crate) fn truncate(&mut self, len: usize) {
unsafe {
if len == 0 {
let inner = self.as_inner();
if inner.is_unique() && inner.offset != METADATA_SIZE_U32 {
let cap = (inner.offset as usize) + inner.capacity as usize;
inner.len = 0;
inner.offset = METADATA_SIZE_U32;
inner.capacity = (cap - METADATA_SIZE) as u32;
inner.remaining = inner.capacity;
return;
}
}
if len < self.len() {
self.set_len(len);
}
}
}
pub(crate) fn resize(&mut self, new_len: usize, value: u8) {
let len = self.len();
if new_len > len {
let additional = new_len - len;
self.reserve(additional);
unsafe {
let dst = self.as_raw()[len..].as_mut_ptr();
ptr::write_bytes(dst, value, additional);
self.set_len(new_len);
}
} else {
self.truncate(new_len);
}
}
#[inline]
pub(crate) fn reserve_capacity(&mut self, capacity: usize) {
*self = StorageVec(SharedVec::create(capacity, self.as_ref()));
}
#[inline]
pub(crate) fn reserve(&mut self, additional: usize) {
if additional <= self.remaining() {
return;
}
self.reserve_inner(additional);
}
fn reserve_inner(&mut self, additional: usize) {
unsafe {
let inner = self.as_inner();
let len = inner.len as usize;
let new_cap = len + additional;
if inner.is_unique() {
let capacity = (inner.offset as usize) + (inner.capacity as usize);
if capacity >= (new_cap + METADATA_SIZE) {
let offset = inner.offset;
inner.offset = METADATA_SIZE_U32;
inner.remaining = (capacity - len - METADATA_SIZE) as u32;
inner.capacity = inner.len + inner.remaining;
if len != 0 {
let ptr = self.0.as_ptr().cast::<u8>();
ptr::copy(ptr.add(offset as usize), ptr.add(METADATA_SIZE), len);
}
return;
}
}
*self = StorageVec(SharedVec::create(new_cap, self.as_ref()));
}
}
#[inline]
pub(crate) unsafe fn set_len(&mut self, len: usize) {
let inner = self.0.as_mut();
assert!(len as u32 <= inner.capacity);
inner.len = len as u32;
inner.remaining = inner.capacity - (len as u32);
}
pub(crate) unsafe fn set_start(&mut self, start: u32) {
if start != 0 {
let inner = self.as_inner();
assert!(
start <= inner.capacity,
"Cannot set start position offset:{} len:{} cap:{} remaining:{} new-len:{start}",
inner.offset,
inner.len,
inner.capacity,
inner.remaining,
);
inner.offset += start;
if inner.len > start {
inner.len -= start;
} else {
inner.len = 0;
}
inner.remaining = inner.capacity - inner.len - start;
inner.capacity = inner.remaining + inner.len;
}
}
}
unsafe impl Send for StorageVec {}
unsafe impl Sync for StorageVec {}
impl Drop for StorageVec {
fn drop(&mut self) {
release_shared_vec(self.0.as_ptr());
}
}
impl SharedVec {
fn create(cap: usize, src: &[u8]) -> NonNull<SharedVec> {
let ptr = Self::alloc_with_capacity(cap, src.len() as u32);
unsafe {
let dst = ptr.add(METADATA_SIZE);
let sl = slice::from_raw_parts_mut(dst, src.len());
sl.copy_from_slice(src);
#[allow(clippy::cast_ptr_alignment)]
NonNull::new_unchecked(ptr.cast::<SharedVec>())
}
}
fn alloc_with_capacity(cap: usize, len: u32) -> *mut u8 {
let layout = shared_vec_layout(cap).unwrap();
unsafe {
let ptr = alloc::alloc(layout);
if ptr.is_null() {
alloc::handle_alloc_error(layout);
}
let capacity = (layout.size() - METADATA_SIZE) as u32;
#[allow(clippy::cast_ptr_alignment)]
ptr::write(
ptr.cast::<SharedVec>(),
SharedVec {
len,
capacity,
remaining: capacity - len,
offset: METADATA_SIZE_U32,
ref_count: AtomicU32::new(1),
},
);
ptr
}
}
fn is_unique(&self) -> bool {
self.ref_count.load(Acquire) == 1
}
fn capacity(&self) -> usize {
self.capacity as usize
}
}
fn release_shared_vec(ptr: *mut SharedVec) {
unsafe {
if (*ptr).ref_count.fetch_sub(1, Release) != 1 {
return;
}
atomic::fence(Acquire);
let cap = (*ptr).offset as usize + (*ptr).capacity as usize;
ptr::drop_in_place(ptr);
let layout = shared_vec_layout(cap - METADATA_SIZE).unwrap();
alloc::dealloc(ptr.cast(), layout);
}
}
const fn shared_vec_layout(cap: usize) -> Result<Layout, LayoutError> {
let s_layout = match Layout::from_size_align(cap, Layout::new::<u8>().align()) {
Ok(l) => l,
Err(e) => return Err(e),
};
match Layout::new::<SharedVec>().pad_to_align().extend(s_layout) {
Ok((l, _)) => Ok(l),
Err(err) => Err(err),
}
}
struct Abort;
impl Drop for Abort {
fn drop(&mut self) {
panic!();
}
}
impl Kind {
fn from_raw(n: usize) -> Kind {
match n {
KIND_INLINE => Kind::Inline,
KIND_STATIC => Kind::Static,
_ => Kind::Vec,
}
}
}
#[inline(never)]
#[cold]
fn abort() {
let _a = Abort;
panic!();
}
#[cfg(test)]
mod tests {
use crate::*;
const LONG: &[u8] =
b"mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb \
mary had a little lamb, little lamb, little lamb, little lamb, little lamb, little lamb";
#[test]
fn trimdown() {
let mut b = Bytes::from(LONG.to_vec());
assert_eq!(b.storage.capacity(), 263);
unsafe { b.storage.set_len(68) };
assert_eq!(b.len(), 68);
assert_eq!(&b[..], &LONG[..68]);
assert_eq!(b.storage.capacity(), 263);
b.trimdown();
assert_eq!(&b[..], &LONG[..68]);
assert_eq!(b.storage.capacity(), 68);
unsafe { b.storage.set_len(16) };
assert_eq!(&b[..], &LONG[..16]);
b.trimdown();
assert!(b.is_inline());
}
#[test]
#[allow(clippy::unnecessary_fallible_conversions)]
fn bytes_mut() {
let bv = BytesMut::copy_from_slice(LONG);
assert_eq!(bv.capacity(), 263);
assert_eq!(bv.len(), 263);
assert_eq!(bv.as_ref().len(), 263);
assert_eq!(bv.as_ref(), LONG);
assert_eq!(&bv[..], LONG);
let sl: &[u8] = &[];
let bv = BytesMut::copy_from_slice(sl);
assert_eq!(bv.capacity(), 0);
assert_eq!(bv.len(), 0);
assert_eq!(bv.as_ref().len(), 0);
assert_eq!(bv.as_ref(), sl);
assert_eq!(&bv[..], sl);
let mut bv = BytesMut::copy_from_slice(&b"hello"[..]);
assert_eq!(bv.capacity(), 5);
bv.reserve_capacity(128);
assert_eq!(bv.capacity(), 128);
assert_eq!(bv.len(), 5);
assert_eq!(bv.as_ref(), &b"hello"[..]);
let mut bv = BytesMut::copy_from_slice(&b"hello"[..]);
assert_eq!(bv.capacity(), 5);
assert_eq!(bv.len(), 5);
assert_eq!(bv.as_ref().len(), 5);
assert_eq!(bv.as_ref()[0], b"h"[0]);
assert_eq!(bv.remaining_mut(), 0);
bv.reserve(1);
assert_eq!(bv.remaining_mut(), 1);
bv.put_u8(b" "[0]);
assert_eq!(bv.as_ref(), &b"hello "[..]);
assert_eq!(bv.remaining_mut(), 0);
bv.reserve(5);
assert_eq!(bv.remaining_mut(), 5);
bv.put("world");
assert_eq!(bv, "hello world");
bv.advance_to(6);
assert_eq!(bv, "world");
assert_eq!(bv.remaining_mut(), 0);
let bv = BytesMut::copy_from_slice(&b"hello world"[..]);
let b = Bytes::from(bv);
assert_eq!(b, "hello world");
let mut bv = BytesMut::with_capacity(0);
bv.extend_from_slice(b"hello world.");
bv.extend_from_slice(b"hello world.");
bv.extend_from_slice(b"hello world.");
bv.extend_from_slice(b"hello world.");
let p1 = unsafe { bv.storage.as_ptr() as usize };
bv.advance(48);
assert!(bv.is_empty());
assert_eq!(bv.capacity(), 0);
bv.reserve(48);
assert!(bv.is_empty());
assert_eq!(bv.capacity(), 48);
let p2 = unsafe { bv.storage.as_ptr() as usize };
assert!(p1 == p2);
}
}