use std::{
alloc,
borrow::{Borrow, BorrowMut},
cmp::Ordering,
fmt,
fs::File,
io::{self, Error as IoError, ErrorKind, Read},
mem,
ops::{Deref, DerefMut, Index, IndexMut},
os::fd::AsRawFd,
ptr::NonNull,
slice,
};
use libc::c_void;
use rkyv::{
Archive, Archived, Serialize,
ser::{ScratchSpace, Serializer},
vec::ArchivedVec,
};
use rkyv::{ArchiveUnsized, Fallible, RelPtr, vec::VecResolver};
pub mod slab;
use self::slab::{acquire_allocation, layout_for, recycle_or_dealloc};
pub struct FBuf {
ptr: NonNull<u8>,
cap: usize,
len: usize,
}
impl Drop for FBuf {
#[inline]
fn drop(&mut self) {
if self.cap != 0 {
recycle_or_dealloc(self.ptr, self.cap);
}
}
}
impl FBuf {
pub const ALIGNMENT: usize = 512;
pub const MAX_CAPACITY: usize = isize::MAX as usize - (Self::ALIGNMENT - 1);
#[inline]
pub fn new() -> Self {
FBuf {
ptr: NonNull::dangling(),
cap: 0,
len: 0,
}
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
if capacity == 0 {
Self::new()
} else {
assert!(
capacity <= Self::MAX_CAPACITY,
"`capacity` cannot exceed isize::MAX - 15"
);
let (ptr, capacity) = acquire_allocation(capacity);
Self {
ptr,
cap: capacity,
len: 0,
}
}
}
#[inline]
fn layout(&self) -> alloc::Layout {
layout_for(self.cap)
}
#[inline]
pub fn clear(&mut self) {
self.len = 0;
}
#[inline]
pub unsafe fn change_capacity(&mut self, new_cap: usize) {
unsafe {
debug_assert!(new_cap <= Self::MAX_CAPACITY);
debug_assert!(new_cap >= self.len);
if new_cap > 0 {
let (new_ptr, new_cap) = if self.cap > 0 {
let new_ptr = alloc::realloc(self.ptr.as_ptr(), self.layout(), new_cap);
if new_ptr.is_null() {
alloc::handle_alloc_error(layout_for(new_cap));
}
(NonNull::new_unchecked(new_ptr), new_cap)
} else {
acquire_allocation(new_cap)
};
self.ptr = new_ptr;
self.cap = new_cap;
} else if self.cap > 0 {
recycle_or_dealloc(self.ptr, self.cap);
self.ptr = NonNull::dangling();
self.cap = 0;
}
}
}
#[inline]
pub fn shrink_to_fit(&mut self) {
if self.cap != self.len {
unsafe { self.change_capacity(self.len) };
}
}
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut u8 {
self.ptr.as_ptr()
}
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [u8] {
unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
}
#[inline]
pub fn as_ptr(&self) -> *const u8 {
self.ptr.as_ptr()
}
#[inline]
pub fn as_slice(&self) -> &[u8] {
unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
}
#[inline]
pub fn capacity(&self) -> usize {
self.cap
}
#[inline]
pub fn reserve(&mut self, additional: usize) {
let remaining = self.cap.wrapping_sub(self.len);
if additional > remaining {
self.do_reserve(additional);
}
}
#[cold]
fn do_reserve(&mut self, additional: usize) {
let new_cap = self
.len
.checked_add(additional)
.expect("cannot reserve a larger FBuf");
unsafe { self.grow_capacity_to(new_cap) };
}
#[inline]
pub unsafe fn grow_capacity_to(&mut self, new_cap: usize) {
unsafe {
debug_assert!(new_cap > self.cap);
let new_cap = if new_cap > (isize::MAX as usize + 1) >> 1 {
assert!(
new_cap <= Self::MAX_CAPACITY,
"cannot reserve a larger FBuf"
);
Self::MAX_CAPACITY
} else {
new_cap.next_power_of_two()
};
self.change_capacity(new_cap);
}
}
#[inline]
pub fn resize(&mut self, new_len: usize, value: u8) {
if new_len > self.len {
let additional = new_len - self.len;
self.reserve(additional);
unsafe {
core::ptr::write_bytes(self.ptr.as_ptr().add(self.len), value, additional);
}
}
unsafe {
self.set_len(new_len);
}
}
#[inline]
pub fn is_empty(&self) -> bool {
self.len == 0
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
#[inline]
pub fn extend_from_slice(&mut self, other: &[u8]) {
if !other.is_empty() {
self.reserve(other.len());
unsafe {
core::ptr::copy_nonoverlapping(
other.as_ptr(),
self.as_mut_ptr().add(self.len()),
other.len(),
);
}
self.len += other.len();
}
}
#[inline]
pub fn pop(&mut self) -> Option<u8> {
if self.len == 0 {
None
} else {
let result = self[self.len - 1];
self.len -= 1;
Some(result)
}
}
#[inline]
pub fn push(&mut self, value: u8) {
if self.len == self.cap {
self.reserve_for_push();
}
unsafe {
self.as_mut_ptr().add(self.len).write(value);
self.len += 1;
}
}
#[cold]
fn reserve_for_push(&mut self) {
let new_cap = self.len + 1;
unsafe { self.grow_capacity_to(new_cap) };
}
#[inline]
pub fn reserve_exact(&mut self, additional: usize) {
let new_cap = self
.len
.checked_add(additional)
.expect("cannot reserve a larger FBuf");
if new_cap > self.cap {
assert!(
new_cap <= Self::MAX_CAPACITY,
"cannot reserve a larger FBuf"
);
unsafe { self.change_capacity(new_cap) };
}
}
#[inline]
pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity());
self.len = new_len;
}
#[inline]
pub fn into_boxed_slice(self) -> Box<[u8]> {
self.into_vec().into_boxed_slice()
}
pub fn from_slice(slice: &[u8]) -> Self {
let mut fbuf = FBuf::new();
fbuf.extend_from_slice(slice);
fbuf
}
#[inline]
pub fn into_vec(self) -> Vec<u8> {
Vec::from(self.as_ref())
}
pub fn extend_from_reader<R: Read + ?Sized>(&mut self, r: &mut R) -> io::Result<usize> {
let start_len = self.len();
let start_cap = self.capacity();
let mut initialized = 0;
loop {
if self.len() == self.capacity() {
self.reserve(32);
}
let read_buf_start = unsafe { self.as_mut_ptr().add(self.len) };
let read_buf_len = self.capacity() - self.len();
unsafe {
core::ptr::write_bytes(
read_buf_start.add(initialized),
0,
read_buf_len - initialized,
);
}
let read_buf = unsafe { core::slice::from_raw_parts_mut(read_buf_start, read_buf_len) };
match r.read(read_buf) {
Ok(read) => {
unsafe {
self.set_len(self.len() + read);
}
initialized = read_buf_len - read;
if read == 0 {
return Ok(self.len() - start_len);
}
}
Err(e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return Err(e),
}
if self.len() == self.capacity() && self.capacity() == start_cap {
let mut probe = [0u8; 32];
loop {
match r.read(&mut probe) {
Ok(0) => return Ok(self.len() - start_len),
Ok(n) => {
self.extend_from_slice(&probe[..n]);
break;
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return Err(e),
}
}
}
}
}
pub fn read_exact_at(
&mut self,
file: &File,
mut offset: u64,
mut len: usize,
) -> Result<(), IoError> {
self.reserve(len);
while len > 0 {
let retval = unsafe {
libc::pread(
file.as_raw_fd(),
self.as_mut_ptr().add(self.len) as *mut c_void,
len,
offset as i64,
)
};
match retval.cmp(&0) {
Ordering::Equal => return Err(ErrorKind::UnexpectedEof.into()),
Ordering::Less => {
let error = IoError::last_os_error();
if error.kind() != ErrorKind::Interrupted {
return Err(error);
}
}
Ordering::Greater => {
self.len += retval as usize;
len -= retval as usize;
offset += retval as u64;
}
}
}
Ok(())
}
}
impl From<FBuf> for Vec<u8> {
#[inline]
fn from(aligned: FBuf) -> Self {
aligned.to_vec()
}
}
impl AsMut<[u8]> for FBuf {
#[inline]
fn as_mut(&mut self) -> &mut [u8] {
self.as_mut_slice()
}
}
impl AsRef<[u8]> for FBuf {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_slice()
}
}
impl Borrow<[u8]> for FBuf {
#[inline]
fn borrow(&self) -> &[u8] {
self.as_slice()
}
}
impl BorrowMut<[u8]> for FBuf {
#[inline]
fn borrow_mut(&mut self) -> &mut [u8] {
self.as_mut_slice()
}
}
impl Clone for FBuf {
#[inline]
fn clone(&self) -> Self {
unsafe {
let mut result = FBuf::with_capacity(self.len);
result.len = self.len;
core::ptr::copy_nonoverlapping(self.as_ptr(), result.as_mut_ptr(), self.len);
result
}
}
}
impl fmt::Debug for FBuf {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.as_slice().fmt(f)
}
}
impl Default for FBuf {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl Deref for FBuf {
type Target = [u8];
#[inline]
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl DerefMut for FBuf {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice()
}
}
impl<I: slice::SliceIndex<[u8]>> Index<I> for FBuf {
type Output = <I as slice::SliceIndex<[u8]>>::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
&self.as_slice()[index]
}
}
impl<I: slice::SliceIndex<[u8]>> IndexMut<I> for FBuf {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
&mut self.as_mut_slice()[index]
}
}
impl io::Write for FBuf {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.extend_from_slice(buf);
Ok(buf.len())
}
#[inline]
fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize> {
let len = bufs.iter().map(|b| b.len()).sum();
self.reserve(len);
for buf in bufs {
self.extend_from_slice(buf);
}
Ok(len)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
#[inline]
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.extend_from_slice(buf);
Ok(())
}
}
#[cfg(test)]
impl Eq for FBuf {}
#[cfg(test)]
impl PartialEq<Self> for FBuf {
fn eq(&self, other: &Self) -> bool {
self.as_slice() == other.as_slice()
}
}
#[cfg(test)]
impl PartialOrd for FBuf {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.as_slice().partial_cmp(other.as_slice())
}
}
impl Archive for FBuf {
type Archived = ArchivedVec<u8>;
type Resolver = VecResolver;
#[inline]
unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
unsafe {
ArchivedVec::resolve_from_slice(self.as_slice(), pos, resolver, out);
}
}
}
impl<S: ScratchSpace + Serializer + ?Sized> Serialize<S> for FBuf {
#[inline]
fn serialize(&self, serializer: &mut S) -> Result<Self::Resolver, S::Error> {
serializer.align(Self::ALIGNMENT)?;
ArchivedVec::<Archived<u8>>::serialize_from_slice(self.as_slice(), serializer)
}
}
unsafe impl Send for FBuf {}
unsafe impl Sync for FBuf {}
impl Unpin for FBuf {}
#[derive(Debug)]
pub struct FBufSerializer<A> {
pub inner: A,
limit: usize,
}
#[derive(Debug)]
pub struct LimitExceeded;
impl<A: Borrow<FBuf>> FBufSerializer<A> {
#[inline]
pub fn new(inner: A) -> Self {
Self {
inner,
limit: usize::MAX,
}
}
pub fn with_limit(self, limit: usize) -> Self {
Self { limit, ..self }
}
#[inline]
pub fn into_inner(self) -> A {
self.inner
}
}
impl<A: Default> Default for FBufSerializer<A> {
#[inline]
fn default() -> Self {
Self {
inner: A::default(),
limit: usize::MAX,
}
}
}
impl<A> Fallible for FBufSerializer<A> {
type Error = LimitExceeded;
}
impl<A: Borrow<FBuf> + BorrowMut<FBuf>> Serializer for FBufSerializer<A> {
#[inline]
fn pos(&self) -> usize {
self.inner.borrow().len()
}
#[inline]
fn write(&mut self, bytes: &[u8]) -> Result<(), Self::Error> {
let vec = self.inner.borrow_mut();
if vec.len() + bytes.len() > self.limit {
Err(LimitExceeded)
} else {
vec.extend_from_slice(bytes);
Ok(())
}
}
#[inline]
unsafe fn resolve_aligned<T: Archive + ?Sized>(
&mut self,
value: &T,
resolver: T::Resolver,
) -> Result<usize, Self::Error> {
unsafe {
let pos = self.pos();
debug_assert_eq!(pos & (mem::align_of::<T::Archived>() - 1), 0);
let vec = self.inner.borrow_mut();
let additional = mem::size_of::<T::Archived>();
if vec.len() + additional > self.limit {
return Err(LimitExceeded);
}
vec.reserve(additional);
vec.set_len(vec.len() + additional);
let ptr = vec.as_mut_ptr().add(pos).cast::<T::Archived>();
ptr.write_bytes(0, 1);
value.resolve(pos, resolver, ptr);
Ok(pos)
}
}
#[inline]
unsafe fn resolve_unsized_aligned<T: ArchiveUnsized + ?Sized>(
&mut self,
value: &T,
to: usize,
metadata_resolver: T::MetadataResolver,
) -> Result<usize, Self::Error> {
unsafe {
let from = self.pos();
debug_assert_eq!(from & (mem::align_of::<RelPtr<T::Archived>>() - 1), 0);
let vec = self.inner.borrow_mut();
let additional = mem::size_of::<RelPtr<T::Archived>>();
if vec.len() + additional > self.limit {
return Err(LimitExceeded);
}
vec.reserve(additional);
vec.set_len(vec.len() + additional);
let ptr = vec.as_mut_ptr().add(from).cast::<RelPtr<T::Archived>>();
ptr.write_bytes(0, 1);
value.resolve_unsized(from, to, metadata_resolver, ptr);
Ok(from)
}
}
}