use std::{
borrow::{Borrow, BorrowMut},
cmp,
convert::TryFrom,
error,
fmt::{self, Debug, Formatter},
hash,
io,
mem,
ops,
sync::atomic::{
self,
AtomicUsize,
Ordering::{AcqRel, Acquire, Relaxed, Release},
},
};
#[cfg(target_pointer_width = "64")]
const WRITER_SHIFT: usize = 32;
#[cfg(target_pointer_width = "64")]
const READER_MASK: usize = 0xFFFF_FFFF;
#[cfg(target_pointer_width = "32")]
const WRITER_SHIFT: usize = 16;
#[cfg(target_pointer_width = "32")]
const READER_MASK: usize = 0xFFFF;
const ONE_WRITER: usize = 1 << WRITER_SHIFT;
#[derive(Clone, Copy, Debug)]
pub struct Error(&'static str);
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl error::Error for Error {}
#[derive(Debug)]
pub struct Chunks {
db: DivBuf,
chunksize: usize,
}
impl Chunks {
fn new(db: DivBuf, chunksize: usize) -> Self {
Chunks { db, chunksize }
}
}
impl Iterator for Chunks {
type Item = DivBuf;
fn next(&mut self) -> Option<DivBuf> {
if self.db.is_empty() {
None
} else {
let size = cmp::min(self.chunksize, self.db.len());
Some(self.db.split_to(size))
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let mut c = self.db.len() / self.chunksize;
if self.db.len() % self.chunksize != 0 {
c += 1;
}
(c, Some(c))
}
}
#[derive(Debug)]
pub struct ChunksMut {
db: DivBufMut,
chunksize: usize,
}
impl ChunksMut {
fn new(db: DivBufMut, chunksize: usize) -> Self {
ChunksMut { db, chunksize }
}
}
impl Iterator for ChunksMut {
type Item = DivBufMut;
fn next(&mut self) -> Option<DivBufMut> {
if self.db.is_empty() {
None
} else {
let size = cmp::min(self.chunksize, self.db.len());
Some(self.db.split_to(size))
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let mut c = self.db.len() / self.chunksize;
if self.db.len() % self.chunksize != 0 {
c += 1;
}
(c, Some(c))
}
}
#[derive(Debug)]
struct Inner {
vec: Vec<u8>,
accessors: AtomicUsize,
sharers: AtomicUsize,
}
pub struct DivBufShared {
inner: *mut Inner,
}
#[derive(Debug)]
pub struct DivBuf {
inner: *mut Inner,
begin: usize,
len: usize,
}
#[derive(Debug)]
pub struct DivBufMut {
inner: *mut Inner,
begin: usize,
len: usize,
}
#[derive(Debug)]
pub struct DivBufInaccessible {
inner: *mut Inner,
begin: usize,
len: usize,
}
impl DivBufShared {
pub fn capacity(&self) -> usize {
let inner = unsafe { &*self.inner };
inner.vec.capacity()
}
pub fn is_empty(&self) -> bool {
let inner = unsafe { &*self.inner };
inner.vec.is_empty()
}
pub fn len(&self) -> usize {
let inner = unsafe { &*self.inner };
inner.vec.len()
}
#[deprecated(since = "0.3.1", note = "use try_const instead")]
#[doc(hidden)]
pub fn r#try(&self) -> Result<DivBuf, Error> {
self.try_const()
}
pub fn try_const(&self) -> Result<DivBuf, Error> {
let inner = unsafe { &*self.inner };
if inner.accessors.fetch_add(1, Acquire) >> WRITER_SHIFT != 0 {
inner.accessors.fetch_sub(1, Relaxed);
Err(Error("Cannot create a DivBuf when DivBufMuts are active"))
} else {
let l = inner.vec.len();
Ok(DivBuf {
inner: self.inner,
begin: 0,
len: l,
})
}
}
pub fn try_mut(&self) -> Result<DivBufMut, Error> {
let inner = unsafe { &*self.inner };
if inner
.accessors
.compare_exchange(0, ONE_WRITER, AcqRel, Acquire)
.is_ok()
{
let l = inner.vec.len();
Ok(DivBufMut {
inner: self.inner,
begin: 0,
len: l,
})
} else {
Err(Error(
"Cannot create a new DivBufMut when other DivBufs or \
DivBufMuts are active",
))
}
}
#[cfg(any(feature = "experimental", docsrs))]
#[cfg_attr(docsrs, doc(cfg(feature = "experimental")))]
#[allow(clippy::uninit_vec)] pub fn uninitialized(capacity: usize) -> Self {
let mut v = Vec::<u8>::with_capacity(capacity);
unsafe { v.set_len(capacity) };
Self::from(v)
}
pub fn with_capacity(capacity: usize) -> Self {
Self::from(Vec::with_capacity(capacity))
}
}
impl Debug for DivBufShared {
fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {
let inner = unsafe { &*self.inner };
write!(f, "DivBufShared {{ inner: {:?} }}", inner)
}
}
impl Drop for DivBufShared {
fn drop(&mut self) {
let inner = unsafe { &*self.inner };
if inner.sharers.fetch_sub(1, Release) == 1
&& inner.accessors.load(Relaxed) == 0
{
atomic::fence(Acquire);
unsafe {
drop(Box::from_raw(self.inner));
}
}
}
}
impl<'a> From<&'a [u8]> for DivBufShared {
fn from(src: &'a [u8]) -> DivBufShared {
DivBufShared::from(src.to_vec())
}
}
impl From<Vec<u8>> for DivBufShared {
fn from(src: Vec<u8>) -> DivBufShared {
let rc = AtomicUsize::new(0);
let sharers = AtomicUsize::new(1);
let inner = Box::new(Inner {
vec: src,
accessors: rc,
sharers,
});
DivBufShared {
inner: Box::into_raw(inner),
}
}
}
impl TryFrom<DivBufShared> for Vec<u8> {
type Error = DivBufShared;
fn try_from(buf: DivBufShared) -> Result<Self, Self::Error> {
let inner = unsafe { &*buf.inner };
if inner.sharers.load(Acquire) == 1
&& inner.accessors.load(Acquire) == 0
{
atomic::fence(Acquire);
let mut inner_box = unsafe { Box::from_raw(buf.inner) };
mem::forget(buf);
Ok(mem::take(&mut inner_box.vec))
} else {
Err(buf)
}
}
}
unsafe impl Send for DivBufShared {}
unsafe impl Sync for DivBufShared {}
impl DivBuf {
pub fn clone_inaccessible(&self) -> DivBufInaccessible {
let inner = unsafe { &*self.inner };
let old = inner.sharers.fetch_add(1, Acquire);
debug_assert!(old > 0);
DivBufInaccessible {
inner: self.inner,
begin: self.begin,
len: self.len,
}
}
pub fn into_chunks(self, size: usize) -> Chunks {
assert!(size != 0);
Chunks::new(self, size)
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
pub fn len(&self) -> usize {
self.len
}
pub fn slice(&self, begin: usize, end: usize) -> DivBuf {
assert!(begin <= end);
assert!(end <= self.len);
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(1, Relaxed);
debug_assert!(old_accessors & READER_MASK > 0);
DivBuf {
inner: self.inner,
begin: self.begin + begin,
len: end - begin,
}
}
pub fn slice_from(&self, begin: usize) -> DivBuf {
self.slice(begin, self.len())
}
pub fn slice_to(&self, end: usize) -> DivBuf {
self.slice(0, end)
}
pub fn split_off(&mut self, at: usize) -> DivBuf {
assert!(at <= self.len, "Can't split past the end");
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(1, Relaxed);
debug_assert!(old_accessors & READER_MASK > 0);
let right_half = DivBuf {
inner: self.inner,
begin: self.begin + at,
len: self.len - at,
};
self.len = at;
right_half
}
pub fn split_to(&mut self, at: usize) -> DivBuf {
assert!(at <= self.len, "Can't split past the end");
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(1, Relaxed);
debug_assert!(old_accessors & READER_MASK > 0);
let left_half = DivBuf {
inner: self.inner,
begin: self.begin,
len: at,
};
self.begin += at;
self.len -= at;
left_half
}
pub fn try_mut(self) -> Result<DivBufMut, DivBuf> {
let inner = unsafe { &*self.inner };
if inner
.accessors
.compare_exchange(1, ONE_WRITER, AcqRel, Acquire)
.is_ok()
{
let mutable_self = Ok(DivBufMut {
inner: self.inner,
begin: self.begin,
len: self.len,
});
mem::forget(self);
mutable_self
} else {
Err(self)
}
}
pub fn unsplit(&mut self, other: DivBuf) -> Result<(), DivBuf> {
if self.inner != other.inner || (self.begin + self.len) != other.begin {
Err(other)
} else {
self.len += other.len;
Ok(())
}
}
}
impl AsRef<[u8]> for DivBuf {
fn as_ref(&self) -> &[u8] {
unsafe {
let inner = &*self.inner;
&inner.vec[self.begin..(self.begin + self.len)][..]
}
}
}
impl Borrow<[u8]> for DivBuf {
fn borrow(&self) -> &[u8] {
let inner = unsafe { &*self.inner };
&inner.vec[..]
}
}
impl hash::Hash for DivBuf {
fn hash<H>(&self, state: &mut H)
where
H: hash::Hasher,
{
let s: &[u8] = self.as_ref();
s.hash(state);
}
}
impl ops::Deref for DivBuf {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe {
let inner = &*self.inner;
&inner.vec[self.begin..(self.begin + self.len)][..]
}
}
}
impl Clone for DivBuf {
fn clone(&self) -> DivBuf {
self.slice_from(0)
}
}
impl Drop for DivBuf {
fn drop(&mut self) {
let inner = unsafe { &*self.inner };
if inner.accessors.fetch_sub(1, Release) == 1
&& inner.sharers.load(Relaxed) == 0
{
atomic::fence(Acquire);
unsafe {
drop(Box::from_raw(self.inner));
}
}
}
}
impl Eq for DivBuf {}
impl From<DivBufMut> for DivBuf {
fn from(src: DivBufMut) -> DivBuf {
src.freeze()
}
}
impl Ord for DivBuf {
fn cmp(&self, other: &DivBuf) -> cmp::Ordering {
self.as_ref().cmp(other.as_ref())
}
}
impl PartialEq for DivBuf {
fn eq(&self, other: &DivBuf) -> bool {
self.as_ref() == other.as_ref()
}
}
impl PartialEq<[u8]> for DivBuf {
fn eq(&self, other: &[u8]) -> bool {
self.as_ref() == other
}
}
impl PartialOrd for DivBuf {
fn partial_cmp(&self, other: &DivBuf) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
unsafe impl Send for DivBuf {}
unsafe impl Sync for DivBuf {}
impl DivBufMut {
pub fn clone_inaccessible(&self) -> DivBufInaccessible {
let inner = unsafe { &*self.inner };
let old = inner.sharers.fetch_add(1, Acquire);
debug_assert!(old > 0);
DivBufInaccessible {
inner: self.inner,
begin: self.begin,
len: self.len,
}
}
fn extend_unchecked<'a, T>(&mut self, iter: T)
where
T: IntoIterator<Item = &'a u8>,
{
let inner = unsafe { &mut *self.inner };
let oldlen = inner.vec.len();
inner.vec.extend(iter);
self.len += inner.vec.len() - oldlen;
}
pub fn freeze(self) -> DivBuf {
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(1, Relaxed);
debug_assert!(old_accessors >> WRITER_SHIFT > 0);
DivBuf {
inner: self.inner,
begin: self.begin,
len: self.len,
}
}
pub fn into_chunks(self, size: usize) -> ChunksMut {
assert!(size != 0);
ChunksMut::new(self, size)
}
pub fn is_empty(&self) -> bool {
self.len == 0
}
fn is_terminal(&self) -> bool {
let inner = unsafe { &*self.inner };
let oldlen = inner.vec.len();
self.begin + self.len == oldlen
}
pub fn len(&self) -> usize {
self.len
}
pub fn reserve(&mut self, additional: usize) {
assert!(
self.is_terminal(),
"Can't reserve from the middle of a buffer"
);
let inner = unsafe { &mut *self.inner };
inner.vec.reserve(additional)
}
pub fn split_off(&mut self, at: usize) -> DivBufMut {
assert!(at <= self.len, "Can't split past the end");
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(ONE_WRITER, Relaxed);
debug_assert!(old_accessors >> WRITER_SHIFT > 0);
let right_half = DivBufMut {
inner: self.inner,
begin: self.begin + at,
len: self.len - at,
};
self.len = at;
right_half
}
pub fn split_to(&mut self, at: usize) -> DivBufMut {
assert!(at <= self.len, "Can't split past the end");
let inner = unsafe { &*self.inner };
let old_accessors = inner.accessors.fetch_add(ONE_WRITER, Relaxed);
debug_assert!(old_accessors >> WRITER_SHIFT > 0);
let left_half = DivBufMut {
inner: self.inner,
begin: self.begin,
len: at,
};
self.begin += at;
self.len -= at;
left_half
}
pub fn try_extend<'a, T>(&mut self, iter: T) -> Result<(), Error>
where
T: IntoIterator<Item = &'a u8>,
{
if self.is_terminal() {
self.extend_unchecked(iter);
Ok(())
} else {
Err(Error("Can't extend into the middle of a buffer"))
}
}
pub fn try_resize(
&mut self,
new_len: usize,
value: u8,
) -> Result<(), Error> {
if self.is_terminal() {
let inner = unsafe { &mut *self.inner };
inner.vec.resize(new_len + self.begin, value);
self.len = new_len;
Ok(())
} else {
Err(Error("Can't resize from a non-terminal buffer"))
}
}
pub fn try_truncate(&mut self, len: usize) -> Result<(), Error> {
if self.is_terminal() {
let inner = unsafe { &mut *self.inner };
inner.vec.truncate(self.begin + len);
self.len = cmp::min(self.len, len);
Ok(())
} else {
Err(Error("Can't truncate a non-terminal DivBufMut"))
}
}
pub fn unsplit(&mut self, other: DivBufMut) -> Result<(), DivBufMut> {
if self.inner != other.inner || (self.begin + self.len) != other.begin {
Err(other)
} else {
self.len += other.len;
Ok(())
}
}
}
impl AsRef<[u8]> for DivBufMut {
fn as_ref(&self) -> &[u8] {
unsafe {
let inner = &*self.inner;
&inner.vec[self.begin..(self.begin + self.len)][..]
}
}
}
impl Borrow<[u8]> for DivBufMut {
fn borrow(&self) -> &[u8] {
let inner = unsafe { &*self.inner };
&inner.vec[..]
}
}
impl BorrowMut<[u8]> for DivBufMut {
fn borrow_mut(&mut self) -> &mut [u8] {
let inner = unsafe { &mut *self.inner };
&mut inner.vec[..]
}
}
impl ops::Deref for DivBufMut {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe {
let inner = &*self.inner;
&inner.vec[self.begin..(self.begin + self.len)][..]
}
}
}
impl ops::DerefMut for DivBufMut {
fn deref_mut(&mut self) -> &mut [u8] {
unsafe {
let inner = &mut *self.inner;
&mut inner.vec[self.begin..(self.begin + self.len)][..]
}
}
}
impl Drop for DivBufMut {
fn drop(&mut self) {
let inner = unsafe { &*self.inner };
if inner.accessors.fetch_sub(ONE_WRITER, Release) == ONE_WRITER
&& inner.sharers.load(Relaxed) == 0
{
atomic::fence(Acquire);
unsafe {
drop(Box::from_raw(self.inner));
}
}
}
}
impl<'a> Extend<&'a u8> for DivBufMut {
fn extend<T>(&mut self, iter: T)
where
T: IntoIterator<Item = &'a u8>,
{
assert!(
self.is_terminal(),
"Can't extend into the middle of a buffer"
);
self.extend_unchecked(iter);
}
}
impl hash::Hash for DivBufMut {
fn hash<H>(&self, state: &mut H)
where
H: hash::Hasher,
{
let s: &[u8] = self.as_ref();
s.hash(state);
}
}
impl Eq for DivBufMut {}
impl Ord for DivBufMut {
fn cmp(&self, other: &DivBufMut) -> cmp::Ordering {
self.as_ref().cmp(other.as_ref())
}
}
impl PartialEq for DivBufMut {
fn eq(&self, other: &DivBufMut) -> bool {
self.as_ref() == other.as_ref()
}
}
impl PartialEq<[u8]> for DivBufMut {
fn eq(&self, other: &[u8]) -> bool {
self.as_ref() == other
}
}
impl PartialOrd for DivBufMut {
fn partial_cmp(&self, other: &DivBufMut) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
unsafe impl Send for DivBufMut {}
unsafe impl Sync for DivBufMut {}
impl io::Write for DivBufMut {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.try_extend(buf)
.map(|_| buf.len())
.map_err(|s| io::Error::new(io::ErrorKind::Other, s))
}
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.try_extend(buf)
.map_err(|s| io::Error::new(io::ErrorKind::Other, s))
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl DivBufInaccessible {
pub fn try_const(&self) -> Result<DivBuf, Error> {
let inner = unsafe { &*self.inner };
if inner.accessors.fetch_add(1, Acquire) >> WRITER_SHIFT != 0 {
inner.accessors.fetch_sub(1, Relaxed);
Err(Error("Cannot create a DivBuf when DivBufMuts are active"))
} else {
Ok(DivBuf {
inner: self.inner,
begin: self.begin,
len: self.len,
})
}
}
pub fn try_mut(&self) -> Result<DivBufMut, Error> {
let inner = unsafe { &*self.inner };
if inner
.accessors
.compare_exchange(0, ONE_WRITER, AcqRel, Acquire)
.is_ok()
{
Ok(DivBufMut {
inner: self.inner,
begin: self.begin,
len: self.len,
})
} else {
Err(Error("Cannot upgrade when DivBufMuts are active"))
}
}
}
impl Clone for DivBufInaccessible {
fn clone(&self) -> Self {
let inner = unsafe { &*self.inner };
let old = inner.sharers.fetch_add(1, Acquire);
debug_assert!(old > 0);
DivBufInaccessible {
inner: self.inner,
begin: self.begin,
len: self.len,
}
}
}
impl Drop for DivBufInaccessible {
fn drop(&mut self) {
let inner = unsafe { &*self.inner };
if inner.sharers.fetch_sub(1, Release) == 1
&& inner.accessors.load(Relaxed) == 0
{
atomic::fence(Acquire);
unsafe {
drop(Box::from_raw(self.inner));
}
}
}
}
unsafe impl Send for DivBufInaccessible {}
unsafe impl Sync for DivBufInaccessible {}