#![allow(missing_docs)]
#[cfg(feature = "servo")]
extern crate serde;
extern crate stable_deref_trait;
#[cfg(feature = "servo")]
use serde::{Deserialize, Serialize};
use stable_deref_trait::{CloneStableDeref, StableDeref};
use std::alloc::{self, Layout};
use std::borrow;
use std::cmp::Ordering;
use std::convert::From;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::{ExactSizeIterator, Iterator};
use std::marker::PhantomData;
use std::mem::{self, align_of, size_of};
use std::ops::{Deref, DerefMut};
use std::os::raw::c_void;
use std::process;
use std::ptr;
use std::slice;
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, Relaxed, Release};
use std::{isize, usize};
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
const STATIC_REFCOUNT: usize = usize::MAX;
#[repr(C)]
pub struct Arc<T: ?Sized> {
p: ptr::NonNull<ArcInner<T>>,
phantom: PhantomData<T>,
}
pub struct UniqueArc<T: ?Sized>(Arc<T>);
impl<T> UniqueArc<T> {
#[inline]
pub fn new(data: T) -> Self {
UniqueArc(Arc::new(data))
}
#[inline]
pub fn new_uninit() -> UniqueArc<mem::MaybeUninit<T>> {
unsafe {
let layout = Layout::new::<ArcInner<mem::MaybeUninit<T>>>();
let ptr = alloc::alloc(layout);
let mut p = ptr::NonNull::new(ptr)
.unwrap_or_else(|| alloc::handle_alloc_error(layout))
.cast::<ArcInner<mem::MaybeUninit<T>>>();
ptr::write(&mut p.as_mut().count, atomic::AtomicUsize::new(1));
#[cfg(feature = "gecko_refcount_logging")]
{
NS_LogCtor(p.as_ptr() as *mut _, b"ServoArc\0".as_ptr() as *const _, 8)
}
UniqueArc(Arc {
p,
phantom: PhantomData,
})
}
}
#[inline]
pub fn shareable(self) -> Arc<T> {
self.0
}
}
impl<T> UniqueArc<mem::MaybeUninit<T>> {
#[inline]
pub unsafe fn assume_init(this: Self) -> UniqueArc<T> {
UniqueArc(Arc {
p: mem::ManuallyDrop::new(this).0.p.cast(),
phantom: PhantomData,
})
}
}
impl<T> Deref for UniqueArc<T> {
type Target = T;
fn deref(&self) -> &T {
&*self.0
}
}
impl<T> DerefMut for UniqueArc<T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut (*self.0.ptr()).data }
}
}
unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
#[repr(C)]
struct ArcInner<T: ?Sized> {
count: atomic::AtomicUsize,
data: T,
}
unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
fn data_offset<T>() -> usize {
let size = size_of::<ArcInner<()>>();
let align = align_of::<T>();
size.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1)
}
impl<T> Arc<T> {
#[inline]
pub fn new(data: T) -> Self {
let ptr = Box::into_raw(Box::new(ArcInner {
count: atomic::AtomicUsize::new(1),
data,
}));
#[cfg(feature = "gecko_refcount_logging")]
unsafe {
NS_LogCtor(ptr as *mut _, b"ServoArc\0".as_ptr() as *const _, 8);
}
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
#[inline]
pub fn new_leaked(data: T) -> Self {
let arc = Self::new(data);
arc.mark_as_intentionally_leaked();
arc
}
#[inline]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &((*this.ptr()).data) as *const _ };
mem::forget(this);
ptr
}
#[inline]
pub unsafe fn from_raw(ptr: *const T) -> Self {
let ptr = (ptr as *const u8).sub(data_offset::<T>());
Arc {
p: ptr::NonNull::new_unchecked(ptr as *mut ArcInner<T>),
phantom: PhantomData,
}
}
#[inline]
pub unsafe fn from_raw_addrefed(ptr: *const T) -> Self {
let arc = Self::from_raw(ptr);
mem::forget(arc.clone());
arc
}
#[inline]
pub unsafe fn new_static<F>(alloc: F, data: T) -> Arc<T>
where
F: FnOnce(Layout) -> *mut u8,
{
let ptr = alloc(Layout::new::<ArcInner<T>>()) as *mut ArcInner<T>;
let x = ArcInner {
count: atomic::AtomicUsize::new(STATIC_REFCOUNT),
data,
};
ptr::write(ptr, x);
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
#[inline]
pub fn borrow_arc<'a>(&'a self) -> ArcBorrow<'a, T> {
ArcBorrow(&**self)
}
pub fn heap_ptr(&self) -> *const c_void {
if self.inner().count.load(Relaxed) == STATIC_REFCOUNT {
ptr::null()
} else {
self.p.as_ptr() as *const ArcInner<T> as *const c_void
}
}
}
impl<T: ?Sized> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
unsafe { &*self.ptr() }
}
#[inline(always)]
fn record_drop(&self) {
#[cfg(feature = "gecko_refcount_logging")]
unsafe {
NS_LogDtor(self.ptr() as *mut _, b"ServoArc\0".as_ptr() as *const _, 8);
}
}
#[inline(always)]
pub fn mark_as_intentionally_leaked(&self) {
self.record_drop();
}
#[inline(never)]
unsafe fn drop_slow(&mut self) {
self.record_drop();
let _ = Box::from_raw(self.ptr());
}
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr() == other.ptr()
}
fn ptr(&self) -> *mut ArcInner<T> {
self.p.as_ptr()
}
}
#[cfg(feature = "gecko_refcount_logging")]
extern "C" {
fn NS_LogCtor(
aPtr: *mut std::os::raw::c_void,
aTypeName: *const std::os::raw::c_char,
aSize: u32,
);
fn NS_LogDtor(
aPtr: *mut std::os::raw::c_void,
aTypeName: *const std::os::raw::c_char,
aSize: u32,
);
}
impl<T: ?Sized> Clone for Arc<T> {
#[inline]
fn clone(&self) -> Self {
if self.inner().count.load(Relaxed) != STATIC_REFCOUNT {
let old_size = self.inner().count.fetch_add(1, Relaxed);
if old_size > MAX_REFCOUNT {
process::abort();
}
}
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(self.ptr()),
phantom: PhantomData,
}
}
}
}
impl<T: ?Sized> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: Clone> Arc<T> {
#[inline]
pub fn make_mut(this: &mut Self) -> &mut T {
if !this.is_unique() {
*this = Arc::new((**this).clone());
}
unsafe {
&mut (*this.ptr()).data
}
}
}
impl<T: ?Sized> Arc<T> {
#[inline]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if this.is_unique() {
unsafe {
Some(&mut (*this.ptr()).data)
}
} else {
None
}
}
#[inline]
pub fn is_static(&self) -> bool {
self.inner().count.load(Relaxed) == STATIC_REFCOUNT
}
#[inline]
pub fn is_unique(&self) -> bool {
self.inner().count.load(Acquire) == 1
}
}
impl<T: ?Sized> Drop for Arc<T> {
#[inline]
fn drop(&mut self) {
if self.is_static() {
return;
}
if self.inner().count.fetch_sub(1, Release) != 1 {
return;
}
self.inner().count.load(Acquire);
unsafe {
self.drop_slow();
}
}
}
impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
fn eq(&self, other: &Arc<T>) -> bool {
Self::ptr_eq(self, other) || *(*self) == *(*other)
}
fn ne(&self, other: &Arc<T>) -> bool {
!Self::ptr_eq(self, other) && *(*self) != *(*other)
}
}
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
fn lt(&self, other: &Arc<T>) -> bool {
*(*self) < *(*other)
}
fn le(&self, other: &Arc<T>) -> bool {
*(*self) <= *(*other)
}
fn gt(&self, other: &Arc<T>) -> bool {
*(*self) > *(*other)
}
fn ge(&self, other: &Arc<T>) -> bool {
*(*self) >= *(*other)
}
}
impl<T: ?Sized + Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: ?Sized + Eq> Eq for Arc<T> {}
impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Pointer::fmt(&self.ptr(), f)
}
}
impl<T: Default> Default for Arc<T> {
fn default() -> Arc<T> {
Arc::new(Default::default())
}
}
impl<T: ?Sized + Hash> Hash for Arc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state)
}
}
impl<T> From<T> for Arc<T> {
#[inline]
fn from(t: T) -> Self {
Arc::new(t)
}
}
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
#[inline]
fn borrow(&self) -> &T {
&**self
}
}
impl<T: ?Sized> AsRef<T> for Arc<T> {
#[inline]
fn as_ref(&self) -> &T {
&**self
}
}
unsafe impl<T: ?Sized> StableDeref for Arc<T> {}
unsafe impl<T: ?Sized> CloneStableDeref for Arc<T> {}
#[cfg(feature = "servo")]
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Arc<T> {
fn deserialize<D>(deserializer: D) -> Result<Arc<T>, D::Error>
where
D: ::serde::de::Deserializer<'de>,
{
T::deserialize(deserializer).map(Arc::new)
}
}
#[cfg(feature = "servo")]
impl<T: Serialize> Serialize for Arc<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::ser::Serializer,
{
(**self).serialize(serializer)
}
}
#[derive(Debug, Eq, PartialEq, PartialOrd)]
#[repr(C)]
pub struct HeaderSlice<H, T: ?Sized> {
pub header: H,
pub slice: T,
}
#[inline(always)]
fn divide_rounding_up(dividend: usize, divisor: usize) -> usize {
(dividend + divisor - 1) / divisor
}
impl<H, T> Arc<HeaderSlice<H, [T]>> {
#[inline]
fn from_header_and_iter_alloc<F, I>(
alloc: F,
header: H,
mut items: I,
num_items: usize,
is_static: bool,
) -> Self
where
F: FnOnce(Layout) -> *mut u8,
I: Iterator<Item = T>,
{
assert_ne!(size_of::<T>(), 0, "Need to think about ZST");
let inner_align = align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
debug_assert!(inner_align >= align_of::<T>());
let size = {
let fake_slice_ptr = inner_align as *const T;
let fake_slice = unsafe { slice::from_raw_parts(fake_slice_ptr, num_items) };
let fake_ptr = fake_slice as *const [T] as *const ArcInner<HeaderSlice<H, [T]>>;
let fake_ref: &ArcInner<HeaderSlice<H, [T]>> = unsafe { &*fake_ptr };
mem::size_of_val(fake_ref)
};
let ptr: *mut ArcInner<HeaderSlice<H, [T]>>;
unsafe {
let layout = if inner_align <= align_of::<usize>() {
Layout::from_size_align_unchecked(size, align_of::<usize>())
} else if inner_align <= align_of::<u64>() {
Layout::from_size_align_unchecked(size, align_of::<u64>())
} else {
panic!("Over-aligned type not handled");
};
let buffer = alloc(layout);
let fake_slice: &mut [T] = slice::from_raw_parts_mut(buffer as *mut T, num_items);
ptr = fake_slice as *mut [T] as *mut ArcInner<HeaderSlice<H, [T]>>;
let count = if is_static {
atomic::AtomicUsize::new(STATIC_REFCOUNT)
} else {
atomic::AtomicUsize::new(1)
};
ptr::write(&mut ((*ptr).count), count);
ptr::write(&mut ((*ptr).data.header), header);
if num_items != 0 {
let mut current: *mut T = &mut (*ptr).data.slice[0];
for _ in 0..num_items {
ptr::write(
current,
items
.next()
.expect("ExactSizeIterator over-reported length"),
);
current = current.offset(1);
}
debug_assert!(
(buffer.add(size) as usize - current as *mut u8 as usize) < inner_align
);
}
assert!(
items.next().is_none(),
"ExactSizeIterator under-reported length"
);
}
#[cfg(feature = "gecko_refcount_logging")]
unsafe {
if !is_static {
NS_LogCtor(ptr as *mut _, b"ServoArc\0".as_ptr() as *const _, 8)
}
}
assert_eq!(
size_of::<Self>(),
size_of::<usize>() * 2,
"The Arc will be fat"
);
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
#[inline]
pub fn from_header_and_iter_with_size<I>(header: H, items: I, num_items: usize) -> Self
where
I: Iterator<Item = T>,
{
Arc::from_header_and_iter_alloc(
|layout| {
let align = layout.align();
unsafe {
if align == mem::align_of::<usize>() {
Self::allocate_buffer::<usize>(layout.size())
} else {
assert_eq!(align, mem::align_of::<u64>());
Self::allocate_buffer::<u64>(layout.size())
}
}
},
header,
items,
num_items,
false,
)
}
#[inline]
pub fn from_header_and_iter<I>(header: H, items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
let len = items.len();
Self::from_header_and_iter_with_size(header, items, len)
}
#[inline]
unsafe fn allocate_buffer<W>(size: usize) -> *mut u8 {
let words_to_allocate = divide_rounding_up(size, mem::size_of::<W>());
let mut vec = Vec::<W>::with_capacity(words_to_allocate);
vec.set_len(words_to_allocate);
Box::into_raw(vec.into_boxed_slice()) as *mut W as *mut u8
}
}
#[derive(Debug, Eq, PartialEq, PartialOrd)]
#[repr(C)]
pub struct HeaderWithLength<H> {
pub header: H,
length: usize,
}
impl<H> HeaderWithLength<H> {
pub fn new(header: H, length: usize) -> Self {
HeaderWithLength { header, length }
}
}
type HeaderSliceWithLength<H, T> = HeaderSlice<HeaderWithLength<H>, T>;
#[repr(C)]
pub struct ThinArc<H, T> {
ptr: ptr::NonNull<ArcInner<HeaderSliceWithLength<H, [T; 0]>>>,
phantom: PhantomData<(H, T)>,
}
impl<H: fmt::Debug, T: fmt::Debug> fmt::Debug for ThinArc<H, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.deref(), f)
}
}
unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
fn thin_to_thick<H, T>(
thin: *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
) -> *mut ArcInner<HeaderSliceWithLength<H, [T]>> {
let len = unsafe { (*thin).data.header.length };
let fake_slice: *mut [T] = unsafe { slice::from_raw_parts_mut(thin as *mut T, len) };
fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>>
}
impl<H, T> ThinArc<H, T> {
#[inline]
pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<HeaderSliceWithLength<H, [T]>>) -> U,
{
let transient = unsafe {
mem::ManuallyDrop::new(Arc {
p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
phantom: PhantomData,
})
};
let result = f(&transient);
result
}
pub fn from_header_and_iter<I>(header: H, items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
let header = HeaderWithLength::new(header, items.len());
Arc::into_thin(Arc::from_header_and_iter(header, items))
}
pub unsafe fn static_from_header_and_iter<F, I>(alloc: F, header: H, items: I) -> Self
where
F: FnOnce(Layout) -> *mut u8,
I: Iterator<Item = T> + ExactSizeIterator,
{
let len = items.len();
let header = HeaderWithLength::new(header, len);
Arc::into_thin(Arc::from_header_and_iter_alloc(
alloc, header, items, len, true,
))
}
#[inline]
pub fn ptr(&self) -> *const c_void {
self.ptr.as_ptr() as *const ArcInner<T> as *const c_void
}
#[inline]
pub fn heap_ptr(&self) -> *const c_void {
let is_static =
ThinArc::with_arc(self, |a| a.inner().count.load(Relaxed) == STATIC_REFCOUNT);
if is_static {
ptr::null()
} else {
self.ptr()
}
}
}
impl<H, T> Deref for ThinArc<H, T> {
type Target = HeaderSliceWithLength<H, [T]>;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
}
}
impl<H, T> Clone for ThinArc<H, T> {
#[inline]
fn clone(&self) -> Self {
ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
}
}
impl<H, T> Drop for ThinArc<H, T> {
#[inline]
fn drop(&mut self) {
let _ = Arc::from_thin(ThinArc {
ptr: self.ptr,
phantom: PhantomData,
});
}
}
impl<H, T> Arc<HeaderSliceWithLength<H, [T]>> {
#[inline]
pub fn into_thin(a: Self) -> ThinArc<H, T> {
assert_eq!(
a.header.length,
a.slice.len(),
"Length needs to be correct for ThinArc to work"
);
let fat_ptr: *mut ArcInner<HeaderSliceWithLength<H, [T]>> = a.ptr();
mem::forget(a);
let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
ThinArc {
ptr: unsafe {
ptr::NonNull::new_unchecked(
thin_ptr as *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
)
},
phantom: PhantomData,
}
}
#[inline]
pub fn from_thin(a: ThinArc<H, T>) -> Self {
let ptr = thin_to_thick(a.ptr.as_ptr());
mem::forget(a);
unsafe {
Arc {
p: ptr::NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
impl<H, T> UniqueArc<HeaderSliceWithLength<H, [T]>> {
#[inline]
pub fn from_header_and_iter<I>(header: HeaderWithLength<H>, items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
Self(Arc::from_header_and_iter(header, items))
}
#[inline]
pub fn from_header_and_iter_with_size<I>(
header: HeaderWithLength<H>,
items: I,
num_items: usize,
) -> Self
where
I: Iterator<Item = T>,
{
Self(Arc::from_header_and_iter_with_size(
header, items, num_items,
))
}
pub fn header_mut(&mut self) -> &mut H {
unsafe { &mut (*self.0.ptr()).data.header.header }
}
pub fn data_mut(&mut self) -> &mut [T] {
unsafe { &mut (*self.0.ptr()).data.slice }
}
pub fn shareable_thin(self) -> ThinArc<H, T> {
Arc::into_thin(self.0)
}
}
impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
#[inline]
fn eq(&self, other: &ThinArc<H, T>) -> bool {
ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
}
}
impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
#[derive(Debug, Eq, PartialEq)]
pub struct ArcBorrow<'a, T: 'a>(&'a T);
impl<'a, T> Copy for ArcBorrow<'a, T> {}
impl<'a, T> Clone for ArcBorrow<'a, T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
impl<'a, T> ArcBorrow<'a, T> {
#[inline]
pub fn clone_arc(&self) -> Arc<T> {
let arc = unsafe { Arc::from_raw(self.0) };
mem::forget(arc.clone());
arc
}
#[inline]
pub unsafe fn from_ref(r: &'a T) -> Self {
ArcBorrow(r)
}
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.0 as *const T == other.0 as *const T
}
#[inline]
pub fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<T>) -> U,
T: 'static,
{
let transient = unsafe { mem::ManuallyDrop::new(Arc::from_raw(self.0)) };
let result = f(&transient);
result
}
#[inline]
pub fn get(&self) -> &'a T {
self.0
}
}
impl<'a, T> Deref for ArcBorrow<'a, T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
self.0
}
}
pub struct ArcUnion<A, B> {
p: ptr::NonNull<()>,
phantom_a: PhantomData<A>,
phantom_b: PhantomData<B>,
}
unsafe impl<A: Sync + Send, B: Send + Sync> Send for ArcUnion<A, B> {}
unsafe impl<A: Sync + Send, B: Send + Sync> Sync for ArcUnion<A, B> {}
impl<A: PartialEq, B: PartialEq> PartialEq for ArcUnion<A, B> {
fn eq(&self, other: &Self) -> bool {
use crate::ArcUnionBorrow::*;
match (self.borrow(), other.borrow()) {
(First(x), First(y)) => x == y,
(Second(x), Second(y)) => x == y,
(_, _) => false,
}
}
}
#[derive(Debug)]
pub enum ArcUnionBorrow<'a, A: 'a, B: 'a> {
First(ArcBorrow<'a, A>),
Second(ArcBorrow<'a, B>),
}
impl<A, B> ArcUnion<A, B> {
unsafe fn new(ptr: *mut ()) -> Self {
ArcUnion {
p: ptr::NonNull::new_unchecked(ptr),
phantom_a: PhantomData,
phantom_b: PhantomData,
}
}
#[inline]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.p == other.p
}
#[inline]
pub fn ptr(&self) -> ptr::NonNull<()> {
self.p
}
#[inline]
pub fn borrow(&self) -> ArcUnionBorrow<A, B> {
if self.is_first() {
let ptr = self.p.as_ptr() as *const A;
let borrow = unsafe { ArcBorrow::from_ref(&*ptr) };
ArcUnionBorrow::First(borrow)
} else {
let ptr = ((self.p.as_ptr() as usize) & !0x1) as *const B;
let borrow = unsafe { ArcBorrow::from_ref(&*ptr) };
ArcUnionBorrow::Second(borrow)
}
}
pub fn from_first(other: Arc<A>) -> Self {
unsafe { Self::new(Arc::into_raw(other) as *mut _) }
}
pub fn from_second(other: Arc<B>) -> Self {
unsafe { Self::new(((Arc::into_raw(other) as usize) | 0x1) as *mut _) }
}
pub fn is_first(&self) -> bool {
self.p.as_ptr() as usize & 0x1 == 0
}
pub fn is_second(&self) -> bool {
!self.is_first()
}
pub fn as_first(&self) -> Option<ArcBorrow<A>> {
match self.borrow() {
ArcUnionBorrow::First(x) => Some(x),
ArcUnionBorrow::Second(_) => None,
}
}
pub fn as_second(&self) -> Option<ArcBorrow<B>> {
match self.borrow() {
ArcUnionBorrow::First(_) => None,
ArcUnionBorrow::Second(x) => Some(x),
}
}
}
impl<A, B> Clone for ArcUnion<A, B> {
fn clone(&self) -> Self {
match self.borrow() {
ArcUnionBorrow::First(x) => ArcUnion::from_first(x.clone_arc()),
ArcUnionBorrow::Second(x) => ArcUnion::from_second(x.clone_arc()),
}
}
}
impl<A, B> Drop for ArcUnion<A, B> {
fn drop(&mut self) {
match self.borrow() {
ArcUnionBorrow::First(x) => unsafe {
let _ = Arc::from_raw(&*x);
},
ArcUnionBorrow::Second(x) => unsafe {
let _ = Arc::from_raw(&*x);
},
}
}
}
impl<A: fmt::Debug, B: fmt::Debug> fmt::Debug for ArcUnion<A, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.borrow(), f)
}
}
#[cfg(test)]
mod tests {
use super::{Arc, HeaderWithLength, ThinArc};
use std::clone::Clone;
use std::ops::Drop;
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
#[derive(PartialEq)]
struct Canary(*mut atomic::AtomicUsize);
impl Drop for Canary {
fn drop(&mut self) {
unsafe {
(*self.0).fetch_add(1, SeqCst);
}
}
}
#[test]
fn empty_thin() {
let header = HeaderWithLength::new(100u32, 0);
let x = Arc::from_header_and_iter(header, std::iter::empty::<i32>());
let y = Arc::into_thin(x.clone());
assert_eq!(y.header.header, 100);
assert!(y.slice.is_empty());
assert_eq!(x.header.header, 100);
assert!(x.slice.is_empty());
}
#[test]
fn thin_assert_padding() {
#[derive(Clone, Default)]
#[repr(C)]
struct Padded {
i: u16,
}
let header = HeaderWithLength::new(0i32, 2);
let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }];
let a = ThinArc::from_header_and_iter(header, items.into_iter());
assert_eq!(a.slice.len(), 2);
assert_eq!(a.slice[0].i, 0xdead);
assert_eq!(a.slice[1].i, 0xbeef);
}
#[test]
fn slices_and_thin() {
let mut canary = atomic::AtomicUsize::new(0);
let c = Canary(&mut canary as *mut atomic::AtomicUsize);
let v = vec![5, 6];
let header = HeaderWithLength::new(c, v.len());
{
let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
let y = ThinArc::with_arc(&x, |q| q.clone());
let _ = y.clone();
let _ = x == x;
Arc::from_thin(x.clone());
}
assert_eq!(canary.load(Acquire), 1);
}
}