use shared_ptr::Shared;
use std::cell::Cell;
use std::mem;
use std::slice;
use u32_to_usize;
use usize_to_u32;
const TAG: usize = 1;
const TAG_MASK: usize = 1;
pub struct TaggedPtr(Shared<HeapAllocation>);
impl TaggedPtr {
pub fn allocate(requested_data_capacity: usize) -> Self {
let ptr = HeapAllocation::allocate(requested_data_capacity);
assert!(((ptr as usize) & TAG_MASK) == 0);
assert!(!ptr.is_null());
unsafe {
TaggedPtr(Shared::new(ptr))
}
}
#[inline]
pub fn new_inline_data(data: usize) -> Self {
let fake_ptr = (data | TAG) as *mut HeapAllocation;
unsafe {
TaggedPtr(Shared::new(fake_ptr))
}
}
#[inline]
fn as_valid_ptr(&self) -> Result<&Shared<HeapAllocation>, usize> {
let as_usize = self.0.as_ptr() as usize;
if (as_usize & TAG_MASK) == 0 {
Ok(&self.0)
} else {
Err(as_usize)
}
}
#[inline]
pub fn get_inline_data(&self) -> Result<usize, ()> {
self.as_valid_ptr().err().ok_or(())
}
#[inline]
pub fn as_allocated(&self) -> Result<&HeapAllocation, usize> {
self.as_valid_ptr().map(|ptr| unsafe { ptr.as_ref() })
}
#[inline]
pub fn as_owned_allocated_mut(&mut self) -> Option<&mut HeapAllocation> {
match self.as_valid_ptr() {
Err(_) => None,
Ok(_) => unsafe {
if self.0.as_ref().is_owned() {
Some(self.0.as_mut())
} else {
None
}
}
}
}
#[inline]
pub fn is_shared_allocation(&self) -> bool {
match self.as_valid_ptr() {
Err(_) => false,
Ok(ptr) => unsafe {
!ptr.as_ref().is_owned()
}
}
}
}
impl Clone for TaggedPtr {
#[inline]
fn clone(&self) -> Self {
if let Ok(heap_allocation) = self.as_allocated() {
heap_allocation.increment_refcount()
}
TaggedPtr(self.0)
}
}
impl Drop for TaggedPtr {
#[inline]
fn drop(&mut self) {
if let Ok(heap_allocation) = self.as_allocated() {
let new_refcount = heap_allocation.decrement_refcount();
if new_refcount == 0 {
unsafe {
HeapAllocation::deallocate(self.0.as_ptr(), heap_allocation.data_capacity)
}
}
}
}
}
#[repr(C)] pub struct HeapAllocation {
refcount: Cell<u32>,
data_capacity: u32,
data: [u8; 0], }
impl HeapAllocation {
fn allocate(requested_data_capacity: usize) -> *mut Self {
let header_size = mem::size_of::<HeapAllocation>();
let bytes = header_size.checked_add(requested_data_capacity).expect("overflow");
let bytes = bytes.checked_next_power_of_two().unwrap_or(bytes);
let actual_data_capacity = usize_to_u32(bytes - header_size);
let vec_capacity = 1 + ((bytes - 1) / header_size);
let mut vec = Vec::<HeapAllocation>::with_capacity(vec_capacity);
debug_assert_eq!(vec.capacity(), vec_capacity);
vec.push(HeapAllocation {
refcount: Cell::new(1),
data_capacity: actual_data_capacity,
data: [],
});
let ptr = vec.as_mut_ptr();
mem::forget(vec);
ptr
}
#[inline]
fn increment_refcount(&self) {
self.refcount.set(self.refcount.get().checked_add(1).expect("refcount overflow"))
}
#[inline]
fn decrement_refcount(&self) -> u32 {
let new_count = self.refcount.get().checked_sub(1).expect("refcount underflow");
self.refcount.set(new_count);
new_count
}
#[inline(never)]
#[cold]
unsafe fn deallocate(ptr: *mut HeapAllocation, data_capacity: u32) {
let header_size = mem::size_of::<HeapAllocation>();
let allocated_bytes = header_size + u32_to_usize(data_capacity);
let vec_capacity = allocated_bytes / header_size;
let vec_length = 1;
let vec = Vec::<HeapAllocation>::from_raw_parts(ptr, vec_length, vec_capacity);
mem::drop(vec);
}
#[inline]
pub fn is_owned(&self) -> bool {
self.refcount.get() == 1
}
#[inline]
pub fn data_capacity(&self) -> u32 {
self.data_capacity
}
#[inline]
pub fn data(&self) -> *const [u8] {
unsafe {
slice::from_raw_parts(self.data.as_ptr(), u32_to_usize(self.data_capacity))
}
}
#[inline]
pub fn data_mut(&mut self) -> *mut [u8] {
unsafe {
slice::from_raw_parts_mut(self.data.as_mut_ptr(), u32_to_usize(self.data_capacity))
}
}
}