#![deny(clippy::missing_safety_doc)]
#![deny(clippy::undocumented_unsafe_blocks)]
use std::{
alloc::{Layout, handle_alloc_error},
ptr::NonNull,
sync::{Arc, Mutex},
};
use crate::value::{VTable, vtable::DropFn};
pub mod ffi {
use crate::value::RotoOption;
use super::ErasedList;
type T = ();
pub unsafe fn list_get(
out: *mut RotoOption<T>,
this: ErasedList,
idx: u64,
) {
let idx = idx.try_into().ok();
match idx.and_then(|idx| this.get(idx)) {
Some(src) => {
unsafe { out.cast::<u8>().write(1) };
let raw = this.0.lock().unwrap();
let size = raw.vtable.size();
let alignment = raw.vtable.align();
let offset = 1usize.next_multiple_of(alignment);
let dst = unsafe { out.byte_add(offset) };
match raw.vtable.clone_fn {
Some(clone_fn) => {
unsafe { (clone_fn)(dst.cast::<()>(), src.as_ptr()) }
}
None => {
unsafe {
std::ptr::copy_nonoverlapping(
src.cast::<u8>().as_ptr(),
dst.cast::<u8>(),
size,
)
};
}
}
unsafe { out.cast::<u8>().write(0) };
}
None => {
unsafe { out.cast::<u8>().write(1) };
}
}
}
}
pub mod boundary {
use std::{
alloc::Layout, marker::PhantomData, mem::ManuallyDrop, ptr::NonNull,
};
use crate::{
Value,
runtime::extern_clone,
value::{
VTable,
vtable::{CloneFn, DropFn},
},
};
use super::ErasedList;
#[repr(transparent)]
pub struct List<T: Value> {
inner: ErasedList,
_phantom: PhantomData<T>,
}
impl<T: Value> Clone for List<T> {
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
_phantom: self._phantom,
}
}
}
impl<T: Value> List<T> {
pub fn new() -> Self {
unsafe extern "C" fn drop<T>(x: NonNull<()>) {
let x = x.cast::<T>();
unsafe {
std::ptr::drop_in_place(x.as_ptr());
}
}
unsafe extern "C" fn clone<T: Clone>(
to: *mut (),
from: *const (),
) {
unsafe { extern_clone::<T>(from, to) };
}
let drop_fn = std::mem::needs_drop::<T::Transformed>()
.then_some(drop::<T::Transformed> as DropFn);
let clone_fn = Some(clone::<T::Transformed> as CloneFn);
let layout = Layout::new::<T::Transformed>();
Self {
inner: ErasedList::new(VTable::new(
layout.size(),
layout.align(),
clone_fn,
drop_fn,
)),
_phantom: PhantomData,
}
}
pub fn push(&self, elem: T) {
let elem = T::transform(elem);
let mut elem = ManuallyDrop::new(elem);
let elem_ptr = NonNull::from_mut(&mut elem).cast::<()>();
unsafe { self.inner.push(elem_ptr) };
}
pub fn get(&self, idx: usize) -> Option<T> {
let ptr = self.inner.get(idx)?;
let transformed =
unsafe { ptr.cast::<T::Transformed>().as_ref() };
Some(T::untransform(transformed.clone()))
}
pub fn concat(&self, other: &Self) -> Self {
let inner = unsafe { self.inner.concat(&other.inner) };
Self {
inner,
_phantom: PhantomData,
}
}
pub fn swap(&self, i: usize, j: usize) {
self.inner.swap(i, j)
}
pub fn len(&self) -> usize {
self.inner.len()
}
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
impl<T: Value> Default for List<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: Clone + Value> List<T> {
pub fn to_vec(&self) -> Vec<T> {
let guard = self.inner.0.lock().unwrap();
let slice = unsafe {
std::slice::from_raw_parts(
guard.ptr.cast::<T::Transformed>().as_ptr(),
guard.len,
)
};
slice
.iter()
.map(|elem| T::untransform(elem.clone()))
.collect()
}
}
}
type T = ();
#[derive(Clone)]
pub(crate) struct ErasedList(Arc<Mutex<RawList>>);
impl ErasedList {
pub fn new(vtable: VTable) -> Self {
Self(Arc::new(Mutex::new(RawList::new(vtable))))
}
pub unsafe fn push(&self, elem_ptr: NonNull<T>) {
unsafe { self.0.lock().unwrap().push(elem_ptr) };
}
pub unsafe fn concat(&self, other: &Self) -> Self {
let a = self.0.lock().unwrap();
let new = Self::new(a.vtable.clone());
let mut raw = new.0.lock().unwrap();
unsafe { raw.extend(&a) };
drop(a);
let b = other.0.lock().unwrap();
unsafe { raw.extend(&b) };
drop(b);
drop(raw);
new
}
pub fn get(&self, idx: usize) -> Option<NonNull<T>> {
self.0.lock().unwrap().get(idx)
}
pub fn swap(&self, i: usize, j: usize) {
self.0.lock().unwrap().swap(i, j)
}
pub fn len(&self) -> usize {
self.0.lock().unwrap().len()
}
pub fn capacity(&self) -> usize {
self.0.lock().unwrap().capacity()
}
pub fn is_empty(&self) -> bool {
self.0.lock().unwrap().is_empty()
}
}
struct RawList {
ptr: NonNull<T>,
len: usize,
capacity: usize,
vtable: VTable,
}
unsafe impl Send for RawList {}
unsafe impl Sync for RawList {}
impl Drop for RawList {
fn drop(&mut self) {
match self.vtable.drop_fn {
Some(drop_fn) => {
unsafe { self.drop_elements_with(drop_fn) };
}
None => {
self.forget_elements();
}
}
unsafe { self.dealloc() };
}
}
impl RawList {
fn new(vtable: VTable) -> Self {
Self::with_capacity(0, vtable)
}
fn forget_elements(&mut self) {
self.len = 0;
}
unsafe fn drop_elements_with(&mut self, drop_fn: DropFn) {
let len = self.len;
self.len = 0;
for i in 0..len {
let offset = self.offset_of(i);
let non_null = unsafe { self.ptr.byte_add(offset) };
unsafe { (drop_fn)(non_null) }
}
}
fn with_capacity(capacity: usize, vtable: VTable) -> Self {
let ptr = unsafe {
NonNull::new_unchecked(std::ptr::without_provenance_mut(
vtable.align(),
))
};
if vtable.size() == 0 {
return Self {
ptr,
len: 0,
capacity: usize::MAX,
vtable,
};
}
let mut this = Self {
ptr,
len: 0,
capacity: 0,
vtable,
};
this.reserve(capacity);
this
}
fn current_memory(&self) -> Option<NonNull<T>> {
if self.vtable.size() == 0 || self.capacity == 0 {
None
} else {
Some(self.ptr)
}
}
unsafe fn push(&mut self, elem_ptr: NonNull<T>) {
if self.vtable.size() > 0 {
self.reserve(1);
let offset = self.offset_of(self.len);
let src = elem_ptr.cast::<u8>().as_ptr();
let dst = self.ptr.cast::<u8>().as_ptr();
let dst = unsafe { dst.byte_add(offset) };
let size = self.vtable.size();
unsafe { std::ptr::copy_nonoverlapping(src, dst, size) };
}
self.len += 1;
}
unsafe fn extend(&mut self, other: &Self) {
struct DropGuard {
ptr: NonNull<()>,
vtable: VTable,
offset: usize,
len: usize,
}
impl Drop for DropGuard {
fn drop(&mut self) {
let Some(drop_fn) = self.vtable.drop_fn else {
return;
};
for i in 0..self.len {
let offset = self.vtable.size() * (self.offset + i);
let src = unsafe { self.ptr.byte_add(offset) };
unsafe { (drop_fn)(src) }
}
}
}
if self.vtable.size() == 0 {
if let Some(clone_fn) = self.vtable.clone_fn {
let mut drop_guard = DropGuard {
ptr: self.ptr,
vtable: self.vtable.clone(),
offset: self.len,
len: 0,
};
for _ in 0..other.len {
unsafe {
(clone_fn)(self.ptr.as_ptr(), other.ptr.as_ptr())
};
drop_guard.len += 1;
}
std::mem::forget(drop_guard);
}
self.len += other.len;
return;
}
if other.len == 0 {
return;
}
self.reserve(other.len);
if let Some(clone_fn) = self.vtable.clone_fn {
let mut drop_guard = DropGuard {
ptr: self.ptr,
vtable: self.vtable.clone(),
offset: self.len,
len: 0,
};
for i in 0..other.len {
let src_offset = other.offset_of(i);
let src = unsafe { other.ptr.byte_add(src_offset) };
let dst_offset = self.offset_of(self.len + i);
let dst = unsafe { self.ptr.byte_add(dst_offset) };
unsafe { (clone_fn)(dst.as_ptr(), src.as_ptr()) }
drop_guard.len += 1;
}
std::mem::forget(drop_guard);
} else {
let src = other.ptr;
let dst_offset = self.offset_of(self.len);
let dst = unsafe { self.ptr.byte_add(dst_offset) };
let size = other.offset_of(other.len);
unsafe {
std::ptr::copy_nonoverlapping(
src.cast::<u8>().as_ptr(),
dst.cast::<u8>().as_ptr(),
size,
)
};
}
self.len += other.len;
}
fn reserve(&mut self, added: usize) {
if self.vtable.size() == 0 {
return;
}
let new_required = self.len.checked_add(added).unwrap();
let new_capacity = compute_capacity(self.vtable.size(), new_required);
if new_capacity > self.capacity {
if let Some(ptr) = self.current_memory() {
let new_ptr = unsafe {
realloc_array(
ptr,
self.vtable.layout(),
self.capacity,
new_capacity,
)
};
self.ptr = new_ptr;
} else {
let new_ptr = unsafe {
alloc_array(self.vtable.layout(), new_capacity)
};
self.ptr = new_ptr;
}
self.capacity = new_capacity;
}
}
fn get(&self, idx: usize) -> Option<NonNull<T>> {
if idx >= self.len {
return None;
}
let offset = self.offset_of(idx);
let ptr = unsafe { self.ptr.byte_add(offset) };
Some(ptr)
}
fn swap(&self, i: usize, j: usize) {
if i >= self.len || j >= self.len {
return;
}
if i == j {
return;
}
let i = self.offset_of(i);
let j = self.offset_of(j);
let ptr_i = unsafe { self.ptr.byte_add(i) };
let ptr_j = unsafe { self.ptr.byte_add(j) };
unsafe {
std::ptr::swap_nonoverlapping(
ptr_i.cast::<u8>().as_ptr(),
ptr_j.cast::<u8>().as_ptr(),
self.vtable.size(),
)
};
}
unsafe fn dealloc(&mut self) {
if self.vtable.size() == 0 {
return;
}
if let Some(ptr) = self.current_memory() {
unsafe {
dealloc_array(ptr, self.vtable.layout(), self.capacity)
};
}
}
fn offset_of(&self, n: usize) -> usize {
self.vtable.size() * n
}
fn is_empty(&self) -> bool {
self.len == 0
}
fn len(&self) -> usize {
self.len
}
fn capacity(&self) -> usize {
self.capacity
}
}
fn compute_capacity(size: usize, required: usize) -> usize {
if required == 0 {
return 0;
}
let minimum_capacity = if size == 1 {
8
} else if size <= 1024 {
4
} else {
1
};
let next_power_of_two = required.checked_next_power_of_two().unwrap();
Ord::max(next_power_of_two, minimum_capacity)
}
unsafe fn alloc_array(elem_layout: Layout, n: usize) -> NonNull<T> {
debug_assert!(elem_layout.size() > 0);
debug_assert!(n > 0);
let layout = array_layout(elem_layout, n);
let ptr = unsafe { std::alloc::alloc(layout) };
let ptr = ptr.cast::<T>();
let Some(ptr) = NonNull::new(ptr) else {
handle_alloc_error(layout);
};
ptr
}
unsafe fn realloc_array(
ptr: NonNull<T>,
elem_layout: Layout,
old_n: usize,
new_n: usize,
) -> NonNull<T> {
debug_assert!(elem_layout.size() > 0);
debug_assert!(old_n > 0);
debug_assert!(new_n > 0);
let old_layout = array_layout(elem_layout, old_n);
let new_layout = array_layout(elem_layout, new_n);
let ptr = unsafe {
std::alloc::realloc(
ptr.cast::<u8>().as_ptr(),
old_layout,
new_layout.size(),
)
};
let ptr = ptr.cast::<T>();
let Some(ptr) = NonNull::new(ptr) else {
handle_alloc_error(new_layout);
};
ptr
}
unsafe fn dealloc_array(ptr: NonNull<T>, elem_layout: Layout, n: usize) {
debug_assert!(elem_layout.size() > 0);
debug_assert!(n > 0);
let layout = array_layout(elem_layout, n);
unsafe { std::alloc::dealloc(ptr.cast::<u8>().as_ptr(), layout) };
}
fn array_layout(elem_layout: Layout, n: usize) -> Layout {
debug_assert!(elem_layout.size() > 0);
debug_assert!(n > 0);
let element_size = elem_layout.size();
let align = elem_layout.align();
let array_size = element_size * n;
Layout::from_size_align(array_size, align).unwrap()
}
#[cfg(test)]
mod tests {
use std::sync::{Arc, atomic::AtomicUsize};
use crate::Val;
use super::boundary::List;
#[test]
fn empty_list() {
let l = List::<u64>::new();
assert_eq!(l.to_vec(), Vec::new());
}
#[test]
fn push_and_get() {
let l = List::<u64>::new();
l.push(10);
l.push(20);
l.push(30);
assert_eq!(Some(10), l.get(0));
assert_eq!(Some(20), l.get(1));
assert_eq!(Some(30), l.get(2));
assert_eq!(None, l.get(3));
}
#[test]
fn list_of_strings() {
let l = List::<Arc<str>>::new();
l.push("hello".into());
l.push("world".into());
let l = l.concat(&l);
let l = l.concat(&l);
assert_eq!(l.len(), 8);
assert_eq!(
l.to_vec(),
vec![
"hello".into(),
"world".into(),
"hello".into(),
"world".into(),
"hello".into(),
"world".into(),
"hello".into(),
"world".into(),
]
);
}
#[test]
fn zero_sized_refcount() {
use std::sync::atomic::Ordering::Relaxed;
static COUNT: AtomicUsize = AtomicUsize::new(1);
struct Counter;
impl Clone for Counter {
fn clone(&self) -> Self {
COUNT.fetch_add(1, Relaxed);
Self
}
}
impl Drop for Counter {
fn drop(&mut self) {
COUNT.fetch_sub(1, Relaxed);
}
}
let counter = Counter;
assert_eq!(std::mem::size_of::<Val<Counter>>(), 0);
let list_one = List::<Val<Counter>>::new();
list_one.push(Val(counter.clone()));
assert_eq!(COUNT.load(Relaxed), 2);
let list_two = list_one.concat(&list_one);
assert_eq!(COUNT.load(Relaxed), 4);
let list_three = list_two.concat(&list_two);
assert_eq!(COUNT.load(Relaxed), 8);
drop(list_one);
assert_eq!(COUNT.load(Relaxed), 7);
drop(list_two);
assert_eq!(COUNT.load(Relaxed), 5);
drop(list_three);
assert_eq!(COUNT.load(Relaxed), 1);
drop(counter);
}
#[test]
fn swap() {
let list = List::<i32>::new();
list.push(1);
list.push(2);
list.push(3);
list.push(4);
list.swap(1, 2);
list.swap(0, 3);
assert_eq!(list.to_vec(), vec![4, 3, 2, 1])
}
}