use alloc::boxed::Box;
use alloc::string::String;
use core::{
alloc::Layout,
ffi::c_void,
fmt,
sync::atomic::{AtomicUsize, Ordering as AtomicOrdering},
};
use azul_css::AzString;
pub type RefAnyDestructorType = extern "C" fn(*mut c_void);
#[derive(Debug)]
#[repr(C)]
pub struct RefCountInner {
pub _internal_ptr: *const c_void,
pub num_copies: AtomicUsize,
pub num_refs: AtomicUsize,
pub num_mutable_refs: AtomicUsize,
pub _internal_len: usize,
pub _internal_layout_size: usize,
pub _internal_layout_align: usize,
pub type_id: u64,
pub type_name: AzString,
pub custom_destructor: extern "C" fn(*mut c_void),
pub serialize_fn: usize,
pub deserialize_fn: usize,
}
#[derive(Hash, PartialEq, PartialOrd, Ord, Eq)]
#[repr(C)]
pub struct RefCount {
pub ptr: *const RefCountInner,
pub run_destructor: bool,
}
impl fmt::Debug for RefCount {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.downcast().fmt(f)
}
}
impl Clone for RefCount {
fn clone(&self) -> Self {
if !self.ptr.is_null() {
unsafe {
(*self.ptr).num_copies.fetch_add(1, AtomicOrdering::SeqCst);
}
}
Self {
ptr: self.ptr,
run_destructor: true,
}
}
}
impl Drop for RefCount {
fn drop(&mut self) {
if !self.run_destructor || self.ptr.is_null() {
return;
}
self.run_destructor = false;
let current_copies = unsafe {
(*self.ptr).num_copies.fetch_sub(1, AtomicOrdering::SeqCst)
};
if current_copies != 1 {
return;
}
let sharing_info = unsafe { Box::from_raw(self.ptr as *mut RefCountInner) };
let sharing_info = *sharing_info;
let data_ptr = sharing_info._internal_ptr;
if sharing_info._internal_len == 0
|| sharing_info._internal_layout_size == 0
|| data_ptr.is_null()
{
let mut _dummy: [u8; 0] = [];
(sharing_info.custom_destructor)(_dummy.as_ptr() as *mut c_void);
} else {
let layout = unsafe {
Layout::from_size_align_unchecked(
sharing_info._internal_layout_size,
sharing_info._internal_layout_align,
)
};
(sharing_info.custom_destructor)(data_ptr as *mut c_void);
unsafe {
alloc::alloc::dealloc(data_ptr as *mut u8, layout);
}
}
}
}
#[derive(Debug, Clone)]
pub struct RefCountInnerDebug {
pub num_copies: usize,
pub num_refs: usize,
pub num_mutable_refs: usize,
pub _internal_len: usize,
pub _internal_layout_size: usize,
pub _internal_layout_align: usize,
pub type_id: u64,
pub type_name: AzString,
pub custom_destructor: usize,
pub serialize_fn: usize,
pub deserialize_fn: usize,
}
impl RefCount {
fn new(ref_count: RefCountInner) -> Self {
RefCount {
ptr: Box::into_raw(Box::new(ref_count)),
run_destructor: true,
}
}
fn downcast(&self) -> &RefCountInner {
if self.ptr.is_null() {
panic!("[RefCount::downcast] FATAL: self.ptr is null!");
}
unsafe { &*self.ptr }
}
pub fn debug_get_refcount_copied(&self) -> RefCountInnerDebug {
let dc = self.downcast();
RefCountInnerDebug {
num_copies: dc.num_copies.load(AtomicOrdering::SeqCst),
num_refs: dc.num_refs.load(AtomicOrdering::SeqCst),
num_mutable_refs: dc.num_mutable_refs.load(AtomicOrdering::SeqCst),
_internal_len: dc._internal_len,
_internal_layout_size: dc._internal_layout_size,
_internal_layout_align: dc._internal_layout_align,
type_id: dc.type_id,
type_name: dc.type_name.clone(),
custom_destructor: dc.custom_destructor as usize,
serialize_fn: dc.serialize_fn,
deserialize_fn: dc.deserialize_fn,
}
}
pub fn can_be_shared(&self) -> bool {
self.downcast()
.num_mutable_refs
.load(AtomicOrdering::SeqCst)
== 0
}
pub fn can_be_shared_mut(&self) -> bool {
let info = self.downcast();
info.num_mutable_refs.load(AtomicOrdering::SeqCst) == 0
&& info.num_refs.load(AtomicOrdering::SeqCst) == 0
}
pub fn increase_ref(&self) {
self.downcast()
.num_refs
.fetch_add(1, AtomicOrdering::SeqCst);
}
pub fn decrease_ref(&self) {
self.downcast()
.num_refs
.fetch_sub(1, AtomicOrdering::SeqCst);
}
pub fn increase_refmut(&self) {
self.downcast()
.num_mutable_refs
.fetch_add(1, AtomicOrdering::SeqCst);
}
pub fn decrease_refmut(&self) {
self.downcast()
.num_mutable_refs
.fetch_sub(1, AtomicOrdering::SeqCst);
}
}
#[derive(Debug)]
#[repr(C)]
pub struct Ref<'a, T> {
ptr: &'a T,
sharing_info: RefCount,
}
impl<'a, T> Drop for Ref<'a, T> {
fn drop(&mut self) {
self.sharing_info.decrease_ref();
}
}
impl<'a, T> core::ops::Deref for Ref<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.ptr
}
}
#[derive(Debug)]
#[repr(C)]
pub struct RefMut<'a, T> {
ptr: &'a mut T,
sharing_info: RefCount,
}
impl<'a, T> Drop for RefMut<'a, T> {
fn drop(&mut self) {
self.sharing_info.decrease_refmut();
}
}
impl<'a, T> core::ops::Deref for RefMut<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&*self.ptr
}
}
impl<'a, T> core::ops::DerefMut for RefMut<'a, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.ptr
}
}
#[derive(Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
#[repr(C)]
pub struct RefAny {
pub sharing_info: RefCount,
pub instance_id: u64,
}
impl_option!(
RefAny,
OptionRefAny,
copy = false,
[Debug, Hash, Clone, PartialEq, PartialOrd, Ord, Eq]
);
unsafe impl Send for RefAny {}
unsafe impl Sync for RefAny {}
impl RefAny {
pub fn new<T: 'static>(value: T) -> Self {
extern "C" fn default_custom_destructor<U: 'static>(ptr: *mut c_void) {
use core::{mem, ptr};
unsafe {
let mut stack_mem = mem::MaybeUninit::<U>::uninit();
ptr::copy_nonoverlapping(
ptr as *const U,
stack_mem.as_mut_ptr(),
1, );
let stack_mem = stack_mem.assume_init();
mem::drop(stack_mem); }
}
let type_name = ::core::any::type_name::<T>();
let type_id = Self::get_type_id_static::<T>();
let st = AzString::from_const_str(type_name);
let s = Self::new_c(
(&value as *const T) as *const c_void,
::core::mem::size_of::<T>(),
::core::mem::align_of::<T>(), type_id,
st,
default_custom_destructor::<T>,
0, 0, );
::core::mem::forget(value); s
}
pub fn new_c(
ptr: *const c_void,
len: usize,
align: usize,
type_id: u64,
type_name: AzString,
custom_destructor: extern "C" fn(*mut c_void),
serialize_fn: usize,
deserialize_fn: usize,
) -> Self {
use core::ptr;
if len > 0 && ptr.is_null() {
panic!(
"RefAny::new_c: NULL pointer passed for non-ZST type (size={}). \
This would cause undefined behavior. Type: {:?}",
len,
type_name.as_str()
);
}
let (_internal_ptr, layout) = if len == 0 {
let _dummy: [u8; 0] = [];
(ptr::null_mut(), Layout::for_value(&_dummy))
} else {
let layout = Layout::from_size_align(len, align).expect("Failed to create layout");
let heap_struct_as_bytes = unsafe { alloc::alloc::alloc(layout) };
if heap_struct_as_bytes.is_null() {
alloc::alloc::handle_alloc_error(layout);
}
unsafe { ptr::copy_nonoverlapping(ptr as *const u8, heap_struct_as_bytes, len) };
(heap_struct_as_bytes, layout)
};
let ref_count_inner = RefCountInner {
_internal_ptr: _internal_ptr as *const c_void,
num_copies: AtomicUsize::new(1), num_refs: AtomicUsize::new(0), num_mutable_refs: AtomicUsize::new(0), _internal_len: len,
_internal_layout_size: layout.size(),
_internal_layout_align: layout.align(),
type_id,
type_name,
custom_destructor,
serialize_fn,
deserialize_fn,
};
let sharing_info = RefCount::new(ref_count_inner);
Self {
sharing_info,
instance_id: 0, }
}
pub fn get_data_ptr(&self) -> *const c_void {
self.sharing_info.downcast()._internal_ptr
}
pub fn has_no_copies(&self) -> bool {
self.sharing_info
.downcast()
.num_copies
.load(AtomicOrdering::SeqCst)
== 1
&& self
.sharing_info
.downcast()
.num_refs
.load(AtomicOrdering::SeqCst)
== 0
&& self
.sharing_info
.downcast()
.num_mutable_refs
.load(AtomicOrdering::SeqCst)
== 0
}
#[inline]
pub fn downcast_ref<'a, U: 'static>(&'a mut self) -> Option<Ref<'a, U>> {
let stored_type_id = self.get_type_id();
let target_type_id = Self::get_type_id_static::<U>();
let is_same_type = stored_type_id == target_type_id;
if !is_same_type {
return None;
}
let can_be_shared = self.sharing_info.can_be_shared();
if !can_be_shared {
return None;
}
let data_ptr = self.sharing_info.downcast()._internal_ptr;
if data_ptr.is_null() {
return None;
}
self.sharing_info.increase_ref();
Some(Ref {
ptr: unsafe { &*(data_ptr as *const U) },
sharing_info: self.sharing_info.clone(),
})
}
#[inline]
pub fn downcast_mut<'a, U: 'static>(&'a mut self) -> Option<RefMut<'a, U>> {
let is_same_type = self.get_type_id() == Self::get_type_id_static::<U>();
if !is_same_type {
return None;
}
let can_be_shared_mut = self.sharing_info.can_be_shared_mut();
if !can_be_shared_mut {
return None;
}
let data_ptr = self.sharing_info.downcast()._internal_ptr;
if data_ptr.is_null() {
return None;
}
self.sharing_info.increase_refmut();
Some(RefMut {
ptr: unsafe { &mut *(data_ptr as *mut U) },
sharing_info: self.sharing_info.clone(),
})
}
#[inline]
fn get_type_id_static<T: 'static>() -> u64 {
use core::{any::TypeId, mem};
let t_id = TypeId::of::<T>();
let struct_as_bytes = unsafe {
core::slice::from_raw_parts(
(&t_id as *const TypeId) as *const u8,
mem::size_of::<TypeId>(),
)
};
struct_as_bytes
.into_iter()
.enumerate()
.take(8) .map(|(s_pos, s)| (*s as u64) << (s_pos * 8))
.sum()
}
pub fn is_type(&self, type_id: u64) -> bool {
self.sharing_info.downcast().type_id == type_id
}
pub fn get_type_id(&self) -> u64 {
self.sharing_info.downcast().type_id
}
pub fn get_type_name(&self) -> AzString {
self.sharing_info.downcast().type_name.clone()
}
pub fn get_ref_count(&self) -> usize {
self.sharing_info
.downcast()
.num_copies
.load(AtomicOrdering::SeqCst)
}
pub fn get_serialize_fn(&self) -> usize {
self.sharing_info.downcast().serialize_fn
}
pub fn get_deserialize_fn(&self) -> usize {
self.sharing_info.downcast().deserialize_fn
}
pub fn set_serialize_fn(&mut self, serialize_fn: usize) {
let inner = self.sharing_info.ptr as *mut RefCountInner;
unsafe {
(*inner).serialize_fn = serialize_fn;
}
}
pub fn set_deserialize_fn(&mut self, deserialize_fn: usize) {
let inner = self.sharing_info.ptr as *mut RefCountInner;
unsafe {
(*inner).deserialize_fn = deserialize_fn;
}
}
pub fn can_serialize(&self) -> bool {
self.get_serialize_fn() != 0
}
pub fn can_deserialize(&self) -> bool {
self.get_deserialize_fn() != 0
}
pub fn replace_contents(&mut self, new_value: RefAny) -> bool {
use core::ptr;
let inner = self.sharing_info.ptr as *mut RefCountInner;
let inner_ref = self.sharing_info.downcast();
let mutable_lock_result = inner_ref.num_mutable_refs.compare_exchange(
0, 1, AtomicOrdering::SeqCst,
AtomicOrdering::SeqCst,
);
if mutable_lock_result.is_err() {
return false;
}
if inner_ref.num_refs.load(AtomicOrdering::SeqCst) != 0 {
inner_ref.num_mutable_refs.store(0, AtomicOrdering::SeqCst);
return false;
}
unsafe {
let old_ptr = (*inner)._internal_ptr;
let old_len = (*inner)._internal_len;
let old_layout_size = (*inner)._internal_layout_size;
let old_layout_align = (*inner)._internal_layout_align;
let old_destructor = (*inner).custom_destructor;
if old_len > 0 && !old_ptr.is_null() {
old_destructor(old_ptr as *mut c_void);
}
if old_layout_size > 0 && !old_ptr.is_null() {
let old_layout = Layout::from_size_align_unchecked(old_layout_size, old_layout_align);
alloc::alloc::dealloc(old_ptr as *mut u8, old_layout);
}
let new_inner = new_value.sharing_info.downcast();
let new_ptr = new_inner._internal_ptr;
let new_len = new_inner._internal_len;
let new_layout_size = new_inner._internal_layout_size;
let new_layout_align = new_inner._internal_layout_align;
let allocated_ptr = if new_len == 0 {
ptr::null_mut()
} else {
let new_layout = Layout::from_size_align(new_len, new_layout_align)
.expect("Failed to create layout");
let heap_ptr = alloc::alloc::alloc(new_layout);
if heap_ptr.is_null() {
alloc::alloc::handle_alloc_error(new_layout);
}
ptr::copy_nonoverlapping(
new_ptr as *const u8,
heap_ptr,
new_len,
);
heap_ptr
};
(*inner)._internal_ptr = allocated_ptr as *const c_void;
(*inner)._internal_len = new_len;
(*inner)._internal_layout_size = new_layout_size;
(*inner)._internal_layout_align = new_layout_align;
(*inner).type_id = new_inner.type_id;
(*inner).type_name = new_inner.type_name.clone();
(*inner).custom_destructor = new_inner.custom_destructor;
(*inner).serialize_fn = new_inner.serialize_fn;
(*inner).deserialize_fn = new_inner.deserialize_fn;
}
self.sharing_info.downcast().num_mutable_refs.store(0, AtomicOrdering::SeqCst);
core::mem::forget(new_value);
true
}
}
impl Clone for RefAny {
fn clone(&self) -> Self {
let inner = self.sharing_info.downcast();
inner.num_copies.fetch_add(1, AtomicOrdering::SeqCst);
let new_instance_id = inner.num_copies.load(AtomicOrdering::SeqCst) as u64;
Self {
sharing_info: RefCount {
ptr: self.sharing_info.ptr, run_destructor: true, },
instance_id: new_instance_id,
}
}
}
impl Drop for RefAny {
fn drop(&mut self) {
}
}
#[cfg(test)]
mod tests {
use super::*;
#[derive(Debug, Clone, PartialEq)]
struct TestStruct {
value: i32,
name: String,
}
#[derive(Debug, Clone, PartialEq)]
struct NestedStruct {
inner: TestStruct,
data: Vec<u8>,
}
#[test]
fn test_refany_basic_create_and_downcast() {
let test_val = TestStruct {
value: 42,
name: "test".to_string(),
};
let mut refany = RefAny::new(test_val.clone());
let borrowed = refany
.downcast_ref::<TestStruct>()
.expect("Should downcast successfully");
assert_eq!(borrowed.value, 42);
assert_eq!(borrowed.name, "test");
drop(borrowed);
{
let mut borrowed_mut = refany
.downcast_mut::<TestStruct>()
.expect("Should downcast mutably");
borrowed_mut.value = 100;
borrowed_mut.name = "modified".to_string();
}
let borrowed = refany
.downcast_ref::<TestStruct>()
.expect("Should downcast after mutation");
assert_eq!(borrowed.value, 100);
assert_eq!(borrowed.name, "modified");
}
#[test]
fn test_refany_clone_and_sharing() {
let test_val = TestStruct {
value: 42,
name: "test".to_string(),
};
let mut refany1 = RefAny::new(test_val);
let mut refany2 = refany1.clone();
let mut refany3 = refany1.clone();
let borrowed1 = refany1
.downcast_ref::<TestStruct>()
.expect("Should downcast ref1");
assert_eq!(borrowed1.value, 42);
drop(borrowed1);
let borrowed2 = refany2
.downcast_ref::<TestStruct>()
.expect("Should downcast ref2");
assert_eq!(borrowed2.value, 42);
drop(borrowed2);
{
let mut borrowed_mut = refany3
.downcast_mut::<TestStruct>()
.expect("Should downcast mut");
borrowed_mut.value = 200;
}
let borrowed1 = refany1
.downcast_ref::<TestStruct>()
.expect("Should see mutation from ref1");
assert_eq!(borrowed1.value, 200);
drop(borrowed1);
let borrowed2 = refany2
.downcast_ref::<TestStruct>()
.expect("Should see mutation from ref2");
assert_eq!(borrowed2.value, 200);
}
#[test]
fn test_refany_borrow_checking() {
let test_val = TestStruct {
value: 42,
name: "test".to_string(),
};
let mut refany = RefAny::new(test_val);
{
let borrowed1 = refany
.downcast_ref::<TestStruct>()
.expect("First immutable borrow");
assert_eq!(borrowed1.value, 42);
assert_eq!(borrowed1.name, "test");
}
{
let mut borrowed_mut = refany
.downcast_mut::<TestStruct>()
.expect("Mutable borrow should work");
borrowed_mut.value = 100;
borrowed_mut.name = "modified".to_string();
}
{
let borrowed = refany
.downcast_ref::<TestStruct>()
.expect("Should be able to borrow again");
assert_eq!(borrowed.value, 100);
assert_eq!(borrowed.name, "modified");
}
}
#[test]
fn test_refany_type_safety() {
let test_val = TestStruct {
value: 42,
name: "test".to_string(),
};
let mut refany = RefAny::new(test_val);
assert!(
refany.downcast_ref::<i32>().is_none(),
"Should not allow downcasting to wrong type"
);
assert!(
refany.downcast_mut::<String>().is_none(),
"Should not allow mutable downcasting to wrong type"
);
let borrowed = refany
.downcast_ref::<TestStruct>()
.expect("Correct type should work");
assert_eq!(borrowed.value, 42);
}
#[test]
fn test_refany_zero_sized_type() {
#[derive(Debug, Clone, PartialEq)]
struct ZeroSized;
let refany = RefAny::new(ZeroSized);
let _cloned = refany.clone();
}
#[test]
fn test_refany_with_vec() {
let test_val = vec![1, 2, 3, 4, 5];
let mut refany = RefAny::new(test_val);
{
let mut borrowed_mut = refany
.downcast_mut::<Vec<i32>>()
.expect("Should downcast vec");
borrowed_mut.push(6);
borrowed_mut.push(7);
}
let borrowed = refany
.downcast_ref::<Vec<i32>>()
.expect("Should downcast vec");
assert_eq!(&**borrowed, &[1, 2, 3, 4, 5, 6, 7]);
}
#[test]
fn test_refany_nested_struct() {
let nested = NestedStruct {
inner: TestStruct {
value: 42,
name: "inner".to_string(),
},
data: vec![1, 2, 3],
};
let mut refany = RefAny::new(nested);
{
let mut borrowed_mut = refany
.downcast_mut::<NestedStruct>()
.expect("Should downcast nested");
borrowed_mut.inner.value = 100;
borrowed_mut.data.push(4);
}
let borrowed = refany
.downcast_ref::<NestedStruct>()
.expect("Should downcast nested");
assert_eq!(borrowed.inner.value, 100);
assert_eq!(&borrowed.data, &[1, 2, 3, 4]);
}
#[test]
fn test_refany_drop_order() {
use std::sync::{Arc, Mutex};
let drop_counter = Arc::new(Mutex::new(0));
struct DropTracker {
counter: Arc<Mutex<i32>>,
}
impl Drop for DropTracker {
fn drop(&mut self) {
*self.counter.lock().unwrap() += 1;
}
}
{
let tracker = DropTracker {
counter: drop_counter.clone(),
};
let refany1 = RefAny::new(tracker);
let refany2 = refany1.clone();
let refany3 = refany1.clone();
assert_eq!(*drop_counter.lock().unwrap(), 0, "Should not drop yet");
drop(refany1);
assert_eq!(
*drop_counter.lock().unwrap(),
0,
"Should not drop after first clone dropped"
);
drop(refany2);
assert_eq!(
*drop_counter.lock().unwrap(),
0,
"Should not drop after second clone dropped"
);
drop(refany3);
assert_eq!(
*drop_counter.lock().unwrap(),
1,
"Should drop after last clone dropped"
);
}
}
#[test]
fn test_refany_callback_simulation() {
#[derive(Clone)]
struct CallbackData {
counter: i32,
}
let data = CallbackData { counter: 0 };
let mut refany = RefAny::new(data);
{
let mut borrowed = refany
.downcast_mut::<CallbackData>()
.expect("Should downcast in callback");
borrowed.counter += 1;
}
let borrowed = refany
.downcast_ref::<CallbackData>()
.expect("Should read after callback");
assert_eq!(borrowed.counter, 1);
}
}