use once_cell::sync::Lazy;
use std::alloc::{GlobalAlloc, Layout, System};
use std::fmt;
use std::fmt::Write;
use std::ops::Deref;
use std::sync::Mutex;
use weak_list::AllocHashSet;
use weak_list::AllocMem;
use weak_list::WeakList;
use weak_list::WeakListHashSet;
#[derive(Clone)]
pub struct WeakAlloc<A> {
alloc: A,
}
static WEAK_LIST: Lazy<Mutex<WeakList<WeakListHashSet>>> =
Lazy::new(|| Mutex::new(WeakList::new()));
static BIGGER_HASHSET: Lazy<Mutex<AllocHashSet>> =
Lazy::new(|| Mutex::new(AllocHashSet::with_capacity(1)));
impl<A> WeakAlloc<A> {
pub const fn new(alloc: A) -> Self {
Self { alloc }
}
}
pub struct WeakRef<T: ?Sized, A: 'static + Clone + GlobalAlloc = System> {
weak: weak_list::WeakRef<T>,
alloc: WeakAlloc<A>,
}
impl<T, A: GlobalAlloc + Clone> Clone for WeakRef<T, A> {
fn clone(&self) -> Self {
Self {
weak: self.weak.clone(),
alloc: self.alloc.clone(),
}
}
}
impl<T: ?Sized + fmt::Debug, A: 'static + Clone + GlobalAlloc> fmt::Debug for WeakRef<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}
impl<T: Send + Sync + 'static, A: 'static + Clone + GlobalAlloc> WeakRef<T, A> {
pub fn upgrade(&self) -> Option<ArcRef<T, A>> {
self.alloc.upgrade(self)
}
}
pub struct ArcRef<T: ?Sized, A: 'static + Clone + GlobalAlloc = System> {
arc: weak_list::ArcRef<T>,
alloc: WeakAlloc<A>,
}
impl<T, A: GlobalAlloc + Clone> Clone for ArcRef<T, A> {
fn clone(&self) -> Self {
Self {
arc: self.arc.clone(),
alloc: self.alloc.clone(),
}
}
}
impl<T: ?Sized, A: GlobalAlloc + Clone> AsRef<T> for ArcRef<T, A> {
fn as_ref(&self) -> &T {
&**self
}
}
impl<T: ?Sized, A: GlobalAlloc + Clone> Deref for ArcRef<T, A> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.arc
}
}
impl<T, A: GlobalAlloc + Clone> ArcRef<T, A> {
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
weak_list::ArcRef::get_mut(&mut this.arc)
}
pub fn downgrade(this: &Self) -> WeakRef<T, A> {
WeakRef {
weak: weak_list::ArcRef::downgrade(&this.arc),
alloc: this.alloc.clone(),
}
}
}
impl<A> WeakAlloc<A>
where
A: GlobalAlloc + Clone,
{
pub fn give<T: Send + Sync + 'static>(&self, element: T) -> WeakRef<T, A> {
let alloc_mem = AllocMem::default();
let mut big_hs = BIGGER_HASHSET.lock().unwrap();
let big_hs_cap = big_hs.capacity();
let big_hs_opt = Some(&mut *big_hs);
let mut lock = WEAK_LIST.lock().unwrap();
lock.realloc_hashset_if_needed_no_alloc(big_hs_opt);
let weak = lock.push_front_no_alloc(element, alloc_mem);
drop(lock);
let new_hs_cap = big_hs.capacity();
if new_hs_cap < big_hs_cap {
big_hs.allocate_capacity(big_hs_cap * 2);
}
WeakRef {
weak,
alloc: self.clone(),
}
}
pub fn give_and_upgrade<T: Send + Sync + 'static>(&self, element: T) -> ArcRef<T, A> {
let alloc_mem = AllocMem::default();
let mut big_hs = BIGGER_HASHSET.lock().unwrap();
let big_hs_cap = big_hs.capacity();
let big_hs_opt = Some(&mut *big_hs);
let mut lock = WEAK_LIST.lock().unwrap();
lock.realloc_hashset_if_needed_no_alloc(big_hs_opt);
let weak = lock.push_front_no_alloc(element, alloc_mem);
let arc = weak.upgrade_quietly().unwrap();
drop(lock);
let new_hs_cap = big_hs.capacity();
if new_hs_cap < big_hs_cap {
big_hs.allocate_capacity(big_hs_cap * 2);
}
ArcRef {
arc,
alloc: self.clone(),
}
}
pub fn upgrade<T: Send + Sync + 'static>(&self, w: &WeakRef<T, A>) -> Option<ArcRef<T, A>> {
let mut wl = WEAK_LIST.lock().unwrap();
w.weak.upgrade(&mut wl).map(|arc| ArcRef {
arc,
alloc: self.clone(),
})
}
pub fn clear(&self) {
let mut wl = WEAK_LIST.lock().unwrap();
wl.clear();
}
pub unsafe fn weak_alloc(&self, layout: Layout) -> *mut u8 {
self.alloc.alloc(layout)
}
pub fn inner(&self) -> &A {
&self.alloc
}
}
unsafe impl<A> GlobalAlloc for WeakAlloc<A>
where
A: GlobalAlloc,
{
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut ret = self.alloc.alloc(layout);
loop {
if !ret.is_null() {
break;
}
let some_arc = WEAK_LIST.lock().unwrap().pop_lru();
if let Some(arc) = some_arc {
drop(arc);
} else {
instrument::increase_null_ptr_layout_counter(layout);
return ret;
}
ret = self.alloc.alloc(layout);
}
ret
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.alloc.dealloc(ptr, layout);
}
}
pub mod instrument {
use super::*;
struct SmallHist {
v: Vec<(Layout, u32)>,
other: u32,
}
const NUM_NULL_PTR_LAYOUT: usize = 4;
static NULL_PTR_LAYOUT_COUNTER: Lazy<Mutex<SmallHist>> = Lazy::new(|| {
Mutex::new(SmallHist {
v: Vec::with_capacity(NUM_NULL_PTR_LAYOUT),
other: 0,
})
});
pub fn increase_null_ptr_layout_counter(layout: Layout) {
let mut guard = NULL_PTR_LAYOUT_COUNTER.lock().unwrap();
let v = &mut guard.v;
let mut found = false;
for (l, ctr) in v.iter_mut() {
if *l == layout {
*ctr += 1;
found = true;
break;
}
}
if !found {
if v.len() == NUM_NULL_PTR_LAYOUT {
let other = &mut guard.other;
*other += 1;
} else {
v.push((layout, 1));
}
}
}
pub fn dump_null_ptr_layout_counters() -> String {
let guard = NULL_PTR_LAYOUT_COUNTER.lock().unwrap();
if guard.v.is_empty() {
return String::new();
}
drop(guard);
let mut buf = String::with_capacity(1024);
write!(
buf,
"Warn: the following layouts caused allocator to return null pointer: "
)
.unwrap();
let guard = NULL_PTR_LAYOUT_COUNTER.lock().unwrap();
writeln!(buf, "null_ptr layouts: {:?}", guard.v).unwrap();
if guard.other != 0 {
writeln!(buf, "number of other null_ptrs: {}", guard.other).unwrap();
}
drop(guard);
buf
}
}