use memkit::{MkAllocator, MkConfig};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
#[test]
fn test_allocator_deferred_free() {
let alloc = MkAllocator::new(MkConfig::default());
struct MyData {
dropped: Arc<AtomicBool>,
}
impl Drop for MyData {
fn drop(&mut self) {
self.dropped.store(true, Ordering::SeqCst);
}
}
let dropped = Arc::new(AtomicBool::new(false));
let box1 = alloc.heap_box(MyData {
dropped: Arc::clone(&dropped),
}).unwrap();
let ptr = box1.into_raw();
unsafe {
unsafe fn drop_my_data(p: *mut MyData) {
std::ptr::drop_in_place(p);
}
alloc.deferred_free(ptr, Some(drop_my_data));
}
assert!(!dropped.load(Ordering::SeqCst));
let count = alloc.reclaim();
assert_eq!(count, 1);
assert!(dropped.load(Ordering::SeqCst));
}
#[test]
fn test_deferred_free_cross_thread() {
fn assert_send<T: Send>(_: &T) {}
let alloc = MkAllocator::new(MkConfig::default());
assert_send(&alloc);
struct DropTracker {
dropped: Arc<AtomicBool>,
}
impl Drop for DropTracker {
fn drop(&mut self) {
self.dropped.store(true, Ordering::SeqCst);
}
}
let dropped = Arc::new(AtomicBool::new(false));
let ptr = {
let box1 = alloc.heap_box(DropTracker {
dropped: Arc::clone(&dropped),
}).unwrap();
box1.into_raw()
};
unsafe {
unsafe fn drop_tracker(p: *mut DropTracker) {
std::ptr::drop_in_place(p);
}
alloc.deferred_free(ptr, Some(drop_tracker));
}
assert!(!dropped.load(Ordering::SeqCst));
alloc.reclaim();
assert!(dropped.load(Ordering::SeqCst));
}