#![allow(dead_code)]
#![deny(missing_docs)]
#![feature(allocator_api)]
use core::cmp;
use core::ptr;
use std::alloc::{AllocError, Layout};
use std::path::Path;
use std::ptr::NonNull;
use std::sync::{Arc, Mutex};
use sys::System;
mod dlmalloc;
mod sys;
pub use memmap2::Advice;
pub unsafe trait SystemAllocator: Send {
fn alloc(&self, size: usize) -> (*mut u8, usize, u32);
fn remap(&self, ptr: *mut u8, oldsize: usize, newsize: usize, can_move: bool) -> *mut u8;
fn free_part(&self, ptr: *mut u8, oldsize: usize, newsize: usize) -> bool;
fn free(&self, ptr: *mut u8, size: usize) -> bool;
fn can_release_part(&self, flags: u32) -> bool;
fn allocates_zeros(&self) -> bool;
fn page_size(&self) -> usize;
}
#[derive(Clone)]
pub struct DiskDlmalloc(Arc<Mutex<dlmalloc::Dlmalloc<System>>>);
impl DiskDlmalloc {
pub fn new<P: AsRef<Path>>(
file_path: P,
total_size: usize,
mem_advise: Option<Advice>,
) -> DiskDlmalloc {
DiskDlmalloc(Arc::new(Mutex::new(dlmalloc::Dlmalloc::new(System::new(
file_path, total_size, mem_advise,
)))))
}
}
impl DiskDlmalloc {
#[inline]
pub unsafe fn malloc(&self, size: usize, align: usize) -> *mut u8 {
let mut me = self.0.lock().unwrap();
if align <= me.malloc_alignment() {
me.malloc(size)
} else {
me.memalign(align, size)
}
}
#[inline]
pub unsafe fn calloc(&self, size: usize, align: usize) -> *mut u8 {
let ptr = self.malloc(size, align);
let me = self.0.lock().unwrap();
if !ptr.is_null() && me.calloc_must_clear(ptr) {
ptr::write_bytes(ptr, 0, size);
}
ptr
}
#[inline]
pub unsafe fn free(&self, ptr: *mut u8, size: usize, align: usize) {
let _ = align;
let mut me = self.0.lock().unwrap();
me.validate_size(ptr, size);
me.free(ptr)
}
#[inline]
pub unsafe fn realloc(
&self,
ptr: *mut u8,
old_size: usize,
old_align: usize,
new_size: usize,
) -> *mut u8 {
let mut me = self.0.lock().unwrap();
me.validate_size(ptr, old_size);
if old_align <= me.malloc_alignment() {
me.realloc(ptr, new_size)
} else {
drop(me);
let res = self.malloc(new_size, old_align);
if !res.is_null() {
let size = cmp::min(old_size, new_size);
ptr::copy_nonoverlapping(ptr, res, size);
self.free(ptr, old_size, old_align);
}
res
}
}
pub unsafe fn trim(&self, pad: usize) -> bool {
let mut me = self.0.lock().unwrap();
me.trim(pad)
}
}
unsafe impl std::alloc::Allocator for DiskDlmalloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let size = layout.size();
let align = layout.align();
let mut me = self.0.lock().unwrap();
let ptr = if align <= me.malloc_alignment() {
unsafe { me.malloc(size) }
} else {
unsafe { me.memalign(align, size) }
};
if ptr.is_null() {
Err(AllocError)
} else {
unsafe {
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(ptr),
size,
))
}
}
}
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let size = layout.size();
let align = layout.align();
let mut me = self.0.lock().unwrap();
let ptr = if align <= me.malloc_alignment() {
unsafe { me.malloc(size) }
} else {
unsafe { me.memalign(align, size) }
};
if ptr.is_null() {
return Err(AllocError);
}
unsafe {
if me.calloc_must_clear(ptr) {
ptr::write_bytes(ptr, 0, size);
}
}
unsafe {
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(ptr),
size,
))
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() == 0 {
return;
}
let mut me = self.0.lock().unwrap();
me.validate_size(ptr.as_ptr(), layout.size());
me.free(ptr.as_ptr());
}
unsafe fn grow(&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout)
-> Result<NonNull<[u8]>, AllocError> {
let old_size = old_layout.size();
let old_align = old_layout.align();
let new_size = new_layout.size();
let new_align = new_layout.align();
let mut me = self.0.lock().unwrap();
me.validate_size(ptr.as_ptr(), old_size);
if old_align <= me.malloc_alignment() && new_align <= me.malloc_alignment() {
let new_ptr = me.realloc(ptr.as_ptr(), new_size);
if new_ptr.is_null() {
return Err(AllocError);
}
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(new_ptr),
new_size,
))
} else {
drop(me);
let res_ptr = self.malloc(new_size, new_align);
if res_ptr.is_null() {
return Err(AllocError);
}
ptr::copy_nonoverlapping(ptr.as_ptr(), res_ptr, core::cmp::min(old_size, new_size));
self.free(ptr.as_ptr(), old_size, old_align);
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(res_ptr),
new_size,
))
}
}
unsafe fn grow_zeroed(&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout)
-> Result<NonNull<[u8]>, AllocError> {
let old_size = old_layout.size();
let old_align = old_layout.align();
let new_size = new_layout.size();
let new_align = new_layout.align();
let mut me = self.0.lock().unwrap();
me.validate_size(ptr.as_ptr(), old_size);
if old_align <= me.malloc_alignment() && new_align <= me.malloc_alignment() {
let new_ptr = me.realloc(ptr.as_ptr(), new_size);
if new_ptr.is_null() {
return Err(AllocError);
}
if new_ptr == ptr.as_ptr() && new_size > old_size {
ptr::write_bytes(new_ptr.add(old_size), 0, new_size - old_size);
} else if new_ptr != ptr.as_ptr() && new_size > old_size && me.calloc_must_clear(new_ptr) {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr, old_size);
ptr::write_bytes(new_ptr.add(old_size), 0, new_size - old_size);
}
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(new_ptr),
new_size,
))
} else {
drop(me);
let res_ptr = self.malloc(new_size, new_align);
if res_ptr.is_null() {
return Err(AllocError);
}
ptr::copy_nonoverlapping(ptr.as_ptr(), res_ptr, core::cmp::min(old_size, new_size));
if new_size > old_size {
ptr::write_bytes(res_ptr.add(old_size), 0, new_size - old_size);
}
self.free(ptr.as_ptr(), old_size, old_align);
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(res_ptr),
new_size,
))
}
}
unsafe fn shrink(&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout)
-> Result<NonNull<[u8]>, AllocError> {
let old_size = old_layout.size();
let old_align = old_layout.align();
let new_size = new_layout.size();
let new_align = new_layout.align();
let mut me = self.0.lock().unwrap();
me.validate_size(ptr.as_ptr(), old_size);
if old_align <= me.malloc_alignment() && new_align <= me.malloc_alignment() {
let new_ptr = me.realloc(ptr.as_ptr(), new_size);
if new_ptr.is_null() {
return Err(AllocError);
}
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(new_ptr),
new_size,
))
} else {
drop(me);
let res_ptr = self.malloc(new_size, new_align);
if res_ptr.is_null() {
return Err(AllocError);
}
ptr::copy_nonoverlapping(ptr.as_ptr(), res_ptr, core::cmp::min(old_size, new_size));
self.free(ptr.as_ptr(), old_size, old_align);
Ok(NonNull::slice_from_raw_parts(
NonNull::new_unchecked(res_ptr),
new_size,
))
}
}
fn by_ref(&self) -> &Self {
self
}
}