#![allow(dead_code)]
#![no_std]
#![deny(missing_docs)]
#![cfg_attr(target_arch = "wasm64", feature(simd_wasm64))]
use core::cmp;
use core::ptr;
use sys::System;
#[cfg(feature = "global")]
pub use self::global::{enable_alloc_after_fork, GlobalDlmalloc};
mod dlmalloc;
#[cfg(feature = "global")]
mod global;
pub unsafe trait Allocator: Send {
fn alloc(&self, size: usize) -> (*mut u8, usize, u32);
fn remap(&self, ptr: *mut u8, oldsize: usize, newsize: usize, can_move: bool) -> *mut u8;
fn free_part(&self, ptr: *mut u8, oldsize: usize, newsize: usize) -> bool;
fn free(&self, ptr: *mut u8, size: usize) -> bool;
fn can_release_part(&self, flags: u32) -> bool;
fn allocates_zeros(&self) -> bool;
fn page_size(&self) -> usize;
}
pub struct Dlmalloc<A = System>(dlmalloc::Dlmalloc<A>);
cfg_if::cfg_if! {
if #[cfg(target_family = "wasm")] {
#[path = "wasm.rs"]
mod sys;
} else if #[cfg(target_os = "windows")] {
#[path = "windows.rs"]
mod sys;
} else if #[cfg(target_os = "xous")] {
#[path = "xous.rs"]
mod sys;
} else if #[cfg(any(target_os = "linux", target_os = "macos"))] {
#[path = "unix.rs"]
mod sys;
} else {
#[path = "dummy.rs"]
mod sys;
}
}
impl Dlmalloc<System> {
pub const fn new() -> Dlmalloc<System> {
Dlmalloc(dlmalloc::Dlmalloc::new(System::new()))
}
}
impl<A> Dlmalloc<A> {
pub const fn new_with_allocator(sys_allocator: A) -> Dlmalloc<A> {
Dlmalloc(dlmalloc::Dlmalloc::new(sys_allocator))
}
}
impl<A: Allocator> Dlmalloc<A> {
#[inline]
pub unsafe fn malloc(&mut self, size: usize, align: usize) -> *mut u8 {
if align <= self.0.malloc_alignment() {
self.0.malloc(size)
} else {
self.0.memalign(align, size)
}
}
#[inline]
pub unsafe fn calloc(&mut self, size: usize, align: usize) -> *mut u8 {
let ptr = self.malloc(size, align);
if !ptr.is_null() && self.0.calloc_must_clear(ptr) {
ptr::write_bytes(ptr, 0, size);
}
ptr
}
#[inline]
pub unsafe fn free(&mut self, ptr: *mut u8, size: usize, align: usize) {
let _ = align;
self.0.validate_size(ptr, size);
self.0.free(ptr)
}
#[inline]
pub unsafe fn realloc(
&mut self,
ptr: *mut u8,
old_size: usize,
old_align: usize,
new_size: usize,
) -> *mut u8 {
self.0.validate_size(ptr, old_size);
if old_align <= self.0.malloc_alignment() {
self.0.realloc(ptr, new_size)
} else {
let res = self.malloc(new_size, old_align);
if !res.is_null() {
let size = cmp::min(old_size, new_size);
ptr::copy_nonoverlapping(ptr, res, size);
self.free(ptr, old_size, old_align);
}
res
}
}
pub unsafe fn trim(&mut self, pad: usize) -> bool {
self.0.trim(pad)
}
pub unsafe fn destroy(self) -> usize {
self.0.destroy()
}
pub fn allocator(&self) -> &A {
self.0.allocator()
}
pub fn allocator_mut(&mut self) -> &mut A {
self.0.allocator_mut()
}
}