#![allow(unused)]
use core::cmp;
use core::isize::MIN;
use core::mem;
use core::ptr;
use core::ptr::null_mut;
use crate::common::{align_down, align_up};
use crate::dlassert;
use crate::dlverbose;
use crate::dlverbose_no_flush;
use dlverbose::{DL_CHECKS, DL_DEBUG, DL_VERBOSE, VERBOSE_DEL};
use sys;
extern crate static_assertions;
const PTR_SIZE: usize = mem::size_of::<usize>();
const MALIGN: usize = 2 * PTR_SIZE;
const CHUNK_SIZE: usize = mem::size_of::<Chunk>();
const SEG_SIZE: usize = mem::size_of::<Segment>();
const CHUNK_MEM_OFFSET: usize = 2 * PTR_SIZE;
const TREE_NODE_SIZE: usize = mem::size_of::<TreeChunk>();
const MIN_CHUNK_SIZE: usize = mem::size_of::<Chunk>();
const MIN_MEM_SIZE: usize = MIN_CHUNK_SIZE - PTR_SIZE;
const SEG_INFO_SIZE: usize = CHUNK_MEM_OFFSET + SEG_SIZE + PTR_SIZE;
const DEFAULT_GRANULARITY: usize = 64 * 1024;
static_assertions::const_assert!(2 * MALIGN == CHUNK_SIZE);
static_assertions::const_assert!(3 * PTR_SIZE == SEG_SIZE);
static_assertions::const_assert!(MIN_CHUNK_SIZE % MALIGN == 0);
static_assertions::const_assert!(6 * PTR_SIZE == SEG_INFO_SIZE);
static_assertions::const_assert!(SEG_INFO_SIZE % MALIGN == 0);
static_assertions::const_assert!(DEFAULT_GRANULARITY % MALIGN == 0);
static_assertions::const_assert!(DEFAULT_GRANULARITY >= MIN_CHUNK_SIZE + SEG_INFO_SIZE);
const PINUSE: usize = 1 << 0;
const CINUSE: usize = 1 << 1;
const FLAG4: usize = 1 << 2;
const INUSE: usize = PINUSE | CINUSE;
const FLAG_BITS: usize = PINUSE | CINUSE | FLAG4;
const BORDER_CHUNK_HEAD: usize = FLAG_BITS;
static_assertions::const_assert!(MALIGN > FLAG_BITS);
const NSMALLBINS: usize = 32;
const NTREEBINS: usize = 32;
const SMALLBIN_SHIFT: usize = 3;
const TREEBIN_SHIFT: usize = 8;
const SBUFF_IDX_SIZES: usize = 0x86611;
const SBUFF_IDX_MAX: usize = {
let mut sizes = SBUFF_IDX_SIZES;
let mut idx = 0;
while sizes != 0 {
idx += 1;
sizes >>= 4;
}
idx
};
static_assertions::const_assert!(SBUFF_IDX_MAX < 8);
const SBUFF_MAX: usize = { (SBUFF_IDX_SIZES >> (4 * (SBUFF_IDX_MAX - 1))) * MALIGN };
const SBUFF_SIZE: usize = {
let mut sizes = SBUFF_IDX_SIZES;
let mut res = 0;
while sizes != 0 {
res += sizes & 0xF;
sizes >>= 4;
}
res * MALIGN
};
const SBUFF_IDX_OFFSETS: usize = {
let mut sizes = SBUFF_IDX_SIZES;
let mut offset = 0;
let mut res = 0;
let mut idx = 0;
while sizes != 0 {
res += offset << idx;
offset += sizes & 0xF;
idx += 4;
sizes >>= 4;
}
res
};
const SBUFF_MAX_OFFSET: usize = {
let mut offset = 0;
let mut idx = 0;
while idx < SBUFF_IDX_MAX - 1 {
offset += (SBUFF_IDX_SIZES >> (4 * idx)) & 0xF;
idx += 1;
}
offset
};
static_assertions::const_assert!(SBUFF_MAX_OFFSET < 0x10);
#[repr(C)]
struct Chunk {
prev_chunk_size: usize,
head: usize,
prev: *mut Chunk,
next: *mut Chunk,
}
#[repr(C)]
struct TreeChunk {
chunk: Chunk,
child: [*mut TreeChunk; 2],
parent: *mut TreeChunk,
index: u32,
}
#[repr(C)]
#[derive(Clone, Copy)]
struct Segment {
base: *mut u8,
size: usize,
next: *mut Segment,
}
#[repr(align(16))]
#[repr(C)]
pub struct Dlmalloc {
sbuff: [u8; SBUFF_SIZE],
sbuff_mask: u8,
smallmap: u32,
treemap: u32,
smallbins: [*mut Chunk; (NSMALLBINS + 1) * 2],
treebins: [*mut TreeChunk; NTREEBINS],
dvsize: usize,
topsize: usize,
dv: *mut Chunk,
top: *mut Chunk,
seg: *mut Segment,
preinstallation_is_done: bool,
least_addr: *mut u8,
}
unsafe impl Send for Dlmalloc {}
pub const DLMALLOC_INIT: Dlmalloc = Dlmalloc {
smallmap: 0,
treemap: 0,
smallbins: [0 as *mut _; (NSMALLBINS + 1) * 2],
treebins: [0 as *mut _; NTREEBINS],
dvsize: 0,
topsize: 0,
dv: 0 as *mut _,
top: 0 as *mut _,
seg: 0 as *mut _,
sbuff: [0; SBUFF_SIZE],
sbuff_mask: 0,
preinstallation_is_done: false,
least_addr: 0 as *mut _,
};
fn to_pinuse(prev_in_use: bool) -> usize {
if prev_in_use {
PINUSE
} else {
0
}
}
fn to_cinuse(curr_in_use: bool) -> usize {
if curr_in_use {
CINUSE
} else {
0
}
}
fn leftshift_for_tree_index(idx: u32) -> u32 {
let x = idx as usize;
if x == NTREEBINS - 1 {
0
} else {
(PTR_SIZE * 8 - 1 - ((x >> 1) + TREEBIN_SHIFT - 2)) as u32
}
}
impl Dlmalloc {
pub fn malloc_alignment(&self) -> usize {
MALIGN
}
fn min_large_chunk_size(&self) -> usize {
1 << TREEBIN_SHIFT
}
fn max_request(&self) -> usize {
let min_sys_alloc_space =
((!0 - (DEFAULT_GRANULARITY + SEG_INFO_SIZE + MALIGN) + 1) & !MALIGN) - PTR_SIZE + 1;
cmp::min((!MIN_CHUNK_SIZE + 1) << 2, min_sys_alloc_space)
}
fn max_chunk_size(&self) -> usize {
self.max_request() + PTR_SIZE
}
fn small_index(&self, chunk_size: usize) -> u32 {
(chunk_size >> SMALLBIN_SHIFT) as u32
}
fn small_index2size(&self, idx: u32) -> usize {
(idx as usize) << SMALLBIN_SHIFT
}
fn is_chunk_small(&self, chunk_size: usize) -> bool {
chunk_size >> SMALLBIN_SHIFT < NSMALLBINS
}
fn mem_to_chunk_size(&self, mem_req_size: usize) -> usize {
if mem_req_size <= MIN_MEM_SIZE {
MIN_CHUNK_SIZE
} else {
align_up(mem_req_size + PTR_SIZE, MALIGN)
}
}
unsafe fn smallmap_is_marked(&self, idx: u32) -> bool {
self.smallmap & (1 << idx) != 0
}
unsafe fn mark_smallmap(&mut self, idx: u32) {
self.smallmap |= 1 << idx;
}
unsafe fn clear_smallmap(&mut self, idx: u32) {
self.smallmap &= !(1 << idx);
}
unsafe fn treemap_is_marked(&self, idx: u32) -> bool {
self.treemap & (1 << idx) != 0
}
unsafe fn mark_treemap(&mut self, idx: u32) {
self.treemap |= 1 << idx;
}
unsafe fn clear_treemap(&mut self, idx: u32) {
self.treemap &= !(1 << idx);
}
unsafe fn set_top_or_dv(
&mut self,
chunk: *mut Chunk,
new_val: *mut Chunk,
new_size: usize,
) -> bool {
if chunk == self.top {
self.top = new_val;
self.topsize = new_size;
} else if chunk == self.dv {
self.dv = new_val;
self.dvsize = new_size;
} else {
return false;
}
true
}
unsafe fn sbuff_idx_to_size(idx: usize) -> usize {
dlassert!(idx < SBUFF_IDX_MAX);
((SBUFF_IDX_SIZES >> (4 * idx)) & 0xF) * MALIGN
}
unsafe fn sbuff_idx_to_offset(idx: usize) -> usize {
dlassert!(idx < SBUFF_IDX_MAX);
((SBUFF_IDX_OFFSETS >> (4 * idx)) & 0xF) * MALIGN
}
unsafe fn sbuff_offset_to_idx(offset: usize) -> usize {
for idx in 0..SBUFF_IDX_MAX {
if offset == Dlmalloc::sbuff_idx_to_offset(idx) {
return idx;
}
}
SBUFF_IDX_MAX
}
unsafe fn sbuff_size_to_idx(&self, size: usize) -> usize {
if size > SBUFF_MAX {
return SBUFF_IDX_MAX;
}
for idx in 0..SBUFF_IDX_MAX {
if self.sbuff_mask & (1 << idx) != 0 {
continue;
}
if size <= Dlmalloc::sbuff_idx_to_size(idx) {
return idx;
}
}
SBUFF_IDX_MAX
}
pub unsafe fn init_preinstalled_memory(&mut self, mem_begin: usize, mem_end: usize) {
dlassert!(!self.preinstallation_is_done);
self.preinstallation_is_done = true;
dlverbose!(
"DL INIT PREINSALLED MEMORY: mem_begin = {:#x}, mem_end = {:#x}",
mem_begin,
mem_end,
);
dlassert!(self.seg.is_null());
dlassert!(mem_end % DEFAULT_GRANULARITY == 0);
let mem_begin = align_up(mem_begin, MALIGN);
let size = mem_end - mem_begin;
if size == 0 {
return;
}
let req_chunk = self.append_mem_in_alloc_ctx(mem_begin as *mut u8, size, 0u32);
dlassert!(req_chunk as usize == mem_begin);
dlassert!(Chunk::size(req_chunk) == size - SEG_INFO_SIZE);
dlassert!(self.top == req_chunk);
}
unsafe fn malloc_chunk_by_size(&mut self, chunk_size: usize) -> *mut Chunk {
if self.is_chunk_small(chunk_size) {
let mut idx = self.small_index(chunk_size);
let smallbits = self.smallmap >> idx;
if smallbits & 0b11 != 0 {
idx += !smallbits & 1;
let head_chunk = self.smallbin_at(idx);
let chunk = self.unlink_last_small_chunk(head_chunk, idx);
let smallsize = self.small_index2size(idx);
(*chunk).head = smallsize | PINUSE | CINUSE;
(*Chunk::next(chunk)).head |= PINUSE;
dlverbose!("MALLOC: use small chunk[{:?}, {:x}]", chunk, smallsize);
return chunk;
}
if chunk_size > self.dvsize {
if smallbits != 0 {
let bins_idx = (smallbits << idx).trailing_zeros();
let head_chunk = self.smallbin_at(bins_idx);
let chunk = self.unlink_last_small_chunk(head_chunk, bins_idx);
let smallsize = self.small_index2size(bins_idx);
let remainder_size = smallsize - chunk_size;
if remainder_size >= MALIGN {
(*chunk).head = chunk_size | PINUSE | CINUSE;
let remainder = Chunk::plus_offset(chunk, chunk_size);
(*remainder).head = remainder_size | PINUSE;
Chunk::set_next_chunk_prev_size(remainder, remainder_size);
if remainder_size >= MIN_CHUNK_SIZE {
self.replace_dv(remainder, remainder_size);
}
} else {
dlassert!(remainder_size == 0);
(*chunk).head = smallsize | PINUSE | CINUSE;
(*Chunk::next(chunk)).head |= PINUSE;
}
dlverbose!(
"MALLOC: use small chunk[{:?}, {:x}]",
chunk,
Chunk::size(chunk)
);
return chunk;
} else if self.treemap != 0 {
let mem = self.tmalloc_small(chunk_size);
if !mem.is_null() {
let chunk = Chunk::from_mem(mem);
dlverbose!(
"MALLOC: ret small-tree chunk[{:?}, {:x}]",
chunk,
Chunk::size(chunk)
);
return chunk;
}
}
}
} else if chunk_size < self.max_chunk_size() {
if self.treemap != 0 {
let mem = self.tmalloc_large(chunk_size);
if !mem.is_null() {
let chunk = Chunk::from_mem(mem);
dlverbose!(
"MALLOC: ret big chunk[{:?}, {:x}]",
chunk,
Chunk::size(chunk)
);
return chunk;
}
}
} else {
return ptr::null_mut();
}
if chunk_size <= self.dvsize {
dlverbose!("MALLOC: use dv chunk[{:?}, {:x}]", self.dv, self.dvsize);
let chunk = self.dv;
self.crop_chunk(self.dv, self.dv, chunk_size, true);
return chunk;
}
if chunk_size <= self.topsize {
dlverbose!(
"MALLOC: use top chunk[{:?}, 0x{:x}]",
self.top,
self.topsize
);
let chunk = self.top;
self.crop_chunk(self.top, self.top, chunk_size, true);
self.check_top_chunk(self.top);
return chunk;
}
ptr::null_mut()
}
unsafe fn debug_zero_tail(ptr: *mut u8, req_size: usize, size: usize) {
for i in req_size..size {
*(ptr.add(i)) = 0;
}
}
unsafe fn debug_mem_sum(ptr: *mut u8, size: usize) -> u64 {
let mut x: u64 = 0;
for i in 0..size {
x += *(ptr.add(i)) as u64;
}
x
}
unsafe fn debug_print_mem(ptr: *mut u8, size: usize) {
for i in 0..size {
let x = *(ptr.add(i));
dlverbose_no_flush!("{:02X}", x);
}
dlverbose!("");
}
unsafe fn malloc_internal(&mut self, size: usize, can_use_sbuff: bool) -> *mut u8 {
if can_use_sbuff && self.seg.is_null() {
let sbuff = &mut self.sbuff as *mut u8;
dlassert!(sbuff as usize % MALIGN == 0);
let idx = self.sbuff_size_to_idx(size);
if idx < SBUFF_IDX_MAX {
dlverbose!("DL MALLOC: use sbuff cell {}", idx);
self.sbuff_mask |= (1 << idx);
return sbuff.add(Dlmalloc::sbuff_idx_to_offset(idx));
}
}
let chunk_size = self.mem_to_chunk_size(size);
let chunk = self.malloc_chunk_by_size(chunk_size);
if chunk.is_null() {
return self.sys_alloc(chunk_size);
}
let mem = Chunk::to_mem(chunk);
self.check_malloced_mem(mem, size);
mem
}
pub unsafe fn malloc(&mut self, size: usize) -> *mut u8 {
dlverbose!("{}", VERBOSE_DEL);
dlverbose!("MALLOC CALL: size = 0x{:x}", size);
self.print_segments();
self.check_malloc_state();
let mem = self.malloc_internal(size, true);
dlverbose!("MALLOC: result mem {:?}", mem);
mem
}
unsafe fn sys_alloc(&mut self, size: usize) -> *mut u8 {
dlverbose!("DL SYS ALLOC: size = 0x{:x}", size);
if size >= self.max_chunk_size() {
return ptr::null_mut();
}
let mut req_chunk = if !self.preinstallation_is_done {
dlassert!(self.seg.is_null());
let (mem_addr, mem_size) = sys::get_preinstalled_memory();
self.init_preinstalled_memory(mem_addr, mem_addr + mem_size);
if self.topsize >= size {
self.top
} else {
ptr::null_mut()
}
} else {
ptr::null_mut()
};
if req_chunk.is_null() {
let aligned_size = align_up(size + SEG_INFO_SIZE + MALIGN, DEFAULT_GRANULARITY);
let (alloced_base, alloced_size, flags) = sys::alloc(aligned_size);
dlverbose!(
"DL SYS ALLOC: new mem {:?} 0x{:x}",
alloced_base,
alloced_size
);
if alloced_base.is_null() {
return alloced_base;
}
req_chunk = self.append_mem_in_alloc_ctx(alloced_base, alloced_size, flags);
}
if self.top.is_null() {
dlassert!(self.topsize == 0);
let mut seg = self.seg;
while !seg.is_null() {
let chunk = (*seg).info_chunk();
seg = (*seg).next;
if Chunk::pinuse(chunk) {
continue;
}
let chunk = Chunk::prev(chunk);
if chunk == self.dv {
continue;
}
let size = Chunk::size(chunk);
if size < MIN_CHUNK_SIZE || size <= self.topsize {
continue;
}
self.top = chunk;
self.topsize = size;
}
if !self.top.is_null() {
self.unlink_chunk(self.top);
}
self.check_top_chunk(self.top);
}
dlassert!(Chunk::size(req_chunk) >= size);
if self.crop_chunk(req_chunk, req_chunk, size, false) {
let next_chunk = Chunk::next(req_chunk);
self.free_chunk(next_chunk);
}
Chunk::to_mem(req_chunk)
}
unsafe fn append_mem_in_alloc_ctx(
&mut self,
alloced_base: *mut u8,
alloced_size: usize,
flags: u32,
) -> *mut Chunk {
let mut req_chunk;
if self.seg.is_null() {
dlverbose!("DL APPEND: it's a newest mem");
self.update_least_addr(alloced_base);
self.add_segment(alloced_base, alloced_size, flags);
self.init_small_bins();
self.check_top_chunk(self.top);
req_chunk = self.top;
} else {
self.update_least_addr(alloced_base);
self.add_segment(alloced_base, alloced_size, flags);
req_chunk = alloced_base as *mut Chunk;
let mut prev_seg = ptr::null_mut();
let mut seg = self.seg;
while !seg.is_null() && alloced_base != (*seg).end() {
prev_seg = seg;
seg = (*seg).next;
}
if !seg.is_null() {
dlverbose!(
"DL APPEND: find seg before [{:?}, {:?}, 0x{:x}]",
(*seg).base,
(*seg).end(),
(*seg).size
);
if prev_seg.is_null() {
dlassert!((*self.seg).next == seg);
(*self.seg).next = (*seg).next;
} else {
(*prev_seg).next = (*seg).next;
}
req_chunk = self.merge_segments(seg.as_mut().unwrap(), self.seg.as_mut().unwrap());
}
let mut prev_seg = ptr::null_mut();
let mut seg = self.seg;
while !seg.is_null() && (*seg).base != alloced_base.add(alloced_size) {
prev_seg = seg;
seg = (*seg).next;
}
if !seg.is_null() {
dlverbose!(
"DL APPEND: find seg after [{:?}, {:?}, 0x{:x}]",
(*seg).base,
(*seg).end(),
(*seg).size
);
let next_seg = (*self.seg).next;
req_chunk = self.merge_segments(self.seg.as_mut().unwrap(), seg.as_mut().unwrap());
self.seg = next_seg;
}
}
req_chunk
}
pub unsafe fn realloc(&mut self, old_mem: *mut u8, req_size: usize) -> *mut u8 {
dlverbose!("{}", VERBOSE_DEL);
dlverbose!(
"DL REALLOC CALL: old_mem={:?} req_size=0x{:x}",
old_mem,
req_size
);
self.check_malloc_state();
if req_size >= self.max_request() {
return ptr::null_mut();
}
let sbuff = &mut self.sbuff as *mut u8;
if old_mem >= sbuff && old_mem <= sbuff.add(SBUFF_SIZE) {
let offset = old_mem as usize - sbuff as usize;
let idx = Dlmalloc::sbuff_offset_to_idx(offset);
let size = Dlmalloc::sbuff_idx_to_size(idx);
dlassert!(idx < SBUFF_IDX_MAX);
dlverbose!("DL REALLOC: in sbuff idx={} size=0x{:x}", idx, size);
if size >= req_size {
dlverbose!("DL REALLOC: use old sbuff cell");
return old_mem;
}
self.sbuff_mask &= !(1 << idx);
let new_mem = self.malloc_internal(req_size, true);
dlverbose!("DL REALLOC: new mem {:?}", new_mem);
ptr::copy_nonoverlapping(old_mem, new_mem, size);
return new_mem;
}
let req_chunk_size = self.mem_to_chunk_size(req_size);
let old_chunk = Chunk::from_mem(old_mem);
let old_chunk_size = Chunk::size(old_chunk);
let old_mem_size = old_chunk_size - PTR_SIZE;
let mut chunk = old_chunk;
let mut chunk_size = old_chunk_size;
dlverbose!(
"REALLOC: oldmem={:?} old_mem_size=0x{:x} req_size=0x{:x}",
old_mem,
old_mem_size,
req_size
);
dlassert!(Chunk::cinuse(chunk));
dlassert!(chunk != self.top && chunk != self.dv);
if req_chunk_size == chunk_size {
old_mem
} else if req_chunk_size < chunk_size {
self.crop_chunk(chunk, chunk, req_chunk_size, false);
let next_chunk = Chunk::next(chunk);
dlassert!(!Chunk::cinuse(next_chunk));
self.extend_free_chunk(next_chunk, false);
self.free_chunk(next_chunk);
old_mem
} else if self.get_extended_up_chunk_size(chunk) >= req_chunk_size {
let next_chunk = Chunk::next(chunk);
dlassert!(!Chunk::cinuse(next_chunk));
let chunk_size = Chunk::size(chunk);
let next_chunk_size = Chunk::size(next_chunk);
let prev_in_use = if Chunk::pinuse(chunk) { PINUSE } else { 0 };
dlverbose!(
"REALLOC: use after chunk[{:?}, 0x{:x}] {}",
next_chunk,
next_chunk_size,
self.is_top_or_dv(next_chunk)
);
if next_chunk != self.top && next_chunk != self.dv {
self.unlink_chunk(next_chunk);
}
let mut remainder_size = chunk_size + next_chunk_size - req_chunk_size;
dlassert!(remainder_size % MALIGN == 0);
let mut remainder_chunk;
if remainder_size > 0 {
remainder_chunk = Chunk::minus_offset(Chunk::next(next_chunk), remainder_size);
(*remainder_chunk).head = remainder_size | PINUSE;
(*Chunk::next(remainder_chunk)).prev_chunk_size = remainder_size;
dlassert!(!Chunk::pinuse(Chunk::next(remainder_chunk)));
} else {
remainder_chunk = ptr::null_mut();
}
let chunk_size = chunk_size + next_chunk_size - remainder_size;
if remainder_size < MIN_CHUNK_SIZE {
remainder_chunk = ptr::null_mut();
remainder_size = 0;
}
if next_chunk == self.top {
self.top = remainder_chunk;
self.topsize = remainder_size;
} else if next_chunk == self.dv {
self.dv = remainder_chunk;
self.dvsize = remainder_size;
} else if remainder_size >= MIN_CHUNK_SIZE {
self.insert_chunk(remainder_chunk, remainder_size);
}
(*chunk).head = chunk_size | CINUSE | prev_in_use;
(*Chunk::next(chunk)).head |= PINUSE;
self.check_malloc_state();
old_mem
} else {
let new_mem = self.malloc_internal(req_size, false);
if new_mem.is_null() {
return new_mem;
}
let new_chunk = Chunk::from_mem(new_mem);
let new_mem_size = Chunk::size(new_chunk) - PTR_SIZE;
dlassert!(new_mem_size >= old_mem_size);
dlverbose!(
"REALLOC: copy data from [{:?}, 0x{:x?}] to [{:?}, 0x{:x?}]",
old_mem,
old_mem_size,
new_mem,
new_mem_size
);
ptr::copy_nonoverlapping(old_mem, new_mem, old_mem_size);
chunk = self.extend_free_chunk(chunk, false);
self.free_chunk(chunk);
self.check_malloc_state();
new_mem
}
}
unsafe fn crop_chunk(
&mut self,
mut chunk: *mut Chunk,
new_chunk_pos: *mut Chunk,
new_chunk_size: usize,
can_insert: bool,
) -> bool {
let mut chunk_size = Chunk::size(chunk);
dlverbose!(
"DL CROP: original chunk [{:?}, {:x?}], to new [{:?}, {:x?}]",
chunk,
chunk_size,
new_chunk_pos,
new_chunk_size
);
dlassert!(new_chunk_size % MALIGN == 0);
dlassert!(MIN_CHUNK_SIZE <= new_chunk_size);
dlassert!(new_chunk_pos as usize % MALIGN == 0);
dlassert!(new_chunk_pos >= chunk);
dlassert!(
Chunk::plus_offset(chunk, chunk_size)
>= Chunk::plus_offset(new_chunk_pos, new_chunk_size)
);
let mut prev_in_use = if Chunk::pinuse(chunk) { PINUSE } else { 0 };
if new_chunk_pos != chunk {
let remainder_size = new_chunk_pos as usize - chunk as usize;
let remainder = chunk;
dlassert!(remainder_size >= MALIGN);
chunk_size -= remainder_size;
(*remainder).head = remainder_size | prev_in_use;
self.set_top_or_dv(chunk, new_chunk_pos, chunk_size);
if can_insert && remainder_size >= MIN_CHUNK_SIZE {
dlassert!(Chunk::pinuse(remainder));
self.insert_chunk(remainder, remainder_size);
}
dlverbose!("CROP: before rem [{:?}, {:x?}]", remainder, remainder_size);
chunk = new_chunk_pos;
(*chunk).prev_chunk_size = remainder_size;
prev_in_use = 0;
}
dlassert!(new_chunk_pos == chunk);
dlassert!(chunk_size >= new_chunk_size);
let has_after_rem;
let next_chunk = Chunk::plus_offset(chunk, chunk_size);
if chunk_size >= new_chunk_size + MALIGN {
let mut remainder_size = chunk_size - new_chunk_size;
let mut remainder = Chunk::plus_offset(chunk, new_chunk_size);
dlverbose!("CROP: after rem [{:?}, {:x?}]", remainder, remainder_size);
(*remainder).head = remainder_size | PINUSE;
(*Chunk::next(remainder)).head &= !PINUSE;
(*Chunk::next(remainder)).prev_chunk_size = remainder_size;
if remainder_size < MIN_CHUNK_SIZE {
dlassert!(remainder_size == MALIGN);
remainder_size = 0;
remainder = ptr::null_mut();
}
if self.set_top_or_dv(chunk, remainder, remainder_size) {
dlassert!(Chunk::cinuse(next_chunk));
} else if can_insert && remainder_size >= MIN_CHUNK_SIZE {
dlassert!(Chunk::cinuse(next_chunk));
self.insert_chunk(remainder, remainder_size);
}
chunk_size = new_chunk_size;
has_after_rem = true;
} else {
dlassert!(chunk_size == new_chunk_size);
(*next_chunk).head |= PINUSE;
(*next_chunk).prev_chunk_size = chunk_size;
has_after_rem = false;
}
dlassert!(chunk == new_chunk_pos);
dlassert!(chunk_size == new_chunk_size);
(*chunk).head = chunk_size | prev_in_use | CINUSE;
self.set_top_or_dv(chunk, ptr::null_mut(), 0);
dlverbose!("CROP: cropped chunk [{:?}, {:x?}]", chunk, chunk_size);
has_after_rem
}
pub unsafe fn memalign(&mut self, mut alignment: usize, req_size: usize) -> *mut u8 {
dlverbose!("{}", VERBOSE_DEL);
dlverbose!("MEMALIGN: align={:x?}, size={:x?}", alignment, req_size);
self.check_malloc_state();
dlassert!(alignment >= MIN_CHUNK_SIZE);
if req_size >= self.max_request() - alignment {
return ptr::null_mut();
}
let req_chunk_size = self.mem_to_chunk_size(req_size);
let size_to_alloc = req_chunk_size + alignment;
let mut mem = self.malloc_internal(size_to_alloc, false);
if mem.is_null() {
return mem;
}
let mut chunk = Chunk::from_mem(mem);
dlverbose!("MEMALIGN: chunk[{:?}, {:x?}]", chunk, Chunk::size(chunk));
dlassert!(Chunk::pinuse(chunk) && Chunk::cinuse(chunk));
mem = align_up(mem as usize, alignment) as *mut u8;
let aligned_chunk = Chunk::from_mem(mem);
if self.crop_chunk(chunk, aligned_chunk, req_chunk_size, false) {
self.extend_free_chunk(Chunk::next(aligned_chunk), true);
}
if chunk != aligned_chunk && Chunk::size(chunk) >= MIN_CHUNK_SIZE {
self.insert_chunk(chunk, Chunk::size(chunk));
}
self.check_cinuse_chunk(aligned_chunk);
self.check_malloc_state();
mem
}
unsafe fn init_top(&mut self, chunk: *mut Chunk, chunk_size: usize) {
dlassert!(chunk as usize % MALIGN == 0);
dlassert!(Chunk::to_mem(chunk) as usize % MALIGN == 0);
self.top = chunk;
self.topsize = chunk_size;
(*self.top).head = chunk_size | PINUSE;
}
unsafe fn init_small_bins(&mut self) {
for i in 0..NSMALLBINS as u32 {
let bin = self.smallbin_at(i);
(*bin).next = bin;
(*bin).prev = bin;
}
}
unsafe fn merge_segments(&mut self, seg1: &mut Segment, seg2: &mut Segment) -> *mut Chunk {
dlassert!(seg1.end() == seg2.base);
dlassert!(seg1.size % MALIGN == 0);
dlassert!(seg2.size % MALIGN == 0);
dlassert!(seg1.base as usize % MALIGN == 0);
dlassert!(seg2.base as usize % MALIGN == 0);
seg2.size += seg1.size;
seg2.base = seg1.base;
let seg1_info_chunk = seg1.info_chunk();
let seg2_base_chunk = seg2.base_chunk();
let seg2_info_chunk = seg2.info_chunk();
Chunk::change_size(seg1_info_chunk, SEG_INFO_SIZE);
if !Chunk::pinuse(seg1_info_chunk) {
let prev = Chunk::prev(seg1_info_chunk);
if prev == self.top && Chunk::next(seg2_base_chunk) != seg2_info_chunk {
self.top = ptr::null_mut();
self.topsize = 0;
self.insert_chunk(prev, Chunk::size(prev));
}
}
let chunk = self.extend_free_chunk(seg1_info_chunk, false);
self.check_top_chunk(self.top);
chunk
}
unsafe fn set_segment_info(
&mut self,
seg_base: *mut u8,
seg_size: usize,
prev_in_use: usize,
) -> *mut Segment {
let seg_end = seg_base.add(seg_size);
let seg_chunk = seg_end.sub(SEG_INFO_SIZE) as *mut Chunk;
let seg_info = Chunk::plus_offset(seg_chunk, 2 * PTR_SIZE) as *mut Segment;
let border_chunk = Chunk::plus_offset(seg_chunk, 4 * PTR_SIZE);
dlassert!(seg_end as usize % MALIGN == 0);
dlassert!(seg_chunk as usize % MALIGN == 0);
dlassert!(seg_info as usize % MALIGN == 0);
dlassert!(border_chunk as usize % MALIGN == 0);
dlassert!(border_chunk as *mut u8 == seg_end.sub(2 * PTR_SIZE));
dlverbose!("ALLOC: add seg, info chunk {:?}", seg_chunk);
(*seg_chunk).head = (4 * PTR_SIZE) | prev_in_use | CINUSE;
(*seg_info).base = seg_base;
(*seg_info).size = seg_size;
(*border_chunk).head = BORDER_CHUNK_HEAD;
seg_info
}
unsafe fn add_segment(&mut self, tbase: *mut u8, tsize: usize, flags: u32) {
dlassert!(tbase as usize % MALIGN == 0);
dlassert!(tsize % MALIGN == 0);
let seg = self.set_segment_info(tbase, tsize, 0);
(*seg).next = self.seg;
if !self.top.is_null() {
(*Chunk::next(self.top)).prev_chunk_size = self.topsize;
self.insert_chunk(self.top, self.topsize);
}
let size = tsize - SEG_INFO_SIZE;
self.init_top(tbase as *mut Chunk, size);
(*Chunk::next(self.top)).prev_chunk_size = self.topsize;
self.seg = seg;
dlverbose!(
"SYS_ALLOC: add seg, top[{:?}, 0x{:x}]",
self.top,
self.topsize
);
self.check_top_chunk(self.top);
}
unsafe fn segment_holding(&self, ptr: *mut u8) -> *mut Segment {
let mut sp = self.seg;
while !sp.is_null() {
if (*sp).base <= ptr && ptr < Segment::top(sp) {
return sp;
}
sp = (*sp).next;
}
ptr::null_mut()
}
unsafe fn tmalloc_small(&mut self, size: usize) -> *mut u8 {
let first_one_idx = self.treemap.trailing_zeros();
let first_tree_chunk = *self.treebin_at(first_one_idx);
let mut tree_chunk = first_tree_chunk;
let mut best_tree_chunk = first_tree_chunk;
let mut remainder_size = Chunk::size(TreeChunk::chunk(tree_chunk)) - size;
loop {
self.check_any_chunk(TreeChunk::chunk(tree_chunk));
tree_chunk = TreeChunk::leftmost_child(tree_chunk);
if tree_chunk.is_null() {
break;
}
let diff = Chunk::size(TreeChunk::chunk(tree_chunk)) - size;
if diff < remainder_size {
remainder_size = diff;
best_tree_chunk = tree_chunk;
}
}
let chunk = TreeChunk::chunk(best_tree_chunk);
dlassert!(Chunk::size(chunk) == remainder_size + size);
self.unlink_large_chunk(best_tree_chunk);
if remainder_size < MALIGN {
dlassert!(remainder_size == 0);
(*chunk).head = size | PINUSE | CINUSE;
(*Chunk::next(chunk)).head |= PINUSE;
} else {
(*chunk).head = size | PINUSE | CINUSE;
let remainder = Chunk::next(chunk);
(*remainder).head = remainder_size | PINUSE;
Chunk::set_next_chunk_prev_size(remainder, remainder_size);
if remainder_size >= MIN_CHUNK_SIZE {
self.replace_dv(remainder, remainder_size);
}
}
Chunk::to_mem(chunk)
}
unsafe fn tmalloc_large(&mut self, size: usize) -> *mut u8 {
let mut v = ptr::null_mut();
let mut rsize = !size + 1;
let idx = self.compute_tree_index(size);
let mut t = *self.treebin_at(idx);
if !t.is_null() {
let mut sizebits = size << leftshift_for_tree_index(idx);
let mut rst = ptr::null_mut();
loop {
let csize = Chunk::size(TreeChunk::chunk(t));
if csize >= size && csize - size < rsize {
v = t;
rsize = csize - size;
if rsize == 0 {
break;
}
}
let rt = (*t).child[1];
t = (*t).child[(sizebits >> (PTR_SIZE * 8 - 1)) & 1];
if !rt.is_null() && rt != t {
rst = rt;
}
if t.is_null() {
t = rst;
break;
}
sizebits <<= 1;
}
}
if t.is_null() && v.is_null() {
let leftbits = self.treemap & (u32::MAX << idx) << 1;
if leftbits != 0 {
let idx = leftbits.trailing_zeros();
t = *self.treebin_at(idx);
}
}
while !t.is_null() {
let csize = Chunk::size(TreeChunk::chunk(t));
if csize >= size && csize - size < rsize {
rsize = csize - size;
v = t;
}
t = TreeChunk::leftmost_child(t);
}
if v.is_null() || (self.dvsize >= size && rsize >= (self.dvsize - size)) {
return ptr::null_mut();
}
let vc = TreeChunk::chunk(v);
let r = Chunk::plus_offset(vc, size);
dlassert!(Chunk::size(vc) == rsize + size);
self.unlink_large_chunk(v);
if rsize < MALIGN {
dlassert!(rsize == 0);
(*vc).head = size | CINUSE | PINUSE;
(*Chunk::next(vc)).head |= PINUSE;
} else {
(*vc).head = size | CINUSE | PINUSE;
(*r).head = rsize | PINUSE;
Chunk::set_next_chunk_prev_size(r, rsize);
if rsize >= MIN_CHUNK_SIZE {
self.insert_chunk(r, rsize);
}
}
Chunk::to_mem(vc)
}
unsafe fn smallbin_at(&mut self, idx: u32) -> *mut Chunk {
let idx = (idx * 2) as usize;
dlassert!(idx < self.smallbins.len());
let smallbins_ptr = &self.smallbins as *const *mut Chunk;
smallbins_ptr.add(idx) as *mut Chunk
}
unsafe fn treebin_at(&mut self, idx: u32) -> *mut *mut TreeChunk {
dlassert!((idx as usize) < self.treebins.len());
&mut *self.treebins.get_unchecked_mut(idx as usize)
}
fn compute_tree_index(&self, size: usize) -> u32 {
let x = size >> TREEBIN_SHIFT;
if x == 0 {
0
} else if x > 0xffff {
NTREEBINS as u32 - 1
} else {
let k = mem::size_of_val(&x) * 8 - 1 - (x.leading_zeros() as usize);
((k << 1) + (size >> (k + TREEBIN_SHIFT - 1) & 1)) as u32
}
}
unsafe fn unlink_last_small_chunk(&mut self, head: *mut Chunk, idx: u32) -> *mut Chunk {
let chunk = (*head).prev;
let new_first_chunk = (*chunk).prev;
dlassert!(chunk != head);
dlassert!(chunk != new_first_chunk);
dlassert!(Chunk::size(chunk) == self.small_index2size(idx));
if head == new_first_chunk {
self.clear_smallmap(idx);
} else {
(*new_first_chunk).next = head;
(*head).prev = new_first_chunk;
}
chunk
}
unsafe fn replace_dv(&mut self, chunk: *mut Chunk, size: usize) {
let dv_size = self.dvsize;
dlassert!(self.is_chunk_small(dv_size));
dlassert!(size >= MIN_CHUNK_SIZE);
if dv_size != 0 {
self.insert_chunk(self.dv, dv_size);
}
self.dvsize = size;
self.dv = chunk;
}
unsafe fn insert_chunk(&mut self, chunk: *mut Chunk, size: usize) {
dlverbose!("ALLOC: insert [{:?}, {:?}]", chunk, Chunk::next(chunk));
dlassert!(size == Chunk::size(chunk));
dlassert!(size >= MIN_CHUNK_SIZE);
dlassert!(!Chunk::cinuse(chunk));
if self.is_chunk_small(size) {
self.insert_small_chunk(chunk, size);
} else {
self.insert_large_chunk(chunk as *mut TreeChunk, size);
}
}
unsafe fn insert_small_chunk(&mut self, chunk: *mut Chunk, size: usize) {
let idx = self.small_index(size);
let head = self.smallbin_at(idx);
let mut f = head;
dlassert!(size >= MIN_CHUNK_SIZE);
if !self.smallmap_is_marked(idx) {
self.mark_smallmap(idx);
} else {
f = (*head).prev;
}
(*head).prev = chunk;
(*f).next = chunk;
(*chunk).prev = f;
(*chunk).next = head;
}
unsafe fn insert_large_chunk(&mut self, chunk: *mut TreeChunk, size: usize) {
let idx = self.compute_tree_index(size);
let h = self.treebin_at(idx);
(*chunk).index = idx;
(*chunk).child[0] = ptr::null_mut();
(*chunk).child[1] = ptr::null_mut();
let chunkc = TreeChunk::chunk(chunk);
if !self.treemap_is_marked(idx) {
self.mark_treemap(idx);
*h = chunk;
(*chunk).parent = h as *mut TreeChunk; (*chunkc).next = chunkc;
(*chunkc).prev = chunkc;
} else {
let mut t = *h;
let mut k = size << leftshift_for_tree_index(idx);
loop {
if Chunk::size(TreeChunk::chunk(t)) != size {
let c = &mut (*t).child[(k >> (PTR_SIZE * 8 - 1)) & 1];
k <<= 1;
if !c.is_null() {
t = *c;
} else {
*c = chunk;
(*chunk).parent = t;
(*chunkc).next = chunkc;
(*chunkc).prev = chunkc;
break;
}
} else {
let tc = TreeChunk::chunk(t);
let f = (*tc).prev;
(*f).next = chunkc;
(*tc).prev = chunkc;
(*chunkc).prev = f;
(*chunkc).next = tc;
(*chunk).parent = ptr::null_mut();
break;
}
}
}
}
unsafe fn unlink_chunk(&mut self, chunk: *mut Chunk) {
let size = Chunk::size(chunk);
if size < MIN_CHUNK_SIZE {
dlassert!(size == MALIGN);
} else if self.is_chunk_small(size) {
self.unlink_small_chunk(chunk)
} else {
self.unlink_large_chunk(chunk as *mut TreeChunk);
}
}
unsafe fn unlink_small_chunk(&mut self, chunk: *mut Chunk) {
let size = Chunk::size(chunk);
dlverbose!(
"ALLOC: unlink small chunk[{:?}, {:?}, 0x{:x}]",
chunk,
Chunk::next(chunk),
size
);
let prev_chunk = (*chunk).prev;
let next_chunk = (*chunk).next;
let idx = self.small_index(size);
dlassert!(chunk != next_chunk);
dlassert!(chunk != prev_chunk);
dlassert!(Chunk::size(chunk) == self.small_index2size(idx));
if next_chunk == prev_chunk {
self.clear_smallmap(idx);
} else {
(*prev_chunk).next = next_chunk;
(*next_chunk).prev = prev_chunk;
}
}
unsafe fn unlink_large_chunk(&mut self, chunk: *mut TreeChunk) {
let size = Chunk::size(TreeChunk::chunk(chunk));
dlverbose!(
"ALLOC: unlink large chunk[{:?}, {:?}, 0x{:x}]",
chunk,
TreeChunk::next(chunk),
size
);
let parent = (*chunk).parent;
let mut r;
if TreeChunk::next(chunk) != chunk {
let f = TreeChunk::prev(chunk);
r = TreeChunk::next(chunk);
(*f).chunk.next = TreeChunk::chunk(r);
(*r).chunk.prev = TreeChunk::chunk(f);
} else {
let mut rp = &mut (*chunk).child[1];
if rp.is_null() {
rp = &mut (*chunk).child[0];
}
r = *rp;
if !rp.is_null() {
loop {
let mut cp = &mut (**rp).child[1];
if cp.is_null() {
cp = &mut (**rp).child[0];
}
if cp.is_null() {
break;
}
rp = cp;
}
r = *rp;
*rp = ptr::null_mut();
}
}
if parent.is_null() {
return;
}
let h = self.treebin_at((*chunk).index);
if chunk == *h {
*h = r;
if r.is_null() {
self.clear_treemap((*chunk).index);
}
} else if (*parent).child[0] == chunk {
(*parent).child[0] = r;
} else {
(*parent).child[1] = r;
}
if !r.is_null() {
(*r).parent = parent;
let c0 = (*chunk).child[0];
if !c0.is_null() {
(*r).child[0] = c0;
(*c0).parent = r;
}
let c1 = (*chunk).child[1];
if !c1.is_null() {
(*r).child[1] = c1;
(*c1).parent = r;
}
}
}
unsafe fn get_extended_up_chunk_size(&mut self, chunk: *mut Chunk) -> usize {
let next_chunk = Chunk::next(chunk);
if !Chunk::cinuse(next_chunk) {
Chunk::size(chunk) + Chunk::size(next_chunk)
} else {
Chunk::size(chunk)
}
}
unsafe fn extend_free_chunk(&mut self, mut chunk: *mut Chunk, can_insert: bool) -> *mut Chunk {
dlverbose!("DL EXTEND: chunk[{:?}, {:#x}]", chunk, Chunk::size(chunk));
dlassert!(Chunk::size(chunk) >= MALIGN);
if !Chunk::pinuse(chunk) {
let curr_chunk_size = Chunk::size(chunk);
let prev_chunk = Chunk::prev(chunk);
let prev_chunk_size = Chunk::size(prev_chunk);
dlassert!(Chunk::pinuse(prev_chunk));
dlverbose!(
"extend: add before chunk[{:?}, 0x{:x}] {}",
prev_chunk,
prev_chunk_size,
self.is_top_or_dv(prev_chunk)
);
if prev_chunk == self.top {
self.topsize += Chunk::size(chunk);
} else if prev_chunk == self.dv {
self.dvsize += Chunk::size(chunk);
} else {
self.unlink_chunk(prev_chunk);
}
chunk = prev_chunk;
(*chunk).head = (curr_chunk_size + prev_chunk_size) | PINUSE;
}
let next_chunk = Chunk::next(chunk);
if !Chunk::cinuse(next_chunk) {
dlverbose!(
"extend: add after chunk[{:?}, 0x{:x}] {}",
next_chunk,
Chunk::size(next_chunk),
self.is_top_or_dv(next_chunk)
);
dlassert!(chunk != self.top);
if next_chunk == self.top {
self.top = chunk;
self.topsize += Chunk::size(chunk);
if chunk == self.dv {
self.dv = ptr::null_mut();
self.dvsize = 0;
}
(*chunk).head = self.topsize | PINUSE;
} else if next_chunk == self.dv {
self.dv = chunk;
self.dvsize += Chunk::size(chunk);
(*chunk).head = self.dvsize | PINUSE;
} else {
self.unlink_chunk(next_chunk);
(*chunk).head = (Chunk::size(chunk) + Chunk::size(next_chunk)) | PINUSE;
if chunk == self.dv {
self.dvsize = Chunk::size(chunk);
} else if can_insert {
self.insert_chunk(chunk, Chunk::size(chunk));
}
}
(*Chunk::next(chunk)).prev_chunk_size = Chunk::size(chunk);
} else {
(*chunk).head &= !CINUSE;
(*next_chunk).head &= !PINUSE;
(*next_chunk).prev_chunk_size = Chunk::size(chunk);
if can_insert
&& chunk != self.top
&& chunk != self.dv
&& Chunk::size(chunk) >= MIN_CHUNK_SIZE
{
self.insert_chunk(chunk, Chunk::size(chunk));
}
}
chunk
}
unsafe fn free_chunk(&mut self, chunk: *mut Chunk) {
dlassert!(Chunk::pinuse(chunk));
dlassert!(Chunk::cinuse(Chunk::next(chunk)));
dlverbose!("DL FREE: chunk[{:?}, {:?}]", chunk, Chunk::next(chunk));
let chunk_size = Chunk::size(chunk);
dlassert!(chunk_size >= MALIGN);
if chunk_size + SEG_INFO_SIZE < DEFAULT_GRANULARITY {
if chunk != self.top && chunk != self.dv && chunk_size >= MIN_CHUNK_SIZE {
self.insert_chunk(chunk, chunk_size);
}
return;
}
let mut mem_to_free = chunk as *mut u8;
let mut mem_to_free_end = mem_to_free.add(chunk_size);
let mut prev_seg = ptr::null_mut() as *mut Segment;
let mut seg = self.seg;
while !seg.is_null() {
if Segment::holds(seg, mem_to_free) {
break;
}
prev_seg = seg;
seg = (*seg).next;
}
dlassert!(!seg.is_null());
dlassert!((*seg).size > chunk_size);
let seg_base = (*seg).base;
let seg_end = (*seg).base.add((*seg).size);
dlassert!(mem_to_free_end < seg_end);
dlassert!(seg_base as usize % MALIGN == 0);
dlverbose!("DL FREE: holding seg[{:?}, {:?}]", seg_base, seg_end);
dlverbose!("DL FREE: prev seg = {:?}", prev_seg);
let next_chunk = Chunk::next(chunk);
if mem_to_free != seg_base {
dlassert!(Chunk::pinuse(chunk));
dlassert!(mem_to_free as usize - seg_base as usize >= MIN_CHUNK_SIZE);
mem_to_free = mem_to_free.add(PTR_SIZE);
mem_to_free = mem_to_free.add(SEG_INFO_SIZE);
}
let mut after_seg_size = seg_end as usize - mem_to_free_end as usize;
if after_seg_size > SEG_INFO_SIZE {
dlassert!(after_seg_size >= SEG_INFO_SIZE + MIN_CHUNK_SIZE);
} else {
dlassert!(after_seg_size == SEG_INFO_SIZE);
mem_to_free_end = seg_end;
}
dlverbose!(
"DL FREE: mem can be freed [{:?}, {:?}]",
mem_to_free,
mem_to_free_end
);
let mem_to_free_size = mem_to_free_end as usize - mem_to_free as usize;
let (mem_to_free, mem_to_free_size) = sys::get_free_borders(mem_to_free, mem_to_free_size);
let mem_to_free_end = mem_to_free.add(mem_to_free_size);
dlverbose!(
"DL FREE: mem can be freed by system [{:?}, {:?}]",
mem_to_free,
mem_to_free_end,
);
if mem_to_free_size == 0 {
if chunk != self.top && chunk != self.dv {
self.insert_chunk(chunk, chunk_size);
}
return;
}
let before_seg_size = mem_to_free as usize - seg_base as usize;
let after_seg_size = seg_end as usize - mem_to_free_end as usize;
let mut crop_chunk;
let mut crop_chunk_size;
if before_seg_size != 0 {
crop_chunk = mem_to_free.sub(SEG_INFO_SIZE) as *mut Chunk;
dlassert!((crop_chunk as usize - chunk as usize) >= MALIGN);
crop_chunk_size = mem_to_free_size + SEG_INFO_SIZE;
} else {
crop_chunk = mem_to_free as *mut Chunk;
crop_chunk_size = mem_to_free_size;
}
if after_seg_size == 0 {
dlassert!(mem_to_free_end == seg_end);
dlassert!(Chunk::next(chunk) as *mut u8 == seg_end.sub(SEG_INFO_SIZE));
crop_chunk_size -= SEG_INFO_SIZE;
}
self.crop_chunk(chunk, crop_chunk, crop_chunk_size, true);
let next_seg = (*seg).next;
let after_rem_pinuse = to_pinuse(after_seg_size > 0 && Chunk::pinuse((*seg).info_chunk()));
let success = sys::free(mem_to_free, mem_to_free_size);
dlassert!(success);
if before_seg_size != 0 {
let before_seg_info = self.set_segment_info(seg_base, before_seg_size, 0);
dlverbose!(
"DL FREE: before seg [{:?}, {:?}]",
(*before_seg_info).base,
Segment::top(before_seg_info)
);
if prev_seg.is_null() {
dlassert!(seg == self.seg);
self.seg = before_seg_info;
} else {
(*prev_seg).next = before_seg_info;
}
prev_seg = before_seg_info;
}
if after_seg_size != 0 {
let after_seg_info =
self.set_segment_info(mem_to_free_end, after_seg_size, after_rem_pinuse);
dlverbose!(
"DL FREE: after seg [{:?}, {:?}]",
(*after_seg_info).base,
Segment::top(after_seg_info)
);
if prev_seg.is_null() {
dlassert!(seg == self.seg);
self.seg = after_seg_info;
} else {
(*prev_seg).next = after_seg_info;
}
prev_seg = after_seg_info;
}
if prev_seg.is_null() {
dlassert!(seg == self.seg);
if next_seg.is_null() {
self.seg = ptr::null_mut();
} else {
self.seg = next_seg;
}
} else {
(*prev_seg).next = next_seg;
}
}
pub unsafe fn free(&mut self, mem: *mut u8) {
dlverbose!("{}", VERBOSE_DEL);
dlverbose!("DL FREE CALL: mem={:?}", mem);
self.print_segments();
self.check_malloc_state();
let sbuff = &mut self.sbuff as *mut u8;
if mem >= sbuff && mem <= sbuff.add(SBUFF_SIZE) {
let offset = mem as usize - sbuff as usize;
let idx = Dlmalloc::sbuff_offset_to_idx(offset);
dlassert!(idx < SBUFF_IDX_MAX);
self.sbuff_mask &= !(1 << idx);
dlverbose!(
"DL FREE: is in sbuff cell {}, sbuff_mask={:x}",
idx,
self.sbuff_mask
);
return;
}
let chunk = Chunk::from_mem(mem);
let chunk_size = Chunk::size(chunk);
dlassert!(chunk_size >= MIN_CHUNK_SIZE);
let chunk = self.extend_free_chunk(chunk, false);
dlverbose!(
"ALLOC FREE: extended chunk[{:?}, 0x{:x}] {}",
chunk,
Chunk::size(chunk),
self.is_top_or_dv(chunk)
);
self.free_chunk(chunk);
self.print_segments();
self.check_malloc_state();
}
unsafe fn is_top_or_dv(&self, chunk: *mut Chunk) -> &'static str {
if chunk == self.top {
"is top"
} else if chunk == self.dv {
"is dv"
} else if Chunk::size(chunk) == MALIGN {
"is half chunk"
} else {
"is chunk"
}
}
pub unsafe fn get_alloced_mem_size(&self) -> usize {
let mut size: usize = 0;
let mut seg = self.seg;
while !seg.is_null() {
let mut chunk = (*seg).base as *mut Chunk;
let last_chunk = Segment::top(seg).sub(SEG_INFO_SIZE);
while (chunk as *mut u8) < last_chunk {
if Chunk::cinuse(chunk) {
size += Chunk::size(chunk);
}
chunk = Chunk::next(chunk);
}
dlassert!(chunk as *mut u8 == last_chunk);
seg = (*seg).next;
}
size
}
unsafe fn print_segments(&mut self) {
if !DL_VERBOSE {
return;
}
for i in 0..SBUFF_IDX_MAX {
let size = Dlmalloc::sbuff_idx_to_size(i);
if self.sbuff_mask & (1 << i) == 0 {
dlverbose!("[{}, -]", size);
} else {
dlverbose!(
"[{}, {}]",
size,
Dlmalloc::debug_mem_sum(
self.sbuff
.as_mut_ptr()
.add(Dlmalloc::sbuff_idx_to_offset(i)),
size
)
);
}
}
let mut i = 0;
let mut seg = self.seg;
while !seg.is_null() {
i += 1;
dlverbose!(
"+++++++ SEG{} {:?} [{:?}, {:?}]",
i,
seg,
(*seg).base,
Segment::top(seg)
);
let mut chunk = (*seg).base as *mut Chunk;
let last_chunk = Segment::top(seg).sub(SEG_INFO_SIZE);
while (chunk as *mut u8) < last_chunk {
dlverbose!(
"chunk [{:?}, {:?}]{}{} {}, sum = {}",
chunk,
Chunk::next(chunk),
if Chunk::cinuse(chunk) { "c" } else { "" },
if Chunk::pinuse(chunk) { "p" } else { "" },
self.is_top_or_dv(chunk),
if Chunk::cinuse(chunk) {
Dlmalloc::debug_mem_sum(Chunk::to_mem(chunk), Chunk::size(chunk) - PTR_SIZE)
} else {
0
}
);
chunk = Chunk::next(chunk);
}
dlverbose!(
"info [{:?}, {:?}]{}{}",
chunk,
Chunk::next(chunk),
if Chunk::cinuse(chunk) { "c" } else { "" },
if Chunk::pinuse(chunk) { "p" } else { "" }
);
seg = (*seg).next;
}
dlverbose!(r"\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/");
}
unsafe fn update_least_addr(&mut self, addr: *mut u8) {
if !DL_DEBUG {
return;
}
if self.least_addr.is_null() || addr < self.least_addr {
self.least_addr = addr;
}
}
unsafe fn check_any_chunk(&self, p: *mut Chunk) {
if !DL_DEBUG {
return;
}
dlassert!(!p.is_null());
dlassert!(p as usize % MALIGN == 0);
dlassert!(Chunk::size(p) % MALIGN == 0);
dlassert!(Chunk::to_mem(p) as usize % MALIGN == 0);
dlassert!(p as *mut u8 >= self.least_addr);
if !DL_CHECKS {
return;
}
let mut seg = self.seg;
while !seg.is_null() {
let mut chunk = (*seg).base as *mut Chunk;
let last_chunk = Segment::top(seg).sub(SEG_INFO_SIZE);
while (chunk as *mut u8) < last_chunk {
dlassert!(!(chunk > p && chunk < Chunk::next(p)));
dlassert!(!(p > chunk && p < Chunk::next(chunk)));
chunk = Chunk::next(chunk);
}
seg = (*seg).next;
}
}
unsafe fn check_top_chunk(&self, p: *mut Chunk) {
if !DL_DEBUG {
return;
}
if self.top.is_null() {
dlassert!(self.topsize == 0);
return;
}
self.check_any_chunk(p);
if !DL_CHECKS {
return;
}
let sp = self.segment_holding(p as *mut u8);
let sz = Chunk::size(p);
dlassert!(!sp.is_null());
dlassert!(sz == self.topsize);
dlassert!(sz != 0);
dlassert!(sz == (*sp).base as usize + (*sp).size - p as usize - SEG_INFO_SIZE);
dlassert!(Chunk::pinuse(p));
dlassert!(!Chunk::pinuse(Chunk::plus_offset(p, sz)));
}
unsafe fn check_malloced_mem(&self, mem: *mut u8, req_size: usize) {
if !DL_DEBUG {
return;
}
if mem.is_null() {
return;
}
let p = Chunk::from_mem(mem);
let sz = Chunk::size(p);
self.check_cinuse_chunk(p);
dlassert!(sz >= MIN_CHUNK_SIZE);
dlassert!(sz >= req_size + PTR_SIZE);
}
unsafe fn check_cinuse_chunk(&self, p: *mut Chunk) {
self.check_any_chunk(p);
dlassert!(Chunk::cinuse(p));
dlassert!(Chunk::pinuse(Chunk::next(p)));
dlassert!(Chunk::pinuse(p) || Chunk::next(Chunk::prev(p)) == p);
}
unsafe fn check_free_chunk(&self, p: *mut Chunk) {
if !DL_DEBUG {
return;
}
self.check_any_chunk(p);
let sz = Chunk::size(p);
let next = Chunk::plus_offset(p, sz);
dlassert!(!Chunk::cinuse(p) && Chunk::pinuse(p));
dlassert!(!Chunk::pinuse(Chunk::next(p)));
dlassert!((*next).prev_chunk_size == sz);
dlassert!(Chunk::cinuse(next));
if p != self.dv && p != self.top {
if sz >= MIN_CHUNK_SIZE {
dlassert!((*(*p).next).prev == p);
dlassert!((*(*p).prev).next == p);
} else {
dlassert!(sz == MALIGN);
}
}
}
unsafe fn check_malloc_state(&mut self) {
if !DL_CHECKS {
return;
}
for i in 0..NSMALLBINS {
self.check_smallbin(i as u32);
}
for i in 0..NTREEBINS {
self.check_treebin(i as u32);
}
if self.dvsize != 0 {
self.check_any_chunk(self.dv);
dlassert!(self.dvsize == Chunk::size(self.dv));
dlassert!(self.dvsize >= MIN_CHUNK_SIZE);
dlassert!(Chunk::pinuse(self.dv));
dlassert!(!Chunk::cinuse(self.dv));
dlassert!(!self.bin_find(self.dv));
}
if !self.top.is_null() {
self.check_top_chunk(self.top);
dlassert!(self.topsize > 0);
dlassert!(!self.bin_find(self.top));
}
let mut seg = self.seg;
while !seg.is_null() {
let mut chunk = (*seg).base as *mut Chunk;
let last_chunk = Segment::top(seg).sub(SEG_INFO_SIZE);
while (chunk as *mut u8) < last_chunk {
if chunk != self.top && chunk != self.dv {
dlassert!(self.top < chunk || self.top >= Chunk::next(chunk));
dlassert!(self.dv < chunk || self.dv >= Chunk::next(chunk));
dlassert!(
Chunk::cinuse(chunk)
|| Chunk::size(chunk) == MALIGN
|| self.bin_find(chunk)
);
}
dlassert!(
Chunk::cinuse(chunk)
|| Chunk::size(chunk) < 2 * DEFAULT_GRANULARITY + SEG_INFO_SIZE
);
dlassert!(Chunk::pinuse(chunk) || !Chunk::cinuse(Chunk::prev(chunk)));
let next = Chunk::next(chunk);
dlassert!(Chunk::pinuse(next) == Chunk::cinuse(chunk));
dlassert!(Chunk::cinuse(chunk) || Chunk::cinuse(next));
dlassert!(
Chunk::pinuse(chunk)
|| Chunk::size(Chunk::prev(chunk)) == (*chunk).prev_chunk_size
);
chunk = next;
}
dlassert!(chunk as *mut u8 == last_chunk);
dlassert!(Chunk::size(chunk) == SEG_INFO_SIZE - 2 * PTR_SIZE);
seg = (*seg).next;
}
}
unsafe fn check_smallbin(&mut self, idx: u32) {
if !DL_CHECKS {
return;
}
let head_chunk = self.smallbin_at(idx);
let mut bin_chunk = (*head_chunk).next;
let idx_bin_is_empty = self.smallmap & (1 << idx) == 0;
if bin_chunk == head_chunk {
dlassert!(idx_bin_is_empty);
} else if !idx_bin_is_empty {
while bin_chunk != head_chunk {
self.check_free_chunk(bin_chunk);
let bin_size = Chunk::size(bin_chunk);
dlassert!(self.small_index(bin_size) == idx);
dlassert!(
(*bin_chunk).next == head_chunk || Chunk::size((*bin_chunk).next) == bin_size
);
let next_mem_chunk = Chunk::next(bin_chunk);
if !(*next_mem_chunk).is_border_chunk() {
self.check_cinuse_chunk(next_mem_chunk);
}
bin_chunk = (*bin_chunk).next;
}
}
}
unsafe fn check_treebin(&mut self, idx: u32) {
if !DL_CHECKS {
return;
}
let tb = self.treebin_at(idx);
let t = *tb;
let empty = self.treemap & (1 << idx) == 0;
if t.is_null() {
dlassert!(empty);
}
if !empty {
self.check_tree(t);
}
}
unsafe fn check_tree(&mut self, t: *mut TreeChunk) {
if !DL_CHECKS {
return;
}
let tc = TreeChunk::chunk(t);
let tindex = (*t).index;
let tsize = Chunk::size(tc);
let idx = self.compute_tree_index(tsize);
dlassert!(tindex == idx);
dlassert!(tsize >= self.min_large_chunk_size());
dlassert!(tsize >= self.min_size_for_tree_index(idx));
dlassert!(idx == NTREEBINS as u32 - 1 || tsize < self.min_size_for_tree_index(idx + 1));
let mut u = t;
let mut head = ptr::null_mut::<TreeChunk>();
loop {
let uc = TreeChunk::chunk(u);
self.check_any_chunk(uc);
dlassert!((*u).index == tindex);
dlassert!(Chunk::size(uc) == tsize);
dlassert!(!Chunk::cinuse(uc) && Chunk::pinuse(uc));
dlassert!(!Chunk::pinuse(Chunk::next(uc)));
dlassert!((*(*uc).next).prev == uc);
dlassert!((*(*uc).prev).next == uc);
let left = (*u).child[0];
let right = (*u).child[1];
if (*u).parent.is_null() {
dlassert!(left.is_null());
dlassert!(right.is_null());
} else {
dlassert!(head.is_null());
head = u;
dlassert!((*u).parent != u);
dlassert!(
(*(*u).parent).child[0] == u
|| (*(*u).parent).child[1] == u
|| *((*u).parent as *mut *mut TreeChunk) == u
);
if !left.is_null() {
dlassert!((*left).parent == u);
dlassert!(left != u);
self.check_tree(left);
}
if !right.is_null() {
dlassert!((*right).parent == u);
dlassert!(right != u);
self.check_tree(right);
}
if !left.is_null() && !right.is_null() {
dlassert!(
Chunk::size(TreeChunk::chunk(left)) < Chunk::size(TreeChunk::chunk(right))
);
}
}
u = TreeChunk::prev(u);
if u == t {
break;
}
}
dlassert!(!head.is_null());
}
fn min_size_for_tree_index(&self, idx: u32) -> usize {
let idx = idx as usize;
(1 << ((idx >> 1) + TREEBIN_SHIFT)) | ((idx & 1) << ((idx >> 1) + TREEBIN_SHIFT - 1))
}
unsafe fn bin_find(&mut self, chunk: *mut Chunk) -> bool {
let size = Chunk::size(chunk);
if self.is_chunk_small(size) {
let sidx = self.small_index(size);
let b = self.smallbin_at(sidx);
if !self.smallmap_is_marked(sidx) {
return false;
}
let mut p = b;
loop {
if p == chunk {
return true;
}
p = (*p).prev;
if p == b {
return false;
}
}
} else {
let tidx = self.compute_tree_index(size);
if !self.treemap_is_marked(tidx) {
return false;
}
let mut t = *self.treebin_at(tidx);
let mut sizebits = size << leftshift_for_tree_index(tidx);
while !t.is_null() && Chunk::size(TreeChunk::chunk(t)) != size {
t = (*t).child[(sizebits >> (PTR_SIZE * 8 - 1)) & 1];
sizebits <<= 1;
}
if t.is_null() {
return false;
}
let mut u = t;
let chunk = chunk as *mut TreeChunk;
loop {
if u == chunk {
return true;
}
u = TreeChunk::prev(u);
if u == t {
return false;
}
}
}
}
}
impl Chunk {
fn is_border_chunk(&self) -> bool {
self.head == BORDER_CHUNK_HEAD
}
unsafe fn size(me: *mut Chunk) -> usize {
(*me).head & !FLAG_BITS
}
unsafe fn next(me: *mut Chunk) -> *mut Chunk {
Chunk::plus_offset(me, Chunk::size(me))
}
unsafe fn prev(me: *mut Chunk) -> *mut Chunk {
dlassert!(!Chunk::pinuse(me));
Chunk::minus_offset(me, (*me).prev_chunk_size)
}
unsafe fn cinuse(me: *mut Chunk) -> bool {
(*me).head & CINUSE != 0
}
unsafe fn pinuse(me: *mut Chunk) -> bool {
(*me).head & PINUSE != 0
}
unsafe fn set_next_chunk_prev_size(me: *mut Chunk, size: usize) {
(*Chunk::next(me)).prev_chunk_size = size;
}
unsafe fn change_size(me: *mut Chunk, size: usize) {
(*me).head = size | ((*me).head & FLAG_BITS);
}
unsafe fn plus_offset(me: *mut Chunk, offset: usize) -> *mut Chunk {
(me as *mut u8).add(offset) as *mut Chunk
}
unsafe fn minus_offset(me: *mut Chunk, offset: usize) -> *mut Chunk {
(me as *mut u8).sub(offset) as *mut Chunk
}
unsafe fn to_mem(me: *mut Chunk) -> *mut u8 {
(me as *mut u8).add(CHUNK_MEM_OFFSET)
}
unsafe fn from_mem(mem: *mut u8) -> *mut Chunk {
mem.sub(CHUNK_MEM_OFFSET) as *mut Chunk
}
}
impl TreeChunk {
unsafe fn leftmost_child(me: *mut TreeChunk) -> *mut TreeChunk {
if (*me).child[0].is_null() {
(*me).child[1]
} else {
(*me).child[0]
}
}
unsafe fn chunk(me: *mut TreeChunk) -> *mut Chunk {
&mut (*me).chunk
}
unsafe fn next(me: *mut TreeChunk) -> *mut TreeChunk {
(*TreeChunk::chunk(me)).next as *mut TreeChunk
}
unsafe fn prev(me: *mut TreeChunk) -> *mut TreeChunk {
(*TreeChunk::chunk(me)).prev as *mut TreeChunk
}
}
impl Segment {
unsafe fn holds(seg: *mut Segment, addr: *mut u8) -> bool {
(*seg).base <= addr && addr < Segment::top(seg)
}
unsafe fn top(seg: *mut Segment) -> *mut u8 {
(*seg).base.add((*seg).size)
}
pub unsafe fn end(&self) -> *mut u8 {
self.base.add(self.size)
}
pub unsafe fn info_chunk(&self) -> *mut Chunk {
self.end().sub(SEG_INFO_SIZE) as *mut Chunk
}
pub unsafe fn base_chunk(&self) -> *mut Chunk {
self.base as *mut Chunk
}
}
#[cfg(test)]
mod tests {
use super::*;
unsafe fn setup_treemap(a: &mut Dlmalloc) {
let large_request_size = NSMALLBINS * (1 << SMALLBIN_SHIFT);
assert!(!a.is_chunk_small(large_request_size));
let large_request1 = a.malloc(large_request_size);
assert_ne!(large_request1, ptr::null_mut());
let large_request2 = a.malloc(large_request_size);
assert_ne!(large_request2, ptr::null_mut());
a.free(large_request1);
assert_ne!(a.treemap, 0);
}
#[test]
fn treemap_alloc_overflow_minimal() {
let mut a = DLMALLOC_INIT;
unsafe {
setup_treemap(&mut a);
let min_idx31_size = (0xc000 << TREEBIN_SHIFT) - PTR_SIZE + 1;
assert_ne!(a.malloc(min_idx31_size), ptr::null_mut());
}
}
#[test]
fn treemap_alloc_max() {
let mut a = DLMALLOC_INIT;
unsafe {
setup_treemap(&mut a);
let max_request_size = a.max_request() - 1;
assert_eq!(a.malloc(max_request_size), ptr::null_mut());
}
}
}