use core::alloc::Layout;
use super::*;
#[test]
fn alloc_no_memory() {
let mut guard = AllocatorInitGuard::empty();
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(1, 1).unwrap())
};
assert!(allocated.is_null())
}
#[test]
fn alloc_not_enough_space_returns_null() {
const MEM_SIZE: usize = USIZE_SIZE * 17;
let mut guard = AllocatorInitGuard::empty();
guard.init(MEM_SIZE);
let perfect_fit = MEM_SIZE - HEADER_SIZE;
let no_fit = perfect_fit + 1;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(no_fit, 1).unwrap())
};
assert!(allocated.is_null())
}
#[test]
fn alloc_perfect_fit() {
const MEM_SIZE: usize = USIZE_SIZE * 17;
let mut guard = AllocatorInitGuard::empty();
guard.init(MEM_SIZE);
let addr = guard.addr();
let perfect_fit = MEM_SIZE - HEADER_SIZE;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(perfect_fit, 1).unwrap())
};
assert_eq!(allocated as usize, addr + HEADER_SIZE);
let chunk_header = unsafe {
match Chunk::from_addr(addr) {
ChunkRef::Used(used) => used,
ChunkRef::Free(_) => panic!("allocated chunk is marked as free"),
}
};
assert_eq!(chunk_header.0.prev_in_use(), true);
assert_eq!(chunk_header.0.size(), perfect_fit);
}
#[test]
fn alloc_aligned_with_end_padding_not_large_enough_to_fit_chunk() {
const MEM_SIZE: usize = USIZE_SIZE * 17;
let mut guard = AllocatorInitGuard::empty();
guard.init(MEM_SIZE);
let addr = guard.addr();
let perfect_fit = MEM_SIZE - HEADER_SIZE;
let size_with_minimal_end_padding = perfect_fit - USIZE_SIZE;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(size_with_minimal_end_padding, 1).unwrap())
};
assert_eq!(allocated as usize, addr + HEADER_SIZE);
let chunk_header = unsafe {
match Chunk::from_addr(addr) {
ChunkRef::Used(used) => used,
ChunkRef::Free(_) => panic!("allocated chunk is marked as free"),
}
};
assert_eq!(chunk_header.0.prev_in_use(), true);
assert_eq!(chunk_header.0.size(), perfect_fit);
}
#[test]
fn alloc_aligned_end_padding() {
const MEM_SIZE: usize = USIZE_SIZE * 17;
let mut guard = AllocatorInitGuard::empty();
guard.init(MEM_SIZE);
let addr = guard.addr();
let perfect_fit = MEM_SIZE - HEADER_SIZE;
let size_with_large_enough_end_padding = perfect_fit - MIN_FREE_CHUNK_SIZE_INCLUDING_HEADER;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(size_with_large_enough_end_padding, 1).unwrap())
};
assert_eq!(allocated as usize, addr + HEADER_SIZE);
let chunk_header = unsafe {
match Chunk::from_addr(addr) {
ChunkRef::Used(used) => used,
ChunkRef::Free(_) => panic!("allocated chunk is marked as free"),
}
};
assert_eq!(chunk_header.0.prev_in_use(), true);
assert_eq!(chunk_header.0.size(), size_with_large_enough_end_padding);
let end_padding_chunk = unsafe {
match Chunk::from_addr(allocated as usize + size_with_large_enough_end_padding) {
ChunkRef::Used(_) => panic!("end padding chunk is marked as used"),
ChunkRef::Free(free) => free,
}
};
assert_eq!(end_padding_chunk.header.prev_in_use(), true);
assert_eq!(
end_padding_chunk.header.size(),
MIN_FREE_CHUNK_SIZE_INCLUDING_HEADER - HEADER_SIZE
);
assert_only_1_free_chunk_in_bin(
&mut guard,
end_padding_chunk as *mut _ as usize,
end_padding_chunk.header.size(),
);
assert_eq!(
*end_padding_chunk.postfix_size(),
MIN_FREE_CHUNK_SIZE_INCLUDING_HEADER - HEADER_SIZE
);
}
#[test]
fn alloc_unaligned_not_enough_space_returns_null() {
const MEM_SIZE: usize = 128;
let mut guard = AllocatorInitGuard::empty();
guard.init_with_alignment(MEM_SIZE, MEM_SIZE);
let alignment = MEM_SIZE >> 1;
let perfect_fit = MEM_SIZE >> 1;
let no_fit = perfect_fit + 1;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(no_fit, alignment).unwrap())
};
assert!(allocated.is_null())
}
#[test]
fn alloc_unaligned_no_end_padding() {
const MEM_SIZE: usize = 128;
let mut guard = AllocatorInitGuard::empty();
guard.init_with_alignment(MEM_SIZE, MEM_SIZE);
let addr = guard.addr();
let alignment = MEM_SIZE >> 1;
let perfect_fit = MEM_SIZE >> 1;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(perfect_fit, alignment).unwrap())
};
let aligned_content_addr = addr + perfect_fit;
assert_eq!(allocated as usize, aligned_content_addr);
let start_padding_chunk = unsafe {
match Chunk::from_addr(addr) {
ChunkRef::Used(_) => panic!("start padding chunk is marked as used"),
ChunkRef::Free(free) => free,
}
};
assert_eq!(start_padding_chunk.header.prev_in_use(), true);
let content_chunk_addr = aligned_content_addr - HEADER_SIZE;
let start_padding_chunk_size = content_chunk_addr - addr - HEADER_SIZE;
assert_eq!(start_padding_chunk.header.size(), start_padding_chunk_size);
assert_eq!(start_padding_chunk.fd, None);
let smallbin_index = unsafe {
SmallBins::<TEST_SMALLBINS_AMOUNT, TEST_ALIGNMENT_SUB_BINS_AMOUNT>::smallbin_index(
start_padding_chunk.size(),
)
.unwrap()
};
let alignment_index = unsafe {
SmallBins::<TEST_SMALLBINS_AMOUNT, TEST_ALIGNMENT_SUB_BINS_AMOUNT>::alignment_index_of_chunk_content_addr(
start_padding_chunk.content_addr(),
)
};
let ptr_to_fd_of_bin = &mut guard.allocator.smallbins.small_bins[smallbin_index]
.alignment_sub_bins[alignment_index]
.fd as *mut _;
assert_eq!(start_padding_chunk.ptr_to_fd_of_bk, ptr_to_fd_of_bin);
let allocated_chunk = unsafe {
match Chunk::from_addr(allocated as usize - HEADER_SIZE) {
ChunkRef::Used(used) => used,
ChunkRef::Free(_) => panic!("allocated chunk is marked as free"),
}
};
assert_eq!(
allocated_chunk.prev_size_if_free(),
Some(start_padding_chunk_size)
);
assert_eq!(allocated_chunk.0.size(), perfect_fit);
}
#[test]
fn alloc_unaligned_end_padding() {
const MEM_SIZE: usize = 128;
let mut guard = AllocatorInitGuard::empty();
guard.init_with_alignment(MEM_SIZE, MEM_SIZE);
let addr = guard.addr();
let alignment = MEM_SIZE >> 1;
let perfect_fit = MEM_SIZE >> 1;
let size_with_large_enough_end_padding = perfect_fit - MIN_FREE_CHUNK_SIZE_INCLUDING_HEADER;
let allocated = unsafe {
guard
.allocator
.alloc(Layout::from_size_align(size_with_large_enough_end_padding, alignment).unwrap())
};
let aligned_content_addr = addr + perfect_fit;
assert_eq!(allocated as usize, aligned_content_addr);
let content_chunk_addr = aligned_content_addr - HEADER_SIZE;
let start_padding_chunk_size = content_chunk_addr - addr - HEADER_SIZE;
assert_only_1_free_chunk_in_bin(&mut guard, addr, start_padding_chunk_size);
assert_only_1_free_chunk_in_bin(
&mut guard,
allocated as usize + size_with_large_enough_end_padding,
MIN_FREE_CHUNK_SIZE_INCLUDING_HEADER - HEADER_SIZE,
);
let allocated_chunk = unsafe {
match Chunk::from_addr(allocated as usize - HEADER_SIZE) {
ChunkRef::Used(used) => used,
ChunkRef::Free(_) => panic!("allocated chunk is marked as free"),
}
};
assert_eq!(
allocated_chunk.prev_size_if_free(),
Some(start_padding_chunk_size)
);
assert_eq!(allocated_chunk.0.size(), size_with_large_enough_end_padding);
}