#![no_std]
#![cfg_attr(feature = "allocator-api", feature(allocator_api))]
#![warn(clippy::dbg_macro)]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
#![warn(missing_debug_implementations)]
#![warn(missing_docs)]
#![warn(unreachable_pub)]
#![warn(unused_crate_dependencies)]
#![warn(unused_macro_rules)]
#![warn(unused_qualifications)]
#![doc(test(attr(deny(warnings))))]
mod arena;
mod iter;
mod ptr;
mod segment;
mod sizing;
mod usage;
#[cfg(test)]
mod tests;
#[cfg(feature = "lazy")]
pub mod lazy;
use crate::arena::Arena;
use crate::iter::ArenaChunks;
use crate::iter::Chunk;
use crate::ptr::SegmentDataPtr;
use crate::segment::Segment;
use core::alloc::GlobalAlloc;
use core::alloc::Layout;
use core::cmp;
use core::ptr::NonNull;
use spin::Mutex;
#[cfg(feature = "allocator-api")]
use core::alloc::AllocError;
#[cfg(feature = "allocator-api")]
use core::alloc::Allocator;
pub use crate::sizing::Sizing;
pub use crate::sizing::Sizing128KiB;
pub use crate::sizing::Sizing16MiB;
pub use crate::sizing::Sizing4KiB;
pub use crate::usage::Usage;
pub type Teaspoon128KiB<'a> = Teaspoon<'a, Sizing128KiB>;
pub type Teaspoon16MiB<'a> = Teaspoon<'a, Sizing16MiB>;
pub type Teaspoon4KiB<'a> = Teaspoon<'a, Sizing4KiB>;
#[derive(Debug)]
pub struct Teaspoon<'a, S: Sizing> {
inner: Mutex<TeaspoonInner<'a, S>>,
}
impl<'a, S: Sizing> Teaspoon<'a, S> {
#[inline]
#[must_use]
pub fn from_slice(slice: &'a mut [u8]) -> Self {
Self {
inner: Mutex::new(TeaspoonInner::from_slice(slice)),
}
}
#[inline]
#[must_use]
pub unsafe fn from_ptr(ptr: *mut [u8]) -> Self {
Self::from_ptr_size(ptr.cast(), ptr.len())
}
#[inline]
#[must_use]
pub unsafe fn from_ptr_size(ptr: *mut u8, size: usize) -> Self {
Self {
inner: Mutex::new(TeaspoonInner::from_ptr_size(ptr, size)),
}
}
#[inline]
#[must_use]
pub unsafe fn from_addr_size(addr: usize, size: usize) -> Self {
Self::from_ptr_size(addr as *mut u8, size)
}
#[inline]
#[must_use]
pub fn usage(&self) -> Usage {
self.inner.lock().usage()
}
}
impl<'a, S: Sizing> From<&'a mut [u8]> for Teaspoon<'a, S> {
#[inline]
fn from(slice: &'a mut [u8]) -> Self {
Self::from_slice(slice)
}
}
impl<'a, S: Sizing, const N: usize> From<&'a mut [u8; N]> for Teaspoon<'a, S> {
#[inline]
fn from(array: &'a mut [u8; N]) -> Self {
Self::from(array.as_mut_slice())
}
}
#[cfg(feature = "allocator-api")]
unsafe impl<'a, S: Sizing> Allocator for Teaspoon<'a, S> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.inner.lock().allocate(layout).ok_or(AllocError)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
let data_ptr = SegmentDataPtr::new(ptr);
self.inner.lock().deallocate(data_ptr, layout)
}
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
let data_ptr = SegmentDataPtr::new(ptr);
self.inner
.lock()
.grow(data_ptr, old_layout, new_layout)
.ok_or(AllocError)
}
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
let data_ptr = SegmentDataPtr::new(ptr);
self.inner
.lock()
.shrink(data_ptr, old_layout, new_layout)
.ok_or(AllocError)
}
}
unsafe impl<'a, S: Sizing> GlobalAlloc for Teaspoon<'a, S> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.inner
.lock()
.allocate(layout)
.map(|ptr| ptr.cast().as_ptr())
.unwrap_or_else(core::ptr::null_mut)
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let data_ptr = SegmentDataPtr::new(NonNull::new_unchecked(ptr));
self.inner.lock().deallocate(data_ptr, layout)
}
unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 {
let data_ptr = SegmentDataPtr::new(NonNull::new_unchecked(ptr));
let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
self.inner
.lock()
.resize(data_ptr, old_layout, new_layout)
.map(|ptr| ptr.cast().as_ptr())
.unwrap_or_else(core::ptr::null_mut)
}
}
#[repr(transparent)]
#[derive(Debug)]
struct TeaspoonInner<'a, S: Sizing> {
arena: Arena<'a, S>,
}
impl<'a, S: Sizing> TeaspoonInner<'a, S> {
#[inline]
#[must_use]
fn from_slice(slice: &'a mut [u8]) -> Self {
Self {
arena: Arena::from(slice),
}
}
#[inline]
#[must_use]
unsafe fn from_ptr_size(ptr: *mut u8, size: usize) -> Self {
let ptr = NonNull::new(ptr).expect("expected non-null pointer");
let slice = NonNull::slice_from_raw_parts(ptr, size);
let arena = Arena::new(slice);
Self { arena }
}
fn allocate(&mut self, layout: Layout) -> Option<NonNull<[u8]>> {
if layout.size() == 0 {
let dangling = unsafe { NonNull::new_unchecked(layout.align() as *mut u8) };
return Some(NonNull::slice_from_raw_parts(dangling, 0));
}
let data = match self.arena.head() {
None => self.allocate_first(layout),
Some(_) => self
.allocate_tail(layout)
.or_else(|| self.allocate_anywhere(layout)),
};
if let Some(data) = data {
debug_assert!(
data.len() >= layout.size(),
"allocation returned fewer bytes than requested"
);
debug_assert!(
data.cast::<u8>().align_offset(layout.align()) == 0,
"allocation returned data with wrong alignment"
);
}
data
}
fn allocate_first(&mut self, layout: Layout) -> Option<NonNull<[u8]>> {
debug_assert!(
layout.size() > 0,
"`layout.size()` must be greater than zero"
);
debug_assert!(
self.arena.head().is_none(),
"arena is expected to be empty, but has a head pointer"
);
debug_assert!(
self.arena.tail().is_none(),
"arena is expected to be empty, but has a tail pointer"
);
let segment = unsafe { Segment::new_in(self.arena, self.arena.usable(), layout)? };
segment.write();
self.arena.set_head(Some(segment.ptr()));
self.arena.set_tail(Some(segment.ptr()));
Some(segment.data(layout))
}
fn allocate_tail(&mut self, layout: Layout) -> Option<NonNull<[u8]>> {
debug_assert!(
layout.size() > 0,
"`layout.size()` must be greater than zero"
);
debug_assert!(
self.arena.head().is_some(),
"arena is expected to be non-empty, but does not have a head pointer"
);
debug_assert!(
self.arena.tail().is_some(),
"arena is expected to be non-empty, but does not have a tail pointer"
);
let mut tail_segment =
unsafe { Segment::read(self.arena, self.arena.tail().unwrap_unchecked()) };
let mut new_segment =
unsafe { Segment::new_in(self.arena, tail_segment.trailing(), layout)? };
Segment::connect(&mut tail_segment, &mut new_segment);
self.arena.set_tail(Some(new_segment.ptr()));
Some(new_segment.data(layout))
}
fn allocate_anywhere(&mut self, layout: Layout) -> Option<NonNull<[u8]>> {
debug_assert!(
layout.size() > 0,
"`layout.size()` must be greater than zero"
);
let mut iter = ArenaChunks::new(self.arena);
let mut prev_segment: Option<Segment<'a, S>> = None;
while let Some(chunk) = iter.next() {
match chunk {
Chunk::Used(segment) => {
prev_segment = Some(segment);
}
Chunk::Unused(unused) => {
if let Some(mut new_segment) =
unsafe { Segment::new_in(self.arena, unused, layout) }
{
let next_segment = match iter.next() {
Some(Chunk::Used(segment)) => Some(segment),
_ => None,
};
match prev_segment {
None => self.arena.set_head(Some(new_segment.ptr())),
Some(mut prev_segment) => {
Segment::connect(&mut prev_segment, &mut new_segment)
}
}
match next_segment {
None => self.arena.set_tail(Some(new_segment.ptr())),
Some(mut next_segment) => {
Segment::connect(&mut new_segment, &mut next_segment)
}
}
return Some(new_segment.data(layout));
}
}
}
}
None
}
fn deallocate(&mut self, data_ptr: SegmentDataPtr<S>, layout: Layout) {
if layout.size() == 0 {
return;
}
let segment = unsafe { Segment::read(self.arena, data_ptr.to_header_ptr()) };
self.remove_segment(segment)
}
fn remove_segment(&mut self, segment: Segment<'a, S>) {
debug_assert!(
self.arena.head().is_some(),
"arena is expected to be non-empty, but does not have a head pointer"
);
debug_assert!(
self.arena.tail().is_some(),
"arena is expected to be non-empty, but does not have a tail pointer"
);
if segment.prev_ptr().is_none() {
self.arena.set_head(segment.next_ptr());
}
if segment.next_ptr().is_none() {
self.arena.set_tail(segment.prev_ptr());
}
segment.disconnect();
}
#[cfg(feature = "allocator-api")]
fn grow(
&mut self,
data_ptr: SegmentDataPtr<S>,
old_layout: Layout,
new_layout: Layout,
) -> Option<NonNull<[u8]>> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout` must be bigger than or equal to `old_layout`"
);
self.resize(data_ptr, old_layout, new_layout)
}
#[cfg(feature = "allocator-api")]
fn shrink(
&mut self,
data_ptr: SegmentDataPtr<S>,
old_layout: Layout,
new_layout: Layout,
) -> Option<NonNull<[u8]>> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout` must be smaller than or equal to `old_layout`"
);
self.resize(data_ptr, old_layout, new_layout)
}
fn resize(
&mut self,
data_ptr: SegmentDataPtr<S>,
old_layout: Layout,
new_layout: Layout,
) -> Option<NonNull<[u8]>> {
if old_layout.size() == 0 || new_layout.size() == 0 {
self.deallocate(data_ptr, old_layout);
return self.allocate(new_layout);
}
debug_assert!(
self.arena.head().is_some(),
"arena is expected to be non-empty, but does not have a head pointer"
);
debug_assert!(
self.arena.tail().is_some(),
"arena is expected to be non-empty, but does not have a tail pointer"
);
let copy_size = cmp::min(old_layout.size(), new_layout.size());
let old_segment = unsafe { Segment::read(self.arena, data_ptr.to_header_ptr()) };
let old_data = old_segment.data(old_layout);
match unsafe { Segment::new_in(self.arena, old_segment.available(), new_layout) } {
None => {
let new_data = self.allocate(new_layout)?;
unsafe {
core::ptr::copy_nonoverlapping(
old_data.cast::<u8>().as_ptr(),
new_data.cast::<u8>().as_ptr(),
copy_size,
)
};
self.remove_segment(old_segment);
Some(new_data)
}
Some(mut new_segment) => {
let new_data = new_segment.data(new_layout);
unsafe {
core::ptr::copy(
old_data.cast::<u8>().as_ptr(),
new_data.cast::<u8>().as_ptr(),
copy_size,
)
};
match old_segment.prev() {
None => self.arena.set_head(Some(new_segment.ptr())),
Some(mut prev) => Segment::connect(&mut prev, &mut new_segment),
}
match old_segment.next() {
None => self.arena.set_tail(Some(new_segment.ptr())),
Some(mut next) => Segment::connect(&mut new_segment, &mut next),
}
new_segment.write();
Some(new_data)
}
}
}
#[inline]
#[must_use]
fn usage(&self) -> Usage {
Usage::get(self.arena)
}
}