use crate::{
util::{get_bitfields, set_bitfields},
Allocator, Boxed, Error, GenericAlloc, NullAlloc, PoolAlloc, Result,
};
use core::alloc::{GlobalAlloc, Layout};
use core::cell::UnsafeCell;
use core::mem::{size_of, ManuallyDrop};
use core::ptr::{self, NonNull};
#[repr(transparent)]
pub struct Pool<A: Allocator = PoolAlloc>(UnsafeCell<Inner<A>>);
unsafe impl<A: Allocator + Send> Send for Pool<A> {}
unsafe impl<A: Allocator> crate::Pool for Pool<A> {}
#[derive(Copy, Clone)]
#[repr(C)]
pub struct Stat {
pub layout: Layout,
pub size: usize,
pub page_cnt: usize,
pub alloc: usize,
pub external_size: usize,
}
#[repr(C)]
struct Inner<A: Allocator> {
current: Option<NonNull<Page>>,
stat: Stat,
small: Option<NonNull<Page>>,
large: Option<NonNull<Large>>,
first: Option<NonNull<Page>>,
alloc: ManuallyDrop<A>,
}
#[repr(C)]
struct Page {
next: Option<NonNull<Page>>,
buf: NonNull<[u8]>,
pos: usize,
}
#[repr(C)]
struct Large {
next: Option<NonNull<Large>>,
buf: NonNull<u8>,
layout: Layout,
}
pub type BoxedPool<A = PoolAlloc> = Boxed<'static, Pool<A>, NullAlloc>;
impl Pool<PoolAlloc> {
pub fn new(page_size: usize) -> Self {
Self::new_in(PoolAlloc, page_size)
}
pub fn new_boxed(page_size: usize) -> Result<BoxedPool> {
Self::new_boxed_in(PoolAlloc, page_size)
}
}
impl<A: Allocator> Pool<A> {
pub fn reset_boxed(boxed: Boxed<'_, Self, NullAlloc>) -> BoxedPool<A> {
let layout = boxed.get_inner().stat.layout;
let alloc = unsafe { ManuallyDrop::take(&mut (*boxed.0.get()).alloc) };
let buf = unsafe { &mut *Boxed::leak(boxed).0 .0.get() }.leak_first();
Self::boxed_from_page(alloc, buf, layout)
}
pub fn reset(self) -> Self {
let alloc = unsafe { ManuallyDrop::take(&mut (*self.0.get()).alloc) };
Self::new_in(alloc, self.get_inner().stat.layout.size())
}
}
impl<A: Allocator> Pool<A> {
pub unsafe fn from_raw(raw: *mut Self) -> BoxedPool<A> {
let layout = (*raw).get_inner().stat.layout;
Boxed::from_with(NonNull::new_unchecked(raw), layout, &NullAlloc)
}
pub fn stat(&self) -> Stat {
self.get_inner().stat
}
pub fn new_in(alloc: A, page_size: usize) -> Self {
Self(UnsafeCell::new(Inner {
current: None,
stat: Stat {
layout: Self::get_page_layout(page_size, size_of::<Page>() * 2),
size: 0,
page_cnt: 0,
alloc: 0,
external_size: 0,
},
small: None,
large: None,
first: None,
alloc: ManuallyDrop::new(alloc),
}))
}
pub fn add_page_buf(&self, mut buf: &'static [u8]) {
let inner = unsafe { &mut *self.0.get() };
while buf.len() > PAGE_MAX {
inner.add_page_buf(&buf[0..PAGE_MAX]);
buf = &buf[PAGE_MAX..];
}
if !buf.is_empty() {
inner.add_page_buf(buf);
}
}
pub fn new_boxed_in<'a>(alloc: A, page_size: usize) -> Result<Boxed<'a, Self, NullAlloc>> {
let layout = Self::get_page_layout(page_size, size_of::<Page>() * 2 + size_of::<Self>());
let buf = unsafe { alloc.allocate(layout)? };
Ok(Pool::boxed_from_page(alloc, buf, layout))
}
fn boxed_from_page<'a>(
alloc: A,
buf: NonNull<[u8]>,
layout: Layout,
) -> Boxed<'a, Self, NullAlloc> {
let page = buf.cast::<Page>().as_ptr();
let pool = unsafe { page.add(1) } as *mut Self;
let offset = size_of::<Self>() + size_of::<Page>();
unsafe {
ptr::addr_of_mut!((*page).next).write(None);
ptr::addr_of_mut!((*page).buf).write(buf);
ptr::addr_of_mut!((*page).pos).write(offset);
}
let page = unsafe { NonNull::new_unchecked(page) };
let inner = Inner::<A> {
current: Some(page),
small: Some(page),
first: Some(page),
large: None,
stat: Stat {
layout,
size: buf.len(),
page_cnt: 1,
alloc: offset,
external_size: 0,
},
alloc: ManuallyDrop::new(alloc),
};
unsafe {
ptr::addr_of_mut!((*pool).0).write(UnsafeCell::new(inner));
}
let this = unsafe { NonNull::new_unchecked(pool) };
unsafe { Boxed::from_with(this, layout, &NullAlloc) }
}
const fn get_page_layout(page_size: usize, min: usize) -> Layout {
let size = Self::get_page_size(page_size, min);
let align = Self::get_page_align(size);
unsafe { Layout::from_size_align_unchecked(size, align) }
}
const fn get_page_size(page_size: usize, min: usize) -> usize {
if page_size == 0 {
4096
} else if page_size > min {
if page_size < PAGE_MAX {
page_size
} else {
PAGE_MAX
}
} else {
min
}
}
const fn get_page_align(page_size: usize) -> usize {
if page_size >= 4096 {
4096
} else if (page_size & (page_size - 1)) == 0 {
page_size
} else {
let mut page_size = page_size | (page_size >> 1);
page_size |= page_size >> 2;
page_size |= page_size >> 4;
page_size |= page_size >> 8;
page_size + 1
}
}
fn get_inner(&self) -> &Inner<A> {
unsafe { &*self.0.get() }
}
fn is_normal(&self, layout: Layout) -> bool {
self.get_inner().stat.layout.size() > layout.size()
&& self.get_inner().stat.layout.align() > layout.align()
}
fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
where
F: FnOnce(NonNull<[u8]>) -> Result<()>,
{
let inner = unsafe { &mut *self.0.get() };
if self.is_normal(layout) {
inner.alloc_normal(layout, |ptr| f(ptr).map(|_| ptr))
} else {
inner.alloc_large(layout, |ptr| f(ptr).map(|_| ptr), |_| {})
}
}
fn free_buf(&self, _ptr: NonNull<[u8]>, _layout: Layout) {}
}
unsafe impl<A: Allocator> Allocator for Pool<A> {
unsafe fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
where
F: FnOnce(NonNull<[u8]>) -> Result<()>,
{
Pool::alloc_buf(self, layout, f)
}
unsafe fn free_buf(&self, ptr: NonNull<[u8]>, layout: Layout) {
Pool::free_buf(self, ptr, layout)
}
}
unsafe impl<A: Allocator> GlobalAlloc for Pool<A> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
match Allocator::alloc_buf(self, layout, |_| Ok(())) {
Ok(ptr) => ptr.cast::<u8>().as_ptr(),
Err(_) => ptr::null_mut(),
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
GenericAlloc::dealloc(self, ptr, layout)
}
}
impl<A: Allocator> Inner<A> {
fn init_page<'a>(page: NonNull<[u8]>, buf: NonNull<[u8]>, pos: usize) -> &'a mut Page {
let mut page = page.cast::<Page>();
let page_node = unsafe { page.as_mut() };
page_node.init(buf, pos);
page_node
}
fn alloc_page(&mut self) -> Option<NonNull<Page>> {
if let Ok(buf) = unsafe { self.alloc.allocate(self.stat.layout) } {
self.stat.size += self.stat.layout.size();
self.stat.page_cnt += 1;
let layout = Layout::new::<Page>();
if let Ok(page) = self.alloc_small(layout) {
Some(NonNull::from(Self::init_page(page, buf, 0)))
} else {
self.stat.alloc += layout.size();
Some(NonNull::from(Self::init_page(buf, buf, layout.size())))
}
} else {
None
}
}
fn add_page_buf(&mut self, buf: &'static [u8]) {
let layout = Layout::new::<Page>();
let buf = NonNull::from(buf);
let page;
if let Ok(page_buf) = self.alloc_small(layout) {
page = Self::init_page(page_buf, buf, 0);
} else if buf.len() > layout.size() {
self.stat.alloc += layout.size();
page = Self::init_page(buf, buf, layout.size()).set_external();
} else {
return;
}
self.stat.external_size += buf.len();
if let Some(mut current) = self.current {
let current = unsafe { current.as_mut() };
page.next = current.next;
current.next = Some(NonNull::from(page));
} else {
self.current = Some(NonNull::from(page));
self.first = self.current;
self.small = self.current;
}
}
fn alloc_small(&mut self, layout: Layout) -> Result<NonNull<[u8]>> {
if let Some(first) = self.small {
self.alloc_buf(
first,
layout,
Ok,
|_| None,
|pool, page| pool.update_small(page),
)
} else {
Err(Error::default())
}
}
fn update_small(&mut self, page: &Page) {
if ptr::eq(self.small.unwrap().as_ptr(), page) {
self.small = page.next;
}
}
fn alloc_normal<F>(&mut self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
where
F: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
{
if let Some(current) = self.current {
self.alloc_buf(
current,
layout,
f,
|pool| pool.alloc_page(),
|pool, page| pool.update_current(page),
)
} else if let Some(page) = self.alloc_page() {
self.current = Some(page);
self.first = self.current;
self.small = self.current;
self.alloc_buf(
page,
layout,
f,
|pool| pool.alloc_page(),
|pool, page| pool.update_current(page),
)
} else {
Err(Error::last())
}
}
fn update_current(&mut self, page: &Page) {
if ptr::eq(self.current.unwrap().as_ptr(), page)
&& (page.get_pos() + size_of::<Page>() > self.stat.layout.size()
|| page.get_fail() == FAIL_MAX)
{
self.current = page.next;
}
}
fn alloc_large<F1, F2>(&mut self, layout: Layout, f: F1, dtor: F2) -> Result<NonNull<[u8]>>
where
F1: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
F2: FnOnce(NonNull<[u8]>),
{
let large = unsafe { self.alloc.allocate(layout)? };
match f(large) {
Ok(_) => {}
Err(error) => {
unsafe { self.alloc.free_buf(large, layout) };
return Err(error);
}
}
let node = match self.alloc_large_node() {
Ok(node) => node,
Err(error) => {
dtor(large);
unsafe { self.alloc.free_buf(large, layout) };
return Err(error);
}
};
self.init_large(large, node, layout);
self.stat.size += layout.size();
self.stat.alloc += layout.size();
Ok(large)
}
fn init_large(&mut self, large: NonNull<[u8]>, node: NonNull<[u8]>, layout: Layout) {
let node = node.cast::<Large>();
let large_node = node.as_ptr();
unsafe {
ptr::addr_of_mut!((*large_node).buf).write(large.cast::<u8>());
ptr::addr_of_mut!((*large_node).next).write(self.large);
ptr::addr_of_mut!((*large_node).layout).write(layout);
}
self.large = Some(node);
}
fn alloc_large_node(&mut self) -> Result<NonNull<[u8]>> {
if let Ok(node) = self.alloc_small(Layout::new::<Large>()) {
Ok(node)
} else {
self.alloc_normal(Layout::new::<Large>(), Ok)
}
}
fn alloc_buf<F1, F2, F3>(
&mut self,
mut page: NonNull<Page>,
layout: Layout,
f: F1,
mut on_new_page: F2,
mut on_fail: F3,
) -> Result<NonNull<[u8]>>
where
F1: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
F2: FnMut(&mut Self) -> Option<NonNull<Page>>,
F3: FnMut(&mut Self, &Page),
{
loop {
let page_node = unsafe { page.as_mut() };
match page_node.alloc_prepare(self.stat.layout.size(), layout) {
Some(pos) => {
return page_node.alloc(pos, layout.size(), f).map(|ptr| {
self.stat.alloc += layout.size();
ptr
});
}
None => {
if let Some(next) = page_node.next {
page = next;
} else if let Some(next) = on_new_page(self) {
page_node.next = Some(next);
page = next;
} else {
return Err(Error::default());
}
on_fail(self, page_node);
}
}
}
}
fn leak_first(&mut self) -> NonNull<[u8]> {
let page = unsafe { self.first.unwrap().as_mut() };
self.first = page.next;
self.release();
page.buf
}
fn release(&mut self) {
self.stat.page_cnt = 0;
self.stat.size = 0;
self.stat.alloc = 0;
self.release_large();
self.release_page();
}
fn release_large(&mut self) {
while let Some(mut large) = self.large {
let large_node = unsafe { large.as_mut() };
if large_node.layout.size() > 0 {
unsafe {
self.alloc
.dealloc(large_node.buf.as_ptr(), large_node.layout)
};
}
self.large = large_node.next;
}
}
fn release_page(&mut self) {
let mut local = None;
while let Some(mut page) = self.first {
let page_node = unsafe { page.as_mut() };
self.first = page_node.next;
page_node.next = local;
local = Some(page_node.into());
}
while let Some(mut page) = local {
let page_node = unsafe { page.as_mut() };
local = page_node.next;
if !page_node.get_external() {
unsafe { self.alloc.free_buf(page_node.buf, self.stat.layout) };
}
}
}
}
impl<A: Allocator> Drop for Inner<A> {
fn drop(&mut self) {
self.release();
unsafe { ManuallyDrop::drop(&mut self.alloc) };
}
}
impl Page {
fn init(&mut self, buf: NonNull<[u8]>, pos: usize) {
self.next = None;
self.buf = buf;
self.pos = pos;
}
fn get_fail(&self) -> usize {
get_bitfields::<FAIL_OFF, FAIL_BITS>(self.pos)
}
fn set_fail(&mut self, cnt: usize) -> &mut Self {
self.pos = set_bitfields::<FAIL_OFF, FAIL_BITS>(self.pos, cnt);
self
}
fn get_pos(&self) -> usize {
get_bitfields::<OFF_OFF, OFF_BITS>(self.pos)
}
fn set_pos(&mut self, pos: usize) -> &mut Self {
self.pos = set_bitfields::<OFF_OFF, OFF_BITS>(self.pos, pos);
self
}
fn set_external(&mut self) -> &mut Self {
self.pos = set_bitfields::<EXTERN_OFF, EXTERN_BITS>(self.pos, 1);
self
}
fn get_external(&self) -> bool {
get_bitfields::<EXTERN_OFF, EXTERN_BITS>(self.pos) > 0
}
fn alloc_prepare(&mut self, page_size: usize, layout: Layout) -> Option<usize> {
let size = layout.size();
let align = layout.align();
let pos = (self.get_pos() + align - 1) & !(align - 1);
if page_size - size >= pos {
Some(pos)
} else {
let fail = self.get_fail();
if fail < FAIL_MAX {
self.set_fail(fail + 1);
}
None
}
}
fn alloc<F>(&mut self, pos: usize, size: usize, f: F) -> Result<NonNull<[u8]>>
where
F: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
{
let slice = unsafe { self.buf.as_mut() };
let ptr = NonNull::from(&mut slice[pos..pos + size]);
let cur = self.get_pos();
self.set_pos(pos + size);
match f(ptr) {
Ok(buf) => Ok(buf),
Err(error) => {
if self.get_pos() == pos + size {
self.set_pos(cur);
}
Err(error)
}
}
}
}
const OFF_OFF: usize = 0;
const OFF_BITS: usize = usize::BITS as usize - 3;
const EXTERN_OFF: usize = OFF_BITS + OFF_OFF;
const EXTERN_BITS: usize = 1;
const FAIL_OFF: usize = EXTERN_BITS + EXTERN_OFF;
const FAIL_BITS: usize = 2;
const FAIL_MAX: usize = (1_usize << FAIL_BITS) - 1;
const PAGE_MAX: usize = (1_usize << OFF_BITS) - 1;
#[cfg(test)]
mod test {
extern crate std;
use super::{Large, Page};
use crate::{Boxed, Error, MemPool, PoolAlloc, Result};
use core::alloc::Layout;
use core::mem::{align_of, size_of, size_of_val};
struct Foo {
val: i32,
}
#[test]
fn test_t() {
let pool = MemPool::new(0);
let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
assert_eq!(foo.val, 100);
let bar = Boxed::new_in(&pool, Foo { val: 101 }).unwrap();
assert_eq!(foo.val, 100);
assert_eq!(bar.val, 101);
assert_eq!(
size_of::<Foo>(),
&bar.val as *const _ as *const u8 as usize - &foo.val as *const _ as *const u8 as usize
);
let stat = pool.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>() * 2);
}
#[test]
fn test_large() {
let pool = MemPool::new(0);
let foo = Boxed::uninit_slice_in::<Foo>(&pool, 2000);
assert!(foo.is_ok());
let stat = pool.stat();
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.size, stat.layout.size() + 2000 * size_of::<Foo>());
assert_eq!(
stat.alloc,
size_of::<Page>() + size_of::<Large>() + 2000 * size_of::<Foo>()
);
}
#[test]
fn test_t_array() {
let pool = MemPool::new(0);
let foo = Boxed::new_slice_then_in(&pool, 10, |_| Ok(Foo { val: 0 })).unwrap();
foo.iter().for_each(|obj| assert_eq!(0, obj.val));
let stat = pool.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>() * 10);
}
#[test]
fn test_t_error() {
let pool = MemPool::new(0);
let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
assert_eq!(foo.val, 100);
let bar = Boxed::new_then_in::<Foo, _>(&pool, || Err(Error::default()));
assert!(bar.is_err());
assert_eq!(foo.val, 100);
let stat = pool.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>());
}
#[test]
fn test_large_error() {
let pool = MemPool::new(0);
let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
assert_eq!(foo.val, 100);
let bar = Boxed::new_slice_then_in::<Foo, _>(&pool, 100, |_| Err(Error::default()));
assert!(bar.is_err());
assert_eq!(foo.val, 100);
let stat = pool.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>());
}
#[test]
fn test_array_error() {
let pool = MemPool::new(0);
struct Foo;
static mut CNT: usize = 0;
impl Drop for Foo {
fn drop(&mut self) {
unsafe {
CNT += 1;
}
}
}
unsafe { CNT = 0 };
let array = Boxed::new_slice_then_in(&pool, 10, |n| {
if n < 9 {
Ok(Foo)
} else {
Err(Error::default())
}
});
assert!(array.is_err());
unsafe {
assert_eq!(CNT, 9);
}
let stat = pool.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>());
}
#[test]
fn test_new_boxed() {
let boxed = MemPool::new_boxed(0);
assert!(boxed.is_ok());
let boxed = boxed.unwrap();
let stat = boxed.stat();
assert_eq!(stat.size, stat.layout.size());
assert_eq!(stat.page_cnt, 1);
assert_eq!(stat.alloc, size_of::<Page>() + size_of_val(&*boxed));
let foo = Boxed::new_in(&*boxed, Foo { val: 1 });
assert!(foo.is_ok());
let foo = foo.unwrap();
assert_eq!(foo.val, 1);
assert_eq!(
size_of_val(&*boxed),
foo.as_ref() as *const _ as *const u8 as usize
- boxed.as_ref() as *const _ as *const u8 as usize
);
let stat = boxed.stat();
assert_eq!(stat.page_cnt, 1);
assert_eq!(
stat.alloc,
size_of::<Page>() + size_of_val(&*boxed) + size_of::<Foo>()
);
assert_eq!(stat.size, stat.layout.size());
}
#[test]
fn test_page_fail() {
let boxed = MemPool::new_boxed(1024);
assert!(boxed.is_ok());
let boxed = boxed.unwrap();
let len1 = 1024 - core::mem::size_of_val(&*boxed);
let val1 =
Boxed::new_buf_in(&*boxed, Layout::from_size_align(len1 + 1, 1).unwrap()).unwrap();
let stat = boxed.stat();
assert_eq!(stat.page_cnt, 2);
let _val = Boxed::new_buf_in(&*boxed, Layout::from_size_align(len1 + 1, 1).unwrap());
let stat = boxed.stat();
assert_eq!(stat.page_cnt, 3);
let val = Boxed::new_in(&*boxed, 1_u8).unwrap();
assert_eq!(
boxed.as_ptr() as *const u8 as usize
+ core::mem::size_of_val(&*boxed)
+ core::mem::size_of::<Page>() * 2,
val.as_ptr() as *const u8 as usize
);
let len2 = len1 - core::mem::size_of::<Page>() * 2 - 1;
let _val = Boxed::new_buf_in(&*boxed, Layout::from_size_align(len2 + 1, 1).unwrap());
let stat = boxed.stat();
assert_eq!(stat.page_cnt, 4);
let val = Boxed::new_in(&*boxed, 1_u8).unwrap();
assert_eq!(
val1.as_ptr() as *const u8 as usize + len1 + 1,
val.as_ptr() as *const u8 as usize
);
}
#[test]
fn test_aligned() {
let boxed = MemPool::new_boxed(1024).unwrap();
let val_u8 = Boxed::new_in(&*boxed, 1_u8).unwrap();
let val_u64 = Boxed::new_in(&*boxed, 1_u64).unwrap();
assert_eq!(
boxed.as_ptr() as *const u8 as usize + core::mem::size_of_val(&*boxed),
val_u8.as_ptr() as *const u8 as usize
);
assert_eq!(
val_u8.as_ptr() as *const u8 as usize + 8,
val_u64.as_ptr() as *const u8 as usize
);
let stat = boxed.stat();
assert_eq!(
stat.alloc,
core::mem::size_of_val(&*boxed) + core::mem::size_of::<Page>() + 8 + 1
);
}
#[test]
fn test_reset_boxed() {
let mut pool = MemPool::new_boxed(0).unwrap();
let addr1;
{
let val_u32 = Boxed::new_slice_then_in(&*pool, 100, |_| Ok(0_u32)).unwrap();
addr1 = val_u32.as_ptr() as *const u8 as usize;
}
pool = MemPool::reset_boxed(pool);
{
let val_u32 = Boxed::new_slice_then_in(&*pool, 100, |_| Ok(0_u32)).unwrap();
assert_eq!(addr1, val_u32.as_ptr() as *const u8 as usize);
let _ = Boxed::new_slice_then_in(&*pool, 1000, |_| Ok(0_u32)).unwrap();
let stat = pool.stat();
assert_eq!(stat.page_cnt, 2);
}
pool = MemPool::reset_boxed(pool);
let val_u32 = Boxed::uninit_slice_in::<u32>(&*pool, 100).unwrap();
assert_eq!(addr1, val_u32.as_ptr() as *const u8 as usize);
}
#[test]
fn test_alloc_in_ctor() {
struct Foo<'a> {
val1: i32,
val2: crate::Boxed<'a, i32, MemPool<PoolAlloc>>,
}
let pool = MemPool::new(0);
let stat = pool.stat();
assert_eq!(stat.alloc, 0);
let foo = Boxed::new_then_in(&pool, || {
Ok(Foo {
val1: 99,
val2: Boxed::new_in(&pool, 100)?,
})
});
assert!(foo.is_ok());
let foo = foo.unwrap();
assert_eq!(foo.val1, 99);
assert_eq!(*foo.val2, 100);
let stat = pool.stat();
assert_eq!(
stat.alloc,
size_of::<Page>() + size_of::<Foo>() + size_of::<i32>()
);
{
let err1 = Boxed::new_then_in(&pool, || {
Ok(Foo {
val1: 0,
val2: Boxed::new_then_in(&pool, || -> Result<i32> { Err(Error::default()) })?,
})
});
assert!(err1.is_err());
}
let stat = pool.stat();
assert_eq!(
stat.alloc,
size_of::<Page>() + size_of::<Foo>() + size_of::<i32>()
);
let bar = Boxed::new_then_in(&pool, || {
Ok(Foo {
val1: 0,
val2: Boxed::new_in(&pool, 100_i32)?,
})
})
.unwrap();
assert_eq!(
&foo.val1 as *const _ as *const u8 as usize + size_of::<Foo>() + align_of::<Foo>(),
&bar.val1 as *const _ as *const u8 as usize
);
core::mem::drop(bar);
core::mem::drop(foo);
core::mem::drop(pool);
}
}