1use crate::{
2 util::{get_bitfields, set_bitfields},
3 Allocator, Boxed, Error, GenericAlloc, NullAlloc, PoolAlloc, Result,
4};
5use core::alloc::{GlobalAlloc, Layout};
6use core::cell::UnsafeCell;
7use core::mem::{size_of, ManuallyDrop};
8use core::ptr::{self, NonNull};
9
10#[repr(transparent)]
14pub struct Pool<A: Allocator = PoolAlloc>(UnsafeCell<Inner<A>>);
15
16unsafe impl<A: Allocator + Send> Send for Pool<A> {}
17unsafe impl<A: Allocator> crate::Pool for Pool<A> {}
18
19#[derive(Copy, Clone)]
20#[repr(C)]
21pub struct Stat {
22 pub layout: Layout,
23 pub size: usize,
24 pub page_cnt: usize,
25 pub alloc: usize,
26 pub external_size: usize,
27}
28
29#[repr(C)]
30struct Inner<A: Allocator> {
31 current: Option<NonNull<Page>>,
32 stat: Stat,
33 small: Option<NonNull<Page>>,
34 large: Option<NonNull<Large>>,
35 first: Option<NonNull<Page>>,
36 alloc: ManuallyDrop<A>,
37}
38
39#[repr(C)]
40struct Page {
41 next: Option<NonNull<Page>>,
42 buf: NonNull<[u8]>,
43 pos: usize,
44}
45
46#[repr(C)]
47struct Large {
48 next: Option<NonNull<Large>>,
49 buf: NonNull<u8>,
50 layout: Layout,
51}
52
53pub type BoxedPool<A = PoolAlloc> = Boxed<'static, Pool<A>, NullAlloc>;
54
55impl Pool<PoolAlloc> {
56 pub fn new(page_size: usize) -> Self {
57 Self::new_in(PoolAlloc, page_size)
58 }
59 pub fn new_boxed(page_size: usize) -> Result<BoxedPool> {
60 Self::new_boxed_in(PoolAlloc, page_size)
61 }
62}
63
64impl<A: Allocator> Pool<A> {
65 pub fn reset_boxed(boxed: Boxed<'_, Self, NullAlloc>) -> BoxedPool<A> {
66 let layout = boxed.get_inner().stat.layout;
67 let alloc = unsafe { ManuallyDrop::take(&mut (*boxed.0.get()).alloc) };
68 let buf = unsafe { &mut *Boxed::leak(boxed).0 .0.get() }.leak_first();
69 Self::boxed_from_page(alloc, buf, layout)
70 }
71
72 pub fn reset(self) -> Self {
73 let alloc = unsafe { ManuallyDrop::take(&mut (*self.0.get()).alloc) };
74 Self::new_in(alloc, self.get_inner().stat.layout.size())
75 }
76}
77
78impl<A: Allocator> Pool<A> {
79 pub unsafe fn from_raw(raw: *mut Self) -> BoxedPool<A> {
82 let layout = (*raw).get_inner().stat.layout;
83 Boxed::from_with(NonNull::new_unchecked(raw), layout, &NullAlloc)
84 }
85
86 pub fn stat(&self) -> Stat {
87 self.get_inner().stat
88 }
89
90 pub fn new_in(alloc: A, page_size: usize) -> Self {
91 Self(UnsafeCell::new(Inner {
92 current: None,
93 stat: Stat {
94 layout: Self::get_page_layout(page_size, size_of::<Page>() * 2),
95 size: 0,
96 page_cnt: 0,
97 alloc: 0,
98 external_size: 0,
99 },
100 small: None,
101 large: None,
102 first: None,
103 alloc: ManuallyDrop::new(alloc),
104 }))
105 }
106
107 pub fn add_page_buf(&self, mut buf: &'static [u8]) {
108 let inner = unsafe { &mut *self.0.get() };
109 while buf.len() > PAGE_MAX {
110 inner.add_page_buf(&buf[0..PAGE_MAX]);
111 buf = &buf[PAGE_MAX..];
112 }
113 if !buf.is_empty() {
114 inner.add_page_buf(buf);
115 }
116 }
117
118 pub fn new_boxed_in<'a>(alloc: A, page_size: usize) -> Result<Boxed<'a, Self, NullAlloc>> {
119 let layout = Self::get_page_layout(page_size, size_of::<Page>() * 2 + size_of::<Self>());
120 let buf = unsafe { alloc.allocate(layout)? };
121 Ok(Pool::boxed_from_page(alloc, buf, layout))
122 }
123
124 fn boxed_from_page<'a>(
125 alloc: A,
126 buf: NonNull<[u8]>,
127 layout: Layout,
128 ) -> Boxed<'a, Self, NullAlloc> {
129 let page = buf.cast::<Page>().as_ptr();
130 let pool = unsafe { page.add(1) } as *mut Self;
131 let offset = size_of::<Self>() + size_of::<Page>();
132
133 unsafe {
134 ptr::addr_of_mut!((*page).next).write(None);
135 ptr::addr_of_mut!((*page).buf).write(buf);
136 ptr::addr_of_mut!((*page).pos).write(offset);
137 }
138 let page = unsafe { NonNull::new_unchecked(page) };
139
140 let inner = Inner::<A> {
141 current: Some(page),
142 small: Some(page),
143 first: Some(page),
144 large: None,
145 stat: Stat {
146 layout,
147 size: buf.len(),
148 page_cnt: 1,
149 alloc: offset,
150 external_size: 0,
151 },
152 alloc: ManuallyDrop::new(alloc),
153 };
154 unsafe {
155 ptr::addr_of_mut!((*pool).0).write(UnsafeCell::new(inner));
156 }
157
158 let this = unsafe { NonNull::new_unchecked(pool) };
159 unsafe { Boxed::from_with(this, layout, &NullAlloc) }
160 }
161
162 const fn get_page_layout(page_size: usize, min: usize) -> Layout {
163 let size = Self::get_page_size(page_size, min);
164 let align = Self::get_page_align(size);
165 unsafe { Layout::from_size_align_unchecked(size, align) }
166 }
167 const fn get_page_size(page_size: usize, min: usize) -> usize {
168 if page_size == 0 {
169 4096
170 } else if page_size > min {
171 if page_size < PAGE_MAX {
172 page_size
173 } else {
174 PAGE_MAX
175 }
176 } else {
177 min
178 }
179 }
180 const fn get_page_align(page_size: usize) -> usize {
181 if page_size >= 4096 {
182 4096
183 } else if (page_size & (page_size - 1)) == 0 {
184 page_size
185 } else {
186 let mut page_size = page_size | (page_size >> 1);
187 page_size |= page_size >> 2;
188 page_size |= page_size >> 4;
189 page_size |= page_size >> 8;
190 page_size + 1
191 }
192 }
193
194 fn get_inner(&self) -> &Inner<A> {
195 unsafe { &*self.0.get() }
196 }
197
198 fn is_normal(&self, layout: Layout) -> bool {
199 self.get_inner().stat.layout.size() > layout.size()
200 && self.get_inner().stat.layout.align() > layout.align()
201 }
202
203 fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
204 where
205 F: FnOnce(NonNull<[u8]>) -> Result<()>,
206 {
207 let inner = unsafe { &mut *self.0.get() };
208 if self.is_normal(layout) {
209 inner.alloc_normal(layout, |ptr| f(ptr).map(|_| ptr))
210 } else {
211 inner.alloc_large(layout, |ptr| f(ptr).map(|_| ptr), |_| {})
212 }
213 }
214 fn free_buf(&self, _ptr: NonNull<[u8]>, _layout: Layout) {}
216}
217
218unsafe impl<A: Allocator> Allocator for Pool<A> {
219 unsafe fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
220 where
221 F: FnOnce(NonNull<[u8]>) -> Result<()>,
222 {
223 Pool::alloc_buf(self, layout, f)
224 }
225 unsafe fn free_buf(&self, ptr: NonNull<[u8]>, layout: Layout) {
226 Pool::free_buf(self, ptr, layout)
227 }
228}
229
230unsafe impl<A: Allocator> GlobalAlloc for Pool<A> {
231 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
232 match Allocator::alloc_buf(self, layout, |_| Ok(())) {
233 Ok(ptr) => ptr.cast::<u8>().as_ptr(),
234 Err(_) => ptr::null_mut(),
235 }
236 }
237 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
238 GenericAlloc::dealloc(self, ptr, layout)
239 }
240}
241
242impl<A: Allocator> Inner<A> {
243 fn init_page<'a>(page: NonNull<[u8]>, buf: NonNull<[u8]>, pos: usize) -> &'a mut Page {
244 let mut page = page.cast::<Page>();
245 let page_node = unsafe { page.as_mut() };
246 page_node.init(buf, pos);
247 page_node
248 }
249
250 fn alloc_page(&mut self) -> Option<NonNull<Page>> {
251 if let Ok(buf) = unsafe { self.alloc.allocate(self.stat.layout) } {
252 self.stat.size += self.stat.layout.size();
253 self.stat.page_cnt += 1;
254 let layout = Layout::new::<Page>();
255 if let Ok(page) = self.alloc_small(layout) {
256 Some(NonNull::from(Self::init_page(page, buf, 0)))
257 } else {
258 self.stat.alloc += layout.size();
259 Some(NonNull::from(Self::init_page(buf, buf, layout.size())))
260 }
261 } else {
262 None
263 }
264 }
265
266 fn add_page_buf(&mut self, buf: &'static [u8]) {
267 let layout = Layout::new::<Page>();
268 let buf = NonNull::from(buf);
269 let page;
270 if let Ok(page_buf) = self.alloc_small(layout) {
271 page = Self::init_page(page_buf, buf, 0);
272 } else if buf.len() > layout.size() {
273 self.stat.alloc += layout.size();
274 page = Self::init_page(buf, buf, layout.size()).set_external();
275 } else {
276 return;
277 }
278
279 self.stat.external_size += buf.len();
280 if let Some(mut current) = self.current {
281 let current = unsafe { current.as_mut() };
282 page.next = current.next;
283 current.next = Some(NonNull::from(page));
284 } else {
285 self.current = Some(NonNull::from(page));
286 self.first = self.current;
287 self.small = self.current;
288 }
289 }
290
291 fn alloc_small(&mut self, layout: Layout) -> Result<NonNull<[u8]>> {
292 if let Some(first) = self.small {
293 self.alloc_buf(
294 first,
295 layout,
296 Ok,
297 |_| None,
298 |pool, page| pool.update_small(page),
299 )
300 } else {
301 Err(Error::default())
302 }
303 }
304
305 fn update_small(&mut self, page: &Page) {
306 if ptr::eq(self.small.unwrap().as_ptr(), page) {
307 self.small = page.next;
308 }
309 }
310
311 fn alloc_normal<F>(&mut self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
312 where
313 F: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
314 {
315 if let Some(current) = self.current {
316 self.alloc_buf(
317 current,
318 layout,
319 f,
320 |pool| pool.alloc_page(),
321 |pool, page| pool.update_current(page),
322 )
323 } else if let Some(page) = self.alloc_page() {
324 self.current = Some(page);
325 self.first = self.current;
326 self.small = self.current;
327 self.alloc_buf(
328 page,
329 layout,
330 f,
331 |pool| pool.alloc_page(),
332 |pool, page| pool.update_current(page),
333 )
334 } else {
335 Err(Error::last())
336 }
337 }
338
339 fn update_current(&mut self, page: &Page) {
340 if ptr::eq(self.current.unwrap().as_ptr(), page)
341 && (page.get_pos() + size_of::<Page>() > self.stat.layout.size()
342 || page.get_fail() == FAIL_MAX)
343 {
344 self.current = page.next;
345 }
346 }
347
348 fn alloc_large<F1, F2>(&mut self, layout: Layout, f: F1, dtor: F2) -> Result<NonNull<[u8]>>
349 where
350 F1: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
351 F2: FnOnce(NonNull<[u8]>),
352 {
353 let large = unsafe { self.alloc.allocate(layout)? };
354 match f(large) {
355 Ok(_) => {}
356 Err(error) => {
357 unsafe { self.alloc.free_buf(large, layout) };
358 return Err(error);
359 }
360 }
361
362 let node = match self.alloc_large_node() {
363 Ok(node) => node,
364 Err(error) => {
365 dtor(large);
366 unsafe { self.alloc.free_buf(large, layout) };
367 return Err(error);
368 }
369 };
370 self.init_large(large, node, layout);
371 self.stat.size += layout.size();
372 self.stat.alloc += layout.size();
373 Ok(large)
374 }
375
376 fn init_large(&mut self, large: NonNull<[u8]>, node: NonNull<[u8]>, layout: Layout) {
377 let node = node.cast::<Large>();
378 let large_node = node.as_ptr();
379 unsafe {
380 ptr::addr_of_mut!((*large_node).buf).write(large.cast::<u8>());
381 ptr::addr_of_mut!((*large_node).next).write(self.large);
382 ptr::addr_of_mut!((*large_node).layout).write(layout);
383 }
384 self.large = Some(node);
385 }
386
387 fn alloc_large_node(&mut self) -> Result<NonNull<[u8]>> {
388 if let Ok(node) = self.alloc_small(Layout::new::<Large>()) {
389 Ok(node)
390 } else {
391 self.alloc_normal(Layout::new::<Large>(), Ok)
392 }
393 }
394
395 fn alloc_buf<F1, F2, F3>(
396 &mut self,
397 mut page: NonNull<Page>,
398 layout: Layout,
399 f: F1,
400 mut on_new_page: F2,
401 mut on_fail: F3,
402 ) -> Result<NonNull<[u8]>>
403 where
404 F1: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
405 F2: FnMut(&mut Self) -> Option<NonNull<Page>>,
406 F3: FnMut(&mut Self, &Page),
407 {
408 loop {
409 let page_node = unsafe { page.as_mut() };
410 match page_node.alloc_prepare(self.stat.layout.size(), layout) {
411 Some(pos) => {
412 return page_node.alloc(pos, layout.size(), f).map(|ptr| {
413 self.stat.alloc += layout.size();
414 ptr
415 });
416 }
417 None => {
418 if let Some(next) = page_node.next {
419 page = next;
420 } else if let Some(next) = on_new_page(self) {
421 page_node.next = Some(next);
422 page = next;
423 } else {
424 return Err(Error::default());
425 }
426 on_fail(self, page_node);
427 }
428 }
429 }
430 }
431
432 fn leak_first(&mut self) -> NonNull<[u8]> {
433 let page = unsafe { self.first.unwrap().as_mut() };
434 self.first = page.next;
435 self.release();
436 page.buf
437 }
438
439 fn release(&mut self) {
440 self.stat.page_cnt = 0;
441 self.stat.size = 0;
442 self.stat.alloc = 0;
443 self.release_large();
444 self.release_page();
445 }
446
447 fn release_large(&mut self) {
448 while let Some(mut large) = self.large {
449 let large_node = unsafe { large.as_mut() };
450 if large_node.layout.size() > 0 {
451 unsafe {
452 self.alloc
453 .dealloc(large_node.buf.as_ptr(), large_node.layout)
454 };
455 }
456 self.large = large_node.next;
457 }
458 }
459 fn release_page(&mut self) {
460 let mut local = None;
461 while let Some(mut page) = self.first {
462 let page_node = unsafe { page.as_mut() };
463 self.first = page_node.next;
464 page_node.next = local;
465 local = Some(page_node.into());
466 }
467
468 while let Some(mut page) = local {
469 let page_node = unsafe { page.as_mut() };
470 local = page_node.next;
471 if !page_node.get_external() {
472 unsafe { self.alloc.free_buf(page_node.buf, self.stat.layout) };
473 }
474 }
475 }
476}
477
478impl<A: Allocator> Drop for Inner<A> {
479 fn drop(&mut self) {
480 self.release();
481 unsafe { ManuallyDrop::drop(&mut self.alloc) };
482 }
483}
484
485impl Page {
486 fn init(&mut self, buf: NonNull<[u8]>, pos: usize) {
487 self.next = None;
488 self.buf = buf;
489 self.pos = pos;
490 }
491 fn get_fail(&self) -> usize {
492 get_bitfields::<FAIL_OFF, FAIL_BITS>(self.pos)
493 }
494 fn set_fail(&mut self, cnt: usize) -> &mut Self {
495 self.pos = set_bitfields::<FAIL_OFF, FAIL_BITS>(self.pos, cnt);
496 self
497 }
498 fn get_pos(&self) -> usize {
499 get_bitfields::<OFF_OFF, OFF_BITS>(self.pos)
500 }
501 fn set_pos(&mut self, pos: usize) -> &mut Self {
502 self.pos = set_bitfields::<OFF_OFF, OFF_BITS>(self.pos, pos);
503 self
504 }
505 fn set_external(&mut self) -> &mut Self {
506 self.pos = set_bitfields::<EXTERN_OFF, EXTERN_BITS>(self.pos, 1);
507 self
508 }
509 fn get_external(&self) -> bool {
510 get_bitfields::<EXTERN_OFF, EXTERN_BITS>(self.pos) > 0
511 }
512
513 fn alloc_prepare(&mut self, page_size: usize, layout: Layout) -> Option<usize> {
514 let size = layout.size();
515 let align = layout.align();
516 let pos = (self.get_pos() + align - 1) & !(align - 1);
517 if page_size - size >= pos {
518 Some(pos)
519 } else {
520 let fail = self.get_fail();
521 if fail < FAIL_MAX {
522 self.set_fail(fail + 1);
523 }
524 None
525 }
526 }
527
528 fn alloc<F>(&mut self, pos: usize, size: usize, f: F) -> Result<NonNull<[u8]>>
529 where
530 F: FnOnce(NonNull<[u8]>) -> Result<NonNull<[u8]>>,
531 {
532 let slice = unsafe { self.buf.as_mut() };
533 let ptr = NonNull::from(&mut slice[pos..pos + size]);
534 let cur = self.get_pos();
535 self.set_pos(pos + size);
536 match f(ptr) {
537 Ok(buf) => Ok(buf),
538 Err(error) => {
539 if self.get_pos() == pos + size {
540 self.set_pos(cur);
541 }
542 Err(error)
543 }
544 }
545 }
546}
547
548const OFF_OFF: usize = 0;
549const OFF_BITS: usize = usize::BITS as usize - 3;
550const EXTERN_OFF: usize = OFF_BITS + OFF_OFF;
551const EXTERN_BITS: usize = 1;
552const FAIL_OFF: usize = EXTERN_BITS + EXTERN_OFF;
553const FAIL_BITS: usize = 2;
554const FAIL_MAX: usize = (1_usize << FAIL_BITS) - 1;
555const PAGE_MAX: usize = (1_usize << OFF_BITS) - 1;
556
557#[cfg(test)]
558mod test {
559 extern crate std;
560 use super::{Large, Page};
561 use crate::{Boxed, Error, MemPool, PoolAlloc, Result};
562 use core::alloc::Layout;
563 use core::mem::{align_of, size_of, size_of_val};
564
565 struct Foo {
566 val: i32,
567 }
568
569 #[test]
570 fn test_t() {
571 let pool = MemPool::new(0);
572 let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
573 assert_eq!(foo.val, 100);
574 let bar = Boxed::new_in(&pool, Foo { val: 101 }).unwrap();
575 assert_eq!(foo.val, 100);
576 assert_eq!(bar.val, 101);
577 assert_eq!(
578 size_of::<Foo>(),
579 &bar.val as *const _ as *const u8 as usize - &foo.val as *const _ as *const u8 as usize
580 );
581 let stat = pool.stat();
582 assert_eq!(stat.size, stat.layout.size());
583 assert_eq!(stat.page_cnt, 1);
584 assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>() * 2);
585 }
586
587 #[test]
588 fn test_large() {
589 let pool = MemPool::new(0);
590 let foo = Boxed::uninit_slice_in::<Foo>(&pool, 2000);
591 assert!(foo.is_ok());
592 let stat = pool.stat();
593 assert_eq!(stat.page_cnt, 1);
594 assert_eq!(stat.size, stat.layout.size() + 2000 * size_of::<Foo>());
595 assert_eq!(
596 stat.alloc,
597 size_of::<Page>() + size_of::<Large>() + 2000 * size_of::<Foo>()
598 );
599 }
600
601 #[test]
602 fn test_t_array() {
603 let pool = MemPool::new(0);
604 let foo = Boxed::new_slice_then_in(&pool, 10, |_| Ok(Foo { val: 0 })).unwrap();
605 foo.iter().for_each(|obj| assert_eq!(0, obj.val));
606 let stat = pool.stat();
607 assert_eq!(stat.size, stat.layout.size());
608 assert_eq!(stat.page_cnt, 1);
609 assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>() * 10);
610 }
611
612 #[test]
613 fn test_t_error() {
614 let pool = MemPool::new(0);
615 let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
616 assert_eq!(foo.val, 100);
617 let bar = Boxed::new_then_in::<Foo, _>(&pool, || Err(Error::default()));
618 assert!(bar.is_err());
619 assert_eq!(foo.val, 100);
620 let stat = pool.stat();
621 assert_eq!(stat.size, stat.layout.size());
622 assert_eq!(stat.page_cnt, 1);
623 assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>());
624 }
625
626 #[test]
627 fn test_large_error() {
628 let pool = MemPool::new(0);
629 let foo = Boxed::new_in(&pool, Foo { val: 100 }).unwrap();
630 assert_eq!(foo.val, 100);
631 let bar = Boxed::new_slice_then_in::<Foo, _>(&pool, 100, |_| Err(Error::default()));
632 assert!(bar.is_err());
633 assert_eq!(foo.val, 100);
634 let stat = pool.stat();
635 assert_eq!(stat.size, stat.layout.size());
636 assert_eq!(stat.page_cnt, 1);
637 assert_eq!(stat.alloc, size_of::<Page>() + size_of::<Foo>());
638 }
639
640 #[test]
641 fn test_array_error() {
642 let pool = MemPool::new(0);
643 struct Foo;
644 static mut CNT: usize = 0;
645 impl Drop for Foo {
646 fn drop(&mut self) {
647 unsafe {
648 CNT += 1;
649 }
650 }
651 }
652 unsafe { CNT = 0 };
653 let array = Boxed::new_slice_then_in(&pool, 10, |n| {
654 if n < 9 {
655 Ok(Foo)
656 } else {
657 Err(Error::default())
658 }
659 });
660 assert!(array.is_err());
661 unsafe {
662 assert_eq!(CNT, 9);
663 }
664 let stat = pool.stat();
665 assert_eq!(stat.size, stat.layout.size());
666 assert_eq!(stat.page_cnt, 1);
667 assert_eq!(stat.alloc, size_of::<Page>());
668 }
669
670 #[test]
671 fn test_new_boxed() {
672 let boxed = MemPool::new_boxed(0);
673 assert!(boxed.is_ok());
674 let boxed = boxed.unwrap();
675 let stat = boxed.stat();
676 assert_eq!(stat.size, stat.layout.size());
677 assert_eq!(stat.page_cnt, 1);
678 assert_eq!(stat.alloc, size_of::<Page>() + size_of_val(&*boxed));
679
680 let foo = Boxed::new_in(&*boxed, Foo { val: 1 });
681 assert!(foo.is_ok());
682 let foo = foo.unwrap();
683 assert_eq!(foo.val, 1);
684
685 assert_eq!(
686 size_of_val(&*boxed),
687 foo.as_ref() as *const _ as *const u8 as usize
688 - boxed.as_ref() as *const _ as *const u8 as usize
689 );
690
691 let stat = boxed.stat();
692 assert_eq!(stat.page_cnt, 1);
693 assert_eq!(
694 stat.alloc,
695 size_of::<Page>() + size_of_val(&*boxed) + size_of::<Foo>()
696 );
697 assert_eq!(stat.size, stat.layout.size());
698 }
699
700 #[test]
701 fn test_page_fail() {
702 let boxed = MemPool::new_boxed(1024);
703 assert!(boxed.is_ok());
704 let boxed = boxed.unwrap();
705
706 let len1 = 1024 - core::mem::size_of_val(&*boxed);
707 let val1 =
708 Boxed::new_buf_in(&*boxed, Layout::from_size_align(len1 + 1, 1).unwrap()).unwrap();
709 let stat = boxed.stat();
710 assert_eq!(stat.page_cnt, 2);
711
712 let _val = Boxed::new_buf_in(&*boxed, Layout::from_size_align(len1 + 1, 1).unwrap());
713 let stat = boxed.stat();
714 assert_eq!(stat.page_cnt, 3);
715
716 let val = Boxed::new_in(&*boxed, 1_u8).unwrap();
717 assert_eq!(
718 boxed.as_ptr() as *const u8 as usize
719 + core::mem::size_of_val(&*boxed)
720 + core::mem::size_of::<Page>() * 2,
721 val.as_ptr() as *const u8 as usize
722 );
723
724 let len2 = len1 - core::mem::size_of::<Page>() * 2 - 1;
725 let _val = Boxed::new_buf_in(&*boxed, Layout::from_size_align(len2 + 1, 1).unwrap());
726 let stat = boxed.stat();
727 assert_eq!(stat.page_cnt, 4);
728
729 let val = Boxed::new_in(&*boxed, 1_u8).unwrap();
730 assert_eq!(
731 val1.as_ptr() as *const u8 as usize + len1 + 1,
732 val.as_ptr() as *const u8 as usize
733 );
734 }
735
736 #[test]
737 fn test_aligned() {
738 let boxed = MemPool::new_boxed(1024).unwrap();
739 let val_u8 = Boxed::new_in(&*boxed, 1_u8).unwrap();
740 let val_u64 = Boxed::new_in(&*boxed, 1_u64).unwrap();
741 assert_eq!(
742 boxed.as_ptr() as *const u8 as usize + core::mem::size_of_val(&*boxed),
743 val_u8.as_ptr() as *const u8 as usize
744 );
745 assert_eq!(
746 val_u8.as_ptr() as *const u8 as usize + 8,
747 val_u64.as_ptr() as *const u8 as usize
748 );
749 let stat = boxed.stat();
750 assert_eq!(
751 stat.alloc,
752 core::mem::size_of_val(&*boxed) + core::mem::size_of::<Page>() + 8 + 1
753 );
754 }
755
756 #[test]
757 fn test_reset_boxed() {
758 let mut pool = MemPool::new_boxed(0).unwrap();
759 let addr1;
760 {
761 let val_u32 = Boxed::new_slice_then_in(&*pool, 100, |_| Ok(0_u32)).unwrap();
762 addr1 = val_u32.as_ptr() as *const u8 as usize;
763 }
764 pool = MemPool::reset_boxed(pool);
765 {
766 let val_u32 = Boxed::new_slice_then_in(&*pool, 100, |_| Ok(0_u32)).unwrap();
767 assert_eq!(addr1, val_u32.as_ptr() as *const u8 as usize);
768 let _ = Boxed::new_slice_then_in(&*pool, 1000, |_| Ok(0_u32)).unwrap();
769 let stat = pool.stat();
770 assert_eq!(stat.page_cnt, 2);
771 }
772 pool = MemPool::reset_boxed(pool);
773 let val_u32 = Boxed::uninit_slice_in::<u32>(&*pool, 100).unwrap();
774 assert_eq!(addr1, val_u32.as_ptr() as *const u8 as usize);
775 }
776
777 #[test]
778 fn test_alloc_in_ctor() {
779 struct Foo<'a> {
780 val1: i32,
781 val2: crate::Boxed<'a, i32, MemPool<PoolAlloc>>,
782 }
783
784 let pool = MemPool::new(0);
785 let stat = pool.stat();
786 assert_eq!(stat.alloc, 0);
787
788 let foo = Boxed::new_then_in(&pool, || {
789 Ok(Foo {
790 val1: 99,
791 val2: Boxed::new_in(&pool, 100)?,
792 })
793 });
794 assert!(foo.is_ok());
795 let foo = foo.unwrap();
796
797 assert_eq!(foo.val1, 99);
798 assert_eq!(*foo.val2, 100);
799
800 let stat = pool.stat();
801 assert_eq!(
802 stat.alloc,
803 size_of::<Page>() + size_of::<Foo>() + size_of::<i32>()
804 );
805
806 {
807 let err1 = Boxed::new_then_in(&pool, || {
808 Ok(Foo {
809 val1: 0,
810 val2: Boxed::new_then_in(&pool, || -> Result<i32> { Err(Error::default()) })?,
811 })
812 });
813 assert!(err1.is_err());
814 }
815
816 let stat = pool.stat();
817 assert_eq!(
818 stat.alloc,
819 size_of::<Page>() + size_of::<Foo>() + size_of::<i32>()
820 );
821
822 let bar = Boxed::new_then_in(&pool, || {
823 Ok(Foo {
824 val1: 0,
825 val2: Boxed::new_in(&pool, 100_i32)?,
826 })
827 })
828 .unwrap();
829 assert_eq!(
830 &foo.val1 as *const _ as *const u8 as usize + size_of::<Foo>() + align_of::<Foo>(),
831 &bar.val1 as *const _ as *const u8 as usize
832 );
833
834 core::mem::drop(bar);
835 core::mem::drop(foo);
836 core::mem::drop(pool);
837 }
838}