1use std::cell::Cell;
2use std::marker::PhantomData;
3use std::mem::MaybeUninit;
4use std::ptr::NonNull;
5use std::rc::Rc;
6
7use crate::block::Block;
8use crate::common::{Pointer, BLOCK_PER_PAGE};
9use crate::page::pool::{drop_page, PagePool};
10use crate::ArenaRc;
11
12pub struct PoolBox<T> {
35 block: NonNull<Block<T>>,
36 _marker: PhantomData<*mut ()>,
37}
38
39impl<T> PoolBox<T> {
40 fn new(mut block: NonNull<Block<T>>) -> PoolBox<T> {
41 let counter_mut = unsafe { block.as_mut() }.counter.get_mut();
43
44 debug_assert!(
47 *counter_mut == 0,
48 "PoolBox: Counter not zero {}",
49 counter_mut
50 );
51 *counter_mut = 1;
52 PoolBox {
53 block,
54 _marker: PhantomData,
55 }
56 }
57}
58
59impl<T> std::ops::Deref for PoolBox<T> {
60 type Target = T;
61 fn deref(&self) -> &T {
62 unsafe { &*self.block.as_ref().value.get() }
63 }
64}
65
66impl<T> std::ops::DerefMut for PoolBox<T> {
67 fn deref_mut(&mut self) -> &mut T {
68 unsafe { &mut *self.block.as_ref().value.get() }
69 }
70}
71
72impl<T> Drop for PoolBox<T> {
76 fn drop(&mut self) {
77 let counter_mut = unsafe { self.block.as_mut() }.counter.get_mut();
79 assert!(
83 *counter_mut == 1,
84 "PoolBox: Counter != 1 on drop {}",
85 counter_mut
86 );
87 *counter_mut = 0;
88
89 Block::drop_block(self.block)
90 }
91}
92
93pub struct Pool<T: Sized> {
102 free: Rc<Pointer<PagePool<T>>>,
103 page_list: Pointer<PagePool<T>>,
104 npages: Cell<usize>,
105 _marker: PhantomData<*mut ()>,
106}
107
108impl<T: Sized> Pool<T> {
109 pub fn new() -> Pool<T> {
122 Self::with_capacity(1)
123 }
124
125 pub fn with_capacity(cap: usize) -> Pool<T> {
141 let npages = ((cap.max(1) - 1) / BLOCK_PER_PAGE) + 1;
142 let free = Rc::new(Cell::new(std::ptr::null_mut()));
143
144 let (mut first, _) = PagePool::make_list(npages, &free);
145 let first_ref = unsafe { first.as_mut() };
146
147 free.set(first_ref);
148
149 Pool {
150 npages: Cell::new(npages),
151 free,
152 page_list: Cell::new(first_ref),
153 _marker: PhantomData,
154 }
155 }
156
157 fn alloc_new_page(&self) -> NonNull<PagePool<T>> {
158 let len = self.npages.get();
159
160 let to_allocate = len.max(1).min(900_000);
161
162 let (first, mut last) = PagePool::make_list(to_allocate, &self.free);
163
164 let last_ref = unsafe { last.as_mut() };
165 last_ref.next_free.set(self.free.get());
166 last_ref.next.set(self.page_list.get());
167
168 let first_ptr = first.as_ptr();
169 self.free.set(first_ptr);
170 self.page_list.set(first_ptr);
171
172 self.npages.set(len + to_allocate);
173
174 first
175 }
176
177 fn find_place(&self) -> NonNull<Block<T>> {
178 loop {
179 while let Some(page) = unsafe { self.free.get().as_mut() } {
180 if let Some(block) = page.acquire_free_block() {
181 return block;
182 }
183
184 let next = page.next_free.get();
185
186 self.free.set(next);
187 page.in_free_list = false;
188 }
189 self.alloc_new_page();
190 }
191 }
192
193 pub fn alloc(&self, value: T) -> PoolBox<T> {
208 let block = self.find_place();
209
210 unsafe {
211 let ptr = block.as_ref().value.get();
212 ptr.write(value);
213 }
214
215 PoolBox::new(block)
216 }
217
218 pub fn alloc_with<F>(&self, initializer: F) -> PoolBox<T>
269 where
270 F: Fn(&mut MaybeUninit<T>) -> &T,
271 {
272 let block = self.find_place();
273 let result = PoolBox::new(block);
274
275 unsafe {
276 let ptr = block.as_ref().value.get();
277 let reference = initializer(&mut *(ptr as *mut std::mem::MaybeUninit<T>));
278 debug_assert_eq!(
279 ptr as *const T, reference as *const T,
280 "`initializer` must return a reference of its parameter"
281 );
282 }
283
284 result
285 }
286
287 pub fn alloc_rc(&self, value: T) -> ArenaRc<T> {
302 let block = self.find_place();
303
304 unsafe {
305 let ptr = block.as_ref().value.get();
306 ptr.write(value);
307 }
308
309 ArenaRc::new(block)
310 }
311
312 pub fn alloc_rc_with<F>(&self, initializer: F) -> ArenaRc<T>
363 where
364 F: Fn(&mut MaybeUninit<T>) -> &T,
365 {
366 let block = self.find_place();
367 let result = ArenaRc::new(block);
368
369 unsafe {
370 let ptr = block.as_ref().value.get();
371 let reference = initializer(&mut *(ptr as *mut std::mem::MaybeUninit<T>));
372 debug_assert_eq!(
373 ptr as *const T, reference as *const T,
374 "`initializer` must return a reference of its parameter"
375 );
376 }
377
378 result
379 }
380
381 pub fn stats(&self) -> (usize, usize) {
396 let mut next = self.page_list.get();
397 let mut used = 0;
398 let mut npages = 0;
399
400 while let Some(next_ref) = unsafe { next.as_mut() } {
401 let next_next = next_ref.next.get();
402
403 let bitfield = next_ref.bitfield;
404 let zeros = bitfield.count_zeros() as usize;
405 used += zeros;
406 next = next_next;
407
408 npages += 1;
409 }
410
411 debug_assert!(npages == self.npages.get());
412
413 let free = (npages * BLOCK_PER_PAGE) - used;
414
415 (used, free)
416 }
417
418 #[cfg(target_pointer_width = "64")]
419 #[cfg(test)]
420 pub(crate) fn size_lists(&self) -> (usize, usize) {
421 let mut next = self.page_list.get();
422 let mut size = 0;
423 while let Some(next_ref) = unsafe { next.as_mut() } {
424 next = next_ref.next.get();
425 size += 1;
426 }
427
428 let mut next = self.free.get();
429 let mut free = 0;
430 while let Some(next_ref) = unsafe { next.as_mut() } {
431 next = next_ref.next_free.get();
432 free += 1;
433 }
434
435 (size, free)
436 }
437
438 pub fn shrink_to_fit(&mut self) {
470 let mut current: &Pointer<PagePool<T>> = &self.free;
471
472 let mut to_drop = vec![];
473
474 while let Some(current_value) = unsafe { current.get().as_mut() } {
475 let next = ¤t_value.next_free;
476 let next_value = next.get();
477
478 if current_value.bitfield == !0 {
479 current.set(next_value);
480 to_drop.push(current_value as *const _ as *mut PagePool<T>);
481 } else {
482 current = next;
483 }
484 }
485
486 let mut current: &Pointer<PagePool<T>> = &self.page_list;
487
488 while let Some(current_value) = unsafe { current.get().as_mut() } {
491 let next = ¤t_value.next;
492 let next_value = next.get();
493
494 if to_drop.contains(&(current_value as *const _ as *mut PagePool<T>)) {
495 current.set(next_value);
496 } else {
497 current = next;
498 }
499 }
500
501 self.npages.set(self.npages.get() - to_drop.len());
502
503 for page in to_drop.iter().rev() {
504 drop_page(*page)
505 }
506 }
507
508 #[allow(dead_code)]
509 #[cfg(test)]
510 pub(crate) fn display_list(&self) {
511 let mut full = vec![];
512
513 let mut next = self.page_list.get();
514 while let Some(next_ref) = unsafe { next.as_mut() } {
515 full.push(next);
516 next = next_ref.next.get();
517 }
518
519 let mut list_free = vec![];
520
521 let mut next = self.page_list.get();
522 while let Some(next_ref) = unsafe { next.as_mut() } {
523 list_free.push(next);
524 next = next_ref.next_free.get();
525 }
526
527 println!("FULL {} {:#?}", full.len(), full);
528 println!("FREE {} {:#?}", list_free.len(), list_free);
529 }
530}
531
532impl<T> Default for Pool<T> {
533 fn default() -> Self {
534 Pool::new()
535 }
536}
537
538impl<T> Drop for Pool<T> {
539 fn drop(&mut self) {
540 let mut next = self.page_list.get();
541
542 while let Some(next_ref) = unsafe { next.as_mut() } {
543 let next_next = next_ref.next.get();
544 drop_page(next);
545 next = next_next;
546 }
547 }
548}
549
550impl<T> std::fmt::Debug for Pool<T> {
551 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
552 struct Page {
553 free: usize,
554 used: usize,
555 }
556
557 impl std::fmt::Debug for Page {
558 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
559 write!(f, "Page {{ free: {} used: {} }}", self.free, self.used)
560 }
561 }
562
563 let npages = self.npages.get();
564
565 let mut vec = Vec::with_capacity(npages);
566
567 let mut next = self.page_list.get();
568
569 while let Some(next_ref) = unsafe { next.as_mut() } {
570 let used = next_ref.bitfield.count_zeros() as usize;
571 vec.push(Page {
572 used,
573 free: BLOCK_PER_PAGE - used,
574 });
575
576 next = next_ref.next.get();
577 }
578
579 let blocks_used: usize = vec.iter().map(|p| p.used).sum();
580 let blocks_free: usize = vec.iter().map(|p| p.free).sum();
581
582 f.debug_struct("Pool")
583 .field("blocks_free", &blocks_free)
584 .field("blocks_used", &blocks_used)
585 .field("npages", &npages)
586 .field("pages", &vec)
587 .finish()
588 }
589}
590
591#[allow(dead_code)]
618fn arena_fail() {} #[cfg(test)]
621mod tests {
622 use super::Pool;
623 use std::mem::MaybeUninit;
624 use std::ptr;
625
626 #[cfg(target_pointer_width = "64")]
627 #[test]
628 fn arena_shrink() {
629 let mut arena = Pool::<usize>::with_capacity(1000);
630 assert_eq!(arena.stats(), (0, 1008));
631 arena.shrink_to_fit();
632 assert_eq!(arena.stats(), (0, 0));
633 }
634
635 #[cfg(target_pointer_width = "64")]
636 #[test]
637 fn arena_shrink2() {
638 let mut arena = Pool::<usize>::with_capacity(1000);
639
640 println!("A");
641 let _a = arena.alloc(1);
642 arena.shrink_to_fit();
643 assert_eq!(arena.stats(), (1, 62));
644
645 println!("A1");
646 let _a = arena.alloc(1);
647 arena.shrink_to_fit();
648 assert_eq!(arena.stats(), (2, 61));
649
650 println!("A2");
651 let mut values = Vec::with_capacity(64);
652 for _ in 0..64 {
653 values.push(arena.alloc(1));
654 }
655
656 println!("A3");
657 assert_eq!(arena.stats(), (66, 60));
658 println!("A32");
659 arena.shrink_to_fit();
660 println!("A33");
661 assert_eq!(arena.stats(), (66, 60));
662
663 println!("A4");
664 std::mem::drop(values);
665
666 println!("A5");
667 assert_eq!(arena.stats(), (2, 124));
668 println!("A6");
669 arena.shrink_to_fit();
670 println!("A7");
671 assert_eq!(arena.stats(), (2, 61));
672 }
673
674 #[cfg(target_pointer_width = "64")]
675 #[test]
676 fn arena_size() {
677 let mut arena = Pool::<usize>::with_capacity(1000);
678
679 assert_eq!(arena.size_lists(), (16, 16));
680 let a = arena.alloc(1);
681 assert_eq!(arena.size_lists(), (16, 16));
682
683 let mut values = Vec::with_capacity(539);
684 for _ in 0..539 {
685 values.push(arena.alloc(1));
686 }
687 assert_eq!(arena.size_lists(), (16, 8));
688
689 arena.shrink_to_fit();
690
691 assert_eq!(arena.size_lists(), (9, 1));
692
693 values.truncate(503);
694 arena.shrink_to_fit();
695
696 assert_eq!(arena.size_lists(), (8, 0));
697
698 std::mem::drop(a);
699 for _ in 0..62 {
700 values.remove(0);
701 }
702
703 assert_eq!(arena.size_lists(), (8, 1));
704
705 arena.shrink_to_fit();
706
707 assert_eq!(arena.size_lists(), (7, 0));
708
709 values.clear();
710
711 assert_eq!(arena.size_lists(), (7, 7));
712
713 arena.shrink_to_fit();
714
715 assert_eq!(arena.size_lists(), (0, 0));
716
717 {
718 let _a = arena.alloc(1);
719 println!("LA3",);
720 assert_eq!(arena.size_lists(), (1, 1));
721
722 println!("{:?}", arena);
723 arena.display_list();
724 }
725
726 assert_eq!(arena.size_lists(), (1, 1));
727 arena.shrink_to_fit();
728 assert_eq!(arena.size_lists(), (0, 0));
729
730 let mut values = Vec::with_capacity(126);
731 for _ in 0..126 {
732 values.push(arena.alloc(1));
733 }
734 assert_eq!(arena.size_lists(), (2, 1));
735
736 values.remove(0);
737 assert_eq!(arena.size_lists(), (2, 2));
738
739 values.push(arena.alloc(1));
740 assert_eq!(arena.size_lists(), (2, 2));
741 }
742
743 #[test]
744 fn alloc_with_initializer() {
745 struct MyData {
746 a: usize,
747 }
748
749 fn initialize_data<'d>(uninit: &'d mut MaybeUninit<MyData>, source: &MyData) -> &'d MyData {
750 unsafe {
751 let ptr = uninit.as_mut_ptr();
752 ptr::copy(source, ptr, 1);
753 &*ptr
754 }
755 }
756
757 let arena = Pool::<MyData>::new();
758
759 let source = MyData { a: 101 };
760 let data = arena.alloc_with(|uninit| initialize_data(uninit, &source));
761 assert!(data.a == 101);
762
763 let source = MyData { a: 102 };
764 let data = arena.alloc_rc_with(|uninit| initialize_data(uninit, &source));
765 assert!(data.a == 102);
766 }
767
768 #[test]
769 #[should_panic]
770 fn alloc_with_panic() {
771 let arena = Pool::<usize>::new();
772 const SOURCE: usize = 10;
773
774 let _ = arena.alloc_with(|_| &SOURCE);
775 } #[test]
778 #[should_panic]
779 fn alloc_rc_with_panic() {
780 let arena = Pool::<usize>::new();
781 const SOURCE: usize = 10;
782
783 let _ = arena.alloc_rc_with(|_| &SOURCE);
784 } #[test]
787 fn alloc_fns() {
788 let arena = Pool::<usize>::new();
789
790 use std::ptr;
791
792 let a = arena.alloc_with(|place| unsafe {
793 ptr::copy(&101, place.as_mut_ptr(), 1);
794 &*place.as_mut_ptr()
795 });
796 assert!(*a == 101);
797
798 let a = arena.alloc_rc_with(|place| unsafe {
799 ptr::copy(&102, place.as_mut_ptr(), 1);
800 &*place.as_mut_ptr()
801 });
802 assert!(*a == 102);
803
804 let a = arena.alloc(103);
805 assert!(*a == 103);
806
807 let a = arena.alloc_rc(104);
808 assert!(*a == 104);
809 }
810
811 #[test]
812 fn drop_arena_with_valid_allocated() {
813 let (a, b, c, d) = {
814 let arena = Pool::<usize>::new();
815
816 use std::ptr;
817
818 let a = arena.alloc_with(|place| unsafe {
819 ptr::copy(&101, place.as_mut_ptr(), 1);
820 &*place.as_mut_ptr()
821 });
822 let b = arena.alloc_rc_with(|place| unsafe {
823 ptr::copy(&102, place.as_mut_ptr(), 1);
824 &*place.as_mut_ptr()
825 });
826 let c = arena.alloc(103);
827 let d = arena.alloc_rc(104);
828
829 (a, b, c, d)
830 };
831
832 assert_eq!((*a, *b, *c, *d), (101, 102, 103, 104))
833 }
834
835 #[test]
836 #[should_panic]
837 #[cfg(target_pointer_width = "64")]
838 fn invalid_block() {
839 use std::cell::UnsafeCell;
840 use std::ptr::NonNull;
841 use std::sync::atomic::AtomicUsize;
842
843 let mut block = super::Block {
844 value: UnsafeCell::new(1),
845 counter: AtomicUsize::new(1),
846 page: crate::block::PageTaggedPtr { data: !0 },
847 };
848
849 super::Block::drop_block(NonNull::from(&mut block));
850 } }