comet/
global_allocator.rs1use crate::allocator::normal::NormalAllocator;
2use crate::allocator::overflow::OverflowAllocator;
3use crate::allocator::Allocator;
4use crate::block::SweepResult;
5use crate::globals::LINE_SIZE;
6use crate::heap::Heap;
7use crate::internal::block_list::BlockList;
8use crate::large_space::PreciseAllocation;
9use crate::Config;
10use crate::{
11 block::Block, block_allocator::BlockAllocator, internal::space_bitmap::SpaceBitmap,
12 large_space::LargeObjectSpace,
13};
14
15use std::ptr::null_mut;
16
17pub const fn round_up(x: usize, y: usize) -> usize {
18 ((x) + (y - 1)) & !(y - 1)
19}
20
21
22pub struct GlobalAllocator {
27 pub(crate) block_allocator: Box<BlockAllocator>,
28 pub(crate) large_space: LargeObjectSpace,
29 pub(crate) live_bitmap: SpaceBitmap<8>,
30 pub(crate) mark_bitmap: SpaceBitmap<8>,
31 pub(crate) line_bitmap: SpaceBitmap<LINE_SIZE>,
32 pub(crate) normal_allocator: NormalAllocator,
33 pub(crate) overflow_allocator: OverflowAllocator,
34}
35
36impl GlobalAllocator {
37 pub fn new(config: &Config) -> Self {
38 let block_allocator = Box::new(BlockAllocator::new(config.heap_size));
39
40 let mut global = Self {
41 live_bitmap: SpaceBitmap::create(
42 "live-bitmap",
43 block_allocator.mmap.aligned(),
44 block_allocator.size(),
45 ),
46 mark_bitmap: SpaceBitmap::create(
47 "mark-bitmap",
48 block_allocator.mmap.aligned(),
49 block_allocator.size(),
50 ),
51 line_bitmap: SpaceBitmap::create(
52 "line-bitmap",
53 block_allocator.mmap.aligned(),
54 block_allocator.size(),
55 ),
56 block_allocator,
57 normal_allocator: NormalAllocator::new(null_mut(), null_mut()),
58 overflow_allocator: OverflowAllocator::new(null_mut(), null_mut()),
59 large_space: LargeObjectSpace::new(),
60 };
61 global.normal_allocator.block_allocator = &mut *global.block_allocator;
62 global.overflow_allocator.block_allocator = &mut *global.block_allocator;
63 global
64 }
65 pub fn large_allocation(&mut self, size: usize) -> (*mut u8, usize) {
66 let cell = self.large_space.allocate(size);
67
68 (cell.cast(), unsafe {
69 (*PreciseAllocation::from_cell(cell)).cell_size()
70 })
71 }
72
73 pub fn prepare_for_marking(&mut self, eden: bool) {
74 self.large_space.prepare_for_marking(eden);
75 }
76 pub fn begin_marking(&mut self) -> BlockList {
77 let mut blocks = BlockList::new();
78 self.normal_allocator.get_all_blocks(&mut blocks);
79 self.overflow_allocator.get_all_blocks(&mut blocks);
80 blocks.for_each(|block| unsafe {
81 self.mark_bitmap
82 .clear_range(block as _, (*block).end() as _);
83 self.line_bitmap.clear_range(block as _, (*block).end());
84 });
85 for alloc in self.large_space.allocations.iter() {
86 unsafe {
87 (**alloc).flip();
88 }
89 }
90 blocks
91 }
92
93 pub(crate) fn release_memory(&mut self) {}
94
95 pub(crate) fn sweep(&mut self, mut block_list: BlockList) {
96 self.large_space.sweep();
97 unsafe {
98 while !block_list.is_empty() {
99 let block = block_list.pop();
100
101 match (*block).sweep::<true>(
102 &self.mark_bitmap,
103 &self.live_bitmap,
104 &self.line_bitmap,
105 ) {
106 SweepResult::Empty => {
107 self.block_allocator.return_block(block);
108 }
109 SweepResult::Reuse => {
110 self.normal_allocator.recyclable_blocks.push(block);
111 }
112 }
113 }
114 }
115 }
116 pub fn acquire_block(&self, heap: *const Heap) -> *mut Block {
117 let mut block = null_mut::<Block>();
118 if block.is_null() {
119 block = self.block_allocator.get_block();
120 unsafe {
121 (*block).init(heap as _);
122 }
123 }
124
125 block
126 }
127}