moonpool_explorer/
coverage.rs1pub const COVERAGE_MAP_SIZE: usize = 1024;
10
11pub struct CoverageBitmap {
16 ptr: *mut u8,
17}
18
19impl CoverageBitmap {
20 pub unsafe fn new(ptr: *mut u8) -> Self {
27 Self { ptr }
28 }
29
30 pub fn set_bit(&self, index: usize) {
32 let bit_index = index % (COVERAGE_MAP_SIZE * 8);
33 let byte = bit_index / 8;
34 let bit = bit_index % 8;
35 unsafe {
37 *self.ptr.add(byte) |= 1 << bit;
38 }
39 }
40
41 pub fn clear(&self) {
43 unsafe {
45 std::ptr::write_bytes(self.ptr, 0, COVERAGE_MAP_SIZE);
46 }
47 }
48
49 pub fn as_ptr(&self) -> *const u8 {
51 self.ptr
52 }
53}
54
55pub struct ExploredMap {
60 ptr: *mut u8,
61}
62
63impl ExploredMap {
64 pub unsafe fn new(ptr: *mut u8) -> Self {
71 Self { ptr }
72 }
73
74 pub fn merge_from(&self, other: &CoverageBitmap) {
76 unsafe {
78 for i in 0..COVERAGE_MAP_SIZE {
79 *self.ptr.add(i) |= *other.as_ptr().add(i);
80 }
81 }
82 }
83
84 pub fn count_bits_set(&self) -> u32 {
89 let mut count: u32 = 0;
90 unsafe {
92 for i in 0..COVERAGE_MAP_SIZE {
93 count += (*self.ptr.add(i)).count_ones();
94 }
95 }
96 count
97 }
98
99 pub fn has_new_bits(&self, other: &CoverageBitmap) -> bool {
101 unsafe {
103 for i in 0..COVERAGE_MAP_SIZE {
104 let explored = *self.ptr.add(i);
105 let child = *other.as_ptr().add(i);
106 if (child & !explored) != 0 {
108 return true;
109 }
110 }
111 }
112 false
113 }
114}
115
116#[cfg(test)]
117mod tests {
118 use super::*;
119 use crate::shared_mem;
120
121 #[test]
122 fn test_set_bit_and_check() {
123 let ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
124 let explored_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
125 let bm = unsafe { CoverageBitmap::new(ptr) };
126 let vm = unsafe { ExploredMap::new(explored_ptr) };
127
128 assert!(!vm.has_new_bits(&bm));
130
131 bm.set_bit(42);
133 assert!(vm.has_new_bits(&bm));
134
135 vm.merge_from(&bm);
137 assert!(!vm.has_new_bits(&bm));
139
140 unsafe {
141 shared_mem::free_shared(ptr, COVERAGE_MAP_SIZE);
142 shared_mem::free_shared(explored_ptr, COVERAGE_MAP_SIZE);
143 }
144 }
145
146 #[test]
147 fn test_clear() {
148 let ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
149 let bm = unsafe { CoverageBitmap::new(ptr) };
150
151 bm.set_bit(0);
152 bm.set_bit(100);
153 bm.set_bit(8000);
154
155 bm.clear();
156
157 unsafe {
159 for i in 0..COVERAGE_MAP_SIZE {
160 assert_eq!(*ptr.add(i), 0);
161 }
162 shared_mem::free_shared(ptr, COVERAGE_MAP_SIZE);
163 }
164 }
165
166 #[test]
167 fn test_merge_accumulates() {
168 let bm1_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
169 let bm2_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
170 let vm_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
171
172 let bm1 = unsafe { CoverageBitmap::new(bm1_ptr) };
173 let bm2 = unsafe { CoverageBitmap::new(bm2_ptr) };
174 let vm = unsafe { ExploredMap::new(vm_ptr) };
175
176 bm1.set_bit(10);
177 bm2.set_bit(20);
178
179 vm.merge_from(&bm1);
180 assert!(vm.has_new_bits(&bm2));
182
183 vm.merge_from(&bm2);
184 assert!(!vm.has_new_bits(&bm1));
186 assert!(!vm.has_new_bits(&bm2));
187
188 unsafe {
189 shared_mem::free_shared(bm1_ptr, COVERAGE_MAP_SIZE);
190 shared_mem::free_shared(bm2_ptr, COVERAGE_MAP_SIZE);
191 shared_mem::free_shared(vm_ptr, COVERAGE_MAP_SIZE);
192 }
193 }
194
195 #[test]
196 fn test_count_bits_set() {
197 let vm_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
198 let bm_ptr = shared_mem::alloc_shared(COVERAGE_MAP_SIZE).expect("alloc failed");
199 let vm = unsafe { ExploredMap::new(vm_ptr) };
200 let bm = unsafe { CoverageBitmap::new(bm_ptr) };
201
202 assert_eq!(vm.count_bits_set(), 0);
203
204 bm.set_bit(0);
205 bm.set_bit(42);
206 bm.set_bit(8000);
207 vm.merge_from(&bm);
208
209 assert_eq!(vm.count_bits_set(), 3);
210
211 unsafe {
212 shared_mem::free_shared(vm_ptr, COVERAGE_MAP_SIZE);
213 shared_mem::free_shared(bm_ptr, COVERAGE_MAP_SIZE);
214 }
215 }
216}