1use crate::util::metadata::vo_bit;
2use crate::util::Address;
3use crate::util::ObjectReference;
4use crate::vm::ObjectModel;
5use crate::vm::VMBinding;
6use std::marker::PhantomData;
7
8pub struct ObjectIterator<VM: VMBinding, S: LinearScanObjectSize, const ATOMIC_LOAD_VO_BIT: bool> {
17 start: Address,
18 end: Address,
19 cursor: Address,
20 _p: PhantomData<(VM, S)>,
21}
22
23impl<VM: VMBinding, S: LinearScanObjectSize, const ATOMIC_LOAD_VO_BIT: bool>
24 ObjectIterator<VM, S, ATOMIC_LOAD_VO_BIT>
25{
26 pub fn new(start: Address, end: Address) -> Self {
29 debug_assert!(start < end);
30 debug_assert!(
31 start.is_aligned_to(ObjectReference::ALIGNMENT),
32 "start is not word-aligned: {start}"
33 );
34 debug_assert!(
35 end.is_aligned_to(ObjectReference::ALIGNMENT),
36 "end is not word-aligned: {end}"
37 );
38 ObjectIterator {
39 start,
40 end,
41 cursor: start,
42 _p: PhantomData,
43 }
44 }
45}
46
47impl<VM: VMBinding, S: LinearScanObjectSize, const ATOMIC_LOAD_VO_BIT: bool> std::iter::Iterator
48 for ObjectIterator<VM, S, ATOMIC_LOAD_VO_BIT>
49{
50 type Item = ObjectReference;
51
52 fn next(&mut self) -> Option<<Self as Iterator>::Item> {
53 while self.cursor < self.end {
54 let is_object = if ATOMIC_LOAD_VO_BIT {
55 vo_bit::is_vo_bit_set_for_addr(self.cursor)
56 } else {
57 unsafe { vo_bit::is_vo_bit_set_unsafe(self.cursor) }
58 };
59
60 if let Some(object) = is_object {
61 self.cursor += S::size(object);
62 return Some(object);
63 } else {
64 self.cursor += VM::MIN_ALIGNMENT;
65 }
66 }
67
68 None
69 }
70}
71
72pub trait LinearScanObjectSize {
75 fn size(object: ObjectReference) -> usize;
77}
78
79pub struct DefaultObjectSize<VM: VMBinding>(PhantomData<VM>);
81impl<VM: VMBinding> LinearScanObjectSize for DefaultObjectSize<VM> {
82 fn size(object: ObjectReference) -> usize {
83 VM::VMObjectModel::get_current_size(object)
84 }
85}
86
87pub trait Region: Copy + PartialEq + PartialOrd {
90 const LOG_BYTES: usize;
92 const BYTES: usize = 1 << Self::LOG_BYTES;
94
95 fn from_aligned_address(address: Address) -> Self;
98 fn start(&self) -> Address;
100
101 fn from_unaligned_address(address: Address) -> Self {
103 Self::from_aligned_address(Self::align(address))
104 }
105
106 fn align(address: Address) -> Address {
108 address.align_down(Self::BYTES)
109 }
110 fn is_aligned(address: Address) -> bool {
112 address.is_aligned_to(Self::BYTES)
113 }
114
115 fn end(&self) -> Address {
117 self.start() + Self::BYTES
118 }
119 fn next(&self) -> Self {
121 self.next_nth(1)
122 }
123 fn next_nth(&self, n: usize) -> Self {
125 debug_assert!(self.start().as_usize() < usize::MAX - (n << Self::LOG_BYTES));
126 Self::from_aligned_address(self.start() + (n << Self::LOG_BYTES))
127 }
128 fn containing(object: ObjectReference) -> Self {
130 Self::from_unaligned_address(object.to_raw_address())
131 }
132 fn includes_address(&self, addr: Address) -> bool {
134 Self::align(addr) == self.start()
135 }
136}
137
138pub struct RegionIterator<R: Region> {
140 current: R,
141 end: R,
142}
143
144impl<R: Region> RegionIterator<R> {
145 pub fn new(start: R, end: R) -> Self {
147 Self {
148 current: start,
149 end,
150 }
151 }
152}
153
154impl<R: Region> Iterator for RegionIterator<R> {
155 type Item = R;
156
157 fn next(&mut self) -> Option<R> {
158 if self.current < self.end {
159 let ret = self.current;
160 self.current = self.current.next();
161 Some(ret)
162 } else {
163 None
164 }
165 }
166}
167
168#[cfg(test)]
169mod tests {
170 use super::*;
171 use crate::util::constants::LOG_BYTES_IN_PAGE;
172
173 const PAGE_SIZE: usize = 1 << LOG_BYTES_IN_PAGE;
174
175 #[derive(Copy, Clone, Debug, PartialEq, PartialOrd)]
176 struct Page(Address);
177
178 impl Region for Page {
179 const LOG_BYTES: usize = LOG_BYTES_IN_PAGE as usize;
180
181 fn from_aligned_address(address: Address) -> Self {
182 debug_assert!(address.is_aligned_to(Self::BYTES));
183 Self(address)
184 }
185
186 fn start(&self) -> Address {
187 self.0
188 }
189 }
190
191 #[test]
192 fn test_region_methods() {
193 let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
194 let addr4k1 = unsafe { Address::from_usize(PAGE_SIZE + 1) };
195
196 debug_assert_eq!(Page::align(addr4k), addr4k);
198 debug_assert_eq!(Page::align(addr4k1), addr4k);
199 debug_assert!(Page::is_aligned(addr4k));
200 debug_assert!(!Page::is_aligned(addr4k1));
201
202 let page = Page::from_aligned_address(addr4k);
203 debug_assert_eq!(page.start(), addr4k);
205 debug_assert_eq!(page.end(), addr4k + PAGE_SIZE);
206 debug_assert_eq!(page.next().start(), addr4k + PAGE_SIZE);
208 debug_assert_eq!(page.next_nth(1).start(), addr4k + PAGE_SIZE);
209 debug_assert_eq!(page.next_nth(2).start(), addr4k + 2 * PAGE_SIZE);
210 }
211
212 #[test]
213 fn test_region_iterator_normal() {
214 let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
215 let page = Page::from_aligned_address(addr4k);
216 let end_page = page.next_nth(5);
217
218 let mut results = vec![];
219 let iter = RegionIterator::new(page, end_page);
220 for p in iter {
221 results.push(p);
222 }
223 debug_assert_eq!(
224 results,
225 vec![
226 page,
227 page.next_nth(1),
228 page.next_nth(2),
229 page.next_nth(3),
230 page.next_nth(4)
231 ]
232 );
233 }
234
235 #[test]
236 fn test_region_iterator_same_start_end() {
237 let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
238 let page = Page::from_aligned_address(addr4k);
239
240 let mut results = vec![];
241 let iter = RegionIterator::new(page, page);
242 for p in iter {
243 results.push(p);
244 }
245 debug_assert_eq!(results, vec![]);
246 }
247
248 #[test]
249 fn test_region_iterator_smaller_end() {
250 let addr4k = unsafe { Address::from_usize(PAGE_SIZE) };
251 let page = Page::from_aligned_address(addr4k);
252 let end = Page::from_aligned_address(Address::ZERO);
253
254 let mut results = vec![];
255 let iter = RegionIterator::new(page, end);
256 for p in iter {
257 results.push(p);
258 }
259 debug_assert_eq!(results, vec![]);
260 }
261}