contiguous_mem/
tracker.rs1#![doc(hidden)]
2
3use core::{alloc::Layout, cmp::Ordering};
4
5#[cfg(feature = "no_std")]
6use crate::types::{vec, Vec};
7use crate::{error::ContiguousMemoryError, range::ByteRange};
8
9#[derive(Clone)]
12pub struct AllocationTracker {
13 size: usize,
14 unused: Vec<ByteRange>,
15}
16
17impl AllocationTracker {
18 pub fn new(size: usize) -> Self {
20 AllocationTracker {
21 size,
22 unused: vec![ByteRange(0, size)],
23 }
24 }
25
26 pub fn len(&self) -> usize {
28 self.size
29 }
30
31 pub fn is_empty(&self) -> bool {
33 self.unused.is_empty()
34 }
35
36 pub fn whole_range(&self) -> ByteRange {
38 ByteRange(0, self.size)
39 }
40
41 pub fn resize(&mut self, new_size: usize) -> Result<(), ContiguousMemoryError> {
46 match new_size.cmp(&self.size) {
47 Ordering::Equal => {}
48 Ordering::Less => {
49 let last = self
50 .unused
51 .last_mut()
52 .ok_or(ContiguousMemoryError::Unshrinkable {
53 required_size: self.size,
54 })?;
55
56 let reduction = self.size - new_size;
57 if last.len() < reduction {
58 return Err(ContiguousMemoryError::Unshrinkable {
59 required_size: self.size - last.len(),
60 });
61 }
62 last.1 -= reduction;
63 self.size = new_size;
64 }
65 Ordering::Greater => {
66 match self.unused.last() {
67 Some(it) => {
68 if it.1 == self.size {
71 let last = self
72 .unused
73 .last_mut()
74 .expect("free byte ranges isn't empty");
75 last.1 = new_size;
76 } else {
77 self.unused.push(ByteRange(self.size, new_size));
78 }
79 }
80 None => {
81 self.unused.push(ByteRange(self.size, new_size));
82 }
83 }
84 self.size = new_size;
85 }
86 }
87 Ok(())
88 }
89
90 pub fn shrink_to_fit(&mut self) -> Option<usize> {
95 match self.unused.last() {
96 Some(it) if it.1 == self.size => {
97 let last = self.unused.pop().expect("free byte ranges isn't empty");
98 self.size -= last.len();
99 Some(self.size)
100 }
101 _ => None,
102 }
103 }
104
105 pub fn peek_next(&self, layout: Layout) -> Option<ByteRange> {
111 if layout.size() > self.size {
112 return None;
113 }
114
115 let available = self.unused.iter().find(|it| {
116 it.len() >= layout.size() && it.aligned(layout.align()).len() >= layout.size()
117 })?;
118
119 let usable = available.aligned(layout.align()).cap_size(layout.size());
120
121 Some(usable)
122 }
123
124 pub fn take(&mut self, region: ByteRange) -> Result<(), ContiguousMemoryError> {
132 if self.whole_range().contains(region) {
133 return Err(ContiguousMemoryError::NotContained);
134 }
135
136 let (i, found) = self
137 .unused
138 .iter()
139 .enumerate()
140 .find(|(_, it)| it.contains(region))
141 .ok_or(ContiguousMemoryError::AlreadyUsed)?;
142
143 let (left, right) = found.difference_unchecked(region);
144
145 if !left.is_empty() {
146 self.unused[i] = left;
147 if !right.is_empty() {
148 self.unused.insert(i + 1, right);
149 }
150 } else if !right.is_empty() {
151 self.unused[i] = right;
152 } else {
153 self.unused.remove(i);
154 }
155
156 Ok(())
157 }
158
159 pub fn take_next(
166 &mut self,
167 base_address: usize,
168 layout: Layout,
169 ) -> Result<ByteRange, ContiguousMemoryError> {
170 if layout.size() > self.size {
171 return Err(ContiguousMemoryError::NoStorageLeft);
172 }
173
174 let (i, available) = self
175 .unused
176 .iter()
177 .enumerate()
178 .find(|(_, it)| {
179 if it.len() < layout.size() {
180 return false;
181 }
182
183 let aligned = it
184 .offset(base_address)
185 .aligned(layout.align())
186 .cap_end(base_address + self.len());
187
188 aligned.len() >= layout.size()
189 })
190 .ok_or(ContiguousMemoryError::NoStorageLeft)?;
191
192 let taken = available.aligned(layout.align()).cap_size(layout.size());
193
194 let (left, right) = available.difference_unchecked(taken);
195
196 if !left.is_empty() {
197 self.unused[i] = left;
198 if !right.is_empty() {
199 self.unused.insert(i + 1, right);
200 }
201 } else if !right.is_empty() {
202 self.unused[i] = right;
203 } else {
204 self.unused.remove(i);
205 }
206
207 Ok(taken)
208 }
209
210 pub fn release(&mut self, region: ByteRange) -> Result<(), ContiguousMemoryError> {
214 if !self.whole_range().contains(region) {
215 return Err(ContiguousMemoryError::NotContained);
216 }
217
218 if let Some(found) = self
219 .unused
220 .iter_mut()
221 .find(|it| region.1 == it.0 || it.1 == region.0 || it.contains(region))
222 {
223 if found.contains(region) {
224 return Err(ContiguousMemoryError::DoubleFree);
225 }
226 found.merge_in_unchecked(region);
227 } else if let Some((i, _)) = self.unused.iter().enumerate().find(|it| it.0 > region.0) {
228 self.unused.insert(i, region);
229 } else {
230 self.unused.push(region);
231 }
232
233 Ok(())
234 }
235}
236
237#[cfg(feature = "debug")]
238impl core::fmt::Debug for AllocationTracker {
239 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
240 f.debug_struct("AllocationTracker")
241 .field("size", &self.size)
242 .field("unused", &self.unused)
243 .finish()
244 }
245}
246
247#[cfg(test)]
248mod tests {
249 use super::*;
250
251 #[test]
252 fn test_new_allocation_tracker() {
253 let tracker = AllocationTracker::new(1024);
254 assert_eq!(tracker.len(), 1024);
255 assert_eq!(tracker.is_empty(), false);
256 assert_eq!(tracker.whole_range(), ByteRange(0, 1024));
257 }
258
259 #[test]
260 fn test_resize_allocation_tracker() {
261 let mut tracker = AllocationTracker::new(1024);
262
263 tracker.resize(512).unwrap();
264 assert_eq!(tracker.len(), 512);
265
266 tracker.resize(2048).unwrap();
267 assert_eq!(tracker.len(), 2048);
268 }
269
270 #[test]
271 fn test_take_and_release_allocation_tracker() {
272 let mut tracker = AllocationTracker::new(1024);
273
274 let range = tracker
275 .take_next(0, Layout::from_size_align(32, 8).unwrap())
276 .unwrap();
277 assert_eq!(range, ByteRange(0, 32));
278
279 tracker
280 .release(range)
281 .expect("expected AllocationTracker to have the provided range marked as taken");
282 assert_eq!(tracker.is_empty(), false);
283 }
284
285 #[test]
286 fn test_peek_next_allocation_tracker() {
287 let tracker = AllocationTracker::new(1024);
288
289 let layout = Layout::from_size_align(64, 8).unwrap();
290 let range = tracker.peek_next(layout).unwrap();
291 assert_eq!(range, ByteRange(0, 64));
292 }
293
294 #[test]
295 fn test_take_next_allocation_tracker() {
296 let mut tracker = AllocationTracker::new(1024);
297
298 let layout = Layout::from_size_align(128, 8).unwrap();
299 let range = tracker.take_next(0, layout).unwrap();
300 assert_eq!(range, ByteRange(0, 128));
301 }
302}