1#![forbid(unsafe_code)]
55#![warn(clippy::pedantic)]
56
57use std::num::NonZeroU32;
58
59use std::collections::{BTreeMap, BTreeSet};
60
61#[must_use]
86#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
87#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
88#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
89pub struct Allocation {
90 pub chunk_index: u32,
92 pub offset: u32,
94 pub len: NonZeroU32,
96}
97
98pub struct ChunkedRangeAlloc {
100 chunk_size: NonZeroU32,
101 free_ranges: BTreeSet<FreeRange>,
103 chunks: Vec<Chunk>,
104 active_chunks: usize,
105 allocated_memory: u64,
106}
107
108#[derive(Default)]
109struct Chunk {
110 free_offsets: BTreeMap<u32, NonZeroU32>,
111}
112
113impl Chunk {
114 #[inline]
116 fn previous_free_range(&self, offset: u32) -> Option<(u32, NonZeroU32)> {
117 self.free_offsets
118 .range(..=offset)
119 .next_back()
120 .map(|(&off, &len)| (off, len))
121 }
122
123 #[inline]
125 fn next_free_range(&self, offset: u32) -> Option<(u32, NonZeroU32)> {
126 self.free_offsets
127 .range(offset..)
128 .next()
129 .map(|(&off, &len)| (off, len))
130 }
131}
132
133#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
135struct FreeRange {
136 len: NonZeroU32,
138 chunk: u32,
140 offset: u32,
142}
143
144struct AlignedFreeRange {
145 free_range: FreeRange,
146 end: u32,
147 aligned_offset: u32,
148 aligned_end: u32,
149}
150
151impl ChunkedRangeAlloc {
152 #[must_use]
154 #[inline]
155 pub fn new(chunk_size: NonZeroU32) -> Self {
156 ChunkedRangeAlloc {
157 chunk_size,
158 free_ranges: BTreeSet::new(),
159 chunks: Vec::new(),
160 active_chunks: 0,
161 allocated_memory: 0,
162 }
163 }
164
165 #[must_use]
169 #[inline]
170 pub fn from_allocations<'a>(
171 chunk_size: NonZeroU32,
172 allocations: impl IntoIterator<Item = &'a Allocation>,
173 ) -> Option<Self> {
174 let mut a = Self::new(chunk_size);
175
176 for allocation in allocations {
177 let chunk_index = usize::try_from(allocation.chunk_index).ok()?;
178
179 let chunk = loop {
181 if let Some(chunk) = a.chunks.get(chunk_index) {
182 break chunk;
183 }
184
185 let chunk = u32::try_from(a.chunks.len()).ok()?;
186
187 a.chunks.push(Chunk::default());
189
190 a.insert_free_range(FreeRange {
192 len: chunk_size,
193 chunk,
194 offset: 0,
195 });
196 };
197
198 let (free_offset, free_len) = chunk.previous_free_range(allocation.offset)?;
199
200 let free_end = free_offset + free_len.get();
202 let allocation_end = allocation.offset + allocation.len.get();
203
204 if !(free_offset <= allocation.offset && free_end >= allocation_end) {
205 return None;
206 }
207
208 a.alloc_from(&AlignedFreeRange {
209 free_range: FreeRange {
210 len: free_len,
211 chunk: allocation.chunk_index,
212 offset: free_offset,
213 },
214 end: free_offset + free_len.get(),
215 aligned_offset: allocation.offset,
216 aligned_end: allocation.offset + allocation.len.get(),
217 });
218 }
219
220 Some(a)
221 }
222
223 #[must_use]
225 #[inline]
226 pub fn chunk_size(&self) -> NonZeroU32 {
227 self.chunk_size
228 }
229
230 #[inline]
246 pub fn alloc(&mut self, size: NonZeroU32, align: NonZeroU32) -> Allocation {
247 if let Some(aligned_free_range) = self.find_free_range(size, align) {
248 let allocation = Allocation {
249 chunk_index: aligned_free_range.free_range.chunk,
250 offset: aligned_free_range.aligned_offset,
251 len: size,
252 };
253
254 self.alloc_from(&aligned_free_range);
255
256 return allocation;
257 }
258
259 let len = size;
260 let size = size.get();
261
262 let chunk = u32::try_from(self.chunks.len()).expect("too many chunks");
263
264 let tail_len = self
265 .chunk_size
266 .get()
267 .checked_sub(size)
268 .expect("size exceeds chunk_size");
269
270 self.chunks.push(Chunk::default());
272
273 if let Some(len) = NonZeroU32::new(tail_len) {
275 self.insert_free_range(FreeRange {
276 len,
277 chunk,
278 offset: size,
279 });
280 }
281
282 self.active_chunks += 1;
283
284 self.allocated_memory += u64::from(size);
285
286 Allocation {
287 chunk_index: chunk,
288 offset: 0,
289 len,
290 }
291 }
292
293 #[must_use]
303 #[inline]
304 pub fn free(&mut self, allocation: Allocation) -> bool {
305 let chunk_index = usize::try_from(allocation.chunk_index).expect("invalid chunk index");
306 let chunk = self.chunks.get(chunk_index).expect("invalid chunk index");
307
308 let previous_free_range = chunk.previous_free_range(allocation.offset);
309 let next_free_range = chunk.next_free_range(allocation.offset);
310
311 let mut free_range = FreeRange {
312 len: allocation.len,
313 chunk: allocation.chunk_index,
314 offset: allocation.offset,
315 };
316
317 if let Some((offset, len)) = previous_free_range {
319 let previous_end = offset + len.get();
320
321 assert!(previous_end <= free_range.offset, "double free");
322
323 if previous_end == free_range.offset {
324 free_range.offset = offset;
325 free_range.len = free_range
326 .len
327 .checked_add(len.get())
328 .expect("invalid allocation");
329
330 self.remove_free_range(FreeRange {
331 len,
332 chunk: free_range.chunk,
333 offset,
334 });
335 }
336 }
337
338 if let Some((offset, len)) = next_free_range {
340 let end = free_range.offset + free_range.len.get();
341
342 assert!(end <= offset, "double free");
343
344 if end == offset {
345 free_range.len = free_range
346 .len
347 .checked_add(len.get())
348 .expect("invalid allocation");
349
350 self.remove_free_range(FreeRange {
351 len,
352 chunk: free_range.chunk,
353 offset,
354 });
355 }
356 }
357
358 self.insert_free_range(free_range);
359
360 self.allocated_memory -= u64::from(allocation.len.get());
361
362 let free_chunk = free_range.len >= self.chunk_size;
363 self.active_chunks -= usize::from(free_chunk);
364
365 free_chunk
366 }
367
368 #[must_use]
370 #[inline]
371 pub fn chunk_count(&self) -> usize {
372 self.chunks.len()
373 }
374
375 #[must_use]
377 #[inline]
378 pub fn active_chunks(&self) -> usize {
379 self.active_chunks
380 }
381
382 #[must_use]
384 #[inline]
385 pub fn unused_chunks(&self) -> usize {
386 self.chunk_count() - self.active_chunks
387 }
388
389 #[must_use]
391 #[inline]
392 pub fn capacity(&self) -> u64 {
393 u64::try_from(self.chunk_count()).unwrap_or(u64::MAX) * u64::from(self.chunk_size.get())
394 }
395
396 #[must_use]
398 #[inline]
399 pub fn allocated_memory(&self) -> u64 {
400 self.allocated_memory
401 }
402
403 #[must_use]
405 #[inline]
406 pub fn remaining(&self) -> u64 {
407 self.capacity() - self.allocated_memory
408 }
409
410 #[must_use]
412 #[inline]
413 pub fn used_memory(&self) -> u64 {
414 u64::try_from(self.active_chunks).unwrap_or(u64::MAX) * u64::from(self.chunk_size.get())
415 }
416
417 #[must_use]
419 #[inline]
420 pub fn unused_memory(&self) -> u64 {
421 self.used_memory() - self.allocated_memory
422 }
423
424 #[inline]
425 fn alloc_from(&mut self, aligned_free_range: &AlignedFreeRange) {
426 let &AlignedFreeRange {
427 free_range,
428 end,
429 aligned_offset,
430 aligned_end,
431 } = aligned_free_range;
432
433 self.remove_free_range(free_range);
434
435 if let Some(len) = NonZeroU32::new(aligned_offset - free_range.offset) {
437 self.insert_free_range(FreeRange {
438 len,
439 chunk: free_range.chunk,
440 offset: free_range.offset,
441 });
442 }
443
444 if let Some(len) = NonZeroU32::new(end - aligned_end) {
446 self.insert_free_range(FreeRange {
447 len,
448 chunk: free_range.chunk,
449 offset: aligned_end,
450 });
451 }
452
453 let free_chunk = free_range.len >= self.chunk_size;
454 self.active_chunks += usize::from(free_chunk);
455
456 self.allocated_memory += u64::from(aligned_end - aligned_offset);
457 }
458
459 #[inline]
468 fn find_free_range(&self, size: NonZeroU32, align: NonZeroU32) -> Option<AlignedFreeRange> {
469 assert!(size <= self.chunk_size);
470 assert!(align.is_power_of_two());
471
472 let align = align.get();
473
474 let start = FreeRange {
475 len: size,
476 chunk: 0,
477 offset: 0,
478 };
479
480 let size = size.get();
481
482 self.free_ranges
484 .range(start..)
485 .copied()
486 .find_map(|free_range| {
487 let end = free_range.offset + free_range.len.get();
488
489 let aligned_offset = free_range.offset.checked_next_multiple_of(align)?;
491 let aligned_end = aligned_offset.checked_add(size)?;
492
493 if aligned_end > end {
495 return None;
496 }
497
498 Some(AlignedFreeRange {
499 free_range,
500 end,
501 aligned_offset,
502 aligned_end,
503 })
504 })
505 }
506
507 #[inline]
511 fn insert_free_range(&mut self, free_range: FreeRange) {
512 let chunk_index = usize::try_from(free_range.chunk).expect("invalid chunk index");
513 let chunk = self
514 .chunks
515 .get_mut(chunk_index)
516 .expect("invalid chunk index");
517
518 let inserted = self.free_ranges.insert(free_range);
519 assert!(inserted);
520
521 let last = chunk.free_offsets.insert(free_range.offset, free_range.len);
522 assert!(last.is_none());
523 }
524
525 #[inline]
527 fn remove_free_range(&mut self, free_range: FreeRange) {
528 let chunk_index = usize::try_from(free_range.chunk).expect("invalid chunk index");
529 let chunk = self
530 .chunks
531 .get_mut(chunk_index)
532 .expect("invalid chunk index");
533
534 let removed = self.free_ranges.remove(&free_range);
535 assert!(removed);
536
537 let removed = chunk.free_offsets.remove(&free_range.offset);
538 assert_eq!(removed, Some(free_range.len));
539 }
540}