Skip to main content

range_alloc/
lib.rs

1//! A generic range allocator for managing sub-ranges within a larger range.
2//!
3//! This crate provides [`RangeAllocator`], which hands out non-overlapping
4//! `Range<T>` values from a pool. It uses a best-fit strategy to reduce
5//! fragmentation and automatically merges adjacent free ranges on deallocation.
6//!
7//! # Example
8//!
9//! ```
10//! use range_alloc::RangeAllocator;
11//!
12//! let mut alloc = RangeAllocator::new(0..1024);
13//!
14//! // Allocate two regions.
15//! let a = alloc.allocate_range(256).unwrap(); // 0..256
16//! let b = alloc.allocate_range(128).unwrap(); // 256..384
17//!
18//! // Free the first region so it can be reused.
19//! alloc.free_range(a);
20//! ```
21//!
22//! # Minimum Supported Rust Version
23//!
24//! The MSRV of this crate is at least 1.31, possibly earlier. It will only be
25//! bumped in a breaking release.
26
27use std::{
28    fmt::Debug,
29    iter::Sum,
30    ops::{Add, AddAssign, Range, Rem, Sub},
31};
32
33/// A best-fit range allocator over a generic index type `T`.
34///
35/// `RangeAllocator` manages a single contiguous range and hands out
36/// non-overlapping sub-ranges on request. Freed ranges are automatically
37/// merged with their neighbors.
38///
39/// # Example
40///
41/// ```
42/// use range_alloc::RangeAllocator;
43///
44/// let mut alloc = RangeAllocator::new(0..100);
45/// let r = alloc.allocate_range(10).unwrap();
46/// assert_eq!(r, 0..10);
47/// alloc.free_range(r);
48/// ```
49#[derive(Debug)]
50pub struct RangeAllocator<T> {
51    /// The range this allocator covers.
52    initial_range: Range<T>,
53    /// A Vec of ranges in this heap which are unused.
54    /// Must be ordered with ascending range start to permit short circuiting allocation.
55    /// No two ranges in this vec may overlap.
56    free_ranges: Vec<Range<T>>,
57}
58
59/// The error returned when an allocation cannot be satisfied.
60///
61/// Contains the total free space that is available but fragmented
62/// across non-contiguous ranges.
63#[derive(Clone, Debug, PartialEq)]
64pub struct RangeAllocationError<T> {
65    /// The total length of all free ranges combined. When this is
66    /// greater than or equal to the requested length, the allocation
67    /// failed due to fragmentation rather than insufficient space.
68    pub fragmented_free_length: T,
69}
70
71impl<T> RangeAllocator<T>
72where
73    T: Clone + Copy + Add<Output = T> + AddAssign + Sub<Output = T> + Eq + PartialOrd + Debug,
74{
75    /// Creates a new allocator that manages the given range.
76    ///
77    /// The entire range starts as free and available for allocation.
78    ///
79    /// # Example
80    ///
81    /// ```
82    /// use range_alloc::RangeAllocator;
83    ///
84    /// let alloc = RangeAllocator::new(0u32..1024);
85    /// assert!(alloc.is_empty());
86    /// ```
87    pub fn new(range: Range<T>) -> Self {
88        RangeAllocator {
89            initial_range: range.clone(),
90            free_ranges: vec![range],
91        }
92    }
93
94    /// Returns the full range this allocator was created with
95    /// (including any extensions from [`grow_to`](Self::grow_to)).
96    pub fn initial_range(&self) -> &Range<T> {
97        &self.initial_range
98    }
99
100    /// Extends the allocator's range to a new end value.
101    ///
102    /// The newly added region (`old_end..new_end`) becomes available for
103    /// allocation. If the last free range is adjacent to the old end, it
104    /// is extended in place rather than creating a new entry.
105    ///
106    /// # Example
107    ///
108    /// ```
109    /// use range_alloc::RangeAllocator;
110    ///
111    /// let mut alloc = RangeAllocator::new(0..10);
112    /// alloc.allocate_range(10).unwrap();
113    /// // Out of space -- grow the pool.
114    /// alloc.grow_to(20);
115    /// let r = alloc.allocate_range(5).unwrap();
116    /// assert_eq!(r, 10..15);
117    /// ```
118    pub fn grow_to(&mut self, new_end: T) {
119        let initial_range_end = self.initial_range.end;
120        if let Some(last_range) = self
121            .free_ranges
122            .last_mut()
123            .filter(|last_range| last_range.end == initial_range_end)
124        {
125            last_range.end = new_end;
126        } else {
127            self.free_ranges.push(self.initial_range.end..new_end);
128        }
129
130        self.initial_range.end = new_end;
131    }
132
133    fn allocate_range_impl(
134        &mut self,
135        length: T,
136        align_start: impl Fn(T) -> T,
137    ) -> Result<Range<T>, RangeAllocationError<T>> {
138        assert_ne!(length + length, length);
139
140        // This is actually correct. With the trait bound as it is, we have
141        // no way to summon a value of 0 directly, so we make one by subtracting
142        // something from itself. Once the trait bound can be changed, this can
143        // be fixed.
144        #[allow(clippy::eq_op)]
145        let mut fragmented_free_length = length - length;
146        let mut best_fit: Option<(usize, T)> = None;
147
148        for (index, range) in self.free_ranges.iter().cloned().enumerate() {
149            let range_length = range.end - range.start;
150            fragmented_free_length += range_length;
151
152            let aligned_start = align_start(range.start);
153
154            if aligned_start >= range.end {
155                continue;
156            }
157            let usable_length = range.end - aligned_start;
158            if usable_length < length {
159                continue;
160            } else if usable_length == length {
161                // Found a perfect fit, so stop looking.
162                best_fit = Some((index, aligned_start));
163                break;
164            }
165            best_fit = Some(match best_fit {
166                Some((best_index, best_aligned_start)) => {
167                    // Find best fit for this allocation to reduce memory fragmentation.
168                    let best_usable = self.free_ranges[best_index].end - best_aligned_start;
169                    if usable_length < best_usable {
170                        (index, aligned_start)
171                    } else {
172                        (best_index, best_aligned_start)
173                    }
174                }
175                None => (index, aligned_start),
176            });
177        }
178
179        match best_fit {
180            Some((index, aligned_start)) => {
181                let range = self.free_ranges[index].clone();
182                let alloc_end = aligned_start + length;
183
184                let has_prefix = aligned_start > range.start;
185                let has_suffix = alloc_end < range.end;
186
187                match (has_prefix, has_suffix) {
188                    (false, false) => {
189                        self.free_ranges.remove(index);
190                    }
191                    (false, true) => {
192                        self.free_ranges[index].start = alloc_end;
193                    }
194                    (true, false) => {
195                        self.free_ranges[index].end = aligned_start;
196                    }
197                    (true, true) => {
198                        self.free_ranges[index].end = aligned_start;
199                        self.free_ranges.insert(index + 1, alloc_end..range.end);
200                    }
201                }
202
203                Ok(aligned_start..alloc_end)
204            }
205            None => Err(RangeAllocationError {
206                fragmented_free_length,
207            }),
208        }
209    }
210
211    /// Allocates a sub-range of the given `length`.
212    ///
213    /// Uses a best-fit strategy: the smallest free range that can satisfy
214    /// the request is chosen to minimise fragmentation.
215    ///
216    /// # Panics
217    ///
218    /// Panics if `length` is zero.
219    ///
220    /// # Example
221    ///
222    /// ```
223    /// use range_alloc::RangeAllocator;
224    ///
225    /// let mut alloc = RangeAllocator::new(0..100);
226    /// let a = alloc.allocate_range(30).unwrap();
227    /// let b = alloc.allocate_range(20).unwrap();
228    /// assert_eq!(a, 0..30);
229    /// assert_eq!(b, 30..50);
230    /// ```
231    pub fn allocate_range(&mut self, length: T) -> Result<Range<T>, RangeAllocationError<T>> {
232        self.allocate_range_impl(length, |start| start)
233    }
234
235    /// Allocates a sub-range of the given `length` whose start is aligned
236    /// to a multiple of `alignment`.
237    ///
238    /// Any space before the aligned start within a free range is kept free
239    /// and available for future allocations -- no space is wasted on
240    /// alignment padding.
241    ///
242    /// Uses the same best-fit strategy as [`allocate_range`](Self::allocate_range).
243    ///
244    /// # Panics
245    ///
246    /// Panics if `length` or `alignment` is zero.
247    ///
248    /// # Example
249    ///
250    /// ```
251    /// use range_alloc::RangeAllocator;
252    ///
253    /// let mut alloc = RangeAllocator::new(0..256);
254    /// // Offset the free region so the next aligned start is not at 0.
255    /// alloc.allocate_range(1).unwrap(); // 0..1
256    ///
257    /// // Allocate 16 units starting at the next multiple of 16.
258    /// let r = alloc.allocate_range_aligned(16, 16).unwrap();
259    /// assert_eq!(r, 16..32);
260    ///
261    /// // The gap (1..16) is still free and usable.
262    /// let small = alloc.allocate_range(15).unwrap();
263    /// assert_eq!(small, 1..16);
264    /// ```
265    pub fn allocate_range_aligned(
266        &mut self,
267        length: T,
268        alignment: T,
269    ) -> Result<Range<T>, RangeAllocationError<T>>
270    where
271        T: Rem<Output = T>,
272    {
273        assert_ne!(alignment + alignment, alignment);
274        self.allocate_range_impl(length, |start| {
275            let padding = (alignment - start % alignment) % alignment;
276            start + padding
277        })
278    }
279
280    /// Returns a previously allocated range to the free pool.
281    ///
282    /// Adjacent free ranges are automatically merged to reduce
283    /// fragmentation.
284    ///
285    /// # Panics
286    ///
287    /// Panics if `range` is outside the allocator's initial range, is
288    /// empty, or overlaps with an already-free range.
289    ///
290    /// # Example
291    ///
292    /// ```
293    /// use range_alloc::RangeAllocator;
294    ///
295    /// let mut alloc = RangeAllocator::new(0..10);
296    /// let r = alloc.allocate_range(10).unwrap();
297    /// alloc.free_range(r);
298    /// assert!(alloc.is_empty());
299    /// ```
300    pub fn free_range(&mut self, range: Range<T>) {
301        assert!(self.initial_range.start <= range.start && range.end <= self.initial_range.end);
302        assert!(range.start < range.end);
303
304        // Get insertion position.
305        let i = self
306            .free_ranges
307            .iter()
308            .position(|r| r.start > range.start)
309            .unwrap_or(self.free_ranges.len());
310
311        // Try merging with neighboring ranges in the free list.
312        // Before: |left|-(range)-|right|
313        if i > 0 && range.start == self.free_ranges[i - 1].end {
314            // Merge with |left|.
315            self.free_ranges[i - 1].end =
316                if i < self.free_ranges.len() && range.end == self.free_ranges[i].start {
317                    // Check for possible merge with |left| and |right|.
318                    let right = self.free_ranges.remove(i);
319                    right.end
320                } else {
321                    range.end
322                };
323
324            return;
325        } else if i < self.free_ranges.len() && range.end == self.free_ranges[i].start {
326            // Merge with |right|.
327            self.free_ranges[i].start = if i > 0 && range.start == self.free_ranges[i - 1].end {
328                // Check for possible merge with |left| and |right|.
329                let left = self.free_ranges.remove(i - 1);
330                left.start
331            } else {
332                range.start
333            };
334
335            return;
336        }
337
338        // Debug checks
339        assert!(
340            (i == 0 || self.free_ranges[i - 1].end < range.start)
341                && (i >= self.free_ranges.len() || range.end < self.free_ranges[i].start)
342        );
343
344        self.free_ranges.insert(i, range);
345    }
346
347    /// Returns an iterator over all currently allocated (non-free) ranges.
348    ///
349    /// The ranges are yielded in ascending order.
350    ///
351    /// # Example
352    ///
353    /// ```
354    /// use range_alloc::RangeAllocator;
355    ///
356    /// let mut alloc = RangeAllocator::new(0..30);
357    /// alloc.allocate_range(10).unwrap(); // 0..10
358    /// alloc.allocate_range(10).unwrap(); // 10..20
359    ///
360    /// // Adjacent allocations appear as a single contiguous range.
361    /// let allocated: Vec<_> = alloc.allocated_ranges().collect();
362    /// assert_eq!(allocated, vec![0..20]);
363    /// ```
364    pub fn allocated_ranges(&self) -> impl Iterator<Item = Range<T>> + '_ {
365        let first = match self.free_ranges.first() {
366            Some(Range { ref start, .. }) if *start > self.initial_range.start => {
367                Some(self.initial_range.start..*start)
368            }
369            None => Some(self.initial_range.clone()),
370            _ => None,
371        };
372
373        let last = match self.free_ranges.last() {
374            Some(Range { end, .. }) if *end < self.initial_range.end => {
375                Some(*end..self.initial_range.end)
376            }
377            _ => None,
378        };
379
380        let mid = self
381            .free_ranges
382            .iter()
383            .zip(self.free_ranges.iter().skip(1))
384            .map(|(ra, rb)| ra.end..rb.start);
385
386        first.into_iter().chain(mid).chain(last)
387    }
388
389    /// Frees all allocations, restoring the allocator to its initial state.
390    ///
391    /// # Example
392    ///
393    /// ```
394    /// use range_alloc::RangeAllocator;
395    ///
396    /// let mut alloc = RangeAllocator::new(0..10);
397    /// alloc.allocate_range(10).unwrap();
398    /// alloc.reset();
399    /// assert!(alloc.is_empty());
400    /// ```
401    pub fn reset(&mut self) {
402        self.free_ranges.clear();
403        self.free_ranges.push(self.initial_range.clone());
404    }
405
406    /// Returns `true` if nothing is currently allocated.
407    pub fn is_empty(&self) -> bool {
408        self.free_ranges.len() == 1 && self.free_ranges[0] == self.initial_range
409    }
410}
411
412impl<T: Copy + Sub<Output = T> + Sum> RangeAllocator<T> {
413    /// Returns the total length of all free ranges combined.
414    ///
415    /// This may be spread across multiple non-contiguous ranges, so an
416    /// allocation of this size is not guaranteed to succeed.
417    ///
418    /// # Example
419    ///
420    /// ```
421    /// use range_alloc::RangeAllocator;
422    ///
423    /// let mut alloc = RangeAllocator::new(0..100);
424    /// alloc.allocate_range(30).unwrap();
425    /// assert_eq!(alloc.total_available(), 70);
426    /// ```
427    pub fn total_available(&self) -> T {
428        self.free_ranges
429            .iter()
430            .map(|range| range.end - range.start)
431            .sum()
432    }
433}
434
435#[cfg(test)]
436mod tests {
437    use super::*;
438
439    #[test]
440    fn test_basic_allocation() {
441        let mut alloc = RangeAllocator::new(0..10);
442        // Test if an allocation works
443        assert_eq!(alloc.allocate_range(4), Ok(0..4));
444        assert!(alloc.allocated_ranges().eq(std::iter::once(0..4)));
445        // Free the prior allocation
446        alloc.free_range(0..4);
447        // Make sure the free actually worked
448        assert_eq!(alloc.free_ranges, vec![0..10]);
449        assert!(alloc.allocated_ranges().eq(std::iter::empty()));
450    }
451
452    #[test]
453    fn test_out_of_space() {
454        let mut alloc = RangeAllocator::new(0..10);
455        // Test if the allocator runs out of space correctly
456        assert_eq!(alloc.allocate_range(10), Ok(0..10));
457        assert!(alloc.allocated_ranges().eq(std::iter::once(0..10)));
458        assert!(alloc.allocate_range(4).is_err());
459        alloc.free_range(0..10);
460    }
461
462    #[test]
463    fn test_grow() {
464        let mut alloc = RangeAllocator::new(0..11);
465        // Test if the allocator runs out of space correctly
466        assert_eq!(alloc.allocate_range(10), Ok(0..10));
467        assert!(alloc.allocated_ranges().eq(std::iter::once(0..10)));
468        assert!(alloc.allocate_range(4).is_err());
469        alloc.grow_to(20);
470        assert_eq!(alloc.allocate_range(4), Ok(10..14));
471        alloc.free_range(0..14);
472    }
473
474    #[test]
475    fn test_grow_with_hole_at_start() {
476        let mut alloc = RangeAllocator::new(0..6);
477
478        assert_eq!(alloc.allocate_range(3), Ok(0..3));
479        assert_eq!(alloc.allocate_range(3), Ok(3..6));
480        alloc.free_range(0..3);
481
482        alloc.grow_to(9);
483        assert_eq!(alloc.allocated_ranges().collect::<Vec<_>>(), [3..6]);
484    }
485    #[test]
486    fn test_grow_with_hole_in_middle() {
487        let mut alloc = RangeAllocator::new(0..6);
488
489        assert_eq!(alloc.allocate_range(2), Ok(0..2));
490        assert_eq!(alloc.allocate_range(2), Ok(2..4));
491        assert_eq!(alloc.allocate_range(2), Ok(4..6));
492        alloc.free_range(2..4);
493
494        alloc.grow_to(9);
495        assert_eq!(alloc.allocated_ranges().collect::<Vec<_>>(), [0..2, 4..6]);
496    }
497
498    #[test]
499    fn test_dont_use_block_that_is_too_small() {
500        let mut alloc = RangeAllocator::new(0..10);
501        // Allocate three blocks then free the middle one and check for correct state
502        assert_eq!(alloc.allocate_range(3), Ok(0..3));
503        assert_eq!(alloc.allocate_range(3), Ok(3..6));
504        assert_eq!(alloc.allocate_range(3), Ok(6..9));
505        alloc.free_range(3..6);
506        assert_eq!(alloc.free_ranges, vec![3..6, 9..10]);
507        assert_eq!(
508            alloc.allocated_ranges().collect::<Vec<Range<i32>>>(),
509            vec![0..3, 6..9]
510        );
511        // Now request space that the middle block can fill, but the end one can't.
512        assert_eq!(alloc.allocate_range(3), Ok(3..6));
513    }
514
515    #[test]
516    fn test_free_blocks_in_middle() {
517        let mut alloc = RangeAllocator::new(0..100);
518        // Allocate many blocks then free every other block.
519        assert_eq!(alloc.allocate_range(10), Ok(0..10));
520        assert_eq!(alloc.allocate_range(10), Ok(10..20));
521        assert_eq!(alloc.allocate_range(10), Ok(20..30));
522        assert_eq!(alloc.allocate_range(10), Ok(30..40));
523        assert_eq!(alloc.allocate_range(10), Ok(40..50));
524        assert_eq!(alloc.allocate_range(10), Ok(50..60));
525        assert_eq!(alloc.allocate_range(10), Ok(60..70));
526        assert_eq!(alloc.allocate_range(10), Ok(70..80));
527        assert_eq!(alloc.allocate_range(10), Ok(80..90));
528        assert_eq!(alloc.allocate_range(10), Ok(90..100));
529        assert_eq!(alloc.free_ranges, vec![]);
530        assert!(alloc.allocated_ranges().eq(std::iter::once(0..100)));
531        alloc.free_range(10..20);
532        alloc.free_range(30..40);
533        alloc.free_range(50..60);
534        alloc.free_range(70..80);
535        alloc.free_range(90..100);
536        // Check that the right blocks were freed.
537        assert_eq!(
538            alloc.free_ranges,
539            vec![10..20, 30..40, 50..60, 70..80, 90..100]
540        );
541        assert_eq!(
542            alloc.allocated_ranges().collect::<Vec<Range<i32>>>(),
543            vec![0..10, 20..30, 40..50, 60..70, 80..90]
544        );
545        // Fragment the memory on purpose a bit.
546        assert_eq!(alloc.allocate_range(6), Ok(10..16));
547        assert_eq!(alloc.allocate_range(6), Ok(30..36));
548        assert_eq!(alloc.allocate_range(6), Ok(50..56));
549        assert_eq!(alloc.allocate_range(6), Ok(70..76));
550        assert_eq!(alloc.allocate_range(6), Ok(90..96));
551        // Check for fragmentation.
552        assert_eq!(
553            alloc.free_ranges,
554            vec![16..20, 36..40, 56..60, 76..80, 96..100]
555        );
556        assert_eq!(
557            alloc.allocated_ranges().collect::<Vec<Range<i32>>>(),
558            vec![0..16, 20..36, 40..56, 60..76, 80..96]
559        );
560        // Fill up the fragmentation
561        assert_eq!(alloc.allocate_range(4), Ok(16..20));
562        assert_eq!(alloc.allocate_range(4), Ok(36..40));
563        assert_eq!(alloc.allocate_range(4), Ok(56..60));
564        assert_eq!(alloc.allocate_range(4), Ok(76..80));
565        assert_eq!(alloc.allocate_range(4), Ok(96..100));
566        // Check that nothing is free.
567        assert_eq!(alloc.free_ranges, vec![]);
568        assert!(alloc.allocated_ranges().eq(std::iter::once(0..100)));
569    }
570
571    #[test]
572    fn test_ignore_block_if_another_fits_better() {
573        let mut alloc = RangeAllocator::new(0..10);
574        // Allocate blocks such that the only free spaces available are 3..6 and 9..10
575        // in order to prepare for the next test.
576        assert_eq!(alloc.allocate_range(3), Ok(0..3));
577        assert_eq!(alloc.allocate_range(3), Ok(3..6));
578        assert_eq!(alloc.allocate_range(3), Ok(6..9));
579        alloc.free_range(3..6);
580        assert_eq!(alloc.free_ranges, vec![3..6, 9..10]);
581        assert_eq!(
582            alloc.allocated_ranges().collect::<Vec<Range<i32>>>(),
583            vec![0..3, 6..9]
584        );
585        // Now request space that can be filled by 3..6 but should be filled by 9..10
586        // because 9..10 is a perfect fit.
587        assert_eq!(alloc.allocate_range(1), Ok(9..10));
588    }
589
590    #[test]
591    fn test_merge_neighbors() {
592        let mut alloc = RangeAllocator::new(0..9);
593        assert_eq!(alloc.allocate_range(3), Ok(0..3));
594        assert_eq!(alloc.allocate_range(3), Ok(3..6));
595        assert_eq!(alloc.allocate_range(3), Ok(6..9));
596        alloc.free_range(0..3);
597        alloc.free_range(6..9);
598        alloc.free_range(3..6);
599        assert_eq!(alloc.free_ranges, vec![0..9]);
600        assert!(alloc.allocated_ranges().eq(std::iter::empty()));
601    }
602
603    #[test]
604    fn test_aligned_already_aligned() {
605        let mut alloc = RangeAllocator::new(0..20);
606        // Start is already aligned to 4, no padding needed.
607        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(0..4));
608        assert_eq!(alloc.free_ranges, vec![4..20]);
609    }
610
611    #[test]
612    fn test_aligned_with_padding() {
613        let mut alloc = RangeAllocator::new(0..20);
614        // Occupy 1 byte to offset the free range start.
615        assert_eq!(alloc.allocate_range(1), Ok(0..1));
616        // Free range is now 1..20. Alignment 4 rounds up to 4.
617        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(4..8));
618        // Prefix 1..4 and suffix 8..20 remain free.
619        assert_eq!(alloc.free_ranges, vec![1..4, 8..20]);
620    }
621
622    #[test]
623    fn test_aligned_prefix_is_reusable() {
624        let mut alloc = RangeAllocator::new(0..20);
625        assert_eq!(alloc.allocate_range(1), Ok(0..1));
626        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(4..8));
627        // The prefix 1..4 should be usable for a smaller allocation.
628        assert_eq!(alloc.allocate_range(3), Ok(1..4));
629        assert_eq!(alloc.free_ranges, vec![8..20]);
630    }
631
632    #[test]
633    fn test_aligned_no_fit() {
634        let mut alloc = RangeAllocator::new(0..5);
635        assert_eq!(alloc.allocate_range(1), Ok(0..1));
636        // Free range is 1..5. Alignment 4 rounds to 4, usable = 5-4 = 1 < 4.
637        assert!(alloc.allocate_range_aligned(4, 4).is_err());
638    }
639
640    #[test]
641    fn test_aligned_exact_fit_after_padding() {
642        let mut alloc = RangeAllocator::new(0..8);
643        assert_eq!(alloc.allocate_range(1), Ok(0..1));
644        // Free range is 1..8. Alignment 4 rounds to 4, usable = 8-4 = 4 == 4.
645        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(4..8));
646        // Prefix 1..4 remains, suffix consumed entirely.
647        assert_eq!(alloc.free_ranges, vec![1..4]);
648    }
649
650    #[test]
651    fn test_aligned_best_fit() {
652        let mut alloc = RangeAllocator::new(0..32);
653        // Create two gaps with different usable sizes after alignment.
654        assert_eq!(alloc.allocate_range(4), Ok(0..4));
655        assert_eq!(alloc.allocate_range(4), Ok(4..8));
656        assert_eq!(alloc.allocate_range(4), Ok(8..12));
657        assert_eq!(alloc.allocate_range(4), Ok(12..16));
658        assert_eq!(alloc.allocate_range(4), Ok(16..20));
659        assert_eq!(alloc.allocate_range(12), Ok(20..32));
660
661        // Free two ranges: 4..8 (already aligned to 4, usable 4) and
662        // 12..20 (already aligned to 4, usable 8).
663        alloc.free_range(4..8);
664        alloc.free_range(12..20);
665        assert_eq!(alloc.free_ranges, vec![4..8, 12..20]);
666
667        // Allocate 4 aligned to 4. Both fit, but 4..8 is the tighter fit.
668        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(4..8));
669    }
670
671    #[test]
672    fn test_aligned_non_power_of_two() {
673        let mut alloc = RangeAllocator::new(0..20);
674        assert_eq!(alloc.allocate_range(1), Ok(0..1));
675        // Free range: 1..20. Alignment 3 rounds 1 up to 3.
676        assert_eq!(alloc.allocate_range_aligned(2, 3), Ok(3..5));
677        assert_eq!(alloc.free_ranges, vec![1..3, 5..20]);
678    }
679
680    #[test]
681    fn test_aligned_multiple_allocations() {
682        let mut alloc = RangeAllocator::new(0..32);
683        assert_eq!(alloc.allocate_range_aligned(4, 8), Ok(0..4));
684        // Free: 4..32. Next align-8 start is 8.
685        assert_eq!(alloc.allocate_range_aligned(4, 8), Ok(8..12));
686        // Free: 4..8, 12..32. Next align-8 start in 12..32 is 16.
687        assert_eq!(alloc.allocate_range_aligned(4, 8), Ok(16..20));
688        // Free: 4..8, 12..16, 20..32.
689        assert_eq!(alloc.free_ranges, vec![4..8, 12..16, 20..32]);
690    }
691
692    #[test]
693    fn test_aligned_allocation_then_free_merges() {
694        let mut alloc = RangeAllocator::new(0..16);
695        assert_eq!(alloc.allocate_range(1), Ok(0..1));
696        assert_eq!(alloc.allocate_range_aligned(4, 4), Ok(4..8));
697        // Free: 1..4, 8..16
698        // Free the aligned range; it should not merge (not adjacent to either).
699        alloc.free_range(4..8);
700        // 1..4 and 4..8 merge into 1..8, then 1..8 and 8..16 merge into 1..16.
701        assert_eq!(alloc.free_ranges, vec![1..16]);
702    }
703
704    #[test]
705    fn test_allocate_range_delegates_correctly() {
706        // Verify allocate_range still behaves identically to the original.
707        let mut alloc = RangeAllocator::new(0..10);
708        assert_eq!(alloc.allocate_range(4), Ok(0..4));
709        assert_eq!(alloc.allocate_range(3), Ok(4..7));
710        assert_eq!(alloc.free_ranges, vec![7..10]);
711        alloc.free_range(0..4);
712        assert_eq!(alloc.free_ranges, vec![0..4, 7..10]);
713    }
714}