1#![no_std]
4#![deny(warnings, unstable_features, missing_docs)]
5
6mod avl;
7mod bitmap;
8mod linked_list;
9
10pub use avl::AvlBuddy;
11pub use bitmap::UsizeBuddy;
12pub use linked_list::LinkedListBuddy;
13
14use core::{alloc::Layout, fmt, num::NonZeroUsize, ptr::NonNull};
15
16pub trait BuddyLine {
18 const EMPTY: Self;
20
21 const INTRUSIVE_META_SIZE: usize = 0;
23
24 #[inline]
26 fn init(&mut self, _order: usize, _base: usize) {}
27
28 #[inline]
30 fn take(&mut self, _idx: usize) -> bool {
31 unimplemented!()
32 }
33}
34
35pub trait OligarchyCollection: BuddyLine {
37 fn take_any(&mut self, align_order: usize, count: usize) -> Option<usize>;
41
42 fn put(&mut self, idx: usize);
44}
45
46pub trait BuddyCollection: BuddyLine {
48 fn take_any(&mut self, align_order: usize) -> Option<usize>;
52
53 fn put(&mut self, idx: usize) -> Option<usize>;
58}
59
60#[derive(Clone, Copy, PartialEq, Eq, Debug)]
62#[repr(transparent)]
63pub struct BuddyError;
64
65pub struct BuddyAllocator<const N: usize, O: OligarchyCollection, B: BuddyCollection> {
67 oligarchy: O,
69
70 buddies: [B; N],
73
74 min_order: usize,
78
79 free: usize,
81
82 capacity: usize,
84}
85
86impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> BuddyAllocator<N, O, B> {
87 const MAX_LAYER: usize = N;
89 const O_MIN_ORDER: usize = O::INTRUSIVE_META_SIZE.next_power_of_two().trailing_zeros() as _;
91 const B_MIN_ORDER: usize = B::INTRUSIVE_META_SIZE.next_power_of_two().trailing_zeros() as _;
93
94 #[inline]
96 pub const fn new() -> Self {
97 Self {
98 oligarchy: O::EMPTY,
99 buddies: [B::EMPTY; N],
100 min_order: 0,
101 free: 0,
102 capacity: 0,
103 }
104 }
105}
106
107impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> Default
108 for BuddyAllocator<N, O, B>
109{
110 fn default() -> Self {
111 Self::new()
112 }
113}
114
115impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> BuddyAllocator<N, O, B> {
116 #[inline]
118 pub fn capacity(&self) -> usize {
119 self.capacity
120 }
121
122 #[inline]
124 pub fn free(&self) -> usize {
125 self.free
126 }
127
128 #[inline]
130 const fn max_order(&self) -> usize {
131 self.min_order + Self::MAX_LAYER
132 }
133
134 #[inline]
138 pub fn init<T>(&mut self, min_order: usize, base: NonNull<T>) {
139 assert_eq!(
140 0, self.capacity,
141 "init is not allowed after any transfering"
142 );
143
144 self.min_order = min_order;
145 let max_order = self.max_order();
146
147 assert!(Self::O_MIN_ORDER <= max_order);
148 assert!(Self::B_MIN_ORDER <= min_order);
149
150 let base = base.as_ptr() as usize;
151 self.buddies.iter_mut().enumerate().for_each(|(i, c)| {
152 let o = self.min_order + i;
153 c.init(o, base >> o)
154 });
155 self.oligarchy.init(max_order, base >> max_order);
156 }
157
158 #[inline]
167 pub unsafe fn transfer<T>(&mut self, ptr: NonNull<T>, size: usize) {
168 self.capacity += size;
169 self.deallocate(ptr, size)
170 }
171
172 #[inline]
174 pub fn snatch<T>(
175 &mut self,
176 align_order: usize,
177 size: NonZeroUsize,
178 ) -> Result<(NonNull<T>, usize), BuddyError> {
179 let ans = self.allocate(align_order, size);
180 if let Ok((_, size)) = ans {
181 self.capacity -= size;
182 }
183 ans
184 }
185
186 #[inline]
188 pub fn allocate_type<T>(&mut self) -> Result<(NonNull<T>, usize), BuddyError> {
189 self.allocate_layout(Layout::new::<T>())
190 }
191
192 #[inline]
194 pub fn allocate_layout<T>(
195 &mut self,
196 layout: Layout,
197 ) -> Result<(NonNull<T>, usize), BuddyError> {
198 #[inline]
199 const fn allocated<T, U>(ptr: *mut T, size: usize) -> (NonNull<U>, usize) {
200 (unsafe { NonNull::new_unchecked(ptr) }.cast(), size)
201 }
202
203 if let Some(size) = NonZeroUsize::new(layout.size()) {
204 self.allocate(layout.align().trailing_zeros() as _, size)
205 } else {
206 Ok(allocated(self, 0))
207 }
208 }
209
210 pub fn allocate<T>(
214 &mut self,
215 align_order: usize,
216 size: NonZeroUsize,
217 ) -> Result<(NonNull<T>, usize), BuddyError> {
218 let max_order = self.max_order();
219 #[inline]
220 const fn allocated<T, U>(ptr: *mut T, size: usize) -> (NonNull<U>, usize) {
221 (unsafe { NonNull::new_unchecked(ptr) }.cast(), size)
222 }
223
224 let page_mask = (1usize << self.min_order) - 1;
226 let ans_size = (size.get() + page_mask) & !page_mask;
227 let size_order = nonzero(ans_size.next_power_of_two()).trailing_zeros() as usize;
229 let (ptr, alloc_size) = if size_order >= max_order {
231 let count = ((ans_size >> (max_order - 1)) + 1) >> 1;
233 let align_offset = align_order.saturating_sub(max_order);
234 match self.oligarchy.take_any(align_offset, count) {
235 Some(idx) => (idx << max_order, count << max_order),
236 None => Err(BuddyError)?,
237 }
238 } else {
239 let layer0 = size_order - self.min_order;
241 let mut layer = layer0;
242 let mut idx = loop {
243 if layer == Self::MAX_LAYER {
245 let align_offset = align_order.saturating_sub(max_order);
246 match self.oligarchy.take_any(align_offset, 1) {
247 Some(idx) => break idx,
248 None => Err(BuddyError)?,
249 }
250 }
251 let align_offset = align_order.saturating_sub(self.min_order + layer);
253 match self.buddies[layer].take_any(align_offset) {
254 Some(idx) => break idx,
255 None => layer += 1,
256 }
257 };
258 assert!(self.buddies[layer0..layer].iter_mut().rev().all(|b| {
260 idx <<= 1;
261 b.put(idx + 1).is_none()
262 }));
263 (idx << size_order, 1 << size_order)
265 };
266 self.free -= alloc_size;
267 if alloc_size > ans_size {
269 self.deallocate(
270 unsafe { NonNull::new_unchecked((ptr + ans_size) as *mut u8) },
271 alloc_size - ans_size,
272 );
273 }
274 Ok(allocated(ptr as *mut (), ans_size))
275 }
276
277 pub unsafe fn deallocate_layout<T>(&mut self, ptr: NonNull<T>, layout: Layout) {
284 debug_assert!((1 << (ptr.as_ptr() as usize).trailing_zeros()) >= layout.align());
285
286 let mask = (1 << self.min_order) - 1;
287 self.deallocate(ptr, (layout.size() + mask) & !mask)
288 }
289
290 pub fn deallocate<T>(&mut self, ptr: NonNull<T>, size: usize) {
296 debug_assert!(
297 size.trailing_zeros() as usize >= self.min_order,
298 "size must align to minium order"
299 );
300
301 let max_order = self.max_order();
302
303 let mut ptr = ptr.as_ptr() as usize;
304 let end = ptr + size;
305 while ptr < end {
306 let len = nonzero(end - ptr);
308 let order_ptr = nonzero(ptr).trailing_zeros();
310 let order_len = usize::BITS - len.leading_zeros() - 1;
312 let order = order_ptr.min(order_len) as usize;
314 if order >= max_order {
316 let idx = ptr >> max_order;
318 let count = len.get() >> max_order;
320 ptr += count << max_order;
322 (idx..).take(count).for_each(|idx| self.oligarchy.put(idx));
324 } else {
325 let mut idx = ptr >> order;
327 ptr += 1 << order;
329 for layer in (order - self.min_order).. {
331 if layer == Self::MAX_LAYER {
333 self.oligarchy.put(idx);
334 break;
335 }
336 match self.buddies[layer].put(idx) {
338 Some(parent) => idx = parent,
339 None => break,
340 }
341 }
342 }
343 }
344 self.free += size;
345 assert!(
346 self.free <= self.capacity,
347 "something wrong with the free bytes, it is larger than the capacity: {} > {}",
348 self.free,
349 self.capacity
350 );
351 }
352}
353
354impl<const N: usize, O: OligarchyCollection + fmt::Debug, B: BuddyCollection + fmt::Debug>
355 fmt::Debug for BuddyAllocator<N, O, B>
356{
357 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
358 writeln!(f, "BuddyAllocator@{:#018x}", self as *const _ as usize)?;
359 writeln!(f, "---------------------------------")?;
360 for (i, line) in self.buddies.iter().enumerate() {
361 writeln!(f, "{:>2}> {line:?}", self.min_order + i)?;
362 }
363 writeln!(f, "{:>2}> {:?}", self.max_order(), self.oligarchy)
364 }
365}
366
367#[inline]
368const fn nonzero(val: usize) -> NonZeroUsize {
369 unsafe { NonZeroUsize::new_unchecked(val) }
370}
371
372struct Order(usize);
376
377impl Order {
378 #[inline]
379 const fn new(order: usize) -> Self {
380 Self(order)
381 }
382
383 #[inline]
384 fn idx_to_ptr<T>(&self, idx: usize) -> Option<NonNull<T>> {
385 NonNull::new((idx << self.0) as *mut _)
386 }
387
388 #[inline]
389 fn ptr_to_idx<T>(&self, ptr: NonNull<T>) -> usize {
390 (ptr.as_ptr() as usize) >> self.0
391 }
392}
393
394#[cfg(test)]
395mod tests {
396 use super::*;
397 use crate::{LinkedListBuddy, UsizeBuddy};
398
399 type TestAllocator<const N: usize> = BuddyAllocator<N, UsizeBuddy, LinkedListBuddy>;
401
402 #[repr(C, align(4096))]
404 #[derive(Clone, Copy)]
405 struct TestPage([u8; 4096]);
406
407 static mut TEST_MEMORY: [TestPage; 16] = [TestPage([0; 4096]); 16];
408
409 #[test]
410 fn test_allocator_new() {
411 let allocator: TestAllocator<4> = BuddyAllocator::new();
412 assert_eq!(allocator.capacity(), 0);
413 assert_eq!(allocator.free(), 0);
414 assert_eq!(allocator.min_order, 0);
415 }
416
417 #[test]
418 fn test_allocator_default() {
419 let allocator: TestAllocator<4> = BuddyAllocator::default();
420 assert_eq!(allocator.capacity(), 0);
421 assert_eq!(allocator.free(), 0);
422 }
423
424 #[test]
425 fn test_allocator_init() {
426 let mut allocator: TestAllocator<4> = BuddyAllocator::new();
427
428 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
430
431 allocator.init(12, ptr);
432 assert_eq!(allocator.min_order, 12);
433 assert_eq!(allocator.capacity(), 0);
434 assert_eq!(allocator.free(), 0);
435 }
436
437 #[test]
438 fn test_allocator_transfer() {
439 let mut allocator: TestAllocator<4> = BuddyAllocator::new();
440
441 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
442 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
443
444 allocator.init(12, ptr);
445
446 unsafe {
448 allocator.transfer(ptr, len);
449 }
450
451 assert_eq!(allocator.capacity(), len);
452 assert_eq!(allocator.free(), len);
453 }
454
455 #[test]
456 fn test_allocator_allocate_deallocate_basic() {
457 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
458
459 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
460 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
461
462 allocator.init(12, ptr);
463 unsafe {
464 allocator.transfer(ptr, len);
465 }
466
467 let initial_free = allocator.free();
468
469 let size = NonZeroUsize::new(4096).unwrap();
471 let (alloc_ptr, alloc_size) = allocator.allocate::<u8>(0, size).unwrap();
472
473 assert!(alloc_size >= 4096);
475 assert!(allocator.free() < initial_free);
476
477 allocator.deallocate(alloc_ptr, alloc_size);
479
480 assert_eq!(allocator.free(), initial_free);
482 }
483
484 #[test]
485 fn test_allocator_allocate_type() {
486 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
487
488 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
489 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
490
491 allocator.init(12, ptr);
492 unsafe {
493 allocator.transfer(ptr, len);
494 }
495
496 let initial_free = allocator.free();
497
498 let (alloc_ptr, alloc_size) = allocator.allocate_type::<usize>().unwrap();
500
501 assert!(alloc_size >= core::mem::size_of::<usize>());
503
504 allocator.deallocate(alloc_ptr, alloc_size);
506
507 assert_eq!(allocator.free(), initial_free);
508 }
509
510 #[test]
511 fn test_allocator_allocate_layout() {
512 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
513
514 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
515 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
516
517 allocator.init(12, ptr);
518 unsafe {
519 allocator.transfer(ptr, len);
520 }
521
522 let initial_free = allocator.free();
523
524 let layout = Layout::from_size_align(64, 8).unwrap();
526 let (alloc_ptr, alloc_size) = allocator.allocate_layout::<u8>(layout).unwrap();
527
528 assert!(alloc_size >= 64);
530
531 unsafe {
533 allocator.deallocate_layout(alloc_ptr, layout);
534 }
535
536 assert_eq!(allocator.free(), initial_free);
537 }
538
539 #[test]
540 fn test_allocator_multiple_allocations() {
541 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
542
543 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
544 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
545
546 allocator.init(12, ptr);
547 unsafe {
548 allocator.transfer(ptr, len);
549 }
550
551 let initial_free = allocator.free();
552
553 let size1 = NonZeroUsize::new(1024).unwrap();
555 let size2 = NonZeroUsize::new(2048).unwrap();
556 let size3 = NonZeroUsize::new(4096).unwrap();
557
558 let (ptr1, size1) = allocator.allocate::<u8>(0, size1).unwrap();
559 let (ptr2, size2) = allocator.allocate::<u8>(0, size2).unwrap();
560 let (ptr3, size3) = allocator.allocate::<u8>(0, size3).unwrap();
561
562 allocator.deallocate(ptr2, size2);
564
565 allocator.deallocate(ptr1, size1);
567 allocator.deallocate(ptr3, size3);
568
569 assert_eq!(allocator.free(), initial_free);
570 }
571
572 #[test]
573 fn test_allocator_snatch() {
574 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
575
576 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
577 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
578
579 allocator.init(12, ptr);
580 unsafe {
581 allocator.transfer(ptr, len);
582 }
583
584 let initial_capacity = allocator.capacity();
585 let initial_free = allocator.free();
586
587 let size = NonZeroUsize::new(4096).unwrap();
589 let (_alloc_ptr, alloc_size) = allocator.snatch::<u8>(0, size).unwrap();
590
591 assert!(allocator.capacity() < initial_capacity);
593 assert_eq!(allocator.capacity(), initial_capacity - alloc_size);
594 assert_eq!(allocator.free(), initial_free - alloc_size);
595
596 }
599
600 #[test]
601 fn test_allocator_allocate_failure() {
602 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
603
604 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
605 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
606
607 allocator.init(12, ptr);
608 unsafe {
609 allocator.transfer(ptr, len);
610 }
611
612 let huge_size = NonZeroUsize::new(len * 2).unwrap();
615 assert!(allocator.allocate::<u8>(0, huge_size).is_err());
616 }
617
618 #[test]
619 fn test_allocator_zero_size_allocation() {
620 let mut allocator: TestAllocator<8> = BuddyAllocator::new();
621
622 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
623 let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
624
625 allocator.init(12, ptr);
626 unsafe {
627 allocator.transfer(ptr, len);
628 }
629
630 let layout = Layout::from_size_align(0, 1).unwrap();
632 let (_alloc_ptr, alloc_size) = allocator.allocate_layout::<u8>(layout).unwrap();
633
634 assert_eq!(alloc_size, 0);
636 }
637
638 #[test]
639 fn test_max_order() {
640 let mut allocator: TestAllocator<4> = BuddyAllocator::new();
641
642 let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
643
644 allocator.init(3, ptr);
645
646 assert_eq!(allocator.max_order(), 7);
648 }
649
650 #[test]
651 fn test_order_idx_to_ptr() {
652 let order = Order::new(12);
653
654 assert!(order.idx_to_ptr::<u8>(0).is_none());
657 let ptr = order.idx_to_ptr::<u8>(1).unwrap();
658 assert_eq!(ptr.as_ptr() as usize, 4096); let ptr = order.idx_to_ptr::<u8>(2).unwrap();
661 assert_eq!(ptr.as_ptr() as usize, 8192); let ptr = order.idx_to_ptr::<u8>(3).unwrap();
664 assert_eq!(ptr.as_ptr() as usize, 12288); }
666
667 #[test]
668 fn test_order_ptr_to_idx() {
669 let order = Order::new(12);
670
671 let ptr = NonNull::new(4096 as *mut u8).unwrap();
673 assert_eq!(order.ptr_to_idx(ptr), 1);
674
675 let ptr = NonNull::new(8192 as *mut u8).unwrap();
676 assert_eq!(order.ptr_to_idx(ptr), 2);
677 }
678
679 #[test]
680 fn test_order_roundtrip() {
681 let order = Order::new(12);
682
683 for i in [1, 5, 100, 1000] {
686 let ptr = order.idx_to_ptr::<u8>(i).unwrap();
687 let idx = order.ptr_to_idx(ptr);
688 assert_eq!(idx, i);
689 }
690 }
691
692 #[test]
694 fn test_transfer_low_address_crossing_boundary() {
695 #[repr(C, align(4096))]
696 struct Buf {
697 _data: [u8; 128 * 1024],
698 }
699 static mut BUF: Buf = Buf {
700 _data: [0; 128 * 1024],
701 };
702
703 type A = BuddyAllocator<32, UsizeBuddy, LinkedListBuddy>;
704 let mut alloc = A::new();
705
706 let ptr = NonNull::new((&raw mut BUF).cast::<u8>()).unwrap();
707 let len = 128 * 1024;
708
709 alloc.init(3, ptr);
710 unsafe { alloc.transfer(ptr, len) };
711
712 let size = NonZeroUsize::new(4096).unwrap();
713 let (p, s) = alloc.allocate::<u8>(0, size).unwrap();
714 assert!(s >= 4096);
715 alloc.deallocate(p, s);
716 }
717}