phala_allocator/
lib.rs

1#![no_std]
2use core::alloc::{GlobalAlloc, Layout};
3use core::sync::atomic::{AtomicUsize, Ordering};
4
5pub struct StatSizeAllocator<T> {
6    inner: T,
7    current: AtomicUsize,
8    spike: AtomicUsize,
9    peak: AtomicUsize,
10}
11
12#[derive(Debug)]
13pub struct Stats {
14    /// The current heap usage of the allocator.
15    pub current: usize,
16    /// The peak heap usage of the allocator in a short-term duration.
17    pub spike: usize,
18    /// The peak heap usage of the allocator.
19    pub peak: usize,
20}
21
22impl<T> StatSizeAllocator<T> {
23    pub const fn new(inner: T) -> Self {
24        Self {
25            inner,
26            current: AtomicUsize::new(0),
27            spike: AtomicUsize::new(0),
28            peak: AtomicUsize::new(0),
29        }
30    }
31
32    pub fn stats(&self) -> Stats {
33        let spike = self.spike.swap(0, Ordering::Relaxed);
34        let current_peak = self.peak.load(Ordering::Relaxed);
35        let peak = current_peak.max(spike);
36        self.peak.store(peak, Ordering::Relaxed);
37        Stats {
38            current: self.current.load(Ordering::Relaxed),
39            spike,
40            peak,
41        }
42    }
43}
44
45impl<T: GlobalAlloc> StatSizeAllocator<T> {
46    fn add_alloced_size(&self, size: usize) {
47        let prev = self.current.fetch_add(size, Ordering::SeqCst);
48        let total_size = prev + size;
49        let mut peak = self.spike.load(Ordering::SeqCst);
50        loop {
51            if total_size <= peak {
52                break;
53            }
54            match self.spike.compare_exchange(
55                peak,
56                total_size,
57                Ordering::Acquire,
58                Ordering::Relaxed,
59            ) {
60                Err(new) => {
61                    peak = new;
62                    continue;
63                }
64                Ok(_) => {
65                    break;
66                }
67            }
68        }
69    }
70}
71
72unsafe impl<T: GlobalAlloc> GlobalAlloc for StatSizeAllocator<T> {
73    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
74        self.add_alloced_size(layout.size());
75        self.inner.alloc(layout)
76    }
77
78    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
79        self.current.fetch_sub(layout.size(), Ordering::SeqCst);
80        self.inner.dealloc(ptr, layout)
81    }
82
83    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
84        self.add_alloced_size(layout.size());
85        self.inner.alloc_zeroed(layout)
86    }
87
88    unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
89        use core::cmp::Ordering::*;
90        match new_size.cmp(&layout.size()) {
91            Less => {
92                let difference = layout.size() - new_size;
93                self.current.fetch_sub(difference, Ordering::SeqCst);
94            }
95            Greater => {
96                let difference = new_size - layout.size();
97                self.add_alloced_size(difference);
98            }
99            Equal => (),
100        }
101        self.inner.realloc(ptr, layout, new_size)
102    }
103}