miden_sdk_alloc/lib.rs
1#![no_std]
2
3extern crate alloc;
4
5use alloc::alloc::{GlobalAlloc, Layout};
6use core::{
7 ptr::null_mut,
8 sync::atomic::{AtomicPtr, Ordering},
9};
10
11/// We assume the Wasm page size for purposes of initializing the heap
12#[cfg(target_family = "wasm")]
13const PAGE_SIZE: usize = 2usize.pow(16);
14
15/// We require all allocations to be minimally word-aligned, i.e. 32 byte alignment
16const MIN_ALIGN: usize = 32;
17
18/// The linear memory heap must not spill over into the region reserved for procedure
19/// locals, which begins at 2^30 in Miden's address space.
20const HEAP_END: *mut u8 = (2usize.pow(30) / 4) as *mut u8;
21
22/// A very simple allocator for Miden SDK-based programs.
23///
24/// This allocator does not free memory, it simply grows the heap until it runs out of available
25/// space for further allocations.
26pub struct BumpAlloc {
27 /// The address at which the available heap begins
28 top: AtomicPtr<u8>,
29}
30
31impl Default for BumpAlloc {
32 fn default() -> Self {
33 Self::new()
34 }
35}
36
37impl BumpAlloc {
38 /// Create a new instance of this allocator
39 ///
40 /// NOTE: Only one instance of this allocator should ever be used at a time, as it is
41 /// allocating from the global heap, not from memory reserved for itself.
42 pub const fn new() -> Self {
43 Self {
44 top: AtomicPtr::new(null_mut()),
45 }
46 }
47
48 /// Initialize the allocator, if it has not yet been initialized
49 #[cfg(target_family = "wasm")]
50 fn maybe_init(&self) {
51 let top = self.top.load(Ordering::Relaxed);
52 if top.is_null() {
53 let base = unsafe { heap_base() };
54 let size = core::arch::wasm32::memory_size(0);
55 self.top.store(unsafe { base.byte_add(size * PAGE_SIZE) }, Ordering::Relaxed);
56 }
57 // TODO: Once treeify issue is fixed, switch to this implementation
58 /*
59 let _ = self.top.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |top| {
60 if top.is_null() {
61 let base = unsafe { heap_base() };
62 let size = core::arch::wasm32::memory_size(0);
63 Some(unsafe { base.byte_add(size * PAGE_SIZE) })
64 } else {
65 None
66 }
67 });
68 */
69 }
70
71 #[cfg(not(target_family = "wasm"))]
72 fn maybe_init(&self) {}
73}
74
75unsafe impl GlobalAlloc for BumpAlloc {
76 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
77 // Force allocations to be at minimally word-aligned. This is wasteful of memory, but
78 // we don't need to be particularly conservative with memory anyway, as most, if not all,
79 // Miden programs will be relatively short-lived. This makes interop at the Rust/Miden
80 // call boundary less expensive, as we can typically pass pointers directly to Miden,
81 // whereas without this alignment guarantee, we would have to set up temporary buffers for
82 // Miden code to write to, and then copy out of that buffer to whatever Rust type, e.g.
83 // `Vec`, we actually want.
84 //
85 // NOTE: This cannot fail, because we're always meeting minimum alignment requirements
86 let layout = layout
87 .align_to(core::cmp::max(layout.align(), MIN_ALIGN))
88 .unwrap()
89 .pad_to_align();
90 let size = layout.size();
91 let align = layout.align();
92
93 self.maybe_init();
94
95 let top = self.top.load(Ordering::Relaxed);
96 let available = HEAP_END.byte_offset_from(top) as usize;
97 if available >= size {
98 self.top.store(top.byte_add(size), Ordering::Relaxed);
99 unsafe { top.byte_offset(align as isize) }
100 } else {
101 null_mut()
102 }
103
104 // TODO: Once treeify issue is fixed, switch to this implementation
105 /*
106 match self.top.fetch_update(Ordering::Relaxed, Ordering::Relaxed, |top| {
107 let available = HEAP_END.byte_offset_from(top) as usize;
108 if available < size {
109 None
110 } else {
111 Some(top.byte_add(size))
112 }
113 }) {
114 Ok(prev_top) => {
115 unsafe { prev_top.byte_offset(align as isize) }
116 }
117 Err(_) => null_mut(),
118 }
119 */
120 }
121
122 unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
123}
124
125#[cfg(target_family = "wasm")]
126#[link(wasm_import_module = "intrinsics::mem")]
127extern "C" {
128 fn heap_base() -> *mut u8;
129}