tasm_lib/memory/
dyn_malloc.rs

1use std::collections::HashMap;
2
3use num_traits::Zero;
4use rand::prelude::*;
5use triton_vm::memory_layout::MemoryRegion;
6use triton_vm::prelude::*;
7
8use crate::empty_stack;
9use crate::prelude::*;
10use crate::snippet_bencher::BenchmarkCase;
11use crate::traits::function::Function;
12use crate::traits::function::FunctionInitialState;
13
14/// The location of the dynamic allocator state in memory.
15///
16/// See the [memory convention][super] for details.
17pub const DYN_MALLOC_ADDRESS: BFieldElement = BFieldElement::new(BFieldElement::MAX);
18
19/// The address of the first page that can be dynamically allocated.
20pub const DYN_MALLOC_FIRST_PAGE: u64 = 1;
21
22/// The number of pages that can be dynamically allocated.
23pub const NUM_ALLOCATABLE_PAGES: u64 = (1 << 31) - 1;
24
25/// The size of one dynamically allocated page.
26pub const DYN_MALLOC_PAGE_SIZE: u64 = 1 << 32;
27
28pub const DYN_MALLOC_FIRST_ADDRESS: BFieldElement =
29    BFieldElement::new(DYN_MALLOC_FIRST_PAGE * DYN_MALLOC_PAGE_SIZE);
30
31/// Return a pointer to the next free page of memory. Updates the dyn malloc state
32/// accordingly
33#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
34pub struct DynMalloc;
35
36impl DynMalloc {
37    pub fn memory_region() -> MemoryRegion {
38        MemoryRegion::new(
39            DYN_MALLOC_FIRST_ADDRESS,
40            (NUM_ALLOCATABLE_PAGES * DYN_MALLOC_PAGE_SIZE)
41                .try_into()
42                .unwrap(),
43        )
44    }
45}
46
47impl BasicSnippet for DynMalloc {
48    fn inputs(&self) -> Vec<(DataType, String)> {
49        vec![]
50    }
51
52    fn outputs(&self) -> Vec<(DataType, String)> {
53        vec![(DataType::Bfe, "*addr".to_string())]
54    }
55
56    fn entrypoint(&self) -> String {
57        "tasmlib_memory_dyn_malloc".to_string()
58    }
59
60    fn code(&self, _: &mut Library) -> Vec<LabelledInstruction> {
61        let entrypoint = self.entrypoint();
62        let dyn_malloc_init = format!("{entrypoint}_initialize");
63
64        triton_asm! {
65        // BEFORE: _
66        // AFTER:  _ *addr
67        {entrypoint}:
68            push {DYN_MALLOC_ADDRESS}       // _ *dyn_malloc_state
69            read_mem 1 pop 1                // _ page_idx
70
71            dup 0 push 0 eq                 // _ page_idx (page_idx == 0)
72            skiz call {dyn_malloc_init}     // _ page_idx
73
74            // Verify that we are mapping inside allowed region
75            push {NUM_ALLOCATABLE_PAGES}
76            dup 1
77            lt                              // _ page_idx (page_idx < NUM_ALLOCATABLE_PAGES)
78            assert error_id 70
79
80            // update dynamic allocator state
81            dup 0                           // _ page_idx page_idx
82            push 1                          // _ page_idx page_idx 1
83            add                             // _ page_idx next_page_idx
84            push {DYN_MALLOC_ADDRESS}       // _ page_idx next_page_idx *dyn_malloc_state
85            write_mem 1 pop 1               // _ page_idx
86
87            // translate page number to address
88            push {DYN_MALLOC_PAGE_SIZE}     // _ page_idx page_size
89            mul                             // _ *free_page
90            return
91
92        // BEFORE: _ 0
93        // AFTER:  _ DYN_MALLOC_FIRST_PAGE
94        {dyn_malloc_init}:
95            pop 1
96            push {DYN_MALLOC_FIRST_PAGE}
97            return
98        }
99    }
100}
101
102impl Function for DynMalloc {
103    fn rust_shadow(
104        &self,
105        stack: &mut Vec<BFieldElement>,
106        memory: &mut HashMap<BFieldElement, BFieldElement>,
107    ) {
108        let mut page_idx = memory.get(&DYN_MALLOC_ADDRESS).copied().unwrap_or_default();
109        if page_idx.is_zero() {
110            page_idx = DYN_MALLOC_FIRST_PAGE.into();
111        }
112        let page_idx = page_idx;
113
114        assert!(
115            page_idx.value() < NUM_ALLOCATABLE_PAGES,
116            "All allocations must happen inside dyn malloc's region"
117        );
118
119        let next_page_idx = page_idx + bfe!(1);
120
121        memory.insert(DYN_MALLOC_ADDRESS, next_page_idx);
122
123        let page_address = page_idx * BFieldElement::new(DYN_MALLOC_PAGE_SIZE);
124        stack.push(page_address);
125    }
126
127    fn pseudorandom_initial_state(
128        &self,
129        seed: [u8; 32],
130        _: Option<BenchmarkCase>,
131    ) -> FunctionInitialState {
132        let mut rng = StdRng::from_seed(seed);
133
134        let stack = empty_stack();
135
136        let mut memory = HashMap::new();
137        let page_number = rng.random_range(0..1_u64 << 31);
138        memory.insert(DYN_MALLOC_ADDRESS, page_number.into());
139
140        FunctionInitialState { stack, memory }
141    }
142
143    fn corner_case_initial_states(&self) -> Vec<FunctionInitialState> {
144        let empty_vm_state = {
145            let stack = self.init_stack_for_isolated_run();
146            let memory = HashMap::new();
147
148            FunctionInitialState { stack, memory }
149        };
150
151        let one_page_has_been_allocated = {
152            let stack = self.init_stack_for_isolated_run();
153            let memory: HashMap<_, _> = [(DYN_MALLOC_ADDRESS, bfe!(1))].into_iter().collect();
154
155            FunctionInitialState { stack, memory }
156        };
157
158        let second_to_last_page_has_been_allocated = {
159            let stack = self.init_stack_for_isolated_run();
160            let memory: HashMap<_, _> = [(DYN_MALLOC_ADDRESS, bfe!(NUM_ALLOCATABLE_PAGES - 1))]
161                .into_iter()
162                .collect();
163
164            FunctionInitialState { stack, memory }
165        };
166
167        let third_to_last_page_has_been_allocated = {
168            let stack = self.init_stack_for_isolated_run();
169            let memory: HashMap<_, _> = [(DYN_MALLOC_ADDRESS, bfe!(NUM_ALLOCATABLE_PAGES - 2))]
170                .into_iter()
171                .collect();
172
173            FunctionInitialState { stack, memory }
174        };
175
176        vec![
177            empty_vm_state,
178            one_page_has_been_allocated,
179            second_to_last_page_has_been_allocated,
180            third_to_last_page_has_been_allocated,
181        ]
182    }
183}
184
185#[cfg(test)]
186pub mod tests {
187    use std::collections::HashMap;
188
189    use super::*;
190    use crate::test_helpers::negative_test;
191    use crate::test_prelude::*;
192
193    #[test]
194    fn expected_address_chosen_for_dyn_malloc() {
195        assert_eq!(bfe!(-1), DYN_MALLOC_ADDRESS);
196    }
197
198    #[test]
199    fn rust_shadow() {
200        ShadowedFunction::new(DynMalloc).test();
201    }
202
203    #[test]
204    fn disallow_allocation_outside_of_region_unit_test() {
205        fn negative_prop_disallow_allocation_outside_of_region(memory_page_index: BFieldElement) {
206            let snippet = DynMalloc;
207            let init_stack = snippet.init_stack_for_isolated_run();
208            let shadowed_snippet = ShadowedFunction::new(snippet);
209
210            let memory_filled_dyn_malloc: HashMap<_, _> = [(DYN_MALLOC_ADDRESS, memory_page_index)]
211                .into_iter()
212                .collect();
213            let init_state =
214                InitVmState::with_stack_and_memory(init_stack, memory_filled_dyn_malloc);
215
216            test_assertion_failure(&shadowed_snippet, init_state, &[70]);
217        }
218
219        // Last page has been allocated, next call must fail
220        negative_prop_disallow_allocation_outside_of_region(bfe!(NUM_ALLOCATABLE_PAGES));
221        negative_prop_disallow_allocation_outside_of_region(bfe!(NUM_ALLOCATABLE_PAGES + 1));
222        negative_prop_disallow_allocation_outside_of_region(bfe!(NUM_ALLOCATABLE_PAGES + 2));
223        negative_prop_disallow_allocation_outside_of_region(bfe!(u32::MAX - 1));
224        negative_prop_disallow_allocation_outside_of_region(bfe!(u32::MAX));
225    }
226
227    #[proptest]
228    fn disallow_allocation_if_page_counter_is_not_a_u32(
229        #[strategy(arb())]
230        #[filter(#address.value() > u64::from(u32::MAX))]
231        address: BFieldElement,
232    ) {
233        fn negative_prop_disallow_allocation_with_non_u32_page_counter(
234            mem_page_index: BFieldElement,
235        ) {
236            let snippet = DynMalloc;
237            let init_stack = snippet.init_stack_for_isolated_run();
238            let shadowed_snippet = ShadowedFunction::new(snippet);
239
240            let memory_filled_dyn_malloc: HashMap<_, _> =
241                [(DYN_MALLOC_ADDRESS, mem_page_index)].into_iter().collect();
242            let init_state =
243                InitVmState::with_stack_and_memory(init_stack, memory_filled_dyn_malloc);
244            let expected_err =
245                InstructionError::OpStackError(OpStackError::FailedU32Conversion(mem_page_index));
246            negative_test(&shadowed_snippet, init_state, &[expected_err]);
247        }
248
249        negative_prop_disallow_allocation_with_non_u32_page_counter(address);
250    }
251
252    #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
253    struct MultipleDynMallocCalls {
254        num_calls: usize,
255    }
256
257    impl BasicSnippet for MultipleDynMallocCalls {
258        fn inputs(&self) -> Vec<(DataType, String)> {
259            vec![]
260        }
261
262        fn outputs(&self) -> Vec<(DataType, String)> {
263            vec![(DataType::Bfe, "*addr".to_string()); self.num_calls]
264        }
265
266        fn entrypoint(&self) -> String {
267            "tasmlib_memory_dyn_malloc_multiple_calls".to_string()
268        }
269
270        fn code(&self, lib: &mut Library) -> Vec<LabelledInstruction> {
271            let dyn_malloc = lib.import(Box::new(DynMalloc));
272
273            let single_call = triton_asm!(call { dyn_malloc });
274            let multiple_calls = vec![single_call; self.num_calls].concat();
275
276            triton_asm!( {self.entrypoint()}: {&multiple_calls} return )
277        }
278    }
279
280    impl Function for MultipleDynMallocCalls {
281        fn rust_shadow(
282            &self,
283            stack: &mut Vec<BFieldElement>,
284            memory: &mut HashMap<BFieldElement, BFieldElement>,
285        ) {
286            for _ in 0..self.num_calls {
287                DynMalloc.rust_shadow(stack, memory);
288            }
289        }
290
291        fn pseudorandom_initial_state(
292            &self,
293            seed: [u8; 32],
294            _: Option<BenchmarkCase>,
295        ) -> FunctionInitialState {
296            DynMalloc.pseudorandom_initial_state(seed, None)
297        }
298    }
299
300    #[proptest(cases = 20)]
301    fn dynamic_allocator_can_be_called_multiple_times(
302        #[strategy(0_usize..1_000)] num_calls: usize,
303    ) {
304        let multiple_calls = MultipleDynMallocCalls { num_calls };
305        ShadowedFunction::new(multiple_calls).test();
306    }
307}
308
309#[cfg(test)]
310mod benches {
311    use super::*;
312    use crate::test_prelude::*;
313
314    #[test]
315    fn benchmark() {
316        ShadowedFunction::new(DynMalloc).bench();
317    }
318}