sp1_recursion_core/runtime/
memory.rs

1use std::{
2    cell::UnsafeCell,
3    mem::{self, MaybeUninit},
4};
5
6use p3_field::PrimeField64;
7
8use crate::{air::Block, Address};
9
10#[derive(Debug, Clone, Default, Copy)]
11pub struct MemoryEntry<F> {
12    pub val: Block<F>,
13}
14
15/// `UnsafeCell`, but `Sync`.
16///
17/// A replication of the standard library type `SyncUnsafeCell`, still unstable as of Rust 1.81.0.
18#[derive(Debug, Default)]
19#[repr(transparent)]
20struct SyncUnsafeCell<T: ?Sized>(UnsafeCell<T>);
21
22unsafe impl<T: ?Sized + Sync> Sync for SyncUnsafeCell<T> {}
23
24#[derive(Debug, Default)]
25pub struct MemVec<F>(Vec<SyncUnsafeCell<MaybeUninit<MemoryEntry<F>>>>);
26
27impl<F: PrimeField64> MemVec<F> {
28    pub fn with_capacity(capacity: usize) -> Self {
29        // SAFETY: SyncUnsafeCell is a `repr(transparent)` newtype of `UnsafeCell`, which has
30        // the same representation as its inner type.
31        Self(unsafe {
32            mem::transmute::<
33                Vec<MaybeUninit<MemoryEntry<F>>>,
34                Vec<SyncUnsafeCell<MaybeUninit<MemoryEntry<F>>>>,
35            >(vec![MaybeUninit::uninit(); capacity])
36        })
37    }
38
39    pub fn mr(&mut self, addr: Address<F>) -> &MemoryEntry<F> {
40        // SAFETY: We have exclusive access to the memory, so no data races can occur.
41        unsafe { self.mr_unchecked(addr) }
42    }
43
44    /// # Safety
45    /// This should be called precisely when memory is to be read according to a happens-before
46    /// relation corresponding to the documented invariants of [`crate::RecursionProgram`]
47    /// invariants. This guarantees the absence of any data races.
48    pub unsafe fn mr_unchecked(&self, addr: Address<F>) -> &MemoryEntry<F> {
49        match self.0.get(addr.as_usize()).map(|c| unsafe {
50            // SAFETY: The pointer is dereferenceable. It has already been written to due to the
51            // happens-before relation (in `mw_unchecked`), so no mutable/unique reference can
52            // exist. The immutable/shared reference returned indeed remains valid as
53            // long as the lifetime of `&self` (lifetimes are elided) since it refers to
54            // memory directly owned by `self`.
55            &*c.0.get()
56        }) {
57            Some(entry) => unsafe {
58                // SAFETY: It has already been written to, so the value is valid. The reference
59                // obeys both lifetime and aliasing rules, as discussed above.
60                entry.assume_init_ref()
61            },
62            None => panic!(
63                "expected address {} to be less than length {}",
64                addr.as_usize(),
65                self.0.len()
66            ),
67        }
68    }
69
70    pub fn mw(&mut self, addr: Address<F>, val: Block<F>) {
71        // SAFETY: We have exclusive access to the memory, so no data races can occur.
72        // Leaks may occur if the same address is written to twice, unless `F` is trivially
73        // destructible (which is the case if it is merely a number).
74        unsafe { self.mw_unchecked(addr, val) }
75    }
76
77    /// # Safety
78    /// This should be called precisely when memory is to be written according to a happens-before
79    /// relation corresponding to the documented invariants of [`crate::RecursionProgram`]
80    /// invariants. This guarantees the absence of any data races.
81    pub unsafe fn mw_unchecked(&self, addr: Address<F>, val: Block<F>) {
82        match self.0.get(addr.as_usize()).map(|c| unsafe {
83            // SAFETY: The pointer is dereferenceable. There are no other aliases to the data
84            // because of the happens-before relation (no other `mw_unchecked` can be invoked on
85            // the same address, and this call happens-before any `mr_unchecked`.)
86            // The mutable/shared reference is dropped below, so it does not escape with
87            // an invalid lifetime.
88            &mut *c.0.get()
89        }) {
90            // This does not leak memory because the address is written to exactly once.
91            // Leaking is memory-safe in Rust, so this isn't technically a "SAFETY" comment.
92            Some(entry) => drop(entry.write(MemoryEntry { val })),
93            None => panic!(
94                "expected address {} to be less than length {}",
95                addr.as_usize(),
96                self.0.len()
97            ),
98        }
99    }
100}