revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use core::{
3    cell::{Ref, RefCell, RefMut},
4    cmp::min,
5    fmt,
6    ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11/// A sequential memory shared between calls, which uses
12/// a `Vec` for internal representation.
13/// A [SharedMemory] instance should always be obtained using
14/// the `new` static method to ensure memory safety.
15#[derive(Clone, PartialEq, Eq)]
16#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
17pub struct SharedMemory {
18    /// The underlying buffer.
19    buffer: Option<Rc<RefCell<Vec<u8>>>>,
20    /// Memory checkpoints for each depth.
21    /// Invariant: these are always in bounds of `data`.
22    my_checkpoint: usize,
23    /// Child checkpoint that we need to free context to.
24    child_checkpoint: Option<usize>,
25    /// Memory limit. See [`Cfg`](context_interface::Cfg).
26    #[cfg(feature = "memory_limit")]
27    memory_limit: u64,
28}
29
30impl fmt::Debug for SharedMemory {
31    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32        f.debug_struct("SharedMemory")
33            .field("current_len", &self.len())
34            .field("context_memory", &hex::encode(&*self.context_memory()))
35            .finish_non_exhaustive()
36    }
37}
38
39impl Default for SharedMemory {
40    #[inline]
41    fn default() -> Self {
42        Self::new()
43    }
44}
45
46impl MemoryTr for SharedMemory {
47    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
48        self.set_data(memory_offset, data_offset, len, data);
49    }
50
51    fn set(&mut self, memory_offset: usize, data: &[u8]) {
52        self.set(memory_offset, data);
53    }
54
55    fn size(&self) -> usize {
56        self.len()
57    }
58
59    fn copy(&mut self, destination: usize, source: usize, len: usize) {
60        self.copy(destination, source, len);
61    }
62
63    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
64        self.slice_range(range)
65    }
66
67    fn local_memory_offset(&self) -> usize {
68        self.my_checkpoint
69    }
70
71    fn set_data_from_global(
72        &mut self,
73        memory_offset: usize,
74        data_offset: usize,
75        len: usize,
76        data_range: Range<usize>,
77    ) {
78        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
79    }
80
81    /// Returns a byte slice of the memory region at the given offset.
82    ///
83    /// # Safety
84    ///
85    /// In debug this will panic on out of bounds. In release it will silently fail.
86    #[inline]
87    #[cfg_attr(debug_assertions, track_caller)]
88    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89        let buffer = self.buffer().borrow(); // Borrow the inner Vec<u8>
90        Ref::map(buffer, |b| match b.get(range) {
91            Some(slice) => slice,
92            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
93        })
94    }
95
96    fn resize(&mut self, new_size: usize) -> bool {
97        self.resize(new_size);
98        true
99    }
100}
101
102impl SharedMemory {
103    /// Creates a new memory instance that can be shared between calls.
104    ///
105    /// The default initial capacity is 4KiB.
106    #[inline]
107    pub fn new() -> Self {
108        Self::with_capacity(4 * 1024) // from evmone
109    }
110
111    /// Creates a new invalid memory instance.
112    #[inline]
113    pub fn invalid() -> Self {
114        Self {
115            buffer: None,
116            my_checkpoint: 0,
117            child_checkpoint: None,
118            #[cfg(feature = "memory_limit")]
119            memory_limit: 0,
120        }
121    }
122
123    /// Creates a new memory instance with a given shared buffer.
124    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
125        Self {
126            buffer: Some(buffer),
127            my_checkpoint: 0,
128            child_checkpoint: None,
129            #[cfg(feature = "memory_limit")]
130            memory_limit: u64::MAX,
131        }
132    }
133
134    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
135    #[inline]
136    pub fn with_capacity(capacity: usize) -> Self {
137        Self {
138            buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
139            my_checkpoint: 0,
140            child_checkpoint: None,
141            #[cfg(feature = "memory_limit")]
142            memory_limit: u64::MAX,
143        }
144    }
145
146    /// Creates a new memory instance that can be shared between calls,
147    /// with `memory_limit` as upper bound for allocation size.
148    ///
149    /// The default initial capacity is 4KiB.
150    #[cfg(feature = "memory_limit")]
151    #[inline]
152    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
153        Self {
154            memory_limit,
155            ..Self::new()
156        }
157    }
158
159    #[inline]
160    fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
161        debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
162        unsafe { self.buffer.as_ref().unwrap_unchecked() }
163    }
164
165    /// Returns `true` if the `new_size` for the current context memory will
166    /// make the shared buffer length exceed the `memory_limit`.
167    #[cfg(feature = "memory_limit")]
168    #[inline]
169    pub fn limit_reached(&self, new_size: usize) -> bool {
170        self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
171    }
172
173    /// Prepares the shared memory for a new child context.
174    ///
175    /// # Panics
176    ///
177    /// Panics if this function was already called without freeing child context.
178    #[inline]
179    pub fn new_child_context(&mut self) -> SharedMemory {
180        if self.child_checkpoint.is_some() {
181            panic!("new_child_context was already called without freeing child context");
182        }
183        let new_checkpoint = self.buffer().borrow().len();
184        self.child_checkpoint = Some(new_checkpoint);
185        SharedMemory {
186            buffer: Some(self.buffer().clone()),
187            my_checkpoint: new_checkpoint,
188            // child_checkpoint is same as my_checkpoint
189            child_checkpoint: None,
190            #[cfg(feature = "memory_limit")]
191            memory_limit: self.memory_limit,
192        }
193    }
194
195    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
196    #[inline]
197    pub fn free_child_context(&mut self) {
198        let Some(child_checkpoint) = self.child_checkpoint.take() else {
199            return;
200        };
201        unsafe {
202            self.buffer().borrow_mut().set_len(child_checkpoint);
203        }
204    }
205
206    /// Returns the length of the current memory range.
207    #[inline]
208    pub fn len(&self) -> usize {
209        self.buffer().borrow().len() - self.my_checkpoint
210    }
211
212    /// Returns `true` if the current memory range is empty.
213    #[inline]
214    pub fn is_empty(&self) -> bool {
215        self.len() == 0
216    }
217
218    /// Resizes the memory in-place so that `len` is equal to `new_len`.
219    #[inline]
220    pub fn resize(&mut self, new_size: usize) {
221        self.buffer()
222            .borrow_mut()
223            .resize(self.my_checkpoint + new_size, 0);
224    }
225
226    /// Returns a byte slice of the memory region at the given offset.
227    ///
228    /// # Panics
229    ///
230    /// Panics on out of bounds.
231    #[inline]
232    #[cfg_attr(debug_assertions, track_caller)]
233    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
234        self.slice_range(offset..offset + size)
235    }
236
237    /// Returns a byte slice of the memory region at the given offset.
238    ///
239    /// # Panics
240    ///
241    /// Panics on out of bounds.
242    #[inline]
243    #[cfg_attr(debug_assertions, track_caller)]
244    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
245        let buffer = self.buffer().borrow(); // Borrow the inner Vec<u8>
246        Ref::map(buffer, |b| {
247            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
248                Some(slice) => slice,
249                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
250            }
251        })
252    }
253
254    /// Returns a byte slice of the memory region at the given offset.
255    ///
256    /// # Panics
257    ///
258    /// Panics on out of bounds.
259    #[inline]
260    #[cfg_attr(debug_assertions, track_caller)]
261    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
262        let buffer = self.buffer().borrow(); // Borrow the inner Vec<u8>
263        Ref::map(buffer, |b| match b.get(range) {
264            Some(slice) => slice,
265            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
266        })
267    }
268
269    /// Returns a byte slice of the memory region at the given offset.
270    ///
271    /// # Panics
272    ///
273    /// Panics on out of bounds.
274    #[inline]
275    #[cfg_attr(debug_assertions, track_caller)]
276    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
277        let buffer = self.buffer().borrow_mut(); // Borrow the inner Vec<u8> mutably
278        RefMut::map(buffer, |b| {
279            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
280                Some(slice) => slice,
281                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
282            }
283        })
284    }
285
286    /// Returns the byte at the given offset.
287    ///
288    /// # Panics
289    ///
290    /// Panics on out of bounds.
291    #[inline]
292    pub fn get_byte(&self, offset: usize) -> u8 {
293        self.slice_len(offset, 1)[0]
294    }
295
296    /// Returns a 32-byte slice of the memory region at the given offset.
297    ///
298    /// # Panics
299    ///
300    /// Panics on out of bounds.
301    #[inline]
302    pub fn get_word(&self, offset: usize) -> B256 {
303        (*self.slice_len(offset, 32)).try_into().unwrap()
304    }
305
306    /// Returns a U256 of the memory region at the given offset.
307    ///
308    /// # Panics
309    ///
310    /// Panics on out of bounds.
311    #[inline]
312    pub fn get_u256(&self, offset: usize) -> U256 {
313        self.get_word(offset).into()
314    }
315
316    /// Sets the `byte` at the given `index`.
317    ///
318    /// # Panics
319    ///
320    /// Panics on out of bounds.
321    #[inline]
322    #[cfg_attr(debug_assertions, track_caller)]
323    pub fn set_byte(&mut self, offset: usize, byte: u8) {
324        self.set(offset, &[byte]);
325    }
326
327    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
328    ///
329    /// # Panics
330    ///
331    /// Panics on out of bounds.
332    #[inline]
333    #[cfg_attr(debug_assertions, track_caller)]
334    pub fn set_word(&mut self, offset: usize, value: &B256) {
335        self.set(offset, &value[..]);
336    }
337
338    /// Sets the given U256 `value` to the memory region at the given `offset`.
339    ///
340    /// # Panics
341    ///
342    /// Panics on out of bounds.
343    #[inline]
344    #[cfg_attr(debug_assertions, track_caller)]
345    pub fn set_u256(&mut self, offset: usize, value: U256) {
346        self.set(offset, &value.to_be_bytes::<32>());
347    }
348
349    /// Set memory region at given `offset`.
350    ///
351    /// # Panics
352    ///
353    /// Panics on out of bounds.
354    #[inline]
355    #[cfg_attr(debug_assertions, track_caller)]
356    pub fn set(&mut self, offset: usize, value: &[u8]) {
357        if !value.is_empty() {
358            self.slice_mut(offset, value.len()).copy_from_slice(value);
359        }
360    }
361
362    /// Set memory from data. Our memory offset+len is expected to be correct but we
363    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
364    ///
365    /// # Panics
366    ///
367    /// Panics if memory is out of bounds.
368    #[inline]
369    #[cfg_attr(debug_assertions, track_caller)]
370    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
371        let mut dst = self.context_memory_mut();
372        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
373    }
374
375    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
376    #[inline]
377    #[cfg_attr(debug_assertions, track_caller)]
378    pub fn global_to_local_set_data(
379        &mut self,
380        memory_offset: usize,
381        data_offset: usize,
382        len: usize,
383        data_range: Range<usize>,
384    ) {
385        let mut buffer = self.buffer().borrow_mut(); // Borrow the inner Vec<u8> mutably
386        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
387        let src = if data_range.is_empty() {
388            &mut []
389        } else {
390            src.get_mut(data_range).unwrap()
391        };
392        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
393    }
394
395    /// Copies elements from one part of the memory to another part of itself.
396    ///
397    /// # Panics
398    ///
399    /// Panics on out of bounds.
400    #[inline]
401    #[cfg_attr(debug_assertions, track_caller)]
402    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
403        self.context_memory_mut().copy_within(src..src + len, dst);
404    }
405
406    /// Returns a reference to the memory of the current context, the active memory.
407    #[inline]
408    pub fn context_memory(&self) -> Ref<'_, [u8]> {
409        let buffer = self.buffer().borrow();
410        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
411            Some(slice) => slice,
412            None => debug_unreachable!("Context memory should be always valid"),
413        })
414    }
415
416    /// Returns a mutable reference to the memory of the current context.
417    #[inline]
418    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
419        let buffer = self.buffer().borrow_mut(); // Borrow the inner Vec<u8> mutably
420        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
421            Some(slice) => slice,
422            None => debug_unreachable!("Context memory should be always valid"),
423        })
424    }
425}
426
427/// Copies data from src to dst taking into account the offsets and len.
428///
429/// If src does not have enough data, it nullifies the rest of dst that is not copied.
430///
431/// # Safety
432///
433/// Assumes that dst has enough space to copy the data.
434/// Assumes that src has enough data to copy.
435/// Assumes that dst_offset and src_offset are in bounds.
436/// Assumes that dst and src are valid.
437/// Assumes that dst and src do not overlap.
438unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
439    if src_offset >= src.len() {
440        // Nullify all memory slots
441        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
442        return;
443    }
444    let src_end = min(src_offset + len, src.len());
445    let src_len = src_end - src_offset;
446    debug_assert!(src_offset < src.len() && src_end <= src.len());
447    let data = unsafe { src.get_unchecked(src_offset..src_end) };
448    unsafe {
449        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
450            .copy_from_slice(data)
451    };
452
453    // Nullify rest of memory slots
454    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
455    unsafe {
456        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
457            .fill(0)
458    };
459}
460
461/// Returns number of words what would fit to provided number of bytes,
462/// i.e. it rounds up the number bytes to number of words.
463#[inline]
464pub const fn num_words(len: usize) -> usize {
465    len.saturating_add(31) / 32
466}
467
468#[cfg(test)]
469mod tests {
470    use super::*;
471
472    #[test]
473    fn test_num_words() {
474        assert_eq!(num_words(0), 0);
475        assert_eq!(num_words(1), 1);
476        assert_eq!(num_words(31), 1);
477        assert_eq!(num_words(32), 1);
478        assert_eq!(num_words(33), 2);
479        assert_eq!(num_words(63), 2);
480        assert_eq!(num_words(64), 2);
481        assert_eq!(num_words(65), 3);
482        assert_eq!(num_words(usize::MAX), usize::MAX / 32);
483    }
484
485    #[test]
486    fn new_free_child_context() {
487        let mut sm1 = SharedMemory::new();
488
489        assert_eq!(sm1.buffer().borrow().len(), 0);
490        assert_eq!(sm1.my_checkpoint, 0);
491
492        unsafe { sm1.buffer().borrow_mut().set_len(32) };
493        assert_eq!(sm1.len(), 32);
494        let mut sm2 = sm1.new_child_context();
495
496        assert_eq!(sm2.buffer().borrow().len(), 32);
497        assert_eq!(sm2.my_checkpoint, 32);
498        assert_eq!(sm2.len(), 0);
499
500        unsafe { sm2.buffer().borrow_mut().set_len(96) };
501        assert_eq!(sm2.len(), 64);
502        let mut sm3 = sm2.new_child_context();
503
504        assert_eq!(sm3.buffer().borrow().len(), 96);
505        assert_eq!(sm3.my_checkpoint, 96);
506        assert_eq!(sm3.len(), 0);
507
508        unsafe { sm3.buffer().borrow_mut().set_len(128) };
509        let sm4 = sm3.new_child_context();
510        assert_eq!(sm4.buffer().borrow().len(), 128);
511        assert_eq!(sm4.my_checkpoint, 128);
512        assert_eq!(sm4.len(), 0);
513
514        // Free contexts
515        drop(sm4);
516        sm3.free_child_context();
517        assert_eq!(sm3.buffer().borrow().len(), 128);
518        assert_eq!(sm3.my_checkpoint, 96);
519        assert_eq!(sm3.len(), 32);
520
521        sm2.free_child_context();
522        assert_eq!(sm2.buffer().borrow().len(), 96);
523        assert_eq!(sm2.my_checkpoint, 32);
524        assert_eq!(sm2.len(), 64);
525
526        sm1.free_child_context();
527        assert_eq!(sm1.buffer().borrow().len(), 32);
528        assert_eq!(sm1.my_checkpoint, 0);
529        assert_eq!(sm1.len(), 32);
530    }
531
532    #[test]
533    fn resize() {
534        let mut sm1 = SharedMemory::new();
535        sm1.resize(32);
536        assert_eq!(sm1.buffer().borrow().len(), 32);
537        assert_eq!(sm1.len(), 32);
538        assert_eq!(sm1.buffer().borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
539
540        let mut sm2 = sm1.new_child_context();
541        sm2.resize(96);
542        assert_eq!(sm2.buffer().borrow().len(), 128);
543        assert_eq!(sm2.len(), 96);
544        assert_eq!(
545            sm2.buffer().borrow().get(32..128),
546            Some(&[0_u8; 96] as &[u8])
547        );
548
549        sm1.free_child_context();
550        assert_eq!(sm1.buffer().borrow().len(), 32);
551        assert_eq!(sm1.len(), 32);
552        assert_eq!(sm1.buffer().borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
553    }
554}