revm_interpreter/interpreter/
shared_memory.rs

1use super::MemoryTr;
2use core::{
3    cell::{Ref, RefCell, RefMut},
4    cmp::min,
5    fmt,
6    ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12    fn dbg_borrow(&self) -> Ref<'_, T>;
13    fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17    #[inline]
18    fn dbg_borrow(&self) -> Ref<'_, T> {
19        match self.try_borrow() {
20            Ok(b) => b,
21            Err(e) => debug_unreachable!("{e}"),
22        }
23    }
24
25    #[inline]
26    fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27        match self.try_borrow_mut() {
28            Ok(b) => b,
29            Err(e) => debug_unreachable!("{e}"),
30        }
31    }
32}
33
34/// A sequential memory shared between calls, which uses
35/// a `Vec` for internal representation.
36/// A [SharedMemory] instance should always be obtained using
37/// the `new` static method to ensure memory safety.
38#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41    /// The underlying buffer.
42    buffer: Option<Rc<RefCell<Vec<u8>>>>,
43    /// Memory checkpoints for each depth.
44    /// Invariant: these are always in bounds of `data`.
45    my_checkpoint: usize,
46    /// Child checkpoint that we need to free context to.
47    child_checkpoint: Option<usize>,
48    /// Memory limit. See [`Cfg`](context_interface::Cfg).
49    #[cfg(feature = "memory_limit")]
50    memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55        f.debug_struct("SharedMemory")
56            .field("current_len", &self.len())
57            .field("context_memory", &hex::encode(&*self.context_memory()))
58            .finish_non_exhaustive()
59    }
60}
61
62impl Default for SharedMemory {
63    #[inline]
64    fn default() -> Self {
65        Self::new()
66    }
67}
68
69impl MemoryTr for SharedMemory {
70    fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71        self.set_data(memory_offset, data_offset, len, data);
72    }
73
74    fn set(&mut self, memory_offset: usize, data: &[u8]) {
75        self.set(memory_offset, data);
76    }
77
78    fn size(&self) -> usize {
79        self.len()
80    }
81
82    fn copy(&mut self, destination: usize, source: usize, len: usize) {
83        self.copy(destination, source, len);
84    }
85
86    fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87        self.slice_range(range)
88    }
89
90    fn local_memory_offset(&self) -> usize {
91        self.my_checkpoint
92    }
93
94    fn set_data_from_global(
95        &mut self,
96        memory_offset: usize,
97        data_offset: usize,
98        len: usize,
99        data_range: Range<usize>,
100    ) {
101        self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102    }
103
104    /// Returns a byte slice of the memory region at the given offset.
105    ///
106    /// # Panics
107    ///
108    /// Panics on out of bounds access in debug builds only.
109    ///
110    /// # Safety
111    ///
112    /// In release builds, calling this method with an out-of-bounds range triggers undefined
113    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
114    #[inline]
115    #[cfg_attr(debug_assertions, track_caller)]
116    fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117        let buffer = self.buffer_ref();
118        Ref::map(buffer, |b| match b.get(range) {
119            Some(slice) => slice,
120            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121        })
122    }
123
124    fn resize(&mut self, new_size: usize) -> bool {
125        self.resize(new_size);
126        true
127    }
128
129    /// Returns `true` if the `new_size` for the current context memory will
130    /// make the shared buffer length exceed the `memory_limit`.
131    #[cfg(feature = "memory_limit")]
132    #[inline]
133    fn limit_reached(&self, offset: usize, len: usize) -> bool {
134        self.my_checkpoint
135            .saturating_add(offset)
136            .saturating_add(len) as u64
137            > self.memory_limit
138    }
139}
140
141impl SharedMemory {
142    /// Creates a new memory instance that can be shared between calls.
143    ///
144    /// The default initial capacity is 4KiB.
145    #[inline]
146    pub fn new() -> Self {
147        Self::with_capacity(4 * 1024) // from evmone
148    }
149
150    /// Creates a new invalid memory instance.
151    #[inline]
152    pub fn invalid() -> Self {
153        Self {
154            buffer: None,
155            my_checkpoint: 0,
156            child_checkpoint: None,
157            #[cfg(feature = "memory_limit")]
158            memory_limit: 0,
159        }
160    }
161
162    /// Creates a new memory instance with a given shared buffer.
163    pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
164        Self {
165            buffer: Some(buffer),
166            my_checkpoint: 0,
167            child_checkpoint: None,
168            #[cfg(feature = "memory_limit")]
169            memory_limit: u64::MAX,
170        }
171    }
172
173    /// Creates a new memory instance that can be shared between calls with the given `capacity`.
174    #[inline]
175    pub fn with_capacity(capacity: usize) -> Self {
176        Self {
177            buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
178            my_checkpoint: 0,
179            child_checkpoint: None,
180            #[cfg(feature = "memory_limit")]
181            memory_limit: u64::MAX,
182        }
183    }
184
185    /// Creates a new memory instance that can be shared between calls,
186    /// with `memory_limit` as upper bound for allocation size.
187    ///
188    /// The default initial capacity is 4KiB.
189    #[cfg(feature = "memory_limit")]
190    #[inline]
191    pub fn new_with_memory_limit(memory_limit: u64) -> Self {
192        Self {
193            memory_limit,
194            ..Self::new()
195        }
196    }
197
198    /// Sets the memory limit in bytes.
199    #[inline]
200    pub fn set_memory_limit(&mut self, limit: u64) {
201        #[cfg(feature = "memory_limit")]
202        {
203            self.memory_limit = limit;
204        }
205        // for clippy.
206        let _ = limit;
207    }
208
209    #[inline]
210    fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
211        debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
212        unsafe { self.buffer.as_ref().unwrap_unchecked() }
213    }
214
215    #[inline]
216    fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
217        self.buffer().dbg_borrow()
218    }
219
220    #[inline]
221    fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
222        self.buffer().dbg_borrow_mut()
223    }
224
225    /// Prepares the shared memory for a new child context.
226    ///
227    /// # Panics
228    ///
229    /// Panics if this function was already called without freeing child context.
230    #[inline]
231    pub fn new_child_context(&mut self) -> SharedMemory {
232        if self.child_checkpoint.is_some() {
233            panic!("new_child_context was already called without freeing child context");
234        }
235        let new_checkpoint = self.full_len();
236        self.child_checkpoint = Some(new_checkpoint);
237        SharedMemory {
238            buffer: Some(self.buffer().clone()),
239            my_checkpoint: new_checkpoint,
240            // child_checkpoint is same as my_checkpoint
241            child_checkpoint: None,
242            #[cfg(feature = "memory_limit")]
243            memory_limit: self.memory_limit,
244        }
245    }
246
247    /// Prepares the shared memory for returning from child context. Do nothing if there is no child context.
248    #[inline]
249    pub fn free_child_context(&mut self) {
250        let Some(child_checkpoint) = self.child_checkpoint.take() else {
251            return;
252        };
253        unsafe {
254            self.buffer_ref_mut().set_len(child_checkpoint);
255        }
256    }
257
258    /// Returns the length of the current memory range.
259    #[inline]
260    pub fn len(&self) -> usize {
261        self.full_len() - self.my_checkpoint
262    }
263
264    fn full_len(&self) -> usize {
265        self.buffer_ref().len()
266    }
267
268    /// Returns `true` if the current memory range is empty.
269    #[inline]
270    pub fn is_empty(&self) -> bool {
271        self.len() == 0
272    }
273
274    /// Resizes the memory in-place so that `len` is equal to `new_len`.
275    #[inline]
276    pub fn resize(&mut self, new_size: usize) {
277        self.buffer()
278            .dbg_borrow_mut()
279            .resize(self.my_checkpoint + new_size, 0);
280    }
281
282    /// Returns a byte slice of the memory region at the given offset.
283    ///
284    /// # Panics
285    ///
286    /// Panics on out of bounds.
287    #[inline]
288    #[cfg_attr(debug_assertions, track_caller)]
289    pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
290        self.slice_range(offset..offset + size)
291    }
292
293    /// Returns a byte slice of the memory region at the given offset.
294    ///
295    /// # Panics
296    ///
297    /// Panics on out of bounds access in debug builds only.
298    ///
299    /// # Safety
300    ///
301    /// In release builds, calling this method with an out-of-bounds range triggers undefined
302    /// behavior. Callers must ensure that the range is within the bounds of the memory (i.e.,
303    /// `range.end <= self.len()`).
304    #[inline]
305    #[cfg_attr(debug_assertions, track_caller)]
306    pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
307        let buffer = self.buffer_ref();
308        Ref::map(buffer, |b| {
309            match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
310                Some(slice) => slice,
311                None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
312            }
313        })
314    }
315
316    /// Returns a byte slice of the memory region at the given offset.
317    ///
318    /// # Panics
319    ///
320    /// Panics on out of bounds access in debug builds only.
321    ///
322    /// # Safety
323    ///
324    /// In release builds, calling this method with an out-of-bounds range triggers undefined
325    /// behavior. Callers must ensure that the range is within the bounds of the buffer.
326    #[inline]
327    #[cfg_attr(debug_assertions, track_caller)]
328    pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
329        let buffer = self.buffer_ref();
330        Ref::map(buffer, |b| match b.get(range) {
331            Some(slice) => slice,
332            None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
333        })
334    }
335
336    /// Returns a byte slice of the memory region at the given offset.
337    ///
338    /// # Panics
339    ///
340    /// Panics on out of bounds access in debug builds only.
341    ///
342    /// # Safety
343    ///
344    /// In release builds, calling this method with out-of-bounds parameters triggers undefined
345    /// behavior. Callers must ensure that `offset + size` does not exceed the length of the
346    /// memory.
347    #[inline]
348    #[cfg_attr(debug_assertions, track_caller)]
349    pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
350        let buffer = self.buffer_ref_mut();
351        RefMut::map(buffer, |b| {
352            match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
353                Some(slice) => slice,
354                None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
355            }
356        })
357    }
358
359    /// Returns the byte at the given offset.
360    ///
361    /// # Panics
362    ///
363    /// Panics on out of bounds.
364    #[inline]
365    pub fn get_byte(&self, offset: usize) -> u8 {
366        self.slice_len(offset, 1)[0]
367    }
368
369    /// Returns a 32-byte slice of the memory region at the given offset.
370    ///
371    /// # Panics
372    ///
373    /// Panics on out of bounds.
374    #[inline]
375    pub fn get_word(&self, offset: usize) -> B256 {
376        (*self.slice_len(offset, 32)).try_into().unwrap()
377    }
378
379    /// Returns a U256 of the memory region at the given offset.
380    ///
381    /// # Panics
382    ///
383    /// Panics on out of bounds.
384    #[inline]
385    pub fn get_u256(&self, offset: usize) -> U256 {
386        self.get_word(offset).into()
387    }
388
389    /// Sets the `byte` at the given `index`.
390    ///
391    /// # Panics
392    ///
393    /// Panics on out of bounds.
394    #[inline]
395    #[cfg_attr(debug_assertions, track_caller)]
396    pub fn set_byte(&mut self, offset: usize, byte: u8) {
397        self.set(offset, &[byte]);
398    }
399
400    /// Sets the given 32-byte `value` to the memory region at the given `offset`.
401    ///
402    /// # Panics
403    ///
404    /// Panics on out of bounds.
405    #[inline]
406    #[cfg_attr(debug_assertions, track_caller)]
407    pub fn set_word(&mut self, offset: usize, value: &B256) {
408        self.set(offset, &value[..]);
409    }
410
411    /// Sets the given U256 `value` to the memory region at the given `offset`.
412    ///
413    /// # Panics
414    ///
415    /// Panics on out of bounds.
416    #[inline]
417    #[cfg_attr(debug_assertions, track_caller)]
418    pub fn set_u256(&mut self, offset: usize, value: U256) {
419        self.set(offset, &value.to_be_bytes::<32>());
420    }
421
422    /// Set memory region at given `offset`.
423    ///
424    /// # Panics
425    ///
426    /// Panics on out of bounds.
427    #[inline]
428    #[cfg_attr(debug_assertions, track_caller)]
429    pub fn set(&mut self, offset: usize, value: &[u8]) {
430        if !value.is_empty() {
431            self.slice_mut(offset, value.len()).copy_from_slice(value);
432        }
433    }
434
435    /// Set memory from data. Our memory offset+len is expected to be correct but we
436    /// are doing bound checks on data/data_offeset/len and zeroing parts that is not copied.
437    ///
438    /// # Panics
439    ///
440    /// Panics if memory is out of bounds.
441    #[inline]
442    #[cfg_attr(debug_assertions, track_caller)]
443    pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
444        let mut dst = self.context_memory_mut();
445        unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
446    }
447
448    /// Set data from global memory to local memory. If global range is smaller than len, zeroes the rest.
449    #[inline]
450    #[cfg_attr(debug_assertions, track_caller)]
451    pub fn global_to_local_set_data(
452        &mut self,
453        memory_offset: usize,
454        data_offset: usize,
455        len: usize,
456        data_range: Range<usize>,
457    ) {
458        let mut buffer = self.buffer_ref_mut();
459        let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
460        let src = if data_range.is_empty() {
461            &mut []
462        } else {
463            src.get_mut(data_range).unwrap()
464        };
465        unsafe { set_data(dst, src, memory_offset, data_offset, len) };
466    }
467
468    /// Copies elements from one part of the memory to another part of itself.
469    ///
470    /// # Panics
471    ///
472    /// Panics on out of bounds.
473    #[inline]
474    #[cfg_attr(debug_assertions, track_caller)]
475    pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
476        self.context_memory_mut().copy_within(src..src + len, dst);
477    }
478
479    /// Returns a reference to the memory of the current context, the active memory.
480    ///
481    /// # Panics
482    ///
483    /// Panics if the checkpoint is invalid in debug builds only.
484    ///
485    /// # Safety
486    ///
487    /// In release builds, calling this method with an invalid checkpoint triggers undefined
488    /// behavior. The checkpoint must be within the bounds of the buffer.
489    #[inline]
490    pub fn context_memory(&self) -> Ref<'_, [u8]> {
491        let buffer = self.buffer_ref();
492        Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
493            Some(slice) => slice,
494            None => debug_unreachable!("Context memory should be always valid"),
495        })
496    }
497
498    /// Returns a mutable reference to the memory of the current context.
499    ///
500    /// # Panics
501    ///
502    /// Panics if the checkpoint is invalid in debug builds only.
503    ///
504    /// # Safety
505    ///
506    /// In release builds, calling this method with an invalid checkpoint triggers undefined
507    /// behavior. The checkpoint must be within the bounds of the buffer.
508    #[inline]
509    pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
510        let buffer = self.buffer_ref_mut();
511        RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
512            Some(slice) => slice,
513            None => debug_unreachable!("Context memory should be always valid"),
514        })
515    }
516}
517
518/// Copies data from src to dst taking into account the offsets and len.
519///
520/// If src does not have enough data, it nullifies the rest of dst that is not copied.
521///
522/// # Safety
523///
524/// Assumes that dst has enough space to copy the data.
525/// Assumes that src has enough data to copy.
526/// Assumes that dst_offset and src_offset are in bounds.
527/// Assumes that dst and src are valid.
528/// Assumes that dst and src do not overlap.
529unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
530    if len == 0 {
531        return;
532    }
533    if src_offset >= src.len() {
534        // Nullify all memory slots
535        dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
536        return;
537    }
538    let src_end = min(src_offset + len, src.len());
539    let src_len = src_end - src_offset;
540    debug_assert!(src_offset < src.len() && src_end <= src.len());
541    let data = unsafe { src.get_unchecked(src_offset..src_end) };
542    unsafe {
543        dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
544            .copy_from_slice(data)
545    };
546
547    // Nullify rest of memory slots
548    // SAFETY: Memory is assumed to be valid, and it is commented where this assumption is made.
549    unsafe {
550        dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
551            .fill(0)
552    };
553}
554
555/// Returns number of words what would fit to provided number of bytes,
556/// i.e. it rounds up the number bytes to number of words.
557#[inline]
558pub const fn num_words(len: usize) -> usize {
559    len.saturating_add(31) / 32
560}
561
562/// Performs EVM memory resize.
563#[inline]
564#[must_use]
565pub fn resize_memory<Memory: MemoryTr>(
566    gas: &mut crate::Gas,
567    memory: &mut Memory,
568    offset: usize,
569    len: usize,
570) -> bool {
571    let new_num_words = num_words(offset.saturating_add(len));
572    if new_num_words > gas.memory().words_num {
573        resize_memory_cold(gas, memory, new_num_words)
574    } else {
575        true
576    }
577}
578
579#[cold]
580#[inline(never)]
581fn resize_memory_cold<Memory: MemoryTr>(
582    gas: &mut crate::Gas,
583    memory: &mut Memory,
584    new_num_words: usize,
585) -> bool {
586    let cost = unsafe {
587        gas.memory_mut()
588            .record_new_len(new_num_words)
589            .unwrap_unchecked()
590    };
591    if !gas.record_cost(cost) {
592        return false;
593    }
594    memory.resize(new_num_words * 32);
595    true
596}
597
598#[cfg(test)]
599mod tests {
600    use super::*;
601
602    #[test]
603    fn test_num_words() {
604        assert_eq!(num_words(0), 0);
605        assert_eq!(num_words(1), 1);
606        assert_eq!(num_words(31), 1);
607        assert_eq!(num_words(32), 1);
608        assert_eq!(num_words(33), 2);
609        assert_eq!(num_words(63), 2);
610        assert_eq!(num_words(64), 2);
611        assert_eq!(num_words(65), 3);
612        assert_eq!(num_words(usize::MAX), usize::MAX / 32);
613    }
614
615    #[test]
616    fn new_free_child_context() {
617        let mut sm1 = SharedMemory::new();
618
619        assert_eq!(sm1.buffer_ref().len(), 0);
620        assert_eq!(sm1.my_checkpoint, 0);
621
622        unsafe { sm1.buffer_ref_mut().set_len(32) };
623        assert_eq!(sm1.len(), 32);
624        let mut sm2 = sm1.new_child_context();
625
626        assert_eq!(sm2.buffer_ref().len(), 32);
627        assert_eq!(sm2.my_checkpoint, 32);
628        assert_eq!(sm2.len(), 0);
629
630        unsafe { sm2.buffer_ref_mut().set_len(96) };
631        assert_eq!(sm2.len(), 64);
632        let mut sm3 = sm2.new_child_context();
633
634        assert_eq!(sm3.buffer_ref().len(), 96);
635        assert_eq!(sm3.my_checkpoint, 96);
636        assert_eq!(sm3.len(), 0);
637
638        unsafe { sm3.buffer_ref_mut().set_len(128) };
639        let sm4 = sm3.new_child_context();
640        assert_eq!(sm4.buffer_ref().len(), 128);
641        assert_eq!(sm4.my_checkpoint, 128);
642        assert_eq!(sm4.len(), 0);
643
644        // Free contexts
645        drop(sm4);
646        sm3.free_child_context();
647        assert_eq!(sm3.buffer_ref().len(), 128);
648        assert_eq!(sm3.my_checkpoint, 96);
649        assert_eq!(sm3.len(), 32);
650
651        sm2.free_child_context();
652        assert_eq!(sm2.buffer_ref().len(), 96);
653        assert_eq!(sm2.my_checkpoint, 32);
654        assert_eq!(sm2.len(), 64);
655
656        sm1.free_child_context();
657        assert_eq!(sm1.buffer_ref().len(), 32);
658        assert_eq!(sm1.my_checkpoint, 0);
659        assert_eq!(sm1.len(), 32);
660    }
661
662    #[test]
663    fn resize() {
664        let mut sm1 = SharedMemory::new();
665        sm1.resize(32);
666        assert_eq!(sm1.buffer_ref().len(), 32);
667        assert_eq!(sm1.len(), 32);
668        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
669
670        let mut sm2 = sm1.new_child_context();
671        sm2.resize(96);
672        assert_eq!(sm2.buffer_ref().len(), 128);
673        assert_eq!(sm2.len(), 96);
674        assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
675
676        sm1.free_child_context();
677        assert_eq!(sm1.buffer_ref().len(), 32);
678        assert_eq!(sm1.len(), 32);
679        assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
680    }
681}