1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12 fn dbg_borrow(&self) -> Ref<'_, T>;
13 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17 #[inline]
18 fn dbg_borrow(&self) -> Ref<'_, T> {
19 match self.try_borrow() {
20 Ok(b) => b,
21 Err(e) => debug_unreachable!("{e}"),
22 }
23 }
24
25 #[inline]
26 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27 match self.try_borrow_mut() {
28 Ok(b) => b,
29 Err(e) => debug_unreachable!("{e}"),
30 }
31 }
32}
33
34#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41 buffer: Option<Rc<RefCell<Vec<u8>>>>,
43 my_checkpoint: usize,
46 child_checkpoint: Option<usize>,
48 #[cfg(feature = "memory_limit")]
50 memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 f.debug_struct("SharedMemory")
56 .field("current_len", &self.len())
57 .field("context_memory", &hex::encode(&*self.context_memory()))
58 .finish_non_exhaustive()
59 }
60}
61
62impl Default for SharedMemory {
63 #[inline]
64 fn default() -> Self {
65 Self::new()
66 }
67}
68
69impl MemoryTr for SharedMemory {
70 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71 self.set_data(memory_offset, data_offset, len, data);
72 }
73
74 fn set(&mut self, memory_offset: usize, data: &[u8]) {
75 self.set(memory_offset, data);
76 }
77
78 fn size(&self) -> usize {
79 self.len()
80 }
81
82 fn copy(&mut self, destination: usize, source: usize, len: usize) {
83 self.copy(destination, source, len);
84 }
85
86 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87 self.slice_range(range)
88 }
89
90 fn local_memory_offset(&self) -> usize {
91 self.my_checkpoint
92 }
93
94 fn set_data_from_global(
95 &mut self,
96 memory_offset: usize,
97 data_offset: usize,
98 len: usize,
99 data_range: Range<usize>,
100 ) {
101 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102 }
103
104 #[inline]
115 #[cfg_attr(debug_assertions, track_caller)]
116 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117 let buffer = self.buffer_ref();
118 Ref::map(buffer, |b| match b.get(range) {
119 Some(slice) => slice,
120 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121 })
122 }
123
124 fn resize(&mut self, new_size: usize) -> bool {
125 self.resize(new_size);
126 true
127 }
128
129 #[cfg(feature = "memory_limit")]
132 #[inline]
133 fn limit_reached(&self, offset: usize, len: usize) -> bool {
134 self.my_checkpoint
135 .saturating_add(offset)
136 .saturating_add(len) as u64
137 > self.memory_limit
138 }
139}
140
141impl SharedMemory {
142 #[inline]
146 pub fn new() -> Self {
147 Self::with_capacity(4 * 1024) }
149
150 #[inline]
152 pub fn invalid() -> Self {
153 Self {
154 buffer: None,
155 my_checkpoint: 0,
156 child_checkpoint: None,
157 #[cfg(feature = "memory_limit")]
158 memory_limit: 0,
159 }
160 }
161
162 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
164 Self {
165 buffer: Some(buffer),
166 my_checkpoint: 0,
167 child_checkpoint: None,
168 #[cfg(feature = "memory_limit")]
169 memory_limit: u64::MAX,
170 }
171 }
172
173 #[inline]
175 pub fn with_capacity(capacity: usize) -> Self {
176 Self {
177 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
178 my_checkpoint: 0,
179 child_checkpoint: None,
180 #[cfg(feature = "memory_limit")]
181 memory_limit: u64::MAX,
182 }
183 }
184
185 #[cfg(feature = "memory_limit")]
190 #[inline]
191 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
192 Self {
193 memory_limit,
194 ..Self::new()
195 }
196 }
197
198 #[inline]
199 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
200 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
201 unsafe { self.buffer.as_ref().unwrap_unchecked() }
202 }
203
204 #[inline]
205 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
206 self.buffer().dbg_borrow()
207 }
208
209 #[inline]
210 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
211 self.buffer().dbg_borrow_mut()
212 }
213
214 #[inline]
220 pub fn new_child_context(&mut self) -> SharedMemory {
221 if self.child_checkpoint.is_some() {
222 panic!("new_child_context was already called without freeing child context");
223 }
224 let new_checkpoint = self.full_len();
225 self.child_checkpoint = Some(new_checkpoint);
226 SharedMemory {
227 buffer: Some(self.buffer().clone()),
228 my_checkpoint: new_checkpoint,
229 child_checkpoint: None,
231 #[cfg(feature = "memory_limit")]
232 memory_limit: self.memory_limit,
233 }
234 }
235
236 #[inline]
238 pub fn free_child_context(&mut self) {
239 let Some(child_checkpoint) = self.child_checkpoint.take() else {
240 return;
241 };
242 unsafe {
243 self.buffer_ref_mut().set_len(child_checkpoint);
244 }
245 }
246
247 #[inline]
249 pub fn len(&self) -> usize {
250 self.full_len() - self.my_checkpoint
251 }
252
253 fn full_len(&self) -> usize {
254 self.buffer_ref().len()
255 }
256
257 #[inline]
259 pub fn is_empty(&self) -> bool {
260 self.len() == 0
261 }
262
263 #[inline]
265 pub fn resize(&mut self, new_size: usize) {
266 self.buffer()
267 .dbg_borrow_mut()
268 .resize(self.my_checkpoint + new_size, 0);
269 }
270
271 #[inline]
277 #[cfg_attr(debug_assertions, track_caller)]
278 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
279 self.slice_range(offset..offset + size)
280 }
281
282 #[inline]
294 #[cfg_attr(debug_assertions, track_caller)]
295 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
296 let buffer = self.buffer_ref();
297 Ref::map(buffer, |b| {
298 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
299 Some(slice) => slice,
300 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
301 }
302 })
303 }
304
305 #[inline]
316 #[cfg_attr(debug_assertions, track_caller)]
317 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
318 let buffer = self.buffer_ref();
319 Ref::map(buffer, |b| match b.get(range) {
320 Some(slice) => slice,
321 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
322 })
323 }
324
325 #[inline]
337 #[cfg_attr(debug_assertions, track_caller)]
338 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
339 let buffer = self.buffer_ref_mut();
340 RefMut::map(buffer, |b| {
341 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
342 Some(slice) => slice,
343 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
344 }
345 })
346 }
347
348 #[inline]
354 pub fn get_byte(&self, offset: usize) -> u8 {
355 self.slice_len(offset, 1)[0]
356 }
357
358 #[inline]
364 pub fn get_word(&self, offset: usize) -> B256 {
365 (*self.slice_len(offset, 32)).try_into().unwrap()
366 }
367
368 #[inline]
374 pub fn get_u256(&self, offset: usize) -> U256 {
375 self.get_word(offset).into()
376 }
377
378 #[inline]
384 #[cfg_attr(debug_assertions, track_caller)]
385 pub fn set_byte(&mut self, offset: usize, byte: u8) {
386 self.set(offset, &[byte]);
387 }
388
389 #[inline]
395 #[cfg_attr(debug_assertions, track_caller)]
396 pub fn set_word(&mut self, offset: usize, value: &B256) {
397 self.set(offset, &value[..]);
398 }
399
400 #[inline]
406 #[cfg_attr(debug_assertions, track_caller)]
407 pub fn set_u256(&mut self, offset: usize, value: U256) {
408 self.set(offset, &value.to_be_bytes::<32>());
409 }
410
411 #[inline]
417 #[cfg_attr(debug_assertions, track_caller)]
418 pub fn set(&mut self, offset: usize, value: &[u8]) {
419 if !value.is_empty() {
420 self.slice_mut(offset, value.len()).copy_from_slice(value);
421 }
422 }
423
424 #[inline]
431 #[cfg_attr(debug_assertions, track_caller)]
432 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
433 let mut dst = self.context_memory_mut();
434 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
435 }
436
437 #[inline]
439 #[cfg_attr(debug_assertions, track_caller)]
440 pub fn global_to_local_set_data(
441 &mut self,
442 memory_offset: usize,
443 data_offset: usize,
444 len: usize,
445 data_range: Range<usize>,
446 ) {
447 let mut buffer = self.buffer_ref_mut();
448 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
449 let src = if data_range.is_empty() {
450 &mut []
451 } else {
452 src.get_mut(data_range).unwrap()
453 };
454 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
455 }
456
457 #[inline]
463 #[cfg_attr(debug_assertions, track_caller)]
464 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
465 self.context_memory_mut().copy_within(src..src + len, dst);
466 }
467
468 #[inline]
479 pub fn context_memory(&self) -> Ref<'_, [u8]> {
480 let buffer = self.buffer_ref();
481 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
482 Some(slice) => slice,
483 None => debug_unreachable!("Context memory should be always valid"),
484 })
485 }
486
487 #[inline]
498 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
499 let buffer = self.buffer_ref_mut();
500 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
501 Some(slice) => slice,
502 None => debug_unreachable!("Context memory should be always valid"),
503 })
504 }
505}
506
507unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
519 if len == 0 {
520 return;
521 }
522 if src_offset >= src.len() {
523 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
525 return;
526 }
527 let src_end = min(src_offset + len, src.len());
528 let src_len = src_end - src_offset;
529 debug_assert!(src_offset < src.len() && src_end <= src.len());
530 let data = unsafe { src.get_unchecked(src_offset..src_end) };
531 unsafe {
532 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
533 .copy_from_slice(data)
534 };
535
536 unsafe {
539 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
540 .fill(0)
541 };
542}
543
544#[inline]
547pub const fn num_words(len: usize) -> usize {
548 len.saturating_add(31) / 32
549}
550
551#[inline]
553#[must_use]
554pub fn resize_memory<Memory: MemoryTr>(
555 gas: &mut crate::Gas,
556 memory: &mut Memory,
557 offset: usize,
558 len: usize,
559) -> bool {
560 let new_num_words = num_words(offset.saturating_add(len));
561 if new_num_words > gas.memory().words_num {
562 resize_memory_cold(gas, memory, new_num_words)
563 } else {
564 true
565 }
566}
567
568#[cold]
569#[inline(never)]
570fn resize_memory_cold<Memory: MemoryTr>(
571 gas: &mut crate::Gas,
572 memory: &mut Memory,
573 new_num_words: usize,
574) -> bool {
575 let cost = unsafe {
576 gas.memory_mut()
577 .record_new_len(new_num_words)
578 .unwrap_unchecked()
579 };
580 if !gas.record_cost(cost) {
581 return false;
582 }
583 memory.resize(new_num_words * 32);
584 true
585}
586
587#[cfg(test)]
588mod tests {
589 use super::*;
590
591 #[test]
592 fn test_num_words() {
593 assert_eq!(num_words(0), 0);
594 assert_eq!(num_words(1), 1);
595 assert_eq!(num_words(31), 1);
596 assert_eq!(num_words(32), 1);
597 assert_eq!(num_words(33), 2);
598 assert_eq!(num_words(63), 2);
599 assert_eq!(num_words(64), 2);
600 assert_eq!(num_words(65), 3);
601 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
602 }
603
604 #[test]
605 fn new_free_child_context() {
606 let mut sm1 = SharedMemory::new();
607
608 assert_eq!(sm1.buffer_ref().len(), 0);
609 assert_eq!(sm1.my_checkpoint, 0);
610
611 unsafe { sm1.buffer_ref_mut().set_len(32) };
612 assert_eq!(sm1.len(), 32);
613 let mut sm2 = sm1.new_child_context();
614
615 assert_eq!(sm2.buffer_ref().len(), 32);
616 assert_eq!(sm2.my_checkpoint, 32);
617 assert_eq!(sm2.len(), 0);
618
619 unsafe { sm2.buffer_ref_mut().set_len(96) };
620 assert_eq!(sm2.len(), 64);
621 let mut sm3 = sm2.new_child_context();
622
623 assert_eq!(sm3.buffer_ref().len(), 96);
624 assert_eq!(sm3.my_checkpoint, 96);
625 assert_eq!(sm3.len(), 0);
626
627 unsafe { sm3.buffer_ref_mut().set_len(128) };
628 let sm4 = sm3.new_child_context();
629 assert_eq!(sm4.buffer_ref().len(), 128);
630 assert_eq!(sm4.my_checkpoint, 128);
631 assert_eq!(sm4.len(), 0);
632
633 drop(sm4);
635 sm3.free_child_context();
636 assert_eq!(sm3.buffer_ref().len(), 128);
637 assert_eq!(sm3.my_checkpoint, 96);
638 assert_eq!(sm3.len(), 32);
639
640 sm2.free_child_context();
641 assert_eq!(sm2.buffer_ref().len(), 96);
642 assert_eq!(sm2.my_checkpoint, 32);
643 assert_eq!(sm2.len(), 64);
644
645 sm1.free_child_context();
646 assert_eq!(sm1.buffer_ref().len(), 32);
647 assert_eq!(sm1.my_checkpoint, 0);
648 assert_eq!(sm1.len(), 32);
649 }
650
651 #[test]
652 fn resize() {
653 let mut sm1 = SharedMemory::new();
654 sm1.resize(32);
655 assert_eq!(sm1.buffer_ref().len(), 32);
656 assert_eq!(sm1.len(), 32);
657 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
658
659 let mut sm2 = sm1.new_child_context();
660 sm2.resize(96);
661 assert_eq!(sm2.buffer_ref().len(), 128);
662 assert_eq!(sm2.len(), 96);
663 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
664
665 sm1.free_child_context();
666 assert_eq!(sm1.buffer_ref().len(), 32);
667 assert_eq!(sm1.len(), 32);
668 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
669 }
670}