1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11trait RefcellExt<T> {
12 fn dbg_borrow(&self) -> Ref<'_, T>;
13 fn dbg_borrow_mut(&self) -> RefMut<'_, T>;
14}
15
16impl<T> RefcellExt<T> for RefCell<T> {
17 #[inline]
18 fn dbg_borrow(&self) -> Ref<'_, T> {
19 match self.try_borrow() {
20 Ok(b) => b,
21 Err(e) => debug_unreachable!("{e}"),
22 }
23 }
24
25 #[inline]
26 fn dbg_borrow_mut(&self) -> RefMut<'_, T> {
27 match self.try_borrow_mut() {
28 Ok(b) => b,
29 Err(e) => debug_unreachable!("{e}"),
30 }
31 }
32}
33
34#[derive(Clone, PartialEq, Eq)]
39#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
40pub struct SharedMemory {
41 buffer: Option<Rc<RefCell<Vec<u8>>>>,
43 my_checkpoint: usize,
46 child_checkpoint: Option<usize>,
48 #[cfg(feature = "memory_limit")]
50 memory_limit: u64,
51}
52
53impl fmt::Debug for SharedMemory {
54 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
55 f.debug_struct("SharedMemory")
56 .field("current_len", &self.len())
57 .field("context_memory", &hex::encode(&*self.context_memory()))
58 .finish_non_exhaustive()
59 }
60}
61
62impl Default for SharedMemory {
63 #[inline]
64 fn default() -> Self {
65 Self::new()
66 }
67}
68
69impl MemoryTr for SharedMemory {
70 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
71 self.set_data(memory_offset, data_offset, len, data);
72 }
73
74 fn set(&mut self, memory_offset: usize, data: &[u8]) {
75 self.set(memory_offset, data);
76 }
77
78 fn size(&self) -> usize {
79 self.len()
80 }
81
82 fn copy(&mut self, destination: usize, source: usize, len: usize) {
83 self.copy(destination, source, len);
84 }
85
86 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
87 self.slice_range(range)
88 }
89
90 fn local_memory_offset(&self) -> usize {
91 self.my_checkpoint
92 }
93
94 fn set_data_from_global(
95 &mut self,
96 memory_offset: usize,
97 data_offset: usize,
98 len: usize,
99 data_range: Range<usize>,
100 ) {
101 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
102 }
103
104 #[inline]
115 #[cfg_attr(debug_assertions, track_caller)]
116 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
117 let buffer = self.buffer_ref();
118 Ref::map(buffer, |b| match b.get(range) {
119 Some(slice) => slice,
120 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
121 })
122 }
123
124 fn resize(&mut self, new_size: usize) -> bool {
125 self.resize(new_size);
126 true
127 }
128
129 #[cfg(feature = "memory_limit")]
132 #[inline]
133 fn limit_reached(&self, offset: usize, len: usize) -> bool {
134 self.my_checkpoint
135 .saturating_add(offset)
136 .saturating_add(len) as u64
137 > self.memory_limit
138 }
139}
140
141impl SharedMemory {
142 #[inline]
146 pub fn new() -> Self {
147 Self::with_capacity(4 * 1024) }
149
150 #[inline]
152 pub fn invalid() -> Self {
153 Self {
154 buffer: None,
155 my_checkpoint: 0,
156 child_checkpoint: None,
157 #[cfg(feature = "memory_limit")]
158 memory_limit: 0,
159 }
160 }
161
162 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
164 Self {
165 buffer: Some(buffer),
166 my_checkpoint: 0,
167 child_checkpoint: None,
168 #[cfg(feature = "memory_limit")]
169 memory_limit: u64::MAX,
170 }
171 }
172
173 #[inline]
175 pub fn with_capacity(capacity: usize) -> Self {
176 Self {
177 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
178 my_checkpoint: 0,
179 child_checkpoint: None,
180 #[cfg(feature = "memory_limit")]
181 memory_limit: u64::MAX,
182 }
183 }
184
185 #[cfg(feature = "memory_limit")]
190 #[inline]
191 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
192 Self {
193 memory_limit,
194 ..Self::new()
195 }
196 }
197
198 #[inline]
200 pub fn set_memory_limit(&mut self, limit: u64) {
201 #[cfg(feature = "memory_limit")]
202 {
203 self.memory_limit = limit;
204 }
205 let _ = limit;
207 }
208
209 #[inline]
210 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
211 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
212 unsafe { self.buffer.as_ref().unwrap_unchecked() }
213 }
214
215 #[inline]
216 fn buffer_ref(&self) -> Ref<'_, Vec<u8>> {
217 self.buffer().dbg_borrow()
218 }
219
220 #[inline]
221 fn buffer_ref_mut(&self) -> RefMut<'_, Vec<u8>> {
222 self.buffer().dbg_borrow_mut()
223 }
224
225 #[inline]
231 pub fn new_child_context(&mut self) -> SharedMemory {
232 if self.child_checkpoint.is_some() {
233 panic!("new_child_context was already called without freeing child context");
234 }
235 let new_checkpoint = self.full_len();
236 self.child_checkpoint = Some(new_checkpoint);
237 SharedMemory {
238 buffer: Some(self.buffer().clone()),
239 my_checkpoint: new_checkpoint,
240 child_checkpoint: None,
242 #[cfg(feature = "memory_limit")]
243 memory_limit: self.memory_limit,
244 }
245 }
246
247 #[inline]
249 pub fn free_child_context(&mut self) {
250 let Some(child_checkpoint) = self.child_checkpoint.take() else {
251 return;
252 };
253 unsafe {
254 self.buffer_ref_mut().set_len(child_checkpoint);
255 }
256 }
257
258 #[inline]
260 pub fn len(&self) -> usize {
261 self.full_len() - self.my_checkpoint
262 }
263
264 fn full_len(&self) -> usize {
265 self.buffer_ref().len()
266 }
267
268 #[inline]
270 pub fn is_empty(&self) -> bool {
271 self.len() == 0
272 }
273
274 #[inline]
276 pub fn resize(&mut self, new_size: usize) {
277 self.buffer()
278 .dbg_borrow_mut()
279 .resize(self.my_checkpoint + new_size, 0);
280 }
281
282 #[inline]
288 #[cfg_attr(debug_assertions, track_caller)]
289 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
290 self.slice_range(offset..offset + size)
291 }
292
293 #[inline]
305 #[cfg_attr(debug_assertions, track_caller)]
306 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
307 let buffer = self.buffer_ref();
308 Ref::map(buffer, |b| {
309 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
310 Some(slice) => slice,
311 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
312 }
313 })
314 }
315
316 #[inline]
327 #[cfg_attr(debug_assertions, track_caller)]
328 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
329 let buffer = self.buffer_ref();
330 Ref::map(buffer, |b| match b.get(range) {
331 Some(slice) => slice,
332 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
333 })
334 }
335
336 #[inline]
348 #[cfg_attr(debug_assertions, track_caller)]
349 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
350 let buffer = self.buffer_ref_mut();
351 RefMut::map(buffer, |b| {
352 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
353 Some(slice) => slice,
354 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
355 }
356 })
357 }
358
359 #[inline]
365 pub fn get_byte(&self, offset: usize) -> u8 {
366 self.slice_len(offset, 1)[0]
367 }
368
369 #[inline]
375 pub fn get_word(&self, offset: usize) -> B256 {
376 (*self.slice_len(offset, 32)).try_into().unwrap()
377 }
378
379 #[inline]
385 pub fn get_u256(&self, offset: usize) -> U256 {
386 self.get_word(offset).into()
387 }
388
389 #[inline]
395 #[cfg_attr(debug_assertions, track_caller)]
396 pub fn set_byte(&mut self, offset: usize, byte: u8) {
397 self.set(offset, &[byte]);
398 }
399
400 #[inline]
406 #[cfg_attr(debug_assertions, track_caller)]
407 pub fn set_word(&mut self, offset: usize, value: &B256) {
408 self.set(offset, &value[..]);
409 }
410
411 #[inline]
417 #[cfg_attr(debug_assertions, track_caller)]
418 pub fn set_u256(&mut self, offset: usize, value: U256) {
419 self.set(offset, &value.to_be_bytes::<32>());
420 }
421
422 #[inline]
428 #[cfg_attr(debug_assertions, track_caller)]
429 pub fn set(&mut self, offset: usize, value: &[u8]) {
430 if !value.is_empty() {
431 self.slice_mut(offset, value.len()).copy_from_slice(value);
432 }
433 }
434
435 #[inline]
442 #[cfg_attr(debug_assertions, track_caller)]
443 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
444 let mut dst = self.context_memory_mut();
445 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
446 }
447
448 #[inline]
450 #[cfg_attr(debug_assertions, track_caller)]
451 pub fn global_to_local_set_data(
452 &mut self,
453 memory_offset: usize,
454 data_offset: usize,
455 len: usize,
456 data_range: Range<usize>,
457 ) {
458 let mut buffer = self.buffer_ref_mut();
459 let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
460 let src = if data_range.is_empty() {
461 &mut []
462 } else {
463 src.get_mut(data_range).unwrap()
464 };
465 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
466 }
467
468 #[inline]
474 #[cfg_attr(debug_assertions, track_caller)]
475 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
476 self.context_memory_mut().copy_within(src..src + len, dst);
477 }
478
479 #[inline]
490 pub fn context_memory(&self) -> Ref<'_, [u8]> {
491 let buffer = self.buffer_ref();
492 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
493 Some(slice) => slice,
494 None => debug_unreachable!("Context memory should be always valid"),
495 })
496 }
497
498 #[inline]
509 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
510 let buffer = self.buffer_ref_mut();
511 RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
512 Some(slice) => slice,
513 None => debug_unreachable!("Context memory should be always valid"),
514 })
515 }
516}
517
518unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
530 if len == 0 {
531 return;
532 }
533 if src_offset >= src.len() {
534 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
536 return;
537 }
538 let src_end = min(src_offset + len, src.len());
539 let src_len = src_end - src_offset;
540 debug_assert!(src_offset < src.len() && src_end <= src.len());
541 let data = unsafe { src.get_unchecked(src_offset..src_end) };
542 unsafe {
543 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
544 .copy_from_slice(data)
545 };
546
547 unsafe {
550 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
551 .fill(0)
552 };
553}
554
555#[inline]
558pub const fn num_words(len: usize) -> usize {
559 len.saturating_add(31) / 32
560}
561
562#[inline]
564#[must_use]
565pub fn resize_memory<Memory: MemoryTr>(
566 gas: &mut crate::Gas,
567 memory: &mut Memory,
568 offset: usize,
569 len: usize,
570) -> bool {
571 let new_num_words = num_words(offset.saturating_add(len));
572 if new_num_words > gas.memory().words_num {
573 resize_memory_cold(gas, memory, new_num_words)
574 } else {
575 true
576 }
577}
578
579#[cold]
580#[inline(never)]
581fn resize_memory_cold<Memory: MemoryTr>(
582 gas: &mut crate::Gas,
583 memory: &mut Memory,
584 new_num_words: usize,
585) -> bool {
586 let cost = unsafe {
587 gas.memory_mut()
588 .record_new_len(new_num_words)
589 .unwrap_unchecked()
590 };
591 if !gas.record_cost(cost) {
592 return false;
593 }
594 memory.resize(new_num_words * 32);
595 true
596}
597
598#[cfg(test)]
599mod tests {
600 use super::*;
601
602 #[test]
603 fn test_num_words() {
604 assert_eq!(num_words(0), 0);
605 assert_eq!(num_words(1), 1);
606 assert_eq!(num_words(31), 1);
607 assert_eq!(num_words(32), 1);
608 assert_eq!(num_words(33), 2);
609 assert_eq!(num_words(63), 2);
610 assert_eq!(num_words(64), 2);
611 assert_eq!(num_words(65), 3);
612 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
613 }
614
615 #[test]
616 fn new_free_child_context() {
617 let mut sm1 = SharedMemory::new();
618
619 assert_eq!(sm1.buffer_ref().len(), 0);
620 assert_eq!(sm1.my_checkpoint, 0);
621
622 unsafe { sm1.buffer_ref_mut().set_len(32) };
623 assert_eq!(sm1.len(), 32);
624 let mut sm2 = sm1.new_child_context();
625
626 assert_eq!(sm2.buffer_ref().len(), 32);
627 assert_eq!(sm2.my_checkpoint, 32);
628 assert_eq!(sm2.len(), 0);
629
630 unsafe { sm2.buffer_ref_mut().set_len(96) };
631 assert_eq!(sm2.len(), 64);
632 let mut sm3 = sm2.new_child_context();
633
634 assert_eq!(sm3.buffer_ref().len(), 96);
635 assert_eq!(sm3.my_checkpoint, 96);
636 assert_eq!(sm3.len(), 0);
637
638 unsafe { sm3.buffer_ref_mut().set_len(128) };
639 let sm4 = sm3.new_child_context();
640 assert_eq!(sm4.buffer_ref().len(), 128);
641 assert_eq!(sm4.my_checkpoint, 128);
642 assert_eq!(sm4.len(), 0);
643
644 drop(sm4);
646 sm3.free_child_context();
647 assert_eq!(sm3.buffer_ref().len(), 128);
648 assert_eq!(sm3.my_checkpoint, 96);
649 assert_eq!(sm3.len(), 32);
650
651 sm2.free_child_context();
652 assert_eq!(sm2.buffer_ref().len(), 96);
653 assert_eq!(sm2.my_checkpoint, 32);
654 assert_eq!(sm2.len(), 64);
655
656 sm1.free_child_context();
657 assert_eq!(sm1.buffer_ref().len(), 32);
658 assert_eq!(sm1.my_checkpoint, 0);
659 assert_eq!(sm1.len(), 32);
660 }
661
662 #[test]
663 fn resize() {
664 let mut sm1 = SharedMemory::new();
665 sm1.resize(32);
666 assert_eq!(sm1.buffer_ref().len(), 32);
667 assert_eq!(sm1.len(), 32);
668 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
669
670 let mut sm2 = sm1.new_child_context();
671 sm2.resize(96);
672 assert_eq!(sm2.buffer_ref().len(), 128);
673 assert_eq!(sm2.len(), 96);
674 assert_eq!(sm2.buffer_ref().get(32..128), Some(&[0_u8; 96] as &[u8]));
675
676 sm1.free_child_context();
677 assert_eq!(sm1.buffer_ref().len(), 32);
678 assert_eq!(sm1.len(), 32);
679 assert_eq!(sm1.buffer_ref().get(0..32), Some(&[0_u8; 32] as &[u8]));
680 }
681}