1use super::MemoryTr;
2use core::{
3 cell::{Ref, RefCell, RefMut},
4 cmp::min,
5 fmt,
6 ops::Range,
7};
8use primitives::{hex, B256, U256};
9use std::{rc::Rc, vec::Vec};
10
11#[derive(Clone, PartialEq, Eq)]
16#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
17pub struct SharedMemory {
18 buffer: Option<Rc<RefCell<Vec<u8>>>>,
20 my_checkpoint: usize,
23 child_checkpoint: Option<usize>,
25 #[cfg(feature = "memory_limit")]
27 memory_limit: u64,
28}
29
30impl fmt::Debug for SharedMemory {
31 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
32 f.debug_struct("SharedMemory")
33 .field("current_len", &self.len())
34 .field("context_memory", &hex::encode(&*self.context_memory()))
35 .finish_non_exhaustive()
36 }
37}
38
39impl Default for SharedMemory {
40 #[inline]
41 fn default() -> Self {
42 Self::new()
43 }
44}
45
46impl MemoryTr for SharedMemory {
47 fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
48 self.set_data(memory_offset, data_offset, len, data);
49 }
50
51 fn set(&mut self, memory_offset: usize, data: &[u8]) {
52 self.set(memory_offset, data);
53 }
54
55 fn size(&self) -> usize {
56 self.len()
57 }
58
59 fn copy(&mut self, destination: usize, source: usize, len: usize) {
60 self.copy(destination, source, len);
61 }
62
63 fn slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
64 self.slice_range(range)
65 }
66
67 fn local_memory_offset(&self) -> usize {
68 self.my_checkpoint
69 }
70
71 fn set_data_from_global(
72 &mut self,
73 memory_offset: usize,
74 data_offset: usize,
75 len: usize,
76 data_range: Range<usize>,
77 ) {
78 self.global_to_local_set_data(memory_offset, data_offset, len, data_range);
79 }
80
81 #[inline]
87 #[cfg_attr(debug_assertions, track_caller)]
88 fn global_slice(&self, range: Range<usize>) -> Ref<'_, [u8]> {
89 let buffer = self.buffer().borrow(); Ref::map(buffer, |b| match b.get(range) {
91 Some(slice) => slice,
92 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
93 })
94 }
95
96 fn resize(&mut self, new_size: usize) -> bool {
97 self.resize(new_size);
98 true
99 }
100}
101
102impl SharedMemory {
103 #[inline]
107 pub fn new() -> Self {
108 Self::with_capacity(4 * 1024) }
110
111 #[inline]
113 pub fn invalid() -> Self {
114 Self {
115 buffer: None,
116 my_checkpoint: 0,
117 child_checkpoint: None,
118 #[cfg(feature = "memory_limit")]
119 memory_limit: 0,
120 }
121 }
122
123 pub fn new_with_buffer(buffer: Rc<RefCell<Vec<u8>>>) -> Self {
125 Self {
126 buffer: Some(buffer),
127 my_checkpoint: 0,
128 child_checkpoint: None,
129 #[cfg(feature = "memory_limit")]
130 memory_limit: u64::MAX,
131 }
132 }
133
134 #[inline]
136 pub fn with_capacity(capacity: usize) -> Self {
137 Self {
138 buffer: Some(Rc::new(RefCell::new(Vec::with_capacity(capacity)))),
139 my_checkpoint: 0,
140 child_checkpoint: None,
141 #[cfg(feature = "memory_limit")]
142 memory_limit: u64::MAX,
143 }
144 }
145
146 #[cfg(feature = "memory_limit")]
151 #[inline]
152 pub fn new_with_memory_limit(memory_limit: u64) -> Self {
153 Self {
154 memory_limit,
155 ..Self::new()
156 }
157 }
158
159 #[inline]
160 fn buffer(&self) -> &Rc<RefCell<Vec<u8>>> {
161 debug_assert!(self.buffer.is_some(), "cannot use SharedMemory::empty");
162 unsafe { self.buffer.as_ref().unwrap_unchecked() }
163 }
164
165 #[cfg(feature = "memory_limit")]
168 #[inline]
169 pub fn limit_reached(&self, new_size: usize) -> bool {
170 self.my_checkpoint.saturating_add(new_size) as u64 > self.memory_limit
171 }
172
173 #[inline]
179 pub fn new_child_context(&mut self) -> SharedMemory {
180 if self.child_checkpoint.is_some() {
181 panic!("new_child_context was already called without freeing child context");
182 }
183 let new_checkpoint = self.buffer().borrow().len();
184 self.child_checkpoint = Some(new_checkpoint);
185 SharedMemory {
186 buffer: Some(self.buffer().clone()),
187 my_checkpoint: new_checkpoint,
188 child_checkpoint: None,
190 #[cfg(feature = "memory_limit")]
191 memory_limit: self.memory_limit,
192 }
193 }
194
195 #[inline]
197 pub fn free_child_context(&mut self) {
198 let Some(child_checkpoint) = self.child_checkpoint.take() else {
199 return;
200 };
201 unsafe {
202 self.buffer().borrow_mut().set_len(child_checkpoint);
203 }
204 }
205
206 #[inline]
208 pub fn len(&self) -> usize {
209 self.buffer().borrow().len() - self.my_checkpoint
210 }
211
212 #[inline]
214 pub fn is_empty(&self) -> bool {
215 self.len() == 0
216 }
217
218 #[inline]
220 pub fn resize(&mut self, new_size: usize) {
221 self.buffer()
222 .borrow_mut()
223 .resize(self.my_checkpoint + new_size, 0);
224 }
225
226 #[inline]
232 #[cfg_attr(debug_assertions, track_caller)]
233 pub fn slice_len(&self, offset: usize, size: usize) -> Ref<'_, [u8]> {
234 self.slice_range(offset..offset + size)
235 }
236
237 #[inline]
243 #[cfg_attr(debug_assertions, track_caller)]
244 pub fn slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
245 let buffer = self.buffer().borrow(); Ref::map(buffer, |b| {
247 match b.get(range.start + self.my_checkpoint..range.end + self.my_checkpoint) {
248 Some(slice) => slice,
249 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
250 }
251 })
252 }
253
254 #[inline]
260 #[cfg_attr(debug_assertions, track_caller)]
261 pub fn global_slice_range(&self, range: Range<usize>) -> Ref<'_, [u8]> {
262 let buffer = self.buffer().borrow(); Ref::map(buffer, |b| match b.get(range) {
264 Some(slice) => slice,
265 None => debug_unreachable!("slice OOB: range; len: {}", self.len()),
266 })
267 }
268
269 #[inline]
275 #[cfg_attr(debug_assertions, track_caller)]
276 pub fn slice_mut(&mut self, offset: usize, size: usize) -> RefMut<'_, [u8]> {
277 let buffer = self.buffer().borrow_mut(); RefMut::map(buffer, |b| {
279 match b.get_mut(self.my_checkpoint + offset..self.my_checkpoint + offset + size) {
280 Some(slice) => slice,
281 None => debug_unreachable!("slice OOB: {offset}..{}", offset + size),
282 }
283 })
284 }
285
286 #[inline]
292 pub fn get_byte(&self, offset: usize) -> u8 {
293 self.slice_len(offset, 1)[0]
294 }
295
296 #[inline]
302 pub fn get_word(&self, offset: usize) -> B256 {
303 (*self.slice_len(offset, 32)).try_into().unwrap()
304 }
305
306 #[inline]
312 pub fn get_u256(&self, offset: usize) -> U256 {
313 self.get_word(offset).into()
314 }
315
316 #[inline]
322 #[cfg_attr(debug_assertions, track_caller)]
323 pub fn set_byte(&mut self, offset: usize, byte: u8) {
324 self.set(offset, &[byte]);
325 }
326
327 #[inline]
333 #[cfg_attr(debug_assertions, track_caller)]
334 pub fn set_word(&mut self, offset: usize, value: &B256) {
335 self.set(offset, &value[..]);
336 }
337
338 #[inline]
344 #[cfg_attr(debug_assertions, track_caller)]
345 pub fn set_u256(&mut self, offset: usize, value: U256) {
346 self.set(offset, &value.to_be_bytes::<32>());
347 }
348
349 #[inline]
355 #[cfg_attr(debug_assertions, track_caller)]
356 pub fn set(&mut self, offset: usize, value: &[u8]) {
357 if !value.is_empty() {
358 self.slice_mut(offset, value.len()).copy_from_slice(value);
359 }
360 }
361
362 #[inline]
369 #[cfg_attr(debug_assertions, track_caller)]
370 pub fn set_data(&mut self, memory_offset: usize, data_offset: usize, len: usize, data: &[u8]) {
371 let mut dst = self.context_memory_mut();
372 unsafe { set_data(dst.as_mut(), data, memory_offset, data_offset, len) };
373 }
374
375 #[inline]
377 #[cfg_attr(debug_assertions, track_caller)]
378 pub fn global_to_local_set_data(
379 &mut self,
380 memory_offset: usize,
381 data_offset: usize,
382 len: usize,
383 data_range: Range<usize>,
384 ) {
385 let mut buffer = self.buffer().borrow_mut(); let (src, dst) = buffer.split_at_mut(self.my_checkpoint);
387 let src = if data_range.is_empty() {
388 &mut []
389 } else {
390 src.get_mut(data_range).unwrap()
391 };
392 unsafe { set_data(dst, src, memory_offset, data_offset, len) };
393 }
394
395 #[inline]
401 #[cfg_attr(debug_assertions, track_caller)]
402 pub fn copy(&mut self, dst: usize, src: usize, len: usize) {
403 self.context_memory_mut().copy_within(src..src + len, dst);
404 }
405
406 #[inline]
408 pub fn context_memory(&self) -> Ref<'_, [u8]> {
409 let buffer = self.buffer().borrow();
410 Ref::map(buffer, |b| match b.get(self.my_checkpoint..) {
411 Some(slice) => slice,
412 None => debug_unreachable!("Context memory should be always valid"),
413 })
414 }
415
416 #[inline]
418 pub fn context_memory_mut(&mut self) -> RefMut<'_, [u8]> {
419 let buffer = self.buffer().borrow_mut(); RefMut::map(buffer, |b| match b.get_mut(self.my_checkpoint..) {
421 Some(slice) => slice,
422 None => debug_unreachable!("Context memory should be always valid"),
423 })
424 }
425}
426
427unsafe fn set_data(dst: &mut [u8], src: &[u8], dst_offset: usize, src_offset: usize, len: usize) {
439 if src_offset >= src.len() {
440 dst.get_mut(dst_offset..dst_offset + len).unwrap().fill(0);
442 return;
443 }
444 let src_end = min(src_offset + len, src.len());
445 let src_len = src_end - src_offset;
446 debug_assert!(src_offset < src.len() && src_end <= src.len());
447 let data = unsafe { src.get_unchecked(src_offset..src_end) };
448 unsafe {
449 dst.get_unchecked_mut(dst_offset..dst_offset + src_len)
450 .copy_from_slice(data)
451 };
452
453 unsafe {
456 dst.get_unchecked_mut(dst_offset + src_len..dst_offset + len)
457 .fill(0)
458 };
459}
460
461#[inline]
464pub const fn num_words(len: usize) -> usize {
465 len.saturating_add(31) / 32
466}
467
468#[cfg(test)]
469mod tests {
470 use super::*;
471
472 #[test]
473 fn test_num_words() {
474 assert_eq!(num_words(0), 0);
475 assert_eq!(num_words(1), 1);
476 assert_eq!(num_words(31), 1);
477 assert_eq!(num_words(32), 1);
478 assert_eq!(num_words(33), 2);
479 assert_eq!(num_words(63), 2);
480 assert_eq!(num_words(64), 2);
481 assert_eq!(num_words(65), 3);
482 assert_eq!(num_words(usize::MAX), usize::MAX / 32);
483 }
484
485 #[test]
486 fn new_free_child_context() {
487 let mut sm1 = SharedMemory::new();
488
489 assert_eq!(sm1.buffer().borrow().len(), 0);
490 assert_eq!(sm1.my_checkpoint, 0);
491
492 unsafe { sm1.buffer().borrow_mut().set_len(32) };
493 assert_eq!(sm1.len(), 32);
494 let mut sm2 = sm1.new_child_context();
495
496 assert_eq!(sm2.buffer().borrow().len(), 32);
497 assert_eq!(sm2.my_checkpoint, 32);
498 assert_eq!(sm2.len(), 0);
499
500 unsafe { sm2.buffer().borrow_mut().set_len(96) };
501 assert_eq!(sm2.len(), 64);
502 let mut sm3 = sm2.new_child_context();
503
504 assert_eq!(sm3.buffer().borrow().len(), 96);
505 assert_eq!(sm3.my_checkpoint, 96);
506 assert_eq!(sm3.len(), 0);
507
508 unsafe { sm3.buffer().borrow_mut().set_len(128) };
509 let sm4 = sm3.new_child_context();
510 assert_eq!(sm4.buffer().borrow().len(), 128);
511 assert_eq!(sm4.my_checkpoint, 128);
512 assert_eq!(sm4.len(), 0);
513
514 drop(sm4);
516 sm3.free_child_context();
517 assert_eq!(sm3.buffer().borrow().len(), 128);
518 assert_eq!(sm3.my_checkpoint, 96);
519 assert_eq!(sm3.len(), 32);
520
521 sm2.free_child_context();
522 assert_eq!(sm2.buffer().borrow().len(), 96);
523 assert_eq!(sm2.my_checkpoint, 32);
524 assert_eq!(sm2.len(), 64);
525
526 sm1.free_child_context();
527 assert_eq!(sm1.buffer().borrow().len(), 32);
528 assert_eq!(sm1.my_checkpoint, 0);
529 assert_eq!(sm1.len(), 32);
530 }
531
532 #[test]
533 fn resize() {
534 let mut sm1 = SharedMemory::new();
535 sm1.resize(32);
536 assert_eq!(sm1.buffer().borrow().len(), 32);
537 assert_eq!(sm1.len(), 32);
538 assert_eq!(sm1.buffer().borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
539
540 let mut sm2 = sm1.new_child_context();
541 sm2.resize(96);
542 assert_eq!(sm2.buffer().borrow().len(), 128);
543 assert_eq!(sm2.len(), 96);
544 assert_eq!(
545 sm2.buffer().borrow().get(32..128),
546 Some(&[0_u8; 96] as &[u8])
547 );
548
549 sm1.free_child_context();
550 assert_eq!(sm1.buffer().borrow().len(), 32);
551 assert_eq!(sm1.len(), 32);
552 assert_eq!(sm1.buffer().borrow().get(0..32), Some(&[0_u8; 32] as &[u8]));
553 }
554}