1use crate::{
2 memory_units::{Bytes, Pages, RoundUpTo},
3 value::LittleEndianConvert,
4 Error,
5};
6use alloc::{rc::Rc, string::ToString, vec::Vec};
7use casper_wasm::elements::ResizableLimits;
8use core::{
9 cell::{Cell, Ref, RefCell, RefMut},
10 cmp,
11 fmt,
12 ops::Range,
13};
14
15#[cfg(all(feature = "virtual_memory", target_pointer_width = "64"))]
16#[path = "mmap_bytebuf.rs"]
17mod bytebuf;
18
19#[cfg(not(all(feature = "virtual_memory", target_pointer_width = "64")))]
20#[path = "vec_bytebuf.rs"]
21mod bytebuf;
22
23use self::bytebuf::ByteBuf;
24
25pub const LINEAR_MEMORY_PAGE_SIZE: Bytes = Bytes(65536);
31
32#[derive(Clone, Debug)]
39pub struct MemoryRef(Rc<MemoryInstance>);
40
41impl ::core::ops::Deref for MemoryRef {
42 type Target = MemoryInstance;
43 fn deref(&self) -> &MemoryInstance {
44 &self.0
45 }
46}
47
48pub struct MemoryInstance {
62 limits: ResizableLimits,
64 buffer: RefCell<ByteBuf>,
66 initial: Pages,
67 current_size: Cell<usize>,
68 maximum: Option<Pages>,
69}
70
71impl fmt::Debug for MemoryInstance {
72 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
73 f.debug_struct("MemoryInstance")
74 .field("limits", &self.limits)
75 .field("buffer.len", &self.buffer.borrow().len())
76 .field("maximum", &self.maximum)
77 .field("initial", &self.initial)
78 .finish()
79 }
80}
81
82struct CheckedRegion {
83 offset: usize,
84 size: usize,
85}
86
87impl CheckedRegion {
88 fn range(&self) -> Range<usize> {
89 self.offset..self.offset + self.size
90 }
91
92 fn intersects(&self, other: &Self) -> bool {
93 let low = cmp::max(self.offset, other.offset);
94 let high = cmp::min(self.offset + self.size, other.offset + other.size);
95
96 low < high
97 }
98}
99
100impl MemoryInstance {
101 pub fn alloc(initial: Pages, maximum: Option<Pages>) -> Result<MemoryRef, Error> {
121 {
122 let initial_u32: u32 = initial.0.try_into().map_err(|_| {
123 Error::Memory(format!("initial ({}) can't be coerced to u32", initial.0))
124 })?;
125 let maximum_u32: Option<u32> = maximum
126 .map(|maximum_pages| {
127 maximum_pages.0.try_into().map_err(|_| {
128 Error::Memory(format!(
129 "maximum ({}) can't be coerced to u32",
130 maximum_pages.0
131 ))
132 })
133 })
134 .transpose()?;
135 validation::validate_memory(initial_u32, maximum_u32).map_err(Error::Memory)?;
136 }
137
138 let memory = MemoryInstance::new(initial, maximum)?;
139 Ok(MemoryRef(Rc::new(memory)))
140 }
141
142 fn new(initial: Pages, maximum: Option<Pages>) -> Result<Self, Error> {
144 let limits = ResizableLimits::new(initial.0 as u32, maximum.map(|p| p.0 as u32));
145
146 let initial_size: Bytes = initial.into();
147 Ok(MemoryInstance {
148 limits,
149 buffer: RefCell::new(ByteBuf::new(initial_size.0).map_err(Error::Memory)?),
150 initial,
151 current_size: Cell::new(initial_size.0),
152 maximum,
153 })
154 }
155
156 pub(crate) fn limits(&self) -> &ResizableLimits {
158 &self.limits
159 }
160
161 pub fn initial(&self) -> Pages {
163 self.initial
164 }
165
166 pub fn maximum(&self) -> Option<Pages> {
171 self.maximum
172 }
173
174 pub fn current_size(&self) -> Pages {
194 Bytes(self.buffer.borrow().len()).round_up_to()
195 }
196
197 pub fn get_value<T: LittleEndianConvert>(&self, offset: u32) -> Result<T, Error> {
199 let mut bytes = <<T as LittleEndianConvert>::Bytes as Default>::default();
200 self.get_into(offset, bytes.as_mut())?;
201 let value = T::from_le_bytes(bytes);
202 Ok(value)
203 }
204
205 #[deprecated(since = "0.10.0", note = "use get_into or get_value method instead")]
212 pub fn get(&self, offset: u32, size: usize) -> Result<Vec<u8>, Error> {
213 let mut buffer = self.buffer.borrow_mut();
214 let region = self.checked_region(&mut buffer, offset as usize, size)?;
215
216 Ok(buffer.as_slice_mut()[region.range()].to_vec())
217 }
218
219 pub fn get_into(&self, offset: u32, target: &mut [u8]) -> Result<(), Error> {
225 let mut buffer = self.buffer.borrow_mut();
226 let region = self.checked_region(&mut buffer, offset as usize, target.len())?;
227
228 target.copy_from_slice(&buffer.as_slice_mut()[region.range()]);
229
230 Ok(())
231 }
232
233 pub fn set(&self, offset: u32, value: &[u8]) -> Result<(), Error> {
235 let mut buffer = self.buffer.borrow_mut();
236 let range = self
237 .checked_region(&mut buffer, offset as usize, value.len())?
238 .range();
239
240 buffer.as_slice_mut()[range].copy_from_slice(value);
241
242 Ok(())
243 }
244
245 pub fn set_value<T: LittleEndianConvert>(&self, offset: u32, value: T) -> Result<(), Error> {
247 let bytes = T::into_le_bytes(value);
248 self.set(offset, bytes.as_ref())?;
249 Ok(())
250 }
251
252 pub fn grow(&self, additional: Pages) -> Result<Pages, Error> {
259 let size_before_grow: Pages = self.current_size();
260
261 if additional == Pages(0) {
262 return Ok(size_before_grow);
263 }
264 if additional > Pages(65536) {
265 return Err(Error::Memory(
266 "Trying to grow memory by more than 65536 pages".to_string(),
267 ));
268 }
269
270 let new_size: Pages = size_before_grow + additional;
271 let maximum = self
272 .maximum
273 .unwrap_or(Pages(validation::LINEAR_MEMORY_MAX_PAGES as usize));
274 if new_size > maximum {
275 return Err(Error::Memory(format!(
276 "Trying to grow memory by {} pages when already have {}",
277 additional.0, size_before_grow.0,
278 )));
279 }
280
281 let new_buffer_length: Bytes = new_size.into();
282 self.buffer
283 .borrow_mut()
284 .realloc(new_buffer_length.0)
285 .map_err(Error::Memory)?;
286
287 self.current_size.set(new_buffer_length.0);
288
289 Ok(size_before_grow)
290 }
291
292 fn checked_region(
293 &self,
294 buffer: &mut ByteBuf,
295 offset: usize,
296 size: usize,
297 ) -> Result<CheckedRegion, Error> {
298 let end = offset.checked_add(size).ok_or_else(|| {
299 Error::Memory(format!(
300 "trying to access memory block of size {} from offset {}",
301 size, offset
302 ))
303 })?;
304
305 if end > buffer.len() {
306 return Err(Error::Memory(format!(
307 "trying to access region [{}..{}] in memory [0..{}]",
308 offset,
309 end,
310 buffer.len()
311 )));
312 }
313
314 Ok(CheckedRegion { offset, size })
315 }
316
317 fn checked_region_pair(
318 &self,
319 buffer: &mut ByteBuf,
320 offset1: usize,
321 size1: usize,
322 offset2: usize,
323 size2: usize,
324 ) -> Result<(CheckedRegion, CheckedRegion), Error> {
325 let end1 = offset1.checked_add(size1).ok_or_else(|| {
326 Error::Memory(format!(
327 "trying to access memory block of size {} from offset {}",
328 size1, offset1
329 ))
330 })?;
331
332 let end2 = offset2.checked_add(size2).ok_or_else(|| {
333 Error::Memory(format!(
334 "trying to access memory block of size {} from offset {}",
335 size2, offset2
336 ))
337 })?;
338
339 if end1 > buffer.len() {
340 return Err(Error::Memory(format!(
341 "trying to access region [{}..{}] in memory [0..{}]",
342 offset1,
343 end1,
344 buffer.len()
345 )));
346 }
347
348 if end2 > buffer.len() {
349 return Err(Error::Memory(format!(
350 "trying to access region [{}..{}] in memory [0..{}]",
351 offset2,
352 end2,
353 buffer.len()
354 )));
355 }
356
357 Ok((
358 CheckedRegion {
359 offset: offset1,
360 size: size1,
361 },
362 CheckedRegion {
363 offset: offset2,
364 size: size2,
365 },
366 ))
367 }
368
369 pub fn copy(&self, src_offset: usize, dst_offset: usize, len: usize) -> Result<(), Error> {
377 let mut buffer = self.buffer.borrow_mut();
378
379 let (read_region, write_region) =
380 self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
381
382 unsafe {
383 ::core::ptr::copy(
384 buffer.as_slice()[read_region.range()].as_ptr(),
385 buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
386 len,
387 )
388 }
389
390 Ok(())
391 }
392
393 pub fn copy_nonoverlapping(
405 &self,
406 src_offset: usize,
407 dst_offset: usize,
408 len: usize,
409 ) -> Result<(), Error> {
410 let mut buffer = self.buffer.borrow_mut();
411
412 let (read_region, write_region) =
413 self.checked_region_pair(&mut buffer, src_offset, len, dst_offset, len)?;
414
415 if read_region.intersects(&write_region) {
416 return Err(Error::Memory(
417 "non-overlapping copy is used for overlapping regions".to_string(),
418 ));
419 }
420
421 unsafe {
422 ::core::ptr::copy_nonoverlapping(
423 buffer.as_slice()[read_region.range()].as_ptr(),
424 buffer.as_slice_mut()[write_region.range()].as_mut_ptr(),
425 len,
426 )
427 }
428
429 Ok(())
430 }
431
432 pub fn transfer(
436 src: &MemoryRef,
437 src_offset: usize,
438 dst: &MemoryRef,
439 dst_offset: usize,
440 len: usize,
441 ) -> Result<(), Error> {
442 if Rc::ptr_eq(&src.0, &dst.0) {
443 return src.copy(src_offset, dst_offset, len);
446 }
447
448 let mut src_buffer = src.buffer.borrow_mut();
451 let mut dst_buffer = dst.buffer.borrow_mut();
452
453 let src_range = src
454 .checked_region(&mut src_buffer, src_offset, len)?
455 .range();
456 let dst_range = dst
457 .checked_region(&mut dst_buffer, dst_offset, len)?
458 .range();
459
460 dst_buffer.as_slice_mut()[dst_range].copy_from_slice(&src_buffer.as_slice()[src_range]);
461
462 Ok(())
463 }
464
465 pub fn clear(&self, offset: usize, new_val: u8, len: usize) -> Result<(), Error> {
473 let mut buffer = self.buffer.borrow_mut();
474
475 let range = self.checked_region(&mut buffer, offset, len)?.range();
476
477 for val in &mut buffer.as_slice_mut()[range] {
478 *val = new_val
479 }
480 Ok(())
481 }
482
483 pub fn zero(&self, offset: usize, len: usize) -> Result<(), Error> {
489 self.clear(offset, 0, len)
490 }
491
492 pub fn erase(&self) -> Result<(), Error> {
496 self.buffer.borrow_mut().erase().map_err(Error::Memory)
497 }
498
499 pub fn with_direct_access<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
509 let buf = self.buffer.borrow();
510 f(buf.as_slice())
511 }
512
513 pub fn with_direct_access_mut<R, F: FnOnce(&mut [u8]) -> R>(&self, f: F) -> R {
524 let mut buf = self.buffer.borrow_mut();
525 f(buf.as_slice_mut())
526 }
527
528 pub fn direct_access(&self) -> impl AsRef<[u8]> + '_ {
538 struct Buffer<'a>(Ref<'a, ByteBuf>);
539 impl<'a> AsRef<[u8]> for Buffer<'a> {
540 fn as_ref(&self) -> &[u8] {
541 self.0.as_slice()
542 }
543 }
544
545 Buffer(self.buffer.borrow())
546 }
547
548 pub fn direct_access_mut(&self) -> impl AsMut<[u8]> + '_ {
559 struct Buffer<'a>(RefMut<'a, ByteBuf>);
560 impl<'a> AsMut<[u8]> for Buffer<'a> {
561 fn as_mut(&mut self) -> &mut [u8] {
562 self.0.as_slice_mut()
563 }
564 }
565
566 Buffer(self.buffer.borrow_mut())
567 }
568}
569
570#[cfg(test)]
571mod tests {
572
573 use super::{MemoryInstance, MemoryRef, LINEAR_MEMORY_PAGE_SIZE};
574 use crate::{memory_units::Pages, Error};
575 use alloc::rc::Rc;
576
577 #[test]
578 fn alloc() {
579 let mut fixtures = vec![
580 (0, None, true),
581 (0, Some(0), true),
582 (1, None, true),
583 (1, Some(1), true),
584 (0, Some(1), true),
585 (1, Some(0), false),
586 ];
587
588 #[cfg(target_pointer_width = "64")]
589 fixtures.extend(&[
590 (65536, Some(65536), true),
591 (65536, Some(0), false),
592 (65536, None, true),
593 ]);
594
595 for (index, &(initial, maybe_max, expected_ok)) in fixtures.iter().enumerate() {
596 let initial: Pages = Pages(initial);
597 let maximum: Option<Pages> = maybe_max.map(Pages);
598 let result = MemoryInstance::alloc(initial, maximum);
599 if result.is_ok() != expected_ok {
600 panic!(
601 "unexpected error at {}, initial={:?}, max={:?}, expected={}, result={:?}",
602 index, initial, maybe_max, expected_ok, result,
603 );
604 }
605 }
606 }
607
608 #[test]
609 fn ensure_page_size() {
610 use crate::memory_units::ByteSize;
611 assert_eq!(LINEAR_MEMORY_PAGE_SIZE, Pages::BYTE_SIZE);
612 }
613
614 fn create_memory(initial_content: &[u8]) -> MemoryInstance {
615 let mem = MemoryInstance::new(Pages(1), Some(Pages(1))).unwrap();
616 mem.set(0, initial_content)
617 .expect("Successful initialize the memory");
618 mem
619 }
620
621 fn get_into_vec(mem: &MemoryInstance, offset: u32, size: usize) -> Vec<u8> {
622 let mut buffer = vec![0x00; size];
623 mem.get_into(offset, &mut buffer[..])
624 .unwrap_or_else(|error| {
625 panic!(
626 "failed to retrieve data from linear memory at offset {} with size {}: {}",
627 offset, size, error
628 )
629 });
630 buffer
631 }
632
633 #[test]
634 fn copy_overlaps_1() {
635 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
636 mem.copy(0, 4, 6).expect("Successfully copy the elements");
637 let result = get_into_vec(&mem, 0, 10);
638 assert_eq!(result, &[0, 1, 2, 3, 0, 1, 2, 3, 4, 5]);
639 }
640
641 #[test]
642 fn copy_overlaps_2() {
643 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
644 mem.copy(4, 0, 6).expect("Successfully copy the elements");
645 let result = get_into_vec(&mem, 0, 10);
646 assert_eq!(result, &[4, 5, 6, 7, 8, 9, 6, 7, 8, 9]);
647 }
648
649 #[test]
650 fn copy_nonoverlapping() {
651 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
652 mem.copy_nonoverlapping(0, 10, 10)
653 .expect("Successfully copy the elements");
654 let result = get_into_vec(&mem, 10, 10);
655 assert_eq!(result, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
656 }
657
658 #[test]
659 fn copy_nonoverlapping_overlaps_1() {
660 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
661 let result = mem.copy_nonoverlapping(0, 4, 6);
662 match result {
663 Err(Error::Memory(_)) => {}
664 _ => panic!("Expected Error::Memory(_) result, but got {:?}", result),
665 }
666 }
667
668 #[test]
669 fn copy_nonoverlapping_overlaps_2() {
670 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
671 let result = mem.copy_nonoverlapping(4, 0, 6);
672 match result {
673 Err(Error::Memory(_)) => {}
674 _ => panic!("Expected Error::Memory(_), but got {:?}", result),
675 }
676 }
677
678 #[test]
679 fn transfer_works() {
680 let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
681 let dst = MemoryRef(Rc::new(create_memory(&[
682 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
683 ])));
684
685 MemoryInstance::transfer(&src, 4, &dst, 0, 3).unwrap();
686
687 assert_eq!(get_into_vec(&src, 0, 10), &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
688 assert_eq!(
689 get_into_vec(&dst, 0, 10),
690 &[4, 5, 6, 13, 14, 15, 16, 17, 18, 19]
691 );
692 }
693
694 #[test]
695 fn transfer_still_works_with_same_memory() {
696 let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
697
698 MemoryInstance::transfer(&src, 4, &src, 0, 3).unwrap();
699
700 assert_eq!(get_into_vec(&src, 0, 10), &[4, 5, 6, 3, 4, 5, 6, 7, 8, 9]);
701 }
702
703 #[test]
704 fn transfer_oob_with_same_memory_errors() {
705 let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
706 assert!(MemoryInstance::transfer(&src, 65535, &src, 0, 3).is_err());
707
708 assert_eq!(get_into_vec(&src, 0, 10), &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
710 }
711
712 #[test]
713 fn transfer_oob_errors() {
714 let src = MemoryRef(Rc::new(create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9])));
715 let dst = MemoryRef(Rc::new(create_memory(&[
716 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
717 ])));
718
719 assert!(MemoryInstance::transfer(&src, 65535, &dst, 0, 3).is_err());
720
721 assert_eq!(get_into_vec(&src, 0, 10), &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
723 assert_eq!(
724 get_into_vec(&dst, 0, 10),
725 &[10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
726 );
727 }
728
729 #[test]
730 fn clear() {
731 let mem = create_memory(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
732 mem.clear(0, 0x4A, 10)
733 .expect("To successfully clear the memory");
734 let result = get_into_vec(&mem, 0, 10);
735 assert_eq!(result, &[0x4A; 10]);
736 }
737
738 #[test]
739 fn get_into() {
740 let mem = MemoryInstance::new(Pages(1), None).unwrap();
741 mem.set(6, &[13, 17, 129])
742 .expect("memory set should not fail");
743
744 let mut data = [0u8; 2];
745 mem.get_into(7, &mut data[..])
746 .expect("get_into should not fail");
747
748 assert_eq!(data, [17, 129]);
749 }
750
751 #[test]
752 fn zero_copy() {
753 let mem = MemoryInstance::alloc(Pages(1), None).unwrap();
754 mem.set(100, &[0]).expect("memory set should not fail");
755 mem.with_direct_access_mut(|buf| {
756 assert_eq!(
757 buf.len(),
758 65536,
759 "the buffer length is expected to be 1 page long"
760 );
761 buf[..10].copy_from_slice(&[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
762 });
763 mem.with_direct_access(|buf| {
764 assert_eq!(
765 buf.len(),
766 65536,
767 "the buffer length is expected to be 1 page long"
768 );
769 assert_eq!(&buf[..10], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
770 });
771 }
772
773 #[should_panic]
774 #[test]
775 fn zero_copy_panics_on_nested_access() {
776 let mem = MemoryInstance::alloc(Pages(1), None).unwrap();
777 let mem_inner = mem.clone();
778 mem.with_direct_access(move |_| {
779 let _ = mem_inner.set(0, &[11, 12, 13]);
780 });
781 }
782}