1use crate::{Error, Memory, MAX_POSSIBLE_ALLOCATION, MAX_WASM_PAGES, PAGE_SIZE};
71use sp_wasm_interface_common::{Pointer, WordSize};
72use std::{
73 cmp::{max, min},
74 mem,
75 ops::{Index, IndexMut, Range},
76};
77
78const ALIGNMENT: u32 = 8;
83
84const HEADER_SIZE: u32 = 8;
87
88fn error(msg: &'static str) -> Error {
90 Error::Other(msg)
91}
92
93const LOG_TARGET: &str = "wasm-heap";
94
95const N_ORDERS: usize = 23;
105const MIN_POSSIBLE_ALLOCATION: u32 = 8; #[derive(Copy, Clone, PartialEq, Eq, Debug)]
122struct Order(u32);
123
124impl Order {
125 fn from_raw(order: u32) -> Result<Self, Error> {
129 if order < N_ORDERS as u32 {
130 Ok(Self(order))
131 } else {
132 Err(error("invalid order"))
133 }
134 }
135
136 fn from_size(size: u32) -> Result<Self, Error> {
142 let clamped_size = if size > MAX_POSSIBLE_ALLOCATION {
143 log::warn!(target: LOG_TARGET, "going to fail due to allocating {:?}", size);
144 return Err(Error::RequestedAllocationTooLarge)
145 } else if size < MIN_POSSIBLE_ALLOCATION {
146 MIN_POSSIBLE_ALLOCATION
147 } else {
148 size
149 };
150
151 let power_of_two_size = clamped_size.next_power_of_two();
155
156 let order = power_of_two_size.trailing_zeros() - MIN_POSSIBLE_ALLOCATION.trailing_zeros();
159
160 Ok(Self(order))
161 }
162
163 fn size(&self) -> u32 {
167 MIN_POSSIBLE_ALLOCATION << self.0
168 }
169
170 fn into_raw(self) -> u32 {
172 self.0
173 }
174}
175
176const NIL_MARKER: u32 = u32::MAX;
178
179#[derive(Clone, Copy, Debug, PartialEq, Eq)]
181enum Link {
182 Nil,
184 Ptr(u32),
186}
187
188impl Link {
189 fn from_raw(raw: u32) -> Self {
191 if raw != NIL_MARKER {
192 Self::Ptr(raw)
193 } else {
194 Self::Nil
195 }
196 }
197
198 fn into_raw(self) -> u32 {
200 match self {
201 Self::Nil => NIL_MARKER,
202 Self::Ptr(ptr) => ptr,
203 }
204 }
205}
206
207#[derive(Clone, Debug, PartialEq, Eq)]
227enum Header {
228 Free(Link),
230 Occupied(Order),
233}
234
235impl Header {
236 fn read_from(memory: &impl Memory, header_ptr: u32) -> Result<Self, Error> {
241 let raw_header = memory.read_le_u64(header_ptr)?;
242
243 let occupied = raw_header & 0x00000001_00000000 != 0;
246 let header_data = raw_header as u32;
247
248 Ok(if occupied {
249 Self::Occupied(Order::from_raw(header_data)?)
250 } else {
251 Self::Free(Link::from_raw(header_data))
252 })
253 }
254
255 fn write_into(&self, memory: &mut impl Memory, header_ptr: u32) -> Result<(), Error> {
259 let (header_data, occupied_mask) = match *self {
260 Self::Occupied(order) => (order.into_raw(), 0x00000001_00000000),
261 Self::Free(link) => (link.into_raw(), 0x00000000_00000000),
262 };
263 let raw_header = header_data as u64 | occupied_mask;
264 memory.write_le_u64(header_ptr, raw_header)?;
265 Ok(())
266 }
267
268 fn into_occupied(self) -> Option<Order> {
270 match self {
271 Self::Occupied(order) => Some(order),
272 _ => None,
273 }
274 }
275
276 fn into_free(self) -> Option<Link> {
278 match self {
279 Self::Free(link) => Some(link),
280 _ => None,
281 }
282 }
283}
284
285struct FreeLists {
287 heads: [Link; N_ORDERS],
288}
289
290impl FreeLists {
291 fn new() -> Self {
293 Self { heads: [Link::Nil; N_ORDERS] }
294 }
295
296 fn replace(&mut self, order: Order, new: Link) -> Link {
298 let prev = self[order];
299 self[order] = new;
300 prev
301 }
302}
303
304impl Index<Order> for FreeLists {
305 type Output = Link;
306 fn index(&self, index: Order) -> &Link {
307 &self.heads[index.0 as usize]
308 }
309}
310
311impl IndexMut<Order> for FreeLists {
312 fn index_mut(&mut self, index: Order) -> &mut Link {
313 &mut self.heads[index.0 as usize]
314 }
315}
316
317#[derive(Clone, Debug, Default)]
319#[non_exhaustive]
320pub struct AllocationStats {
321 pub bytes_allocated: u32,
325
326 pub bytes_allocated_peak: u32,
330
331 pub bytes_allocated_sum: u128,
335
336 pub address_space_used: u32,
344}
345
346fn pages_from_size(size: u64) -> Option<u32> {
352 u32::try_from((size + PAGE_SIZE as u64 - 1) / PAGE_SIZE as u64).ok()
353}
354
355pub struct FreeingBumpHeapAllocator {
359 original_heap_base: u32,
360 bumper: u32,
361 free_lists: FreeLists,
362 poisoned: bool,
363 last_observed_memory_size: u64,
364 stats: AllocationStats,
365}
366
367impl Drop for FreeingBumpHeapAllocator {
368 fn drop(&mut self) {
369 log::debug!(target: LOG_TARGET, "allocator dropped: {:?}", self.stats)
370 }
371}
372
373impl FreeingBumpHeapAllocator {
374 pub fn new(heap_base: u32) -> Self {
380 let aligned_heap_base = (heap_base + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
381
382 FreeingBumpHeapAllocator {
383 original_heap_base: aligned_heap_base,
384 bumper: aligned_heap_base,
385 free_lists: FreeLists::new(),
386 poisoned: false,
387 last_observed_memory_size: 0,
388 stats: AllocationStats::default(),
389 }
390 }
391
392 pub fn allocate(
408 &mut self,
409 mem: &mut impl Memory,
410 size: WordSize,
411 ) -> Result<Pointer<u8>, Error> {
412 if self.poisoned {
413 return Err(error("the allocator has been poisoned"))
414 }
415
416 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
417
418 Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
419 let order = Order::from_size(size)?;
420
421 let header_ptr: u32 = match self.free_lists[order] {
422 Link::Ptr(header_ptr) => {
423 assert!(
424 u64::from(header_ptr + order.size() + HEADER_SIZE) <= mem.size(),
425 "Pointer is looked up in list of free entries, into which
426 only valid values are inserted; qed"
427 );
428
429 let next_free = Header::read_from(mem, header_ptr)?
431 .into_free()
432 .ok_or_else(|| error("free list points to a occupied header"))?;
433 self.free_lists[order] = next_free;
434
435 header_ptr
436 },
437 Link::Nil => {
438 Self::bump(&mut self.bumper, order.size() + HEADER_SIZE, mem)?
440 },
441 };
442
443 Header::Occupied(order).write_into(mem, header_ptr)?;
445
446 self.stats.bytes_allocated += order.size() + HEADER_SIZE;
447 self.stats.bytes_allocated_sum += u128::from(order.size() + HEADER_SIZE);
448 self.stats.bytes_allocated_peak =
449 max(self.stats.bytes_allocated_peak, self.stats.bytes_allocated);
450 self.stats.address_space_used = self.bumper - self.original_heap_base;
451
452 log::trace!(target: LOG_TARGET, "after allocation: {:?}", self.stats);
453
454 bomb.disarm();
455 Ok(Pointer::new(header_ptr + HEADER_SIZE))
456 }
457
458 pub fn deallocate(&mut self, mem: &mut impl Memory, ptr: Pointer<u8>) -> Result<(), Error> {
470 if self.poisoned {
471 return Err(error("the allocator has been poisoned"))
472 }
473
474 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
475
476 Self::observe_memory_size(&mut self.last_observed_memory_size, mem)?;
477
478 let header_ptr = u32::from(ptr)
479 .checked_sub(HEADER_SIZE)
480 .ok_or_else(|| error("Invalid pointer for deallocation"))?;
481
482 let order = Header::read_from(mem, header_ptr)?
483 .into_occupied()
484 .ok_or_else(|| error("the allocation points to an empty header"))?;
485
486 let prev_head = self.free_lists.replace(order, Link::Ptr(header_ptr));
488 Header::Free(prev_head).write_into(mem, header_ptr)?;
489
490 self.stats.bytes_allocated = self
491 .stats
492 .bytes_allocated
493 .checked_sub(order.size() + HEADER_SIZE)
494 .ok_or_else(|| error("underflow of the currently allocated bytes count"))?;
495
496 log::trace!("after deallocation: {:?}", self.stats);
497
498 bomb.disarm();
499 Ok(())
500 }
501
502 pub fn stats(&self) -> AllocationStats {
504 self.stats.clone()
505 }
506
507 fn bump(bumper: &mut u32, size: u32, memory: &mut impl Memory) -> Result<u32, Error> {
512 let required_size = u64::from(*bumper) + u64::from(size);
513
514 if required_size > memory.size() {
515 let required_pages =
516 pages_from_size(required_size).ok_or_else(|| Error::AllocatorOutOfSpace)?;
517
518 let current_pages = memory.pages();
519 let max_pages = memory.max_pages().unwrap_or(MAX_WASM_PAGES);
520 debug_assert!(
521 current_pages < required_pages,
522 "current pages {current_pages} < required pages {required_pages}"
523 );
524
525 if current_pages >= max_pages {
526 log::debug!(
527 target: LOG_TARGET,
528 "Wasm pages ({current_pages}) are already at the maximum.",
529 );
530
531 return Err(Error::AllocatorOutOfSpace)
532 } else if required_pages > max_pages {
533 log::debug!(
534 target: LOG_TARGET,
535 "Failed to grow memory from {current_pages} pages to at least {required_pages}\
536 pages due to the maximum limit of {max_pages} pages",
537 );
538 return Err(Error::AllocatorOutOfSpace)
539 }
540
541 let next_pages = min(current_pages * 2, max_pages);
544 let next_pages = max(next_pages, required_pages);
546
547 if memory.grow(next_pages - current_pages).is_err() {
548 log::error!(
549 target: LOG_TARGET,
550 "Failed to grow memory from {current_pages} pages to {next_pages} pages",
551 );
552
553 return Err(Error::AllocatorOutOfSpace)
554 }
555
556 debug_assert_eq!(memory.pages(), next_pages, "Number of pages should have increased!");
557 }
558
559 let res = *bumper;
560 *bumper += size;
561 Ok(res)
562 }
563
564 fn observe_memory_size(
565 last_observed_memory_size: &mut u64,
566 mem: &mut impl Memory,
567 ) -> Result<(), Error> {
568 if mem.size() < *last_observed_memory_size {
569 return Err(Error::MemoryShrinked)
570 }
571 *last_observed_memory_size = mem.size();
572 Ok(())
573 }
574}
575
576trait MemoryExt: Memory {
584 fn read_le_u64(&self, ptr: u32) -> Result<u64, Error> {
587 self.with_access(|memory| {
588 let range =
589 heap_range(ptr, 8, memory.len()).ok_or_else(|| error("read out of heap bounds"))?;
590 let bytes = memory[range]
591 .try_into()
592 .expect("[u8] slice of length 8 must be convertible to [u8; 8]");
593 Ok(u64::from_le_bytes(bytes))
594 })
595 }
596
597 fn write_le_u64(&mut self, ptr: u32, val: u64) -> Result<(), Error> {
600 self.with_access_mut(|memory| {
601 let range = heap_range(ptr, 8, memory.len())
602 .ok_or_else(|| error("write out of heap bounds"))?;
603 let bytes = val.to_le_bytes();
604 memory[range].copy_from_slice(&bytes[..]);
605 Ok(())
606 })
607 }
608
609 fn size(&self) -> u64 {
611 debug_assert!(self.pages() <= MAX_WASM_PAGES);
612
613 self.pages() as u64 * PAGE_SIZE as u64
614 }
615}
616
617impl<T: Memory> MemoryExt for T {}
618
619fn heap_range(offset: u32, length: u32, heap_len: usize) -> Option<Range<usize>> {
620 let start = offset as usize;
621 let end = offset.checked_add(length)? as usize;
622 if end <= heap_len {
623 Some(start..end)
624 } else {
625 None
626 }
627}
628
629struct PoisonBomb<'a> {
631 poisoned: &'a mut bool,
632}
633
634impl<'a> PoisonBomb<'a> {
635 fn disarm(self) {
636 mem::forget(self)
637 }
638}
639
640impl<'a> Drop for PoisonBomb<'a> {
641 fn drop(&mut self) {
642 *self.poisoned = true;
643 }
644}
645
646#[cfg(test)]
647mod tests {
648 use super::*;
649
650 fn to_pointer(address: u32) -> Pointer<u8> {
652 Pointer::new(address)
653 }
654
655 #[derive(Debug)]
656 struct MemoryInstance {
657 data: Vec<u8>,
658 max_wasm_pages: u32,
659 }
660
661 impl MemoryInstance {
662 fn with_pages(pages: u32) -> Self {
663 Self { data: vec![0; (pages * PAGE_SIZE) as usize], max_wasm_pages: MAX_WASM_PAGES }
664 }
665
666 fn set_max_wasm_pages(&mut self, max_pages: u32) {
667 self.max_wasm_pages = max_pages;
668 }
669 }
670
671 impl Memory for MemoryInstance {
672 fn with_access<R>(&self, run: impl FnOnce(&[u8]) -> R) -> R {
673 run(&self.data)
674 }
675
676 fn with_access_mut<R>(&mut self, run: impl FnOnce(&mut [u8]) -> R) -> R {
677 run(&mut self.data)
678 }
679
680 fn pages(&self) -> u32 {
681 pages_from_size(self.data.len() as u64).unwrap()
682 }
683
684 fn max_pages(&self) -> Option<u32> {
685 Some(self.max_wasm_pages)
686 }
687
688 fn grow(&mut self, pages: u32) -> Result<(), ()> {
689 if self.pages() + pages > self.max_wasm_pages {
690 Err(())
691 } else {
692 self.data.resize(((self.pages() + pages) * PAGE_SIZE) as usize, 0);
693 Ok(())
694 }
695 }
696 }
697
698 #[test]
699 fn test_pages_from_size() {
700 assert_eq!(pages_from_size(0).unwrap(), 0);
701 assert_eq!(pages_from_size(1).unwrap(), 1);
702 assert_eq!(pages_from_size(65536).unwrap(), 1);
703 assert_eq!(pages_from_size(65536 + 1).unwrap(), 2);
704 assert_eq!(pages_from_size(2 * 65536).unwrap(), 2);
705 assert_eq!(pages_from_size(2 * 65536 + 1).unwrap(), 3);
706 }
707
708 #[test]
709 fn should_allocate_properly() {
710 let mut mem = MemoryInstance::with_pages(1);
712 let mut heap = FreeingBumpHeapAllocator::new(0);
713
714 let ptr = heap.allocate(&mut mem, 1).unwrap();
716
717 assert_eq!(ptr, to_pointer(HEADER_SIZE));
720 }
721
722 #[test]
723 fn should_always_align_pointers_to_multiples_of_8() {
724 let mut mem = MemoryInstance::with_pages(1);
726 let mut heap = FreeingBumpHeapAllocator::new(13);
727
728 let ptr = heap.allocate(&mut mem, 1).unwrap();
730
731 assert_eq!(ptr, to_pointer(24));
735 }
736
737 #[test]
738 fn should_increment_pointers_properly() {
739 let mut mem = MemoryInstance::with_pages(1);
741 let mut heap = FreeingBumpHeapAllocator::new(0);
742
743 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
745 let ptr2 = heap.allocate(&mut mem, 9).unwrap();
746 let ptr3 = heap.allocate(&mut mem, 1).unwrap();
747
748 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
751
752 assert_eq!(ptr2, to_pointer(24));
755
756 assert_eq!(ptr3, to_pointer(24 + 16 + HEADER_SIZE));
758 }
759
760 #[test]
761 fn should_free_properly() {
762 let mut mem = MemoryInstance::with_pages(1);
764 let mut heap = FreeingBumpHeapAllocator::new(0);
765 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
766 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
768
769 let ptr2 = heap.allocate(&mut mem, 1).unwrap();
770 assert_eq!(ptr2, to_pointer(24));
772
773 heap.deallocate(&mut mem, ptr2).unwrap();
775
776 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
780 }
781
782 #[test]
783 fn should_deallocate_and_reallocate_properly() {
784 let mut mem = MemoryInstance::with_pages(1);
786 let padded_offset = 16;
787 let mut heap = FreeingBumpHeapAllocator::new(13);
788
789 let ptr1 = heap.allocate(&mut mem, 1).unwrap();
790 assert_eq!(ptr1, to_pointer(padded_offset + HEADER_SIZE));
792
793 let ptr2 = heap.allocate(&mut mem, 9).unwrap();
794 assert_eq!(ptr2, to_pointer(padded_offset + 16 + HEADER_SIZE));
798
799 heap.deallocate(&mut mem, ptr2).unwrap();
801 let ptr3 = heap.allocate(&mut mem, 9).unwrap();
802
803 assert_eq!(ptr3, to_pointer(padded_offset + 16 + HEADER_SIZE));
806 assert_eq!(heap.free_lists.heads, [Link::Nil; N_ORDERS]);
807 }
808
809 #[test]
810 fn should_build_linked_list_of_free_areas_properly() {
811 let mut mem = MemoryInstance::with_pages(1);
813 let mut heap = FreeingBumpHeapAllocator::new(0);
814
815 let ptr1 = heap.allocate(&mut mem, 8).unwrap();
816 let ptr2 = heap.allocate(&mut mem, 8).unwrap();
817 let ptr3 = heap.allocate(&mut mem, 8).unwrap();
818
819 heap.deallocate(&mut mem, ptr1).unwrap();
821 heap.deallocate(&mut mem, ptr2).unwrap();
822 heap.deallocate(&mut mem, ptr3).unwrap();
823
824 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr3) - HEADER_SIZE));
826
827 let ptr4 = heap.allocate(&mut mem, 8).unwrap();
828 assert_eq!(ptr4, ptr3);
829
830 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
831 }
832
833 #[test]
834 fn should_not_allocate_if_too_large() {
835 let mut mem = MemoryInstance::with_pages(1);
837 mem.set_max_wasm_pages(1);
838 let mut heap = FreeingBumpHeapAllocator::new(13);
839
840 let ptr = heap.allocate(&mut mem, PAGE_SIZE - 13);
842
843 assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
845 }
846
847 #[test]
848 fn should_not_allocate_if_full() {
849 let mut mem = MemoryInstance::with_pages(1);
851 mem.set_max_wasm_pages(1);
852 let mut heap = FreeingBumpHeapAllocator::new(0);
853 let ptr1 = heap.allocate(&mut mem, (PAGE_SIZE / 2) - HEADER_SIZE).unwrap();
854 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
855
856 let ptr2 = heap.allocate(&mut mem, PAGE_SIZE / 2);
858
859 match ptr2.unwrap_err() {
862 Error::AllocatorOutOfSpace => {},
863 e => panic!("Expected allocator out of space error, got: {:?}", e),
864 }
865 }
866
867 #[test]
868 fn should_allocate_max_possible_allocation_size() {
869 let mut mem = MemoryInstance::with_pages(1);
871 let mut heap = FreeingBumpHeapAllocator::new(0);
872
873 let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION).unwrap();
875
876 assert_eq!(ptr, to_pointer(HEADER_SIZE));
878 }
879
880 #[test]
881 fn should_not_allocate_if_requested_size_too_large() {
882 let mut mem = MemoryInstance::with_pages(1);
884 let mut heap = FreeingBumpHeapAllocator::new(0);
885
886 let ptr = heap.allocate(&mut mem, MAX_POSSIBLE_ALLOCATION + 1);
888
889 assert_eq!(Error::RequestedAllocationTooLarge, ptr.unwrap_err());
891 }
892
893 #[test]
894 fn should_return_error_when_bumper_greater_than_heap_size() {
895 let mut mem = MemoryInstance::with_pages(1);
897 mem.set_max_wasm_pages(1);
898 let mut heap = FreeingBumpHeapAllocator::new(0);
899
900 let mut ptrs = Vec::new();
901 for _ in 0..(PAGE_SIZE as usize / 40) {
902 ptrs.push(heap.allocate(&mut mem, 32).expect("Allocate 32 byte"));
903 }
904
905 assert_eq!(heap.stats.bytes_allocated, PAGE_SIZE - 16);
906 assert_eq!(heap.bumper, PAGE_SIZE - 16);
907
908 ptrs.into_iter()
909 .for_each(|ptr| heap.deallocate(&mut mem, ptr).expect("Deallocate 32 byte"));
910
911 assert_eq!(heap.stats.bytes_allocated, 0);
912 assert_eq!(heap.stats.bytes_allocated_peak, PAGE_SIZE - 16);
913 assert_eq!(heap.bumper, PAGE_SIZE - 16);
914
915 heap.allocate(&mut mem, 8).expect("Allocate 8 byte");
917
918 assert_eq!(heap.bumper as u64, mem.size());
924 let ptr = heap.allocate(&mut mem, 8);
925
926 assert_eq!(Error::AllocatorOutOfSpace, ptr.unwrap_err());
928 }
929
930 #[test]
931 fn should_include_prefixes_in_total_heap_size() {
932 let mut mem = MemoryInstance::with_pages(1);
934 let mut heap = FreeingBumpHeapAllocator::new(1);
935
936 heap.allocate(&mut mem, 9).unwrap();
939
940 assert_eq!(heap.stats.bytes_allocated, HEADER_SIZE + 16);
942 }
943
944 #[test]
945 fn should_calculate_total_heap_size_to_zero() {
946 let mut mem = MemoryInstance::with_pages(1);
948 let mut heap = FreeingBumpHeapAllocator::new(13);
949
950 let ptr = heap.allocate(&mut mem, 42).unwrap();
952 assert_eq!(ptr, to_pointer(16 + HEADER_SIZE));
953 heap.deallocate(&mut mem, ptr).unwrap();
954
955 assert_eq!(heap.stats.bytes_allocated, 0);
957 }
958
959 #[test]
960 fn should_calculate_total_size_of_zero() {
961 let mut mem = MemoryInstance::with_pages(1);
963 let mut heap = FreeingBumpHeapAllocator::new(19);
964
965 for _ in 1..10 {
967 let ptr = heap.allocate(&mut mem, 42).unwrap();
968 heap.deallocate(&mut mem, ptr).unwrap();
969 }
970
971 assert_eq!(heap.stats.bytes_allocated, 0);
973 }
974
975 #[test]
976 fn should_read_and_write_u64_correctly() {
977 let mut mem = MemoryInstance::with_pages(1);
979
980 mem.write_le_u64(40, 4480113).unwrap();
982
983 let value = MemoryExt::read_le_u64(&mut mem, 40).unwrap();
985 assert_eq!(value, 4480113);
986 }
987
988 #[test]
989 fn should_get_item_size_from_order() {
990 let raw_order = 0;
992
993 let item_size = Order::from_raw(raw_order).unwrap().size();
995
996 assert_eq!(item_size, 8);
998 }
999
1000 #[test]
1001 fn should_get_max_item_size_from_index() {
1002 let raw_order = 22;
1004
1005 let item_size = Order::from_raw(raw_order).unwrap().size();
1007
1008 assert_eq!(item_size as u32, MAX_POSSIBLE_ALLOCATION);
1010 }
1011
1012 #[test]
1013 fn deallocate_needs_to_maintain_linked_list() {
1014 let mut mem = MemoryInstance::with_pages(1);
1015 let mut heap = FreeingBumpHeapAllocator::new(0);
1016
1017 let ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
1019 ptrs.iter().rev().for_each(|ptr| heap.deallocate(&mut mem, *ptr).unwrap());
1020
1021 let new_ptrs = (0..4).map(|_| heap.allocate(&mut mem, 8).unwrap()).collect::<Vec<_>>();
1023 assert_eq!(ptrs, new_ptrs);
1024 }
1025
1026 #[test]
1027 fn header_read_write() {
1028 let roundtrip = |header: Header| {
1029 let mut memory = MemoryInstance::with_pages(1);
1030 header.write_into(&mut memory, 0).unwrap();
1031
1032 let read_header = Header::read_from(&memory, 0).unwrap();
1033 assert_eq!(header, read_header);
1034 };
1035
1036 roundtrip(Header::Occupied(Order(0)));
1037 roundtrip(Header::Occupied(Order(1)));
1038 roundtrip(Header::Free(Link::Nil));
1039 roundtrip(Header::Free(Link::Ptr(0)));
1040 roundtrip(Header::Free(Link::Ptr(4)));
1041 }
1042
1043 #[test]
1044 fn poison_oom() {
1045 let mut mem = MemoryInstance::with_pages(1);
1047 mem.set_max_wasm_pages(1);
1048
1049 let mut heap = FreeingBumpHeapAllocator::new(0);
1050
1051 let alloc_ptr = heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1053 assert_eq!(Error::AllocatorOutOfSpace, heap.allocate(&mut mem, PAGE_SIZE).unwrap_err());
1054
1055 assert!(heap.poisoned);
1057 assert!(heap.deallocate(&mut mem, alloc_ptr).is_err());
1058 }
1059
1060 #[test]
1061 fn test_n_orders() {
1062 assert_eq!(
1064 MIN_POSSIBLE_ALLOCATION * 2u32.pow(N_ORDERS as u32 - 1),
1065 MAX_POSSIBLE_ALLOCATION
1066 );
1067 }
1068
1069 #[test]
1070 fn accepts_growing_memory() {
1071 let mut mem = MemoryInstance::with_pages(1);
1072 let mut heap = FreeingBumpHeapAllocator::new(0);
1073
1074 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1075 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1076
1077 mem.grow(1).unwrap();
1078
1079 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1080 }
1081
1082 #[test]
1083 fn doesnt_accept_shrinking_memory() {
1084 let mut mem = MemoryInstance::with_pages(2);
1085 let mut heap = FreeingBumpHeapAllocator::new(0);
1086
1087 heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap();
1088
1089 mem.data.truncate(PAGE_SIZE as usize);
1090
1091 match heap.allocate(&mut mem, PAGE_SIZE / 2).unwrap_err() {
1092 Error::MemoryShrinked => (),
1093 _ => panic!(),
1094 }
1095 }
1096
1097 #[test]
1098 fn should_grow_memory_when_running_out_of_memory() {
1099 let mut mem = MemoryInstance::with_pages(1);
1100 let mut heap = FreeingBumpHeapAllocator::new(0);
1101
1102 assert_eq!(1, mem.pages());
1103
1104 heap.allocate(&mut mem, PAGE_SIZE * 2).unwrap();
1105
1106 assert_eq!(3, mem.pages());
1107 }
1108}