1use crate::Error;
49use tetcore_std::{mem, convert::{TryFrom, TryInto}, ops::{Range, Index, IndexMut}};
50use tetcore_wasm_interface::{Pointer, WordSize};
51
52const ALIGNMENT: u32 = 8;
57
58const HEADER_SIZE: u32 = 8;
61
62fn error(msg: &'static str) -> Error {
64 Error::Other(msg)
65}
66
67macro_rules! trace {
71 ( $( $args:expr ),+ ) => {
72 tetcore_std::if_std! {
73 log::trace!(target: "wasm-heap", $( $args ),+);
74 }
75 }
76}
77
78const N_ORDERS: usize = 22;
89const MAX_POSSIBLE_ALLOCATION: u32 = 16777216; const MIN_POSSIBLE_ALLOCATION: u32 = 8; #[derive(Copy, Clone, PartialEq, Eq, Debug)]
106struct Order(u32);
107
108impl Order {
109 fn from_raw(order: u32) -> Result<Self, Error> {
113 if order < N_ORDERS as u32 {
114 Ok(Self(order))
115 } else {
116 Err(error("invalid order"))
117 }
118 }
119
120 fn from_size(size: u32) -> Result<Self, Error> {
126 let clamped_size = if size > MAX_POSSIBLE_ALLOCATION {
127 return Err(Error::RequestedAllocationTooLarge);
128 } else if size < MIN_POSSIBLE_ALLOCATION {
129 MIN_POSSIBLE_ALLOCATION
130 } else {
131 size
132 };
133
134 let power_of_two_size = clamped_size.next_power_of_two();
138
139 let order = power_of_two_size.trailing_zeros() - MIN_POSSIBLE_ALLOCATION.trailing_zeros();
142
143 Ok(Self(order))
144 }
145
146 fn size(&self) -> u32 {
150 MIN_POSSIBLE_ALLOCATION << self.0
151 }
152
153 fn into_raw(self) -> u32 {
155 self.0
156 }
157}
158
159const NIL_MARKER: u32 = u32::max_value();
161
162#[derive(Clone, Copy, Debug, PartialEq, Eq)]
164enum Link {
165 Nil,
167 Ptr(u32),
169}
170
171impl Link {
172 fn from_raw(raw: u32) -> Self {
174 if raw != NIL_MARKER {
175 Self::Ptr(raw)
176 } else {
177 Self::Nil
178 }
179 }
180
181 fn into_raw(self) -> u32 {
183 match self {
184 Self::Nil => NIL_MARKER,
185 Self::Ptr(ptr) => ptr,
186 }
187 }
188}
189
190#[derive(Clone, Debug, PartialEq, Eq)]
212enum Header {
213 Free(Link),
215 Occupied(Order),
218}
219
220impl Header {
221 fn read_from<M: Memory + ?Sized>(memory: &M, header_ptr: u32) -> Result<Self, Error> {
226 let raw_header = memory.read_le_u64(header_ptr)?;
227
228 let occupied = raw_header & 0x00000001_00000000 != 0;
231 let header_data = raw_header as u32;
232
233 Ok(if occupied {
234 Self::Occupied(Order::from_raw(header_data)?)
235 } else {
236 Self::Free(Link::from_raw(header_data))
237 })
238 }
239
240 fn write_into<M: Memory + ?Sized>(&self, memory: &mut M, header_ptr: u32) -> Result<(), Error> {
244 let (header_data, occupied_mask) = match *self {
245 Self::Occupied(order) => (order.into_raw(), 0x00000001_00000000),
246 Self::Free(link) => (link.into_raw(), 0x00000000_00000000),
247 };
248 let raw_header = header_data as u64 | occupied_mask;
249 memory.write_le_u64(header_ptr, raw_header)?;
250 Ok(())
251 }
252
253 fn into_occupied(self) -> Option<Order> {
255 match self {
256 Self::Occupied(order) => Some(order),
257 _ => None,
258 }
259 }
260
261 fn into_free(self) -> Option<Link> {
263 match self {
264 Self::Free(link) => Some(link),
265 _ => None,
266 }
267 }
268}
269
270struct FreeLists {
272 heads: [Link; N_ORDERS],
273}
274
275impl FreeLists {
276 fn new() -> Self {
278 Self {
279 heads: [Link::Nil; N_ORDERS]
280 }
281 }
282
283 fn replace(&mut self, order: Order, new: Link) -> Link {
285 let prev = self[order];
286 self[order] = new;
287 prev
288 }
289}
290
291impl Index<Order> for FreeLists {
292 type Output = Link;
293 fn index(&self, index: Order) -> &Link {
294 &self.heads[index.0 as usize]
295 }
296}
297
298impl IndexMut<Order> for FreeLists {
299 fn index_mut(&mut self, index: Order) -> &mut Link {
300 &mut self.heads[index.0 as usize]
301 }
302}
303
304pub struct FreeingBumpHeapAllocator {
308 bumper: u32,
309 free_lists: FreeLists,
310 total_size: u32,
311 poisoned: bool,
312}
313
314impl FreeingBumpHeapAllocator {
315 pub fn new(heap_base: u32) -> Self {
321 let aligned_heap_base = (heap_base + ALIGNMENT - 1) / ALIGNMENT * ALIGNMENT;
322
323 FreeingBumpHeapAllocator {
324 bumper: aligned_heap_base,
325 free_lists: FreeLists::new(),
326 total_size: 0,
327 poisoned: false,
328 }
329 }
330
331 pub fn allocate<M: Memory + ?Sized>(
344 &mut self,
345 mem: &mut M,
346 size: WordSize,
347 ) -> Result<Pointer<u8>, Error> {
348 if self.poisoned {
349 return Err(error("the allocator has been poisoned"))
350 }
351
352 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
353 let order = Order::from_size(size)?;
354
355 let header_ptr: u32 = match self.free_lists[order] {
356 Link::Ptr(header_ptr) => {
357 assert!(
358 header_ptr + order.size() + HEADER_SIZE <= mem.size(),
359 "Pointer is looked up in list of free entries, into which
360 only valid values are inserted; qed"
361 );
362
363 let next_free = Header::read_from(mem, header_ptr)?
365 .into_free()
366 .ok_or_else(|| error("free list points to a occupied header"))?;
367 self.free_lists[order] = next_free;
368
369 header_ptr
370 }
371 Link::Nil => {
372 Self::bump(
374 &mut self.bumper,
375 order.size() + HEADER_SIZE,
376 mem.size(),
377 )?
378 }
379 };
380
381 Header::Occupied(order).write_into(mem, header_ptr)?;
383
384 self.total_size += order.size() + HEADER_SIZE;
385 trace!("Heap size is {} bytes after allocation", self.total_size);
386
387 bomb.disarm();
388 Ok(Pointer::new(header_ptr + HEADER_SIZE))
389 }
390
391 pub fn deallocate<M: Memory + ?Sized>(&mut self, mem: &mut M, ptr: Pointer<u8>) -> Result<(), Error> {
400 if self.poisoned {
401 return Err(error("the allocator has been poisoned"))
402 }
403
404 let bomb = PoisonBomb { poisoned: &mut self.poisoned };
405
406 let header_ptr = u32::from(ptr)
407 .checked_sub(HEADER_SIZE)
408 .ok_or_else(|| error("Invalid pointer for deallocation"))?;
409
410 let order = Header::read_from(mem, header_ptr)?
411 .into_occupied()
412 .ok_or_else(|| error("the allocation points to an empty header"))?;
413
414 let prev_head = self.free_lists.replace(order, Link::Ptr(header_ptr));
416 Header::Free(prev_head).write_into(mem, header_ptr)?;
417
418 self.total_size = self
420 .total_size
421 .checked_sub(order.size() + HEADER_SIZE)
422 .ok_or_else(|| error("Unable to subtract from total heap size without overflow"))?;
423 trace!("Heap size is {} bytes after deallocation", self.total_size);
424
425 bomb.disarm();
426 Ok(())
427 }
428
429 fn bump(bumper: &mut u32, size: u32, heap_end: u32) -> Result<u32, Error> {
435 if *bumper + size > heap_end {
436 return Err(Error::AllocatorOutOfSpace);
437 }
438
439 let res = *bumper;
440 *bumper += size;
441 Ok(res)
442 }
443}
444
445pub trait Memory {
453 fn read_le_u64(&self, ptr: u32) -> Result<u64, Error>;
456 fn write_le_u64(&mut self, ptr: u32, val: u64) -> Result<(), Error>;
459 fn size(&self) -> u32;
461}
462
463impl Memory for [u8] {
464 fn read_le_u64(&self, ptr: u32) -> Result<u64, Error> {
465 let range =
466 heap_range(ptr, 8, self.len()).ok_or_else(|| error("read out of heap bounds"))?;
467 let bytes = self[range]
468 .try_into()
469 .expect("[u8] slice of length 8 must be convertible to [u8; 8]");
470 Ok(u64::from_le_bytes(bytes))
471 }
472 fn write_le_u64(&mut self, ptr: u32, val: u64) -> Result<(), Error> {
473 let range =
474 heap_range(ptr, 8, self.len()).ok_or_else(|| error("write out of heap bounds"))?;
475 let bytes = val.to_le_bytes();
476 &mut self[range].copy_from_slice(&bytes[..]);
477 Ok(())
478 }
479 fn size(&self) -> u32 {
480 u32::try_from(self.len()).expect("size of Wasm linear memory is <2^32; qed")
481 }
482}
483
484fn heap_range(offset: u32, length: u32, heap_len: usize) -> Option<Range<usize>> {
485 let start = offset as usize;
486 let end = offset.checked_add(length)? as usize;
487 if end <= heap_len {
488 Some(start..end)
489 } else {
490 None
491 }
492}
493
494struct PoisonBomb<'a> {
496 poisoned: &'a mut bool,
497}
498
499impl<'a> PoisonBomb<'a> {
500 fn disarm(self) {
501 mem::forget(self)
502 }
503}
504
505impl<'a> Drop for PoisonBomb<'a> {
506 fn drop(&mut self) {
507 *self.poisoned = true;
508 }
509}
510
511#[cfg(test)]
512mod tests {
513 use super::*;
514
515 const PAGE_SIZE: u32 = 65536;
516
517 fn to_pointer(address: u32) -> Pointer<u8> {
519 Pointer::new(address)
520 }
521
522 #[test]
523 fn should_allocate_properly() {
524 let mut mem = [0u8; PAGE_SIZE as usize];
526 let mut heap = FreeingBumpHeapAllocator::new(0);
527
528 let ptr = heap.allocate(&mut mem[..], 1).unwrap();
530
531 assert_eq!(ptr, to_pointer(HEADER_SIZE));
534 }
535
536 #[test]
537 fn should_always_align_pointers_to_multiples_of_8() {
538 let mut mem = [0u8; PAGE_SIZE as usize];
540 let mut heap = FreeingBumpHeapAllocator::new(13);
541
542 let ptr = heap.allocate(&mut mem[..], 1).unwrap();
544
545 assert_eq!(ptr, to_pointer(24));
549 }
550
551 #[test]
552 fn should_increment_pointers_properly() {
553 let mut mem = [0u8; PAGE_SIZE as usize];
555 let mut heap = FreeingBumpHeapAllocator::new(0);
556
557 let ptr1 = heap.allocate(&mut mem[..], 1).unwrap();
559 let ptr2 = heap.allocate(&mut mem[..], 9).unwrap();
560 let ptr3 = heap.allocate(&mut mem[..], 1).unwrap();
561
562 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
565
566 assert_eq!(ptr2, to_pointer(24));
569
570 assert_eq!(ptr3, to_pointer(24 + 16 + HEADER_SIZE));
572 }
573
574 #[test]
575 fn should_free_properly() {
576 let mut mem = [0u8; PAGE_SIZE as usize];
578 let mut heap = FreeingBumpHeapAllocator::new(0);
579 let ptr1 = heap.allocate(&mut mem[..], 1).unwrap();
580 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
582
583 let ptr2 = heap.allocate(&mut mem[..], 1).unwrap();
584 assert_eq!(ptr2, to_pointer(24));
586
587 heap.deallocate(&mut mem[..], ptr2).unwrap();
589
590 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
594 }
595
596 #[test]
597 fn should_deallocate_and_reallocate_properly() {
598 let mut mem = [0u8; PAGE_SIZE as usize];
600 let padded_offset = 16;
601 let mut heap = FreeingBumpHeapAllocator::new(13);
602
603 let ptr1 = heap.allocate(&mut mem[..], 1).unwrap();
604 assert_eq!(ptr1, to_pointer(padded_offset + HEADER_SIZE));
606
607 let ptr2 = heap.allocate(&mut mem[..], 9).unwrap();
608 assert_eq!(ptr2, to_pointer(padded_offset + 16 + HEADER_SIZE));
612
613 heap.deallocate(&mut mem[..], ptr2).unwrap();
615 let ptr3 = heap.allocate(&mut mem[..], 9).unwrap();
616
617 assert_eq!(ptr3, to_pointer(padded_offset + 16 + HEADER_SIZE));
620 assert_eq!(heap.free_lists.heads, [Link::Nil; N_ORDERS]);
621 }
622
623 #[test]
624 fn should_build_linked_list_of_free_areas_properly() {
625 let mut mem = [0u8; PAGE_SIZE as usize];
627 let mut heap = FreeingBumpHeapAllocator::new(0);
628
629 let ptr1 = heap.allocate(&mut mem[..], 8).unwrap();
630 let ptr2 = heap.allocate(&mut mem[..], 8).unwrap();
631 let ptr3 = heap.allocate(&mut mem[..], 8).unwrap();
632
633 heap.deallocate(&mut mem[..], ptr1).unwrap();
635 heap.deallocate(&mut mem[..], ptr2).unwrap();
636 heap.deallocate(&mut mem[..], ptr3).unwrap();
637
638 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr3) - HEADER_SIZE));
640
641 let ptr4 = heap.allocate(&mut mem[..], 8).unwrap();
642 assert_eq!(ptr4, ptr3);
643
644 assert_eq!(heap.free_lists.heads[0], Link::Ptr(u32::from(ptr2) - HEADER_SIZE));
645 }
646
647 #[test]
648 fn should_not_allocate_if_too_large() {
649 let mut mem = [0u8; PAGE_SIZE as usize];
651 let mut heap = FreeingBumpHeapAllocator::new(13);
652
653 let ptr = heap.allocate(&mut mem[..], PAGE_SIZE - 13);
655
656 match ptr.unwrap_err() {
658 Error::AllocatorOutOfSpace => {},
659 e => panic!("Expected allocator out of space error, got: {:?}", e),
660 }
661 }
662
663 #[test]
664 fn should_not_allocate_if_full() {
665 let mut mem = [0u8; PAGE_SIZE as usize];
667 let mut heap = FreeingBumpHeapAllocator::new(0);
668 let ptr1 = heap.allocate(&mut mem[..], (PAGE_SIZE / 2) - HEADER_SIZE).unwrap();
669 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
670
671 let ptr2 = heap.allocate(&mut mem[..], PAGE_SIZE / 2);
673
674 match ptr2.unwrap_err() {
677 Error::AllocatorOutOfSpace => {},
678 e => panic!("Expected allocator out of space error, got: {:?}", e),
679 }
680 }
681
682 #[test]
683 fn should_allocate_max_possible_allocation_size() {
684 let mut mem = vec![0u8; (MAX_POSSIBLE_ALLOCATION + PAGE_SIZE) as usize];
686 let mut heap = FreeingBumpHeapAllocator::new(0);
687
688 let ptr = heap.allocate(&mut mem[..], MAX_POSSIBLE_ALLOCATION).unwrap();
690
691 assert_eq!(ptr, to_pointer(HEADER_SIZE));
693 }
694
695 #[test]
696 fn should_not_allocate_if_requested_size_too_large() {
697 let mut mem = [0u8; PAGE_SIZE as usize];
699 let mut heap = FreeingBumpHeapAllocator::new(0);
700
701 let ptr = heap.allocate(&mut mem[..], MAX_POSSIBLE_ALLOCATION + 1);
703
704 match ptr.unwrap_err() {
706 Error::RequestedAllocationTooLarge => {},
707 e => panic!("Expected allocation size too large error, got: {:?}", e),
708 }
709 }
710
711 #[test]
712 fn should_return_error_when_bumper_greater_than_heap_size() {
713 let mut mem = [0u8; 64];
715 let mut heap = FreeingBumpHeapAllocator::new(0);
716
717 let ptr1 = heap.allocate(&mut mem[..], 32).unwrap();
718 assert_eq!(ptr1, to_pointer(HEADER_SIZE));
719 heap.deallocate(&mut mem[..], ptr1).expect("failed freeing ptr1");
720 assert_eq!(heap.total_size, 0);
721 assert_eq!(heap.bumper, 40);
722
723 let ptr2 = heap.allocate(&mut mem[..], 16).unwrap();
724 assert_eq!(ptr2, to_pointer(48));
725 heap.deallocate(&mut mem[..], ptr2).expect("failed freeing ptr2");
726 assert_eq!(heap.total_size, 0);
727 assert_eq!(heap.bumper, 64);
728
729 let ptr = heap.allocate(&mut mem[..], 8);
735
736 match ptr.unwrap_err() {
738 Error::AllocatorOutOfSpace => {},
739 e => panic!("Expected allocator out of space error, got: {:?}", e),
740 }
741 }
742
743 #[test]
744 fn should_include_prefixes_in_total_heap_size() {
745 let mut mem = [0u8; PAGE_SIZE as usize];
747 let mut heap = FreeingBumpHeapAllocator::new(1);
748
749 heap.allocate(&mut mem[..], 9).unwrap();
752
753 assert_eq!(heap.total_size, HEADER_SIZE + 16);
755 }
756
757 #[test]
758 fn should_calculate_total_heap_size_to_zero() {
759 let mut mem = [0u8; PAGE_SIZE as usize];
761 let mut heap = FreeingBumpHeapAllocator::new(13);
762
763 let ptr = heap.allocate(&mut mem[..], 42).unwrap();
765 assert_eq!(ptr, to_pointer(16 + HEADER_SIZE));
766 heap.deallocate(&mut mem[..], ptr).unwrap();
767
768 assert_eq!(heap.total_size, 0);
770 }
771
772 #[test]
773 fn should_calculate_total_size_of_zero() {
774 let mut mem = [0u8; PAGE_SIZE as usize];
776 let mut heap = FreeingBumpHeapAllocator::new(19);
777
778 for _ in 1..10 {
780 let ptr = heap.allocate(&mut mem[..], 42).unwrap();
781 heap.deallocate(&mut mem[..], ptr).unwrap();
782 }
783
784 assert_eq!(heap.total_size, 0);
786 }
787
788 #[test]
789 fn should_read_and_write_u64_correctly() {
790 let mut mem = [0u8; PAGE_SIZE as usize];
792
793 Memory::write_le_u64(mem.as_mut(), 40, 4480113).unwrap();
795
796 let value = Memory::read_le_u64(mem.as_mut(), 40).unwrap();
798 assert_eq!(value, 4480113);
799 }
800
801 #[test]
802 fn should_get_item_size_from_order() {
803 let raw_order = 0;
805
806 let item_size = Order::from_raw(raw_order).unwrap().size();
808
809 assert_eq!(item_size, 8);
811 }
812
813 #[test]
814 fn should_get_max_item_size_from_index() {
815 let raw_order = 21;
817
818 let item_size = Order::from_raw(raw_order).unwrap().size();
820
821 assert_eq!(item_size as u32, MAX_POSSIBLE_ALLOCATION);
823 }
824
825 #[test]
826 fn deallocate_needs_to_maintain_linked_list() {
827 let mut mem = [0u8; 8 * 2 * 4 + ALIGNMENT as usize];
828 let mut heap = FreeingBumpHeapAllocator::new(0);
829
830 let ptrs = (0..4).map(|_| heap.allocate(&mut mem[..], 8).unwrap()).collect::<Vec<_>>();
832 ptrs.into_iter().for_each(|ptr| heap.deallocate(&mut mem[..], ptr).unwrap());
833
834 let _ = (0..4).map(|_| heap.allocate(&mut mem[..], 8).unwrap()).collect::<Vec<_>>();
836 }
837
838 #[test]
839 fn header_read_write() {
840 let roundtrip = |header: Header| {
841 let mut memory = [0u8; 32];
842 header.write_into(memory.as_mut(), 0).unwrap();
843
844 let read_header = Header::read_from(memory.as_mut(), 0).unwrap();
845 assert_eq!(header, read_header);
846 };
847
848 roundtrip(Header::Occupied(Order(0)));
849 roundtrip(Header::Occupied(Order(1)));
850 roundtrip(Header::Free(Link::Nil));
851 roundtrip(Header::Free(Link::Ptr(0)));
852 roundtrip(Header::Free(Link::Ptr(4)));
853 }
854
855 #[test]
856 fn poison_oom() {
857 let mut mem = [0u8; 32];
860 let mut heap = FreeingBumpHeapAllocator::new(0);
861
862 assert!(heap.allocate(mem.as_mut(), 8).is_ok());
864 let alloc_ptr = heap.allocate(mem.as_mut(), 8).unwrap();
865 assert!(heap.allocate(mem.as_mut(), 8).is_err());
866
867 assert!(heap.poisoned);
869 assert!(heap.deallocate(mem.as_mut(), alloc_ptr).is_err());
870 }
871}