composable_allocators/
limited_up_to.rs1use crate::base::*;
2use core::alloc::{self, AllocError, Allocator};
3use core::cmp::min;
4use core::ptr::NonNull;
5
6pub struct LimitedUpTo<A: Allocator> {
7 layout: alloc::Layout,
8 base: A,
9}
10
11unsafe impl<A: NonUnwinding> NonUnwinding for LimitedUpTo<A> { }
12
13impl<A: Allocator> LimitedUpTo<A> {
14 pub const fn new(layout: alloc::Layout, base: A) -> Self {
15 LimitedUpTo { layout, base }
16 }
17
18 fn manages(&self, layout: alloc::Layout) -> bool {
19 layout.size() <= self.layout.size() &&
20 layout.align() <= self.layout.align()
21 }
22}
23
24unsafe impl<A: Allocator> Fallbackable for LimitedUpTo<A> {
25 unsafe fn has_allocated(&self, _ptr: NonNull<u8>, layout: alloc::Layout) -> bool {
26 self.manages(layout)
27 }
28
29 fn allows_fallback(&self, layout: alloc::Layout) -> bool {
30 !self.manages(layout)
31 }
32}
33
34unsafe impl<A: Allocator> Allocator for LimitedUpTo<A> {
35 fn allocate(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
36 if self.manages(layout) {
37 if let Ok(block) = self.base.allocate(layout) {
38 let len = min(block.len(), self.layout.size());
39 Ok(unsafe { NonNull::slice_from_raw_parts(NonNull::new_unchecked(block.as_mut_ptr()), len) })
40 } else {
41 Err(AllocError)
42 }
43 } else {
44 Err(AllocError)
45 }
46 }
47
48 fn allocate_zeroed(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
49 if self.manages(layout) {
50 if let Ok(block) = self.base.allocate_zeroed(layout) {
51 let len = min(block.len(), self.layout.size());
52 Ok(unsafe { NonNull::slice_from_raw_parts(NonNull::new_unchecked(block.as_mut_ptr()), len) })
53 } else {
54 Err(AllocError)
55 }
56 } else {
57 Err(AllocError)
58 }
59 }
60
61 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: alloc::Layout) {
62 self.base.deallocate(ptr, layout);
63 }
64
65 unsafe fn grow(
66 &self,
67 ptr: NonNull<u8>,
68 old_layout: alloc::Layout,
69 new_layout: alloc::Layout
70 ) -> Result<NonNull<[u8]>, AllocError> {
71 if self.manages(new_layout) {
72 if let Ok(block) = self.base.grow(ptr, old_layout, new_layout) {
73 let len = min(block.len(), self.layout.size());
74 Ok(NonNull::slice_from_raw_parts(NonNull::new_unchecked(block.as_mut_ptr()), len))
75 } else {
76 Err(AllocError)
77 }
78 } else {
79 Err(AllocError)
80 }
81 }
82
83 unsafe fn grow_zeroed(
84 &self,
85 ptr: NonNull<u8>,
86 old_layout: alloc::Layout,
87 new_layout: alloc::Layout
88 ) -> Result<NonNull<[u8]>, AllocError> {
89 if self.manages(new_layout) {
90 if let Ok(block) = self.base.grow_zeroed(ptr, old_layout, new_layout) {
91 let len = min(block.len(), self.layout.size());
92 Ok(NonNull::slice_from_raw_parts(NonNull::new_unchecked(block.as_mut_ptr()), len))
93 } else {
94 Err(AllocError)
95 }
96 } else {
97 Err(AllocError)
98 }
99 }
100
101 unsafe fn shrink(
102 &self,
103 ptr: NonNull<u8>,
104 old_layout: alloc::Layout,
105 new_layout: alloc::Layout
106 ) -> Result<NonNull<[u8]>, AllocError> {
107 if self.manages(new_layout) {
108 if let Ok(block) = self.base.shrink(ptr, old_layout, new_layout) {
109 let len = min(block.len(), self.layout.size());
110 Ok(NonNull::slice_from_raw_parts(NonNull::new_unchecked(block.as_mut_ptr()), len))
111 } else {
112 Err(AllocError)
113 }
114 } else {
115 Err(AllocError)
116 }
117 }
118}