composable_allocators/
fallbacked.rs1use crate::base::*;
2use core::alloc::{self, AllocError, Allocator};
3use core::ptr::{self, NonNull};
4
5pub struct Fallbacked<A: Fallbackable, Fallback: Allocator>(pub A, pub Fallback);
6
7unsafe impl<
8 A: NonUnwinding + Fallbackable,
9 Fallback: NonUnwinding
10> NonUnwinding for Fallbacked<A, Fallback> { }
11
12unsafe impl<A: Fallbackable, Fallback: Fallbackable> Fallbackable for Fallbacked<A, Fallback> {
13 unsafe fn has_allocated(&self, ptr: NonNull<u8>, layout: alloc::Layout) -> bool {
14 self.0.has_allocated(ptr, layout) || self.1.has_allocated(ptr, layout)
15 }
16
17 fn allows_fallback(&self, layout: alloc::Layout) -> bool {
18 self.0.allows_fallback(layout) && self.1.allows_fallback(layout)
19 }
20}
21
22unsafe impl<A: Fallbackable, Fallback: Allocator> Allocator for Fallbacked<A, Fallback> {
23 fn allocate(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
24 if let Ok(block) = self.0.allocate(layout) {
25 Ok(block)
26 } else if self.0.allows_fallback(layout) {
27 self.1.allocate(layout)
28 } else {
29 Err(AllocError)
30 }
31 }
32
33 fn allocate_zeroed(&self, layout: alloc::Layout) -> Result<NonNull<[u8]>, AllocError> {
34 if let Ok(block) = self.0.allocate_zeroed(layout) {
35 Ok(block)
36 } else if self.0.allows_fallback(layout) {
37 self.1.allocate_zeroed(layout)
38 } else {
39 Err(AllocError)
40 }
41 }
42
43 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: alloc::Layout) {
44 if self.0.has_allocated(ptr, layout) {
45 self.0.deallocate(ptr, layout);
46 } else {
47 self.1.deallocate(ptr, layout);
48 }
49 }
50
51 unsafe fn grow(
52 &self,
53 ptr: NonNull<u8>,
54 old_layout: alloc::Layout,
55 new_layout: alloc::Layout
56 ) -> Result<NonNull<[u8]>, AllocError> {
57 if self.0.has_allocated(ptr, old_layout) {
58 if let Ok(block) = self.0.grow(ptr, old_layout, new_layout) {
59 Ok(block)
60 } else if self.0.allows_fallback(new_layout) {
61 if let Ok(block) = self.1.allocate(new_layout) {
62 ptr::copy_nonoverlapping(ptr.as_ptr(), block.as_mut_ptr(), old_layout.size());
63 self.0.deallocate(ptr, old_layout);
64 Ok(block)
65 } else {
66 Err(AllocError)
67 }
68 } else {
69 Err(AllocError)
70 }
71 } else {
72 self.1.grow(ptr, old_layout, new_layout)
73 }
74 }
75
76 unsafe fn grow_zeroed(
77 &self,
78 ptr: NonNull<u8>,
79 old_layout: alloc::Layout,
80 new_layout: alloc::Layout
81 ) -> Result<NonNull<[u8]>, AllocError> {
82 if self.0.has_allocated(ptr, old_layout) {
83 if let Ok(block) = self.0.grow_zeroed(ptr, old_layout, new_layout) {
84 Ok(block)
85 } else if self.0.allows_fallback(new_layout) {
86 if let Ok(block) = self.1.allocate_zeroed(new_layout) {
87 ptr::copy_nonoverlapping(ptr.as_ptr(), block.as_mut_ptr(), old_layout.size());
88 self.0.deallocate(ptr, old_layout);
89 Ok(block)
90 } else {
91 Err(AllocError)
92 }
93 } else {
94 Err(AllocError)
95 }
96 } else {
97 self.1.grow_zeroed(ptr, old_layout, new_layout)
98 }
99 }
100
101 unsafe fn shrink(
102 &self,
103 ptr: NonNull<u8>,
104 old_layout: alloc::Layout,
105 new_layout: alloc::Layout
106 ) -> Result<NonNull<[u8]>, AllocError> {
107 if self.0.has_allocated(ptr, old_layout) {
108 if let Ok(block) = self.0.shrink(ptr, old_layout, new_layout) {
109 Ok(block)
110 } else if self.0.allows_fallback(new_layout) {
111 if let Ok(block) = self.1.allocate(new_layout) {
112 ptr::copy_nonoverlapping(ptr.as_ptr(), block.as_mut_ptr(), new_layout.size());
113 self.0.deallocate(ptr, old_layout);
114 Ok(block)
115 } else {
116 Err(AllocError)
117 }
118 } else {
119 Err(AllocError)
120 }
121 } else {
122 self.1.shrink(ptr, old_layout, new_layout)
123 }
124 }
125}