1use super::HeaderOfHeader;
2use super::MEM_MAP;
3use super::os::Void;
4use std::ops;
5use intrusive_collections::intrusive_adapter;
6use intrusive_collections::Bound::Included as IntrusiveIncluded;
7use intrusive_collections::{KeyAdapter, RBTreeLink, UnsafeRef};
8use std::alloc::{AllocError, Allocator as StdAllocator, Layout};
9use std::mem::ManuallyDrop;
10use std::ptr::{copy_nonoverlapping, NonNull};
11
12pub struct FreeBlock {
17    by_size_address: RBTreeLink,
18    by_address: RBTreeLink,
19    pub size: usize,
20    _padding: usize,
21}
22pub const ALLOCATION_QUANTUM: usize = std::mem::size_of::<FreeBlock>();
24impl FreeBlock {
25    pub(crate) unsafe fn initialize(
26        h: &mut HeaderOfHeader,
27        fbr: *const FreeBlock,
28        size: usize,
29    ) {
30        let fbptr = fbr as *mut ManuallyDrop<FreeBlock>;
31        let fbref = fbptr.as_mut().unwrap();
32        fbref._padding = !fbref._padding; *fbref = ManuallyDrop::new(FreeBlock {
34            by_address: RBTreeLink::new(),
35            by_size_address: RBTreeLink::new(),
36            size,
37            _padding: 0,
38        });
39        h.by_address.insert(UnsafeRef::from_raw(fbr));
40        h.by_size_address.insert(UnsafeRef::from_raw(fbr));
41    }
42    pub(crate) unsafe fn finalize(
43        h: &mut HeaderOfHeader,
44        fbr: *const FreeBlock,
45    ) {
46        h.by_address.cursor_mut_from_ptr(fbr).remove();
47        h.by_size_address.cursor_mut_from_ptr(fbr).remove();
48    }
49}
50intrusive_adapter!(pub ByAddressAdapter = UnsafeRef<FreeBlock>:
51                            FreeBlock { by_address: RBTreeLink });
52impl<'a> KeyAdapter<'a> for ByAddressAdapter {
53    type Key = *const FreeBlock;
54    fn get_key(&self, x: &'a FreeBlock) -> Self::Key {
55        x as *const FreeBlock
56    }
57}
58intrusive_adapter!(pub BySizeAddressAdapter = UnsafeRef<FreeBlock>:
59                            FreeBlock { by_size_address: RBTreeLink });
60impl<'a> KeyAdapter<'a> for BySizeAddressAdapter {
61    type Key = (usize, *const FreeBlock);
62    fn get_key(&self, x: &'a FreeBlock) -> Self::Key {
63        (x.size, x as *const FreeBlock)
64    }
65}
66
67#[derive(Clone)]
73pub struct Allocator {
74    address: usize,
75}
76
77pub fn new_allocator(address: usize) -> Allocator {
78    Allocator { address }
79}
80
81fn check_mem_map_exists(address: usize) {
82    let a = address as *const Void;
83    assert!(MEM_MAP.with_borrow(|mb| {
84        let c = mb.upper_bound(ops::Bound::Included(&a));
85        if let Some((l, _)) = c.peek_prev() {
86            *l == a
87        } else { false }
88    }), "tmfalloc::Allocator has been used in non-writing thread");
89}
90
91unsafe impl StdAllocator for Allocator {
92    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
93        let s = layout.size() + layout.padding_needed_for(ALLOCATION_QUANTUM);
94        check_mem_map_exists(self.address);
95        let h = unsafe { (self.address as *mut HeaderOfHeader).as_mut() };
96        allocate(s, h.unwrap())
97    }
98    unsafe fn deallocate(&self, p: NonNull<u8>, layout: Layout) {
99        let s = layout.size() + layout.padding_needed_for(ALLOCATION_QUANTUM);
100        check_mem_map_exists(self.address);
101        assert!(s % ALLOCATION_QUANTUM == 0);
102        let h = unsafe { (self.address as *mut HeaderOfHeader).as_mut() };
103        let p = p.as_ptr() as *const FreeBlock;
104        deallocate(s, p, h.unwrap())
105    }
106    unsafe fn grow(
107        &self,
108        p: NonNull<u8>,
109        old: Layout,
110        new: Layout,
111    ) -> Result<NonNull<[u8]>, AllocError> {
112        check_mem_map_exists(self.address);
113        let os = old.size() + old.padding_needed_for(ALLOCATION_QUANTUM);
114        let ns = new.size() + new.padding_needed_for(ALLOCATION_QUANTUM);
115        if os == ns {
116            return Ok(NonNull::slice_from_raw_parts(p, ns));
117        }
118        let h = (self.address as *mut HeaderOfHeader).as_mut().unwrap();
119        let fbp = p.as_ptr() as *const FreeBlock;
120        let cu = h.by_address.lower_bound(IntrusiveIncluded(&fbp));
121        if let Some(u) = cu.get() {
122            let ufb = u as *const FreeBlock;
124            let us = os + (*ufb).size; if fbp.byte_add(os) == ufb && us >= ns {
126                FreeBlock::finalize(h, ufb);
127                if us > ns {
128                    FreeBlock::initialize(h, ufb.byte_add(ns - os), us - ns);
130                }
131                return Ok(NonNull::slice_from_raw_parts(p, ns));
132            }
133        }
134        let r = allocate(ns, h)?;
135        copy_nonoverlapping(p.as_ptr(), r.as_mut_ptr(), new.size());
136        deallocate(os, fbp, h); Ok(r)
138    }
139    unsafe fn shrink(
140        &self,
141        p: NonNull<u8>,
142        old: Layout,
143        new: Layout,
144    ) -> Result<NonNull<[u8]>, AllocError> {
145        check_mem_map_exists(self.address);
146        let os = old.size() + old.padding_needed_for(ALLOCATION_QUANTUM);
147        let ns = new.size() + new.padding_needed_for(ALLOCATION_QUANTUM);
148        let h = (self.address as *mut HeaderOfHeader).as_mut().unwrap();
149        let fbp = p.as_ptr() as *const FreeBlock;
150        assert!(ns >= ALLOCATION_QUANTUM);
151        let cu = h.by_address.lower_bound(IntrusiveIncluded(&fbp));
152        let new_block_size = if let Some(u) = cu.get() {
153            let ufb = u as *const FreeBlock;
155            if fbp.byte_add(os) == ufb {
156                FreeBlock::finalize(h, ufb);
157                os - ns + (*ufb).size
158            } else {
159                os - ns
160            }
161        } else {
162            os - ns
163        };
164        assert!(new_block_size >= ALLOCATION_QUANTUM);
165        FreeBlock::initialize(h, fbp.byte_add(ns), new_block_size);
167        Ok(NonNull::slice_from_raw_parts(p, ns))
168    }
169}
170
171fn allocate(
173    s: usize,
174    h: &mut HeaderOfHeader,
175) -> std::result::Result<NonNull<[u8]>, AllocError> {
176    let c = h
177        .by_size_address
178        .lower_bound(IntrusiveIncluded(&(s, std::ptr::null::<FreeBlock>())));
179    match c.get() {
180        None => Err(AllocError),
181        Some(r) => {
182            let p = r as *const FreeBlock;
183            let rs = r.size;
184            unsafe { FreeBlock::finalize(h, p) };
185            if rs > s {
186                let n = unsafe { p.byte_add(s) } as *mut FreeBlock;
187                unsafe { FreeBlock::initialize(h, n, rs - s) };
188            }
189            Ok(NonNull::slice_from_raw_parts(
190                NonNull::new(p as *mut u8).unwrap(),
191                s,
192            ))
193        }
194    }
195}
196
197unsafe fn deallocate(s: usize, p: *const FreeBlock, h: &mut HeaderOfHeader) {
198    let cu = h.by_address.lower_bound(IntrusiveIncluded(&p));
200    let cl = h.by_address.upper_bound(IntrusiveIncluded(&p));
201    let ln = match cl.get() {
202        None => None,
204        Some(l) => {
205            if (l as *const FreeBlock).byte_add(l.size) < p {
206                None
207            } else {
208                Some(l as *const FreeBlock)
209            }
210        }
211    };
212    let un = match cu.get() {
213        None => None,
215        Some(u) => {
216            if p.byte_add(s) < u as *const FreeBlock {
217                None
218            } else {
219                Some((u as *const FreeBlock, u.size))
220            }
221        }
222    };
223    match ln {
224        None => match un {
225            None => FreeBlock::initialize(h, p, s),
226            Some((up, us)) => {
227                FreeBlock::finalize(h, up as *const FreeBlock);
228                FreeBlock::initialize(h, p, s + us);
229            }
230        },
231        Some(lp) => {
232            let lr = lp.cast_mut().as_mut().unwrap();
233            h.by_size_address.cursor_mut_from_ptr(lp).remove();
234            match un {
235                None => lr.size += s,
236                Some((up, us)) => {
237                    FreeBlock::finalize(h, up);
238                    lr.size += s + us;
239                }
240            };
241            h.by_size_address.insert(unsafe { UnsafeRef::from_raw(lp) });
242        }
243    }
244}