1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
mod abort;
mod layout;

pub use self::{
    abort::AbortAlloc,
    layout::{LayoutErr, NonZeroLayout},
};
pub use core::alloc::GlobalAlloc;
use core::{
    cmp,
    fmt,
    num::NonZeroUsize,
    ptr::{self, NonNull},
};
pub use liballoc::alloc::{alloc, alloc_zeroed, dealloc, realloc};
#[cfg(feature = "std")]
use std::alloc::System;

#[derive(Clone, PartialEq, Eq, Debug)]
pub struct CapacityOverflow;

impl From<core::alloc::LayoutErr> for CapacityOverflow {
    #[inline]
    #[must_use]
    fn from(_: core::alloc::LayoutErr) -> Self {
        Self
    }
}

impl From<LayoutErr> for CapacityOverflow {
    #[inline]
    #[must_use]
    fn from(_: LayoutErr) -> Self {
        Self
    }
}

pub trait BuildAllocRef: Sized {
    type Ref: DeallocRef<BuildAlloc = Self>;

    #[must_use]
    /// # Safety
    ///
    /// * `ptr` must denote a block of memory currently allocated via this allocator
    /// * `layout` must *fit* that block of memory
    /// * the alignment of the `layout` must match the alignment used to allocate that block of
    ///   memory
    unsafe fn build_alloc_ref(
        &mut self,
        ptr: NonNull<u8>,
        layout: Option<NonZeroLayout>,
    ) -> Self::Ref;
}

pub trait DeallocRef: Sized {
    type BuildAlloc: BuildAllocRef<Ref = Self>;

    fn get_build_alloc(&mut self) -> Self::BuildAlloc;

    /// # Safety
    ///
    /// * `ptr` must denote a block of memory currently allocated via this allocator
    /// * `layout` must *fit* that block of memory
    /// * the alignment of the `layout` must match the alignment used to allocate that block of
    ///   memory
    unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout);
}

pub trait AllocRef: DeallocRef {
    type Error;

    fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error>;

    fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
        let size = layout.size();
        let p = self.alloc(layout)?;
        unsafe {
            ptr::write_bytes(p.as_ptr(), 0, size.get());
        }
        Ok(p)
    }

    fn usable_size(&self, layout: NonZeroLayout) -> (usize, usize) {
        (layout.size().get(), layout.size().get())
    }

    /// # Safety
    ///
    /// * `ptr` must be currently allocated via this allocator
    /// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
    /// * `new_size` must not be less than `layout.size()`
    unsafe fn grow_in_place(
        &mut self,
        ptr: NonNull<u8>,
        layout: NonZeroLayout,
        new_size: NonZeroUsize,
    ) -> bool {
        let _ = ptr; // this default implementation doesn't care about the actual address.
        debug_assert!(new_size.get() >= layout.size().get());
        let (_l, u) = self.usable_size(layout);
        // _l <= layout.size()                       [guaranteed by usable_size()]
        //       layout.size() <= new_layout.size()  [required by this method]
        new_size.get() <= u
    }

    /// # Safety
    ///
    /// * `ptr` must be currently allocated via this allocator
    /// * `layout` must *fit* the `ptr` (see above); note the `new_size` argument need not fit it
    /// * `new_size` must not be greater than `layout.size()` (and must be greater than zero)
    unsafe fn shrink_in_place(
        &mut self,
        ptr: NonNull<u8>,
        layout: NonZeroLayout,
        new_size: NonZeroUsize,
    ) -> bool {
        let _ = ptr; // this default implementation doesn't care about the actual address.
        debug_assert!(new_size.get() <= layout.size().get());
        let (l, _u) = self.usable_size(layout);
        //                      layout.size() <= _u  [guaranteed by usable_size()]
        // new_layout.size() <= layout.size()        [required by this method]
        l <= new_size.get()
    }
}

pub trait ReallocRef: AllocRef {
    /// # Safety
    ///
    /// * `ptr` must be currently allocated via this allocator,
    /// * `layout` must *fit* the `ptr` (see above). (The `new_size` argument need not fit it.)
    /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
    ///   must not overflow (i.e., the rounded value must be less than `usize::MAX`).
    ///
    /// (Extension subtraits might provide more specific bounds on
    /// behavior, e.g., guarantee a sentinel address or a null pointer
    /// in response to a zero-size allocation request.)
    ///
    /// # Errors
    ///
    /// Returns `Err` only if the new layout
    /// does not meet the allocator's size
    /// and alignment constraints of the allocator, or if reallocation
    /// otherwise fails.
    ///
    /// Implementations are encouraged to return `Err` on memory
    /// exhaustion rather than panicking or aborting, but this is not
    /// a strict requirement. (Specifically: it is *legal* to
    /// implement this trait atop an underlying native allocation
    /// library that aborts on memory exhaustion.)
    unsafe fn realloc(
        &mut self,
        ptr: NonNull<u8>,
        old_layout: NonZeroLayout,
        new_layout: NonZeroLayout,
    ) -> Result<NonNull<u8>, Self::Error> {
        let old_size = old_layout.size();
        let new_size = new_layout.size();

        if old_layout.align() == new_layout.align()
            && ((new_size > old_size && self.grow_in_place(ptr, old_layout, new_size))
                || (new_size < old_size && self.shrink_in_place(ptr, old_layout, new_size)))
        {
            return Ok(ptr);
        }

        alloc_copy_dealloc(self, ptr, old_layout, new_layout)
    }
}

/// The `AllocErr` error indicates an allocation failure
/// that may be due to resource exhaustion or to
/// something wrong when combining the given input arguments with this
/// allocator.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct AllocErr;

impl fmt::Display for AllocErr {
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        f.write_str("memory allocation failed")
    }
}

/// The global memory allocator.
///
/// This type implements the allocation traits by forwarding calls
/// to the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crate’s default.
#[derive(Copy, Clone, Default, Debug)]
pub struct Global;

macro_rules! impl_buildalloc_alloc_zst {
    ($ty:tt) => {
        impl BuildAllocRef for $ty {
            type Ref = Self;

            unsafe fn build_alloc_ref(
                &mut self,
                _ptr: NonNull<u8>,
                _layout: Option<NonZeroLayout>,
            ) -> Self::Ref {
                Self
            }
        }
    };
}

impl_buildalloc_alloc_zst!(Global);
#[cfg(feature = "std")]
impl_buildalloc_alloc_zst!(System);

impl DeallocRef for Global {
    type BuildAlloc = Self;

    fn get_build_alloc(&mut self) -> Self::BuildAlloc {
        Self
    }

    unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
        dealloc(ptr.as_ptr(), layout.into())
    }
}

impl AllocRef for Global {
    type Error = AllocErr;

    fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
        unsafe { NonNull::new(alloc(layout.into())).ok_or(AllocErr) }
    }

    fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
        unsafe { NonNull::new(alloc_zeroed(layout.into())).ok_or(AllocErr) }
    }
}

impl ReallocRef for Global {
    unsafe fn realloc(
        &mut self,
        ptr: NonNull<u8>,
        old_layout: NonZeroLayout,
        new_layout: NonZeroLayout,
    ) -> Result<NonNull<u8>, Self::Error> {
        // FIXME: Remove `else` branch. This is needed, as std provides old method.
        if old_layout.align() == new_layout.align() {
            NonNull::new(realloc(
                ptr.as_ptr(),
                old_layout.into(),
                new_layout.size().get(),
            ))
            .ok_or(AllocErr)
        } else {
            alloc_copy_dealloc(self, ptr, old_layout, new_layout)
        }
    }
}

#[cfg(feature = "std")]
impl DeallocRef for System {
    type BuildAlloc = Self;

    fn get_build_alloc(&mut self) -> Self::BuildAlloc {
        Self
    }

    unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: NonZeroLayout) {
        GlobalAlloc::dealloc(self, ptr.as_ptr(), layout.into())
    }
}

#[cfg(feature = "std")]
impl AllocRef for System {
    type Error = AllocErr;

    fn alloc(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
        unsafe { NonNull::new(GlobalAlloc::alloc(self, layout.into())).ok_or(AllocErr) }
    }

    fn alloc_zeroed(&mut self, layout: NonZeroLayout) -> Result<NonNull<u8>, Self::Error> {
        unsafe { NonNull::new(GlobalAlloc::alloc_zeroed(self, layout.into())).ok_or(AllocErr) }
    }
}

#[cfg(feature = "std")]
impl ReallocRef for System {
    // FIXME: Remove `else` branch. This is needed, as std provides old method.
    unsafe fn realloc(
        &mut self,
        ptr: NonNull<u8>,
        old_layout: NonZeroLayout,
        new_layout: NonZeroLayout,
    ) -> Result<NonNull<u8>, Self::Error> {
        if old_layout.align() == new_layout.align() {
            NonNull::new(GlobalAlloc::realloc(
                self,
                ptr.as_ptr(),
                old_layout.into(),
                new_layout.size().get(),
            ))
            .ok_or(AllocErr)
        } else {
            alloc_copy_dealloc(self, ptr, old_layout, new_layout)
        }
    }
}

#[inline]
unsafe fn alloc_copy_dealloc<A: ReallocRef>(
    alloc: &mut A,
    ptr: NonNull<u8>,
    old_layout: NonZeroLayout,
    new_layout: NonZeroLayout,
) -> Result<NonNull<u8>, A::Error> {
    let result = alloc.alloc(new_layout);

    if let Ok(new_ptr) = result {
        ptr::copy_nonoverlapping(
            ptr.as_ptr(),
            new_ptr.as_ptr(),
            cmp::min(old_layout.size().get(), new_layout.size().get()),
        );
        alloc.dealloc(ptr, old_layout);
    }
    result
}