hipool 0.3.4

RUST Memory Pool
Documentation
use crate::{stdlib, Error, Result};
use core::alloc::{GlobalAlloc, Layout};
use core::mem::{needs_drop, MaybeUninit};
use core::ptr::{self, NonNull};
use core::slice;

/// # Safety
/// 一个标记Trait,和Allocator配合使用, 表示支持跨线程free_buf.
/// 内存池的实现中将资源内存释放延迟到内存池生命周期结束,因此也是支持Pool
/// 这些内存池分配资源是否支持Send/Sync就只由资源本身来决定, 与内存池无关
pub unsafe trait Pool {}

/// # Safety
/// 这里分配的内存需要保证生命周期一定小于Self
/// 建议使用Boxed的构建函数,它保证了这一点
pub unsafe trait Allocator {
    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self.
    /// f: 初始化函数,如果初始化失败,内存资源会被自动释放.
    /// 这样在内存池的场景下,可以避免初始化失败导致内存大量泄漏,因为内存池只有在最后集中释放所有内存.
    unsafe fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
    where
        F: FnOnce(NonNull<[u8]>) -> Result<()>;

    /// # Safety
    /// ptr一定是alloc_buf分配的内存地址, layout也应该和alloc_buf的参数完全相同
    unsafe fn free_buf(&self, ptr: NonNull<[u8]>, layout: Layout);
}

/// # Safety
/// 提供常见的和具体类型相关的内存分配操作接口.
pub unsafe trait GenericAlloc {
    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>>;

    /// # Safety
    /// ptr一定时alloc_buf分配的内存地址
    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout);

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn init<T>(&self, val: T) -> Result<NonNull<T>>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    /// 通过函数初始化目的是在内存池场景下可以控制多个内嵌对象分配的内存顺序
    /// 如果T内部成员也需要从同一个内存池分配内存,那么init_then可以保证先分配T,再分配成员
    /// 而init接口一定是先分配成员,再分配T,两种分配方式在不同场景下的访问顺序可能对性能有一定影响.
    unsafe fn alloc_then<T, F>(&self, f: F) -> Result<NonNull<T>>
    where
        F: FnOnce() -> Result<T>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn init_slice<T: Clone>(&self, len: usize, val: T) -> Result<NonNull<[T]>>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn alloc_slice_then<T, F>(&self, len: usize, f: F) -> Result<NonNull<[T]>>
    where
        F: FnMut(usize) -> Result<T>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn zeroed<T>(&self) -> Result<NonNull<MaybeUninit<T>>>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn zeroed_slice<T>(&self, len: usize) -> Result<NonNull<[MaybeUninit<T>]>>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn uninit<T>(&self) -> Result<NonNull<MaybeUninit<T>>>;

    /// # Safety
    /// 用户保证分配的内存生命周期一定小于self
    unsafe fn uninit_slice<T>(&self, len: usize) -> Result<NonNull<[MaybeUninit<T>]>>;

    /// # Safety
    /// ptr必须是allocate分配的内存, 基于类型获取的Layout和分配时保持一致
    unsafe fn release<T>(&self, ptr: NonNull<T>);

    /// # Safety
    /// ptr必须是allocate分配的内存, Layout和分配时保持一致
    unsafe fn release_with<T: ?Sized>(&self, ptr: NonNull<T>, layout: Layout);

    /// # Safety
    /// ptr必须是allocate分配的内存, 基于类型获取的Layout和分配时保持一致
    unsafe fn release_slice<T>(&self, ptr: NonNull<[T]>);

    /// # Safety
    /// ptr必须是allocate分配的内存, Layout和分配时保持一致
    unsafe fn release_slice_with<T>(&self, ptr: NonNull<[T]>, layout: Layout);
}

unsafe impl<A: ?Sized + Allocator> GenericAlloc for A {
    unsafe fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>> {
        self.alloc_buf(layout, |_| Ok(()))
    }

    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
        let slice = slice::from_raw_parts(ptr, layout.size());
        self.free_buf(NonNull::from(slice), layout)
    }

    unsafe fn init<T>(&self, val: T) -> Result<NonNull<T>> {
        let layout = Layout::new::<T>();
        self.alloc_buf(layout, |ptr| {
            unsafe { ptr.cast::<T>().as_ptr().write(val) };
            Ok(())
        })
        .map(|ptr| ptr.cast::<T>())
    }

    unsafe fn alloc_then<T, F>(&self, f: F) -> Result<NonNull<T>>
    where
        F: FnOnce() -> Result<T>,
    {
        let layout = Layout::new::<T>();
        self.alloc_buf(layout, |ptr| {
            unsafe { ptr.cast::<T>().as_ptr().write(f()?) }
            Ok(())
        })
        .map(|ptr| ptr.cast::<T>())
    }

    unsafe fn init_slice<T: Clone>(&self, len: usize, val: T) -> Result<NonNull<[T]>> {
        let layout = Layout::array::<T>(len).map_err(|_| Error::inval())?;
        self.alloc_buf(layout, |ptr| {
            let ptr = ptr.cast::<T>().as_ptr();
            for offset in 0..len {
                unsafe { ptr.add(offset).write(val.clone()) };
            }
            Ok(())
        })
        .map(|ptr| {
            let ptr = ptr.cast::<T>().as_ptr();
            NonNull::from(unsafe { slice::from_raw_parts(ptr, len) })
        })
    }

    unsafe fn alloc_slice_then<T, F>(&self, len: usize, mut f: F) -> Result<NonNull<[T]>>
    where
        F: FnMut(usize) -> Result<T>,
    {
        let layout = Layout::array::<T>(len).map_err(|_| Error::inval())?;
        self.alloc_buf(layout, |ptr| {
            let ptr = ptr.cast::<T>().as_ptr();
            for offset in 0..len {
                let val = f(offset).map_err(|e| {
                    if needs_drop::<T>() {
                        for n in (0..offset).rev() {
                            unsafe { ptr.add(n).drop_in_place() };
                        }
                    }
                    e
                })?;
                unsafe { ptr.add(offset).write(val) };
            }
            Ok(())
        })
        .map(|ptr| {
            let ptr = ptr.cast::<T>().as_ptr();
            NonNull::from(unsafe { slice::from_raw_parts(ptr, len) })
        })
    }

    unsafe fn zeroed<T>(&self) -> Result<NonNull<MaybeUninit<T>>> {
        self.alloc_then::<_, _>(|| Ok(MaybeUninit::<T>::zeroed()))
            .map(|ptr| ptr.cast::<MaybeUninit<T>>())
    }

    unsafe fn zeroed_slice<T>(&self, len: usize) -> Result<NonNull<[MaybeUninit<T>]>> {
        self.alloc_slice_then::<_, _>(len, |_| Ok(MaybeUninit::<T>::zeroed()))
            .map(|ptr| {
                let ptr = ptr.cast::<MaybeUninit<T>>().as_ptr();
                NonNull::from(unsafe { slice::from_raw_parts(ptr, len) })
            })
    }

    unsafe fn uninit<T>(&self) -> Result<NonNull<MaybeUninit<T>>> {
        self.alloc_then::<_, _>(|| Ok(MaybeUninit::<T>::uninit()))
            .map(|ptr| ptr.cast::<MaybeUninit<T>>())
    }

    unsafe fn uninit_slice<T>(&self, len: usize) -> Result<NonNull<[MaybeUninit<T>]>> {
        self.alloc_slice_then::<_, _>(len, |_| Ok(MaybeUninit::<T>::uninit()))
            .map(|ptr| {
                let ptr = ptr.cast::<MaybeUninit<T>>().as_ptr();
                NonNull::from(unsafe { slice::from_raw_parts(ptr, len) })
            })
    }

    unsafe fn release<T>(&self, ptr: NonNull<T>) {
        let layout = Layout::new::<T>();
        let ptr = unsafe { slice::from_raw_parts(ptr.cast::<u8>().as_ptr(), layout.size()) };
        self.free_buf(NonNull::from(ptr), layout);
    }

    unsafe fn release_with<T: ?Sized>(&self, ptr: NonNull<T>, layout: Layout) {
        let ptr = unsafe { slice::from_raw_parts(ptr.cast::<u8>().as_ptr(), layout.size()) };
        self.free_buf(NonNull::from(ptr), layout);
    }

    unsafe fn release_slice<T>(&self, ptr: NonNull<[T]>) {
        let layout = Layout::array::<T>(unsafe { ptr.as_ref() }.len()).unwrap();
        let ptr = unsafe { slice::from_raw_parts(ptr.cast::<u8>().as_ptr(), layout.size()) };
        self.free_buf(NonNull::from(ptr), layout);
    }

    unsafe fn release_slice_with<T>(&self, ptr: NonNull<[T]>, layout: Layout) {
        let ptr = unsafe { slice::from_raw_parts(ptr.cast::<u8>().as_ptr(), layout.size()) };
        self.free_buf(NonNull::from(ptr), layout);
    }
}

#[derive(Copy, Clone, PartialEq, PartialOrd, Ord, Eq)]
pub struct PoolAlloc;

unsafe impl Send for PoolAlloc {}
unsafe impl Sync for PoolAlloc {}
unsafe impl Pool for PoolAlloc {}

unsafe impl GlobalAlloc for PoolAlloc {
    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
        stdlib::aligned_alloc(layout.align(), layout.size())
    }
    unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
        stdlib::aligned_free(ptr);
    }
}

unsafe impl Allocator for PoolAlloc {
    unsafe fn alloc_buf<F>(&self, layout: Layout, f: F) -> Result<NonNull<[u8]>>
    where
        F: FnOnce(NonNull<[u8]>) -> Result<()>,
    {
        let ptr = GlobalAlloc::alloc(self, layout);
        if !ptr.is_null() {
            let ptr = NonNull::from(slice::from_raw_parts(ptr, layout.size()));
            f(ptr).map_err(|e| {
                GlobalAlloc::dealloc(self, ptr.cast::<u8>().as_ptr(), layout);
                e
            })?;
            Ok(ptr)
        } else {
            Err(Error::last())
        }
    }
    unsafe fn free_buf(&self, ptr: NonNull<[u8]>, layout: Layout) {
        GlobalAlloc::dealloc(self, ptr.cast::<u8>().as_ptr(), layout)
    }
}

#[derive(Copy, Clone, PartialEq, PartialOrd, Ord, Eq)]
pub struct NullAlloc;

unsafe impl Send for NullAlloc {}
unsafe impl Sync for NullAlloc {}
unsafe impl Pool for NullAlloc {}

unsafe impl GlobalAlloc for NullAlloc {
    unsafe fn alloc(&self, _layout: Layout) -> *mut u8 {
        ptr::null_mut()
    }
    unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
}

unsafe impl Allocator for NullAlloc {
    unsafe fn alloc_buf<F>(&self, _layout: Layout, _f: F) -> Result<NonNull<[u8]>>
    where
        F: FnOnce(NonNull<[u8]>) -> Result<()>,
    {
        Err(Error::default())
    }
    unsafe fn free_buf(&self, _ptr: NonNull<[u8]>, _layout: Layout) {}
}