1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
mod heap;

pub use self::heap::{Heap};

use std::{mem, ptr};
use std::sync::atomic::{AtomicUsize, Ordering};

pub fn heap(len: usize) -> MemRef {
    Heap.allocate(len)
}

/// Allocates memory to be used by Bufs or Bytes. Allows allocating memory
/// using alternate stratgies than the default Rust heap allocator. Also does
/// not require that allocations are continuous in memory.
///
/// For example, an alternate allocator could use a slab of 4kb chunks of
/// memory and return as many chunks as needed to satisfy the length
/// requirement.
pub trait Allocator: Sync + Send {

  /// Allocate memory. May or may not be contiguous.
  fn allocate(&self, len: usize) -> MemRef;

  /// Deallocate a chunk of memory
  fn deallocate(&self, mem: *mut Mem);
}

pub struct MemRef {
    ptr: *mut u8,
}

impl MemRef {
    pub fn new(mem: *mut Mem) -> MemRef {
        let ptr = mem as *mut u8;

        unsafe {
            MemRef {
                ptr: ptr.offset(mem::size_of::<Mem>() as isize),
            }
        }
    }

    #[inline]
    pub fn none() -> MemRef {
        MemRef { ptr: ptr::null_mut() }
    }

    #[inline]
    pub fn is_none(&self) -> bool {
        self.ptr.is_null()
    }

    #[inline]
    pub fn ptr(&self) -> *mut u8 {
        self.ptr
    }

    pub fn bytes(&self) -> &[u8] {
        use std::slice;
        unsafe {
            slice::from_raw_parts(self.ptr(), self.mem().len)
        }
    }

    #[inline]
    pub fn bytes_mut(&mut self) -> &mut [u8] {
        use std::slice;
        unsafe {
            slice::from_raw_parts_mut(self.ptr(), self.mem().len)
        }
    }

    #[inline]
    fn mem_ptr(&self) -> *mut Mem {
        unsafe {
            self.ptr.offset(-(mem::size_of::<Mem>() as isize)) as *mut Mem
        }
    }

    #[inline]
    fn mem(&self) -> &Mem {
        unsafe {
            mem::transmute(self.mem_ptr())
        }
    }
}

impl Clone for MemRef {
    #[inline]
    fn clone(&self) -> MemRef {
        self.mem().refs.fetch_add(1, Ordering::Relaxed);
        MemRef { ptr: self.ptr }
    }
}

impl Drop for MemRef {
    fn drop(&mut self) {
        // Guard against the ref having already been dropped
        if self.ptr.is_null() { return; }

        // Decrement the ref count
        if 1 == self.mem().refs.fetch_sub(1, Ordering::Relaxed) {
            // Last ref dropped, free the memory
            unsafe {
                let alloc: &Allocator = mem::transmute(self.mem().allocator);
                alloc.deallocate(self.mem_ptr());
            }
        }
    }
}

unsafe impl Send for MemRef { }
unsafe impl Sync for MemRef { }

/// Memory allocated by an Allocator must be prefixed with Mem
pub struct Mem {
    // TODO: It should be possible to reduce the size of this struct
    allocator: *const Allocator,
    refs: AtomicUsize,
    len: usize,
}

impl Mem {
    pub fn new(len: usize, allocator: *const Allocator) -> Mem {
        Mem {
            allocator: allocator,
            refs: AtomicUsize::new(1),
            len: len,
        }
    }
}