use core::cmp;
use core::marker::PhantomData;
use core::mem;
use core::ops::Drop;
use core::ptr::{self, NonNull};
use core::slice;
use allocator::{Alloc, Layout};
use boxed::Box;
use allocator::CollectionAllocErr;
use allocator::CollectionAllocErr::*;
#[allow(missing_debug_implementations)]
pub struct RawVec<T, A: Alloc> {
ptr: NonNull<T>,
marker: PhantomData<T>,
cap: usize,
a: A,
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn new_in(a: A) -> Self {
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
RawVec {
ptr: NonNull::dangling(),
marker: PhantomData,
cap,
a,
}
}
#[inline]
pub fn with_capacity_in(cap: usize, a: A) -> Self {
RawVec::allocate_in(cap, false, a)
}
#[inline]
pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Self {
RawVec::allocate_in(cap, true, a)
}
fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self {
unsafe {
let elem_size = mem::size_of::<T>();
let alloc_size = cap.checked_mul(elem_size).expect("capacity overflow");
alloc_guard(alloc_size).expect("capacity overflow");
let ptr = if alloc_size == 0 {
mem::align_of::<T>() as *mut u8
} else {
let align = mem::align_of::<T>();
let result = if zeroed {
a.alloc_zeroed(Layout::from_size_align(alloc_size, align).unwrap())
} else {
a.alloc(Layout::from_size_align(alloc_size, align).unwrap())
};
match result {
Ok(ptr) => ptr,
Err(err) => a.oom(err),
}
};
RawVec {
ptr: NonNull::new_unchecked(ptr as *mut _),
marker: PhantomData,
cap,
a,
}
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: A) -> Self {
RawVec {
ptr: NonNull::new_unchecked(ptr),
marker: PhantomData,
cap,
a,
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn from_box(mut slice: Box<[T], A>) -> Self {
unsafe {
let a = ptr::read(&slice.a);
let result = RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), a);
mem::forget(slice);
result
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
!0
} else {
self.cap
}
}
pub fn alloc(&self) -> &A {
&self.a
}
pub fn alloc_mut(&mut self) -> &mut A {
&mut self.a
}
fn current_layout(&self) -> Option<Layout> {
if self.cap == 0 {
None
} else {
unsafe {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>() * self.cap;
Some(Layout::from_size_align_unchecked(size, align))
}
}
}
#[inline(never)]
#[cold]
pub fn double(&mut self) {
unsafe {
let elem_size = mem::size_of::<T>();
assert!(elem_size != 0, "capacity overflow");
let (new_cap, uniq) = match self.current_layout() {
Some(cur) => {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
let new_layout = Layout::from_size_align_unchecked(new_size, cur.align());
alloc_guard(new_size).expect("capacity overflow");
let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
cur,
new_layout);
match ptr_res {
Ok(ptr) => (new_cap, NonNull::new_unchecked(ptr as *mut T)),
Err(e) => self.a.oom(e),
}
}
None => {
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
match self.a.alloc_array::<T>(new_cap) {
Ok(ptr) => (new_cap, ptr.into()),
Err(e) => self.a.oom(e),
}
}
};
self.ptr = uniq;
self.cap = new_cap;
}
}
#[inline(never)]
#[cold]
pub fn double_in_place(&mut self) -> bool {
unsafe {
let elem_size = mem::size_of::<T>();
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false, };
assert!(elem_size != 0, "capacity overflow");
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).expect("capacity overflow");
let ptr = self.ptr() as *mut _;
let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
match self.a.grow_in_place(ptr, old_layout, new_layout) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => {
false
}
}
}
}
pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {
unsafe {
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return Ok(());
}
let new_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
let new_layout = Layout::array::<T>(new_cap).ok_or(CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let res = match self.current_layout() {
Some(layout) => {
let old_ptr = self.ptr.as_ptr() as *mut u8;
self.a.realloc(old_ptr, layout, new_layout)
}
None => self.a.alloc(new_layout),
};
self.ptr = NonNull::new_unchecked(res? as *mut T);
self.cap = new_cap;
Ok(())
}
}
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve_exact(used_cap, needed_extra_cap) {
Err(CapacityOverflow) => panic!("capacity overflow"),
Err(AllocErr(e)) => self.a.oom(e),
Ok(()) => { }
}
}
fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize)
-> Result<usize, CollectionAllocErr> {
let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
let double_cap = self.cap * 2;
Ok(cmp::max(double_cap, required_cap))
}
pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {
unsafe {
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return Ok(());
}
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?;
let new_layout = Layout::array::<T>(new_cap).ok_or(CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let res = match self.current_layout() {
Some(layout) => {
let old_ptr = self.ptr.as_ptr() as *mut u8;
self.a.realloc(old_ptr, layout, new_layout)
}
None => self.a.alloc(new_layout),
};
self.ptr = NonNull::new_unchecked(res? as *mut T);
self.cap = new_cap;
Ok(())
}
}
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.try_reserve(used_cap, needed_extra_cap) {
Err(CapacityOverflow) => panic!("capacity overflow"),
Err(AllocErr(e)) => self.a.oom(e),
Ok(()) => { }
}
}
pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool {
unsafe {
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false,
};
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return false;
}
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
.expect("capacity overflow");
let ptr = self.ptr() as *mut _;
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
alloc_guard(new_layout.size()).expect("capacity overflow");
match self.a.grow_in_place(ptr, old_layout, new_layout) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => {
false
}
}
}
}
pub fn shrink_to_fit(&mut self, amount: usize) {
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
self.cap = amount;
return;
}
assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
if amount == 0 {
unsafe {
let a = ptr::read(&self.a as *const A);
self.dealloc_buffer();
ptr::write(self, RawVec::new_in(a));
}
} else if self.cap != amount {
unsafe {
let old_size = elem_size * self.cap;
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
let new_layout = Layout::from_size_align_unchecked(new_size, align);
match self.a.realloc(self.ptr.as_ptr() as *mut u8,
old_layout,
new_layout) {
Ok(p) => self.ptr = NonNull::new_unchecked(p as *mut T),
Err(err) => self.a.oom(err),
}
}
self.cap = amount;
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn into_box(self) -> Box<[T], A> {
let slice = slice::from_raw_parts_mut(self.ptr(), self.cap);
let a = ptr::read(&self.a);
let output: Box<[T], A> = Box::from_raw_in(slice, a);
mem::forget(self);
output
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn dealloc_buffer(&mut self) {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
let ptr = self.ptr() as *mut u8;
self.a.dealloc(ptr, layout);
}
}
}
}
impl<T, A: Alloc> Drop for RawVec<T, A> {
fn drop(&mut self) {
unsafe { self.dealloc_buffer(); }
}
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
if mem::size_of::<usize>() < 8 && alloc_size > ::core::isize::MAX as usize {
Err(CapacityOverflow)
} else {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
mod allocator_api {
pub use ::allocator::*;
}
include!("dummy.rs");
#[test]
fn allocator_param() {
use allocator::{Alloc, AllocErr};
struct BoundedAlloc { fuel: usize }
unsafe impl Alloc for BoundedAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr::Unsupported { details: "fuel exhausted" });
}
match MyHeap.alloc(layout) {
ok @ Ok(_) => { self.fuel -= size; ok }
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
MyHeap.dealloc(ptr, layout)
}
}
let a = BoundedAlloc { fuel: 500 };
let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
assert_eq!(v.a.fuel, 450);
v.reserve(50, 150); assert_eq!(v.a.fuel, 250);
}
#[test]
fn reserve_does_not_overallocate() {
{
let mut v: RawVec<u32> = RawVec::new();
v.reserve(0, 9);
assert_eq!(9, v.cap());
}
{
let mut v: RawVec<u32> = RawVec::new();
v.reserve(0, 7);
assert_eq!(7, v.cap());
v.reserve(7, 90);
assert_eq!(97, v.cap());
}
{
let mut v: RawVec<u32> = RawVec::new();
v.reserve(0, 12);
assert_eq!(12, v.cap());
v.reserve(12, 3);
assert!(v.cap() >= 12 + 12 / 2);
}
}
}