use core::cmp;
use core::marker::PhantomData;
use core::mem;
use core::ops::Drop;
use core::ptr::{self, NonNull};
use core::slice;
use alloc::{Alloc, Layout, oom};
use alloc::CollectionAllocErr;
use alloc::CollectionAllocErr::*;
use boxed::Box;
#[cfg(not(feature = "nonnull_cast"))]
use ::NonNullCast;
#[allow(missing_debug_implementations)]
pub struct RawVec<T, A: Alloc> {
ptr: NonNull<T>,
marker: PhantomData<T>,
cap: usize,
a: A,
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn new_in(a: A) -> Self {
RawVec {
ptr: NonNull::dangling(),
marker: PhantomData,
cap: [0, !0][(mem::size_of::<T>() == 0) as usize],
a,
}
}
#[inline]
pub fn with_capacity_in(cap: usize, a: A) -> Self {
RawVec::allocate_in(cap, false, a)
}
#[inline]
pub fn with_capacity_zeroed_in(cap: usize, a: A) -> Self {
RawVec::allocate_in(cap, true, a)
}
fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self {
unsafe {
let elem_size = mem::size_of::<T>();
let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling()
} else {
let align = mem::align_of::<T>();
let layout = Layout::from_size_align(alloc_size, align).unwrap();
let result = if zeroed {
a.alloc_zeroed(layout)
} else {
a.alloc(layout)
};
match result {
Ok(ptr) => ptr.cast(),
Err(_) => oom(layout),
}
};
RawVec {
ptr: ptr.into(),
marker: PhantomData,
cap,
a,
}
}
}
}
impl<T, A: Alloc + Default> RawVec<T, A> {
pub fn new() -> Self {
Self::new_in(Default::default())
}
#[inline]
pub fn with_capacity(cap: usize) -> Self {
RawVec::allocate_in(cap, false, Default::default())
}
#[inline]
pub fn with_capacity_zeroed(cap: usize) -> Self {
RawVec::allocate_in(cap, true, Default::default())
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: A) -> Self {
RawVec {
ptr: NonNull::new_unchecked(ptr),
marker: PhantomData,
cap,
a,
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn from_box(mut slice: Box<[T], A>) -> Self {
unsafe {
let a = ptr::read(&slice.a);
let result = RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), a);
mem::forget(slice);
result
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub fn ptr(&self) -> *mut T {
self.ptr.as_ptr()
}
#[inline(always)]
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
!0
} else {
self.cap
}
}
pub fn alloc(&self) -> &A {
&self.a
}
pub fn alloc_mut(&mut self) -> &mut A {
&mut self.a
}
fn current_layout(&self) -> Option<Layout> {
if self.cap == 0 {
None
} else {
unsafe {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>() * self.cap;
Some(Layout::from_size_align_unchecked(size, align))
}
}
}
#[inline(never)]
#[cold]
pub fn double(&mut self) {
unsafe {
let elem_size = mem::size_of::<T>();
assert!(elem_size != 0, "capacity overflow");
let (new_cap, uniq) = match self.current_layout() {
Some(cur) => {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
cur,
new_size);
match ptr_res {
Ok(ptr) => (new_cap, ptr.cast().into()),
Err(_) => oom(Layout::from_size_align_unchecked(new_size, cur.align())),
}
}
None => {
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
match self.a.alloc_array::<T>(new_cap) {
Ok(ptr) => (new_cap, ptr.into()),
Err(_) => oom(Layout::array::<T>(new_cap).unwrap()),
}
}
};
self.ptr = uniq;
self.cap = new_cap;
}
}
#[inline(never)]
#[cold]
pub fn double_in_place(&mut self) -> bool {
unsafe {
let elem_size = mem::size_of::<T>();
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false, };
assert!(elem_size != 0, "capacity overflow");
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => {
false
}
}
}
}
pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
}
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => unreachable!(),
Ok(()) => { }
}
}
fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize)
-> Result<usize, CollectionAllocErr> {
let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
let double_cap = self.cap * 2;
Ok(cmp::max(double_cap, required_cap))
}
pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
}
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => unreachable!(),
Ok(()) => { }
}
}
pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool {
unsafe {
let old_layout = match self.current_layout() {
Some(layout) => layout,
None => return false,
};
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return false;
}
let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
.unwrap_or_else(|_| capacity_overflow());
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => {
false
}
}
}
}
pub fn shrink_to_fit(&mut self, amount: usize) {
let elem_size = mem::size_of::<T>();
if elem_size == 0 {
self.cap = amount;
return;
}
assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
if amount == 0 {
unsafe {
let a = ptr::read(&self.a as *const A);
self.dealloc_buffer();
ptr::write(self, RawVec::new_in(a));
}
} else if self.cap != amount {
unsafe {
let old_size = elem_size * self.cap;
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(NonNull::from(self.ptr).cast(),
old_layout,
new_size) {
Ok(p) => self.ptr = p.cast().into(),
Err(_) => oom(Layout::from_size_align_unchecked(new_size, align)),
}
}
self.cap = amount;
}
}
}
enum Fallibility {
Fallible,
Infallible,
}
use self::Fallibility::*;
enum ReserveStrategy {
Exact,
Amortized,
}
use self::ReserveStrategy::*;
impl<T, A: Alloc> RawVec<T, A> {
fn reserve_internal(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
fallibility: Fallibility,
strategy: ReserveStrategy,
) -> Result<(), CollectionAllocErr> {
unsafe {
use alloc::AllocErr;
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return Ok(());
}
let new_cap = match strategy {
Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?,
Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
};
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
alloc_guard(new_layout.size())?;
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
match (&res, fallibility) {
(Err(AllocErr), Infallible) => oom(new_layout),
_ => {}
}
self.ptr = res?.cast().into();
self.cap = new_cap;
Ok(())
}
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn into_box(self) -> Box<[T], A> {
let slice = slice::from_raw_parts_mut(self.ptr(), self.cap);
let a = ptr::read(&self.a);
let output: Box<[T], A> = Box::from_raw_in(slice, a);
mem::forget(self);
output
}
}
impl<T, A: Alloc> RawVec<T, A> {
pub unsafe fn dealloc_buffer(&mut self) {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
self.a.dealloc(NonNull::from(self.ptr).cast(), layout);
}
}
}
}
impl<T, A: Alloc> Drop for RawVec<T, A> {
fn drop(&mut self) {
unsafe { self.dealloc_buffer(); }
}
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
if mem::size_of::<usize>() < 8 && alloc_size > ::core::isize::MAX as usize {
Err(CapacityOverflow)
} else {
Ok(())
}
}
fn capacity_overflow() -> ! {
panic!("capacity overflow")
}
#[cfg(test)]
mod tests {
use super::*;
mod allocator_api {
pub use ::alloc::*;
}
include!("../dummy.rs");
#[test]
fn allocator_param() {
use alloc::AllocErr;
struct BoundedAlloc { fuel: usize }
unsafe impl Alloc for BoundedAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr);
}
match MyHeap.alloc(layout) {
ok @ Ok(_) => { self.fuel -= size; ok }
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
MyHeap.dealloc(ptr, layout)
}
}
let a = BoundedAlloc { fuel: 500 };
let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
assert_eq!(v.a.fuel, 450);
v.reserve(50, 150); assert_eq!(v.a.fuel, 250);
}
#[test]
fn reserve_does_not_overallocate() {
{
let mut v: RawVec<u32, _> = RawVec::new_in(MyHeap);
v.reserve(0, 9);
assert_eq!(9, v.cap());
}
{
let mut v: RawVec<u32, _> = RawVec::new_in(MyHeap);
v.reserve(0, 7);
assert_eq!(7, v.cap());
v.reserve(7, 90);
assert_eq!(97, v.cap());
}
{
let mut v: RawVec<u32, _> = RawVec::new_in(MyHeap);
v.reserve(0, 12);
assert_eq!(12, v.cap());
v.reserve(12, 3);
assert!(v.cap() >= 12 + 12 / 2);
}
}
}