use std::mem::{
MaybeUninit,
ManuallyDrop,
};
use std::marker::{Send, Sync, PhantomData};
use std::ops::Drop;
use std::slice;
#[repr(C)]
#[derive(Debug)]
struct StackBuffer<T>
{
fill_ptr: usize,
buf_ptr: *mut MaybeUninit<T>,
}
impl<T> Clone for StackBuffer<T>
{
fn clone(&self) -> Self {
Self{
fill_ptr: self.fill_ptr,
buf_ptr: self.buf_ptr,
}
}
}
impl<T> Copy for StackBuffer<T>{}
#[repr(C)]
#[derive(Debug, Clone)]
struct HeapBuffer<T>
{
_fill_ptr: usize, buf: Vec<T>,
}
#[repr(C)]
union Internal<T>
{
stack: StackBuffer<T>,
heap: ManuallyDrop<HeapBuffer<T>>,
}
pub struct AVec<'a, T>
{
stack_sz: usize,
inner: Internal<T>,
_stack: PhantomData<&'a mut [MaybeUninit<T>]>,
}
unsafe impl<'a, T> Send for AVec<'a, T>{}
unsafe impl<'a, T> Sync for AVec<'a, T>{}
impl<'a, T> Drop for AVec<'a, T>
{
fn drop(&mut self) {
if self.is_allocated() {
unsafe {
ManuallyDrop::drop(&mut self.inner.heap);
}
} else {
if std::mem::needs_drop::<T>() {
unsafe {
std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(self.inner.stack.buf_ptr as *mut T, self.fill_ptr()));
}
}
}
}
}
impl<'a, T> AVec<'a, T>
{
fn fill_ptr(&self) -> usize
{
unsafe {
self.inner.stack.fill_ptr
}
}
pub fn is_allocated(&self) -> bool
{
self.fill_ptr() >= self.stack_sz
}
pub fn new(stack: &'a mut [MaybeUninit<T>]) -> Self
{
let (buf_ptr, stack_sz) = (stack.as_mut_ptr(), stack.len());
Self {
stack_sz,
inner: Internal {
stack: StackBuffer {
fill_ptr: 0,
buf_ptr,
}
},
_stack: PhantomData
}
}
fn move_to_heap(&mut self)
{
let buf: Vec<T> = unsafe {
slice::from_raw_parts(self.inner.stack.buf_ptr as *const MaybeUninit<T>, self.fill_ptr()).iter().map(|x| x.as_ptr().read()).collect()
};
self.inner = Internal {
heap: ManuallyDrop::new(HeapBuffer {
_fill_ptr: self.stack_sz,
buf,
}),
};
}
pub fn push(&mut self, item: T)
{
if self.is_allocated()
{
unsafe {
(*self.inner.heap).buf.push(item)
}
} else {
unsafe {
let ptr = self.inner.stack.fill_ptr;
*self.inner.stack.buf_ptr.add(ptr) = MaybeUninit::new(item);
self.inner.stack.fill_ptr += 1;
if self.is_allocated() {
self.move_to_heap();
}
}
}
}
pub fn len(&self) -> usize
{
if self.is_allocated()
{
unsafe {
self.inner.heap.buf.len()
}
} else {
self.fill_ptr()
}
}
}