use core::{
mem::MaybeUninit,
ptr::{self, NonNull},
};
use super::*;
#[derive(Debug)]
enum Kind<T> {
Slot(MaybeUninit<T>),
Inline(NonNull<T>),
Dangling(NonNull<T>),
}
impl<T> Default for Kind<T> {
fn default() -> Self {
if mem::size_of::<T>() == 0 {
Kind::Dangling(NonNull::dangling())
} else if mem::needs_drop::<T>() {
Kind::Slot(MaybeUninit::uninit())
} else {
Kind::Inline(NonNull::dangling())
}
}
}
#[derive(Debug)]
#[must_use = "The `T` is uninitialized, and must be initialized by `write` before it is used, if `T` is not zero sized type."]
pub struct Owned<T, A: Allocator> {
kind: Kind<T>,
arena: A,
detached: bool,
pub(super) allocated: Meta,
}
unsafe impl<A: Allocator + Send, T> Send for Owned<T, A> {}
unsafe impl<A: Allocator + Sync, T> Sync for Owned<T, A> {}
impl<T, A: Allocator> crate::Buffer for Owned<T, A> {
#[inline]
fn capacity(&self) -> usize {
self.allocated.ptr_size as usize
}
#[inline]
fn offset(&self) -> usize {
self.allocated.ptr_offset as usize
}
#[inline]
fn buffer_capacity(&self) -> usize {
self.allocated.memory_size as usize
}
#[inline]
fn buffer_offset(&self) -> usize {
self.allocated.memory_offset as usize
}
#[inline]
unsafe fn detach(&mut self) {
self.detached = true;
}
#[cfg(all(feature = "memmap", not(target_family = "wasm")))]
fn flush(&self) -> std::io::Result<()> {
self.arena.flush_range(
self.allocated.ptr_offset as usize,
self.allocated.ptr_size as usize,
)
}
#[cfg(all(feature = "memmap", not(target_family = "wasm")))]
fn flush_async(&self) -> std::io::Result<()> {
self.arena.flush_async_range(
self.allocated.memory_offset as usize,
self.allocated.memory_size as usize,
)
}
}
impl<T, A: Allocator> Owned<T, A> {
#[inline]
pub fn write(&mut self, value: T) {
match &mut self.kind {
Kind::Slot(slot) => unsafe {
slot.as_mut_ptr().write(value);
},
Kind::Inline(ptr) => unsafe {
ptr.as_ptr().write(value);
},
Kind::Dangling(_) => {}
}
}
pub unsafe fn as_ref(&self) -> &T {
match &self.kind {
Kind::Slot(slot) => slot.as_ptr().as_ref().unwrap(),
Kind::Inline(ptr) => ptr.as_ref(),
Kind::Dangling(val) => val.as_ref(),
}
}
pub unsafe fn as_mut(&mut self) -> &mut T {
match &mut self.kind {
Kind::Slot(slot) => slot.as_mut_ptr().as_mut().unwrap(),
Kind::Inline(ptr) => ptr.as_mut(),
Kind::Dangling(val) => val.as_mut(),
}
}
pub fn as_mut_ptr(&mut self) -> NonNull<T> {
match &mut self.kind {
Kind::Slot(slot) => {
if slot.as_ptr().is_null() {
NonNull::dangling()
} else {
unsafe { NonNull::new_unchecked(slot.as_mut_ptr()) }
}
}
Kind::Inline(ptr) => *ptr,
Kind::Dangling(val) => *val,
}
}
}
impl<T, A: Allocator> Drop for Owned<T, A> {
fn drop(&mut self) {
match &mut self.kind {
Kind::Slot(slot) => {
if !self.detached {
unsafe {
if mem::needs_drop::<T>() {
let ptr = slot.as_mut_ptr();
if !ptr.is_null() {
ptr::drop_in_place(ptr);
}
}
}
unsafe {
self
.arena
.dealloc(self.allocated.memory_offset, self.allocated.memory_size);
}
}
}
Kind::Inline(_) => {
if !self.detached {
unsafe {
self
.arena
.dealloc(self.allocated.memory_offset, self.allocated.memory_size);
}
}
}
Kind::Dangling(_) => {}
}
}
}
#[derive(Debug)]
#[must_use = "The `T` is uninitialized, and must be initialized by `write` before it is used, if `T` is not zero sized type."]
pub struct RefMut<'a, T, A: Allocator> {
kind: Kind<T>,
arena: &'a A,
detached: bool,
pub(super) allocated: Meta,
}
impl<T, A: Allocator> crate::Buffer for RefMut<'_, T, A> {
#[inline]
fn capacity(&self) -> usize {
self.allocated.ptr_size as usize
}
#[inline]
fn offset(&self) -> usize {
self.allocated.ptr_offset as usize
}
#[inline]
fn buffer_capacity(&self) -> usize {
self.allocated.memory_size as usize
}
#[inline]
fn buffer_offset(&self) -> usize {
self.allocated.memory_offset as usize
}
#[inline]
unsafe fn detach(&mut self) {
self.detached = true;
}
#[cfg(all(feature = "memmap", not(target_family = "wasm")))]
fn flush(&self) -> std::io::Result<()> {
self.arena.flush_range(
self.allocated.ptr_offset as usize,
self.allocated.ptr_size as usize,
)
}
#[cfg(all(feature = "memmap", not(target_family = "wasm")))]
fn flush_async(&self) -> std::io::Result<()> {
self.arena.flush_async_range(
self.allocated.ptr_offset as usize,
self.allocated.ptr_size as usize,
)
}
}
impl<'a, T, A: Allocator> RefMut<'a, T, A> {
#[inline]
pub fn write(&mut self, value: T) {
match &mut self.kind {
Kind::Slot(slot) => unsafe {
slot.as_mut_ptr().write(value);
},
Kind::Inline(ptr) => unsafe {
ptr.as_ptr().write(value);
},
Kind::Dangling(_) => {}
}
}
pub unsafe fn as_ref(&self) -> &T {
match &self.kind {
Kind::Slot(slot) => slot.as_ptr().as_ref().unwrap(),
Kind::Inline(ptr) => ptr.as_ref(),
Kind::Dangling(val) => val.as_ref(),
}
}
pub unsafe fn as_mut(&mut self) -> &mut T {
match &mut self.kind {
Kind::Slot(slot) => slot.as_mut_ptr().as_mut().unwrap(),
Kind::Inline(ptr) => ptr.as_mut(),
Kind::Dangling(val) => val.as_mut(),
}
}
pub fn as_mut_ptr(&mut self) -> NonNull<T> {
match &mut self.kind {
Kind::Slot(slot) => {
if slot.as_ptr().is_null() {
NonNull::dangling()
} else {
unsafe { NonNull::new_unchecked(slot.as_mut_ptr()) }
}
}
Kind::Inline(ptr) => *ptr,
Kind::Dangling(val) => *val,
}
}
#[inline]
pub(super) fn new(slot: MaybeUninit<T>, allocated: Meta, arena: &'a A) -> Self {
Self {
kind: Kind::Slot(slot),
arena,
detached: false,
allocated,
}
}
#[inline]
pub(super) fn new_inline(value: NonNull<T>, allocated: Meta, arena: &'a A) -> Self {
Self {
kind: Kind::Inline(value),
arena,
detached: false,
allocated,
}
}
#[inline]
pub(super) fn new_zst(arena: &'a A) -> Self {
Self {
kind: Kind::Dangling(NonNull::dangling()),
allocated: Meta::null(arena.raw_ptr() as _),
arena,
detached: false,
}
}
#[allow(clippy::wrong_self_convention)]
#[inline]
pub(super) fn to_owned(&mut self) -> Owned<T, A> {
self.detached = true;
Owned {
arena: self.arena.clone(),
kind: mem::take(&mut self.kind),
detached: false,
allocated: self.allocated,
}
}
}
impl<T, A: Allocator> Drop for RefMut<'_, T, A> {
fn drop(&mut self) {
match &mut self.kind {
Kind::Slot(slot) => {
if !self.detached {
unsafe {
if mem::needs_drop::<T>() {
let ptr = slot.as_mut_ptr();
if !ptr.is_null() {
ptr::drop_in_place(ptr);
}
}
}
unsafe {
self
.arena
.dealloc(self.allocated.memory_offset, self.allocated.memory_size);
}
}
}
Kind::Inline(_) => {
if !self.detached {
unsafe {
self
.arena
.dealloc(self.allocated.memory_offset, self.allocated.memory_size);
}
}
}
Kind::Dangling(_) => {}
}
}
}