#![no_std]
use core::mem::ManuallyDrop;
use core::task::{RawWaker, RawWakerVTable, Waker};
use core::{
borrow::{Borrow, BorrowMut},
cell::UnsafeCell,
mem::MaybeUninit,
ops::{Deref, DerefMut},
pin::Pin,
sync::atomic::{AtomicBool, AtomicUsize, Ordering},
};
pub struct Box<T: ?Sized>(*mut T);
impl<T> Box<T> {
#[inline]
pub fn new(_: T) -> Self {
unimplemented!()
}
#[inline]
pub fn pin(_: T) -> Pin<Self> {
unimplemented!()
}
}
impl<T: ?Sized> Box<T> {
#[inline]
pub const unsafe fn from_raw(raw: *mut T) -> Self {
Box(raw)
}
#[inline]
pub const fn into_raw(b: Self) -> *mut T {
b.0
}
#[inline]
pub fn leak<'a>(b: Self) -> &'a mut T {
unsafe { &mut *b.0 }
}
#[inline]
pub fn into_pin(boxed: Self) -> Pin<Self> {
unsafe { Pin::new_unchecked(boxed) }
}
}
impl<F: ?Sized + core::future::Future + Unpin> core::future::Future for Box<F> {
type Output = F::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut core::task::Context<'_>) -> core::task::Poll<Self::Output> {
F::poll(Pin::new(&mut *self), cx)
}
}
impl<T: ?Sized> Borrow<T> for Box<T> {
#[inline]
fn borrow(&self) -> &T {
&*self
}
}
impl<T: ?Sized> BorrowMut<T> for Box<T> {
#[inline]
fn borrow_mut(&mut self) -> &mut T {
&mut *self
}
}
impl<T: ?Sized> AsRef<T> for Box<T> {
#[inline]
fn as_ref(&self) -> &T {
&*self
}
}
impl<T: ?Sized> AsMut<T> for Box<T> {
#[inline]
fn as_mut(&mut self) -> &mut T {
&mut *self
}
}
impl<T: ?Sized> Deref for Box<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*self.0 }
}
}
impl<T: ?Sized> DerefMut for Box<T> {
#[inline]
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.0 }
}
}
pub struct BoxStore<T>(UnsafeCell<MaybeUninit<T>>, AtomicBool);
unsafe impl<T> Sync for BoxStore<T> {}
impl<T> BoxStore<T> {
#[inline]
pub const fn new() -> Self {
Self(
UnsafeCell::new(MaybeUninit::uninit()),
AtomicBool::new(false),
)
}
#[inline]
pub fn alloc(&'static self, value: T) -> Option<Box<T>> {
if self.1.fetch_or(true, Ordering::SeqCst) {
None
} else {
unsafe {
let maybe_uninit = &mut *self.0.get();
let pointer = maybe_uninit.write(value);
Some(Box::from_raw(pointer))
}
}
}
#[inline]
pub fn dealloc(&'static self, ptr: Box<T>) -> Result<(), ()> {
unsafe {
let ptr = Box::into_raw(ptr);
if (*self.0.get()).as_mut_ptr() == ptr {
core::ptr::drop_in_place(ptr);
self.1.store(false, Ordering::SeqCst);
Ok(())
} else {
Err(())
}
}
}
}
impl<T> Drop for BoxStore<T> {
fn drop(&mut self) {
if self.1.load(Ordering::SeqCst) {
unsafe { core::ptr::drop_in_place(self.0.get()) };
}
}
}
pub struct ArcStore<T>(UnsafeCell<MaybeUninit<ArcInner<T>>>, AtomicBool);
unsafe impl<T> Sync for ArcStore<T> {}
impl<T> ArcStore<T> {
#[inline]
pub const fn new() -> Self {
Self(
UnsafeCell::new(MaybeUninit::uninit()),
AtomicBool::new(false),
)
}
#[inline]
pub fn alloc(&'static self, value: T) -> Option<Arc<T>> {
if self.1.fetch_or(true, Ordering::SeqCst) {
None
} else {
unsafe {
let maybe_uninit = &mut *self.0.get();
let pointer = maybe_uninit.write(ArcInner {
count: AtomicUsize::new(1),
data: UnsafeCell::new(value),
});
Some(Arc(pointer))
}
}
}
#[inline]
pub fn dealloc(&'static self, ptr: Arc<T>) -> Result<(), ()> {
unsafe {
let count = Arc::count(&ptr);
let ptr = Arc::into_raw(ptr);
let ptr: *const ArcInner<T> = ptr.cast();
if count == 1 && ptr == (*self.0.get()).as_ptr() {
core::ptr::drop_in_place((*ptr).data.get());
self.1.store(false, Ordering::SeqCst);
Ok(())
} else {
Arc::<T>::decrement_count(ptr.cast());
Err(())
}
}
}
}
impl<T: core::fmt::Debug + ?Sized> core::fmt::Debug for Box<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&**self, f)
}
}
impl<T> Drop for ArcStore<T> {
fn drop(&mut self) {
if self.1.load(Ordering::SeqCst) {
unsafe { core::ptr::drop_in_place(self.0.get()) };
}
}
}
struct ArcInner<T: ?Sized> {
count: AtomicUsize,
data: UnsafeCell<T>,
}
pub struct Arc<T: ?Sized>(*const ArcInner<T>);
impl<T: ?Sized> Arc<T> {
#[inline]
#[must_use]
pub fn count(this: &Self) -> usize {
unsafe { (*this.0).count.load(Ordering::SeqCst) }
}
#[inline]
#[must_use]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.0 == other.0
}
#[inline]
pub fn leak<'a>(this: Self) -> &'a T {
unsafe { &*(*this.0).data.get() }
}
}
impl<T> Arc<T> {
#[inline]
pub fn new(_: T) -> Self {
unimplemented!()
}
#[inline]
pub unsafe fn increment_count(ptr: *const ()) {
let arc = ManuallyDrop::new(Arc::<T>::from_raw(ptr));
let _arc_clone: ManuallyDrop<_> = arc.clone();
}
#[inline]
pub unsafe fn decrement_count(ptr: *const ()) {
core::mem::drop(Arc::<T>::from_raw(ptr));
}
#[inline]
pub unsafe fn from_raw(ptr: *const ()) -> Self {
Self(ptr.cast())
}
#[inline]
pub fn into_raw(this: Self) -> *const () {
let ptr = this.0;
core::mem::forget(this);
ptr.cast()
}
}
impl<T: ?Sized> Borrow<T> for Arc<T> {
#[inline]
fn borrow(&self) -> &T {
&*self
}
}
impl<T: ?Sized> AsRef<T> for Arc<T> {
#[inline]
fn as_ref(&self) -> &T {
&*self
}
}
impl<T: ?Sized> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
unsafe { &*(*self.0).data.get() }
}
}
impl<T: ?Sized> Clone for Arc<T> {
#[inline]
fn clone(&self) -> Arc<T> {
const MAX: usize = (isize::MAX) as usize;
if unsafe { (*self.0).count.fetch_add(1, Ordering::Relaxed) } > MAX {
panic!();
}
Self(self.0)
}
}
impl<T: ?Sized + core::fmt::Debug> core::fmt::Debug for Arc<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(&**self, f)
}
}
impl<T: ?Sized> Drop for Arc<T> {
#[inline]
fn drop(&mut self) {
unsafe { (*self.0).count.fetch_sub(1, Ordering::Release) };
}
}
pub trait Wake {
fn wake(this: Arc<Self>);
fn wake_by_ref(this: &Arc<Self>) {
Self::wake(this.clone());
}
}
impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
fn from(waker: Arc<W>) -> Waker {
unsafe { Waker::from_raw(raw_waker(waker)) }
}
}
impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
fn from(waker: Arc<W>) -> RawWaker {
raw_waker(waker)
}
}
#[inline(always)]
fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
Arc::<W>::increment_count(waker);
RawWaker::new(
waker as *const (),
&RawWakerVTable::new(
clone_waker::<W>,
wake::<W>,
wake_by_ref::<W>,
drop_waker::<W>,
),
)
}
unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker = Arc::<W>::from_raw(waker);
<W as Wake>::wake(waker);
}
unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker = ManuallyDrop::new(Arc::from_raw(waker));
<W as Wake>::wake_by_ref(&waker);
}
unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
Arc::<W>::decrement_count(waker);
}
RawWaker::new(
Arc::into_raw(waker) as *const (),
&RawWakerVTable::new(
clone_waker::<W>,
wake::<W>,
wake_by_ref::<W>,
drop_waker::<W>,
),
)
}
pub mod boxed {
pub use crate::Box;
}
pub mod sync {
pub use crate::Arc;
}
pub mod task {
pub use crate::Wake;
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}