use alloc::sync::{Arc, Weak};
use core::cell::UnsafeCell;
use core::sync::atomic::Ordering::{self, Relaxed, SeqCst};
use core::sync::atomic::{AtomicBool, AtomicPtr};
use std::borrow::Borrow;
use std::ops::Deref;
use super::abort::abort;
use super::ReadyToRunQueue;
use core::hash::Hash;
use futures_util::task::ArcWake;
pub(super) struct Task<K: Hash + Eq, Fut> {
pub(super) future: UnsafeCell<Option<Fut>>,
pub(super) next_all: AtomicPtr<Task<K, Fut>>,
pub(super) prev_all: UnsafeCell<*const Task<K, Fut>>,
pub(super) len_all: UnsafeCell<usize>,
pub(super) next_ready_to_run: AtomicPtr<Task<K, Fut>>,
pub(super) ready_to_run_queue: Weak<ReadyToRunQueue<K, Fut>>,
pub(super) queued: AtomicBool,
pub(super) woken: AtomicBool,
pub(super) key: UnsafeCell<Option<K>>,
}
pub(super) struct HashTask<K: Hash + Eq, Fut> {
pub(super) inner: Arc<Task<K, Fut>>,
}
impl<K: Hash + Eq, Fut> HashTask<K, Fut> {
fn key(&self) -> Option<&K> {
Task::key(&*self)
}
}
impl<K: Hash + Eq, Fut> Deref for HashTask<K, Fut> {
type Target = Task<K, Fut>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<K: Hash + Eq, Fut> Borrow<K> for HashTask<K, Fut> {
fn borrow(&self) -> &K {
unsafe { (*self.key.get()).as_ref().unwrap() }
}
}
impl<K: Hash + Eq, Fut> Hash for HashTask<K, Fut> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
unsafe { (*self.key.get()).as_ref() }.unwrap().hash(state)
}
}
impl<K: Hash + Eq, Fut> PartialEq for HashTask<K, Fut> {
fn eq(&self, other: &Self) -> bool {
self.key() == other.key()
}
}
impl<K: Hash + Eq, Fut> Eq for HashTask<K, Fut> {}
unsafe impl<K: Hash + Eq, Fut> Send for HashTask<K, Fut> {}
unsafe impl<K: Hash + Eq, Fut> Sync for HashTask<K, Fut> {}
unsafe impl<K: Hash + Eq, Fut> Send for Task<K, Fut> {}
unsafe impl<K: Hash + Eq, Fut> Sync for Task<K, Fut> {}
impl<K: Hash + Eq, Fut> ArcWake for Task<K, Fut> {
fn wake_by_ref(arc_self: &Arc<Self>) {
Task::wake_by_ptr(Arc::as_ptr(arc_self));
}
}
impl<K: Hash + Eq, Fut> Task<K, Fut> {
pub(super) unsafe fn waker_ref(this: &Arc<Self>) -> waker_ref::WakerRef<'_> {
unsafe { waker_ref::waker_ref(this) }
}
pub(super) fn wake_by_ptr(this: *const Task<K, Fut>) {
let task = unsafe { &*this };
let inner = match task.ready_to_run_queue.upgrade() {
Some(inner) => inner,
None => return,
};
task.woken.store(true, Relaxed);
let prev = task.queued.swap(true, SeqCst);
if !prev {
inner.enqueue(this);
inner.waker.wake();
}
}
#[inline]
pub(super) fn spin_next_all(
&self,
pending_next_all: *mut Self,
ordering: Ordering,
) -> *const Self {
loop {
let next = self.next_all.load(ordering);
if next != pending_next_all {
return next;
}
}
}
pub(super) fn key(&self) -> Option<&K> {
unsafe { (&*self.key.get()).as_ref() }
}
pub(super) fn take_key(&self) -> K {
unsafe { (*self.key.get()).take().unwrap() }
}
}
impl<K: Hash + Eq, Fut> Drop for Task<K, Fut> {
fn drop(&mut self) {
unsafe {
if (*self.future.get()).is_some() {
abort("future still here when dropping");
}
}
}
}
mod waker_ref {
use alloc::sync::Arc;
use core::marker::PhantomData;
use core::mem;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::task::{RawWaker, RawWakerVTable, Waker};
use futures_task::ArcWake;
pub(crate) struct WakerRef<'a> {
waker: ManuallyDrop<Waker>,
_marker: PhantomData<&'a ()>,
}
impl WakerRef<'_> {
#[inline]
fn new_unowned(waker: ManuallyDrop<Waker>) -> Self {
Self {
waker,
_marker: PhantomData,
}
}
}
impl Deref for WakerRef<'_> {
type Target = Waker;
#[inline]
fn deref(&self) -> &Waker {
&self.waker
}
}
#[inline]
pub(crate) unsafe fn waker_ref<W>(wake: &Arc<W>) -> WakerRef<'_>
where
W: ArcWake,
{
let ptr = Arc::as_ptr(wake).cast::<()>();
let waker =
ManuallyDrop::new(unsafe { Waker::from_raw(RawWaker::new(ptr, waker_vtable::<W>())) });
WakerRef::new_unowned(waker)
}
fn waker_vtable<W: ArcWake>() -> &'static RawWakerVTable {
&RawWakerVTable::new(
clone_arc_raw::<W>,
wake_arc_raw::<W>,
wake_by_ref_arc_raw::<W>,
drop_arc_raw::<W>,
)
}
unsafe fn increase_refcount<T: ArcWake>(data: *const ()) {
let arc = mem::ManuallyDrop::new(unsafe { Arc::<T>::from_raw(data.cast::<T>()) });
let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
}
unsafe fn clone_arc_raw<T: ArcWake>(data: *const ()) -> RawWaker {
unsafe { increase_refcount::<T>(data) }
RawWaker::new(data, waker_vtable::<T>())
}
unsafe fn wake_arc_raw<T: ArcWake>(data: *const ()) {
let arc: Arc<T> = unsafe { Arc::from_raw(data.cast::<T>()) };
ArcWake::wake(arc);
}
unsafe fn wake_by_ref_arc_raw<T: ArcWake>(data: *const ()) {
let arc = mem::ManuallyDrop::new(unsafe { Arc::<T>::from_raw(data.cast::<T>()) });
ArcWake::wake_by_ref(&arc);
}
unsafe fn drop_arc_raw<T: ArcWake>(data: *const ()) {
drop(unsafe { Arc::<T>::from_raw(data.cast::<T>()) })
}
}