fp_bindgen_support/guest/async/
task.rs1use serde::Serialize;
6use std::cell::{Cell, RefCell};
7use std::future::Future;
8use std::mem::ManuallyDrop;
9use std::pin::Pin;
10use std::rc::Rc;
11use std::task::{Context, RawWaker, RawWakerVTable, Waker};
12
13use crate::common::mem::{to_fat_ptr, FatPtr};
14use crate::common::r#async::AsyncValue;
15use crate::guest::io::export_value_to_host;
16
17use super::host_resolve_async_value;
18
19struct Inner {
20 future: Pin<Box<dyn Future<Output = ()> + 'static>>,
21 waker: Waker,
22}
23
24pub struct Task {
25 inner: RefCell<Option<Inner>>,
30
31 is_queued: Cell<bool>,
33}
34
35impl Task {
36 pub fn spawn(future: Pin<Box<dyn Future<Output = ()> + 'static>>) {
37 let this = Rc::new(Self {
38 inner: RefCell::new(None),
39 is_queued: Cell::new(false),
40 });
41
42 let waker = unsafe { Waker::from_raw(Task::into_raw_waker(Rc::clone(&this))) };
43
44 *this.inner.borrow_mut() = Some(Inner { future, waker });
45
46 Task::wake_by_ref(&this);
47 }
48
49 pub fn alloc_and_spawn<FUT, RET>(future: FUT) -> FatPtr
50 where
51 FUT: Future<Output = RET> + 'static,
52 RET: Serialize,
53 {
54 let layout = std::alloc::Layout::new::<AsyncValue>();
55 let len = layout.size() as u32;
56 let ptr = unsafe { std::alloc::alloc_zeroed(layout) };
57 let fat_ptr = to_fat_ptr(ptr, len);
58
59 Task::spawn(Box::pin(async move {
60 let ret = future.await;
61 let result_ptr = export_value_to_host(&ret);
62 host_resolve_async_value(fat_ptr, result_ptr);
63 }));
64
65 fat_ptr
66 }
67
68 fn wake_by_ref(this: &Rc<Self>) {
69 if this.is_queued.replace(true) {
73 return;
74 }
75
76 super::queue::push_task(Rc::clone(this));
77 }
78
79 unsafe fn into_raw_waker(this: Rc<Self>) -> RawWaker {
87 unsafe fn raw_clone(ptr: *const ()) -> RawWaker {
88 let ptr = ManuallyDrop::new(Rc::from_raw(ptr as *const Task));
89 Task::into_raw_waker((*ptr).clone())
90 }
91
92 unsafe fn raw_wake(ptr: *const ()) {
93 let ptr = Rc::from_raw(ptr as *const Task);
94 Task::wake_by_ref(&ptr);
95 }
96
97 unsafe fn raw_wake_by_ref(ptr: *const ()) {
98 let ptr = ManuallyDrop::new(Rc::from_raw(ptr as *const Task));
99 Task::wake_by_ref(&ptr);
100 }
101
102 unsafe fn raw_drop(ptr: *const ()) {
103 drop(Rc::from_raw(ptr as *const Task));
104 }
105
106 const VTABLE: RawWakerVTable =
107 RawWakerVTable::new(raw_clone, raw_wake, raw_wake_by_ref, raw_drop);
108
109 RawWaker::new(Rc::into_raw(this) as *const (), &VTABLE)
110 }
111
112 pub(crate) fn run(&self) {
113 let mut borrow = self.inner.borrow_mut();
114
115 let inner = match borrow.as_mut() {
118 Some(inner) => inner,
119 None => return,
120 };
121
122 self.is_queued.set(false);
125
126 let poll = {
127 let mut cx = Context::from_waker(&inner.waker);
128 inner.future.as_mut().poll(&mut cx)
129 };
130
131 if poll.is_ready() {
138 *borrow = None;
139 }
140 }
141}