Skip to main content

js_sys/futures/
queue.rs

1use crate::Promise;
2use alloc::collections::VecDeque;
3use alloc::rc::Rc;
4use core::cell::{Cell, RefCell};
5use core::panic::AssertUnwindSafe;
6use wasm_bindgen::prelude::*;
7use wasm_bindgen::JsError;
8
9#[wasm_bindgen]
10extern "C" {
11    #[wasm_bindgen]
12    fn queueMicrotask(closure: &Closure<dyn FnMut(JsValue) -> Result<(), JsError>>);
13
14    type Global;
15
16    #[wasm_bindgen(method, getter, js_name = queueMicrotask)]
17    fn hasQueueMicrotask(this: &Global) -> JsValue;
18}
19
20struct QueueState {
21    // The queue of Tasks which are to be run in order. In practice this is all the
22    // synchronous work of futures, and each `Task` represents calling `poll` on
23    // a future "at the right time".
24    tasks: RefCell<VecDeque<Rc<crate::futures::task::Task>>>,
25
26    // This flag indicates whether we've scheduled `run_all` to run in the future.
27    // This is used to ensure that it's only scheduled once.
28    is_scheduled: Cell<bool>,
29}
30
31impl QueueState {
32    fn run_all(&self) {
33        // "consume" the schedule
34        let _was_scheduled = self.is_scheduled.replace(false);
35        debug_assert!(_was_scheduled);
36
37        // Stop when all tasks that have been scheduled before this tick have been run.
38        // Tasks that are scheduled while running tasks will run on the next tick.
39        let mut task_count_left = self.tasks.borrow().len();
40        while task_count_left > 0 {
41            task_count_left -= 1;
42            let task = match self.tasks.borrow_mut().pop_front() {
43                Some(task) => task,
44                None => break,
45            };
46            task.run();
47        }
48
49        // All of the Tasks have been run, so it's now possible to schedule the
50        // next tick again
51    }
52}
53
54pub(crate) struct Queue {
55    state: Rc<QueueState>,
56    promise: Promise,
57    closure: Closure<dyn FnMut(JsValue) -> Result<(), JsError>>,
58    has_queue_microtask: bool,
59}
60
61impl Queue {
62    // Schedule a task to run on the next tick
63    pub(crate) fn schedule_task(&self, task: Rc<crate::futures::task::Task>) {
64        self.state.tasks.borrow_mut().push_back(task);
65        // Use queueMicrotask to execute as soon as possible. If it does not exist
66        // fall back to the promise resolution
67        if !self.state.is_scheduled.replace(true) {
68            if self.has_queue_microtask {
69                queueMicrotask(&self.closure);
70            } else {
71                let _ = self.promise.then_map(&self.closure);
72            }
73        }
74    }
75    // Append a task to the currently running queue, or schedule it
76    #[cfg(not(target_feature = "atomics"))]
77    pub(crate) fn push_task(&self, task: Rc<crate::futures::task::Task>) {
78        // It would make sense to run this task on the same tick.  For now, we
79        // make the simplifying choice of always scheduling tasks for a future tick.
80        self.schedule_task(task)
81    }
82}
83
84impl Queue {
85    fn new() -> Self {
86        let state = Rc::new(QueueState {
87            is_scheduled: Cell::new(false),
88            tasks: RefCell::new(VecDeque::new()),
89        });
90
91        let has_queue_microtask = crate::global()
92            .unchecked_into::<Global>()
93            .hasQueueMicrotask()
94            .is_function();
95
96        Self {
97            promise: Promise::resolve(&JsValue::undefined()),
98
99            closure: {
100                let state = AssertUnwindSafe(Rc::clone(&state));
101
102                // This closure will only be called on the next microtask event
103                // tick
104                Closure::new(move |_| {
105                    state.run_all();
106                    Ok(())
107                })
108            },
109
110            state,
111            has_queue_microtask,
112        }
113    }
114
115    pub(crate) fn with<R>(f: impl FnOnce(&Self) -> R) -> R {
116        use once_cell::unsync::Lazy;
117
118        struct Wrapper<T>(Lazy<T>);
119
120        #[cfg(not(target_feature = "atomics"))]
121        unsafe impl<T> Sync for Wrapper<T> {}
122
123        #[cfg(not(target_feature = "atomics"))]
124        unsafe impl<T> Send for Wrapper<T> {}
125
126        #[cfg_attr(target_feature = "atomics", thread_local)]
127        static QUEUE: Wrapper<Queue> = Wrapper(Lazy::new(Queue::new));
128
129        f(&QUEUE.0)
130    }
131}