1use crate::collections::map::HashMap;
2use crate::collections::map::HashSet;
3use crate::MutableStateInner;
4use std::any::Any;
5use std::cell::{Cell, Ref, RefCell};
6use std::collections::{HashMap as StdHashMap, VecDeque};
7use std::future::Future;
8use std::pin::Pin;
9use std::rc::{Rc, Weak};
10use std::sync::atomic::{AtomicU32, AtomicUsize, Ordering};
11use std::sync::{mpsc, Arc};
12use std::task::{Context, Poll, Waker};
13use std::thread::ThreadId;
14use std::thread_local;
15
16#[cfg(any(feature = "internal", test))]
17use crate::frame_clock::FrameClock;
18use crate::platform::RuntimeScheduler;
19use crate::{Applier, Command, FrameCallbackId, NodeError, RecomposeScopeInner, ScopeId};
20
21enum UiMessage {
22 Task(Box<dyn FnOnce() + Send + 'static>),
23 Invoke { id: u64, value: Box<dyn Any + Send> },
24}
25
26type UiContinuation = Box<dyn Fn(Box<dyn Any>) + 'static>;
27type UiContinuationMap = HashMap<u64, UiContinuation>;
28
29trait AnyStateCell {
30 fn as_any(&self) -> &dyn Any;
31}
32
33struct TypedStateCell<T: Clone + 'static> {
34 inner: MutableStateInner<T>,
35}
36
37impl<T: Clone + 'static> AnyStateCell for TypedStateCell<T> {
38 fn as_any(&self) -> &dyn Any {
39 &self.inner
40 }
41}
42
43#[allow(dead_code)]
44struct RawStateCell<T: 'static> {
45 value: T,
46}
47
48impl<T: 'static> AnyStateCell for RawStateCell<T> {
49 fn as_any(&self) -> &dyn Any {
50 &self.value
51 }
52}
53
54#[derive(Default)]
55pub(crate) struct StateArena {
56 cells: RefCell<Vec<Option<Box<dyn AnyStateCell>>>>,
57}
58
59impl StateArena {
60 pub(crate) fn alloc<T: Clone + 'static>(&self, value: T, runtime: RuntimeHandle) -> StateId {
61 let mut cells = self.cells.borrow_mut();
62 let id = StateId(cells.len() as u32);
63 let inner = MutableStateInner::new(value, runtime.clone());
64 inner.install_snapshot_observer(id);
65 let cell: Box<dyn AnyStateCell> = Box::new(TypedStateCell { inner });
66 cells.push(Some(cell));
67 id
68 }
69
70 #[allow(dead_code)]
71 pub(crate) fn alloc_raw<T: 'static>(&self, value: T) -> StateId {
72 let mut cells = self.cells.borrow_mut();
73 let id = StateId(cells.len() as u32);
74 let cell: Box<dyn AnyStateCell> = Box::new(RawStateCell { value });
75 cells.push(Some(cell));
76 id
77 }
78
79 fn get_cell(&self, id: StateId) -> Ref<'_, Box<dyn AnyStateCell>> {
80 Ref::map(self.cells.borrow(), |cells| {
81 cells
82 .get(id.0 as usize)
83 .and_then(|cell| cell.as_ref())
84 .expect("state cell missing")
85 })
86 }
87
88 pub(crate) fn get_typed<T: Clone + 'static>(
89 &self,
90 id: StateId,
91 ) -> Ref<'_, MutableStateInner<T>> {
92 Ref::map(self.get_cell(id), |cell| {
93 cell.as_any()
94 .downcast_ref::<MutableStateInner<T>>()
95 .expect("state cell type mismatch")
96 })
97 }
98
99 #[allow(dead_code)]
100 pub(crate) fn get_raw<T: 'static>(&self, id: StateId) -> Ref<'_, T> {
101 Ref::map(self.get_cell(id), |cell| {
102 cell.as_any()
103 .downcast_ref::<T>()
104 .expect("raw state cell type mismatch")
105 })
106 }
107
108 pub(crate) fn get_typed_opt<T: Clone + 'static>(
109 &self,
110 id: StateId,
111 ) -> Option<Ref<'_, MutableStateInner<T>>> {
112 Ref::filter_map(self.get_cell(id), |cell| {
113 cell.as_any().downcast_ref::<MutableStateInner<T>>()
114 })
115 .ok()
116 }
117}
118
119thread_local! {
120 #[allow(clippy::missing_const_for_thread_local)]
121 static RUNTIME_HANDLES: RefCell<StdHashMap<RuntimeId, RuntimeHandle>> =
122 RefCell::new(StdHashMap::new());
123}
124
125fn register_runtime_handle(handle: &RuntimeHandle) {
126 RUNTIME_HANDLES.with(|registry| {
127 registry.borrow_mut().insert(handle.id, handle.clone());
128 });
129 LAST_RUNTIME.with(|slot| *slot.borrow_mut() = Some(handle.clone()));
132}
133
134pub(crate) fn runtime_handle_for(id: RuntimeId) -> Option<RuntimeHandle> {
135 RUNTIME_HANDLES.with(|registry| registry.borrow().get(&id).cloned())
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
139pub struct StateId(pub(crate) u32);
140
141#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
142pub struct RuntimeId(u32);
143
144impl RuntimeId {
145 fn next() -> Self {
146 static NEXT_RUNTIME_ID: AtomicU32 = AtomicU32::new(1);
147 Self(NEXT_RUNTIME_ID.fetch_add(1, Ordering::Relaxed))
148 }
149}
150
151struct UiDispatcherInner {
152 scheduler: Arc<dyn RuntimeScheduler>,
153 tx: mpsc::Sender<UiMessage>,
154 pending: AtomicUsize,
155}
156
157impl UiDispatcherInner {
158 fn new(scheduler: Arc<dyn RuntimeScheduler>, tx: mpsc::Sender<UiMessage>) -> Self {
159 Self {
160 scheduler,
161 tx,
162 pending: AtomicUsize::new(0),
163 }
164 }
165
166 fn post(&self, task: impl FnOnce() + Send + 'static) {
167 self.pending.fetch_add(1, Ordering::SeqCst);
168 let _ = self.tx.send(UiMessage::Task(Box::new(task)));
169 self.scheduler.schedule_frame();
170 }
171
172 fn post_invoke(&self, id: u64, value: Box<dyn Any + Send>) {
173 self.pending.fetch_add(1, Ordering::SeqCst);
174 let _ = self.tx.send(UiMessage::Invoke { id, value });
175 self.scheduler.schedule_frame();
176 }
177
178 fn has_pending(&self) -> bool {
179 self.pending.load(Ordering::SeqCst) > 0
180 }
181}
182
183struct PendingGuard<'a> {
184 counter: &'a AtomicUsize,
185}
186
187impl<'a> PendingGuard<'a> {
188 fn new(counter: &'a AtomicUsize) -> Self {
189 Self { counter }
190 }
191}
192
193impl<'a> Drop for PendingGuard<'a> {
194 fn drop(&mut self) {
195 let previous = self.counter.fetch_sub(1, Ordering::SeqCst);
196 debug_assert!(previous > 0, "UI dispatcher pending count underflowed");
197 }
198}
199
200#[derive(Clone)]
201pub struct UiDispatcher {
202 inner: Arc<UiDispatcherInner>,
203}
204
205impl UiDispatcher {
206 fn new(inner: Arc<UiDispatcherInner>) -> Self {
207 Self { inner }
208 }
209
210 pub fn post(&self, task: impl FnOnce() + Send + 'static) {
211 self.inner.post(task);
212 }
213
214 pub fn post_invoke<T>(&self, id: u64, value: T)
215 where
216 T: Send + 'static,
217 {
218 self.inner.post_invoke(id, Box::new(value));
219 }
220
221 pub fn has_pending(&self) -> bool {
222 self.inner.has_pending()
223 }
224}
225
226struct RuntimeInner {
227 scheduler: Arc<dyn RuntimeScheduler>,
228 needs_frame: RefCell<bool>,
229 node_updates: RefCell<Vec<Command>>, invalid_scopes: RefCell<HashSet<ScopeId>>, scope_queue: RefCell<Vec<(ScopeId, Weak<RecomposeScopeInner>)>>, frame_callbacks: RefCell<VecDeque<FrameCallbackEntry>>, next_frame_callback_id: Cell<u64>,
234 ui_dispatcher: Arc<UiDispatcherInner>,
235 ui_rx: RefCell<mpsc::Receiver<UiMessage>>,
236 local_tasks: RefCell<VecDeque<Box<dyn FnOnce() + 'static>>>,
237 ui_conts: RefCell<UiContinuationMap>,
238 next_cont_id: Cell<u64>,
239 ui_thread_id: ThreadId,
240 tasks: RefCell<Vec<TaskEntry>>, next_task_id: Cell<u64>,
242 task_waker: RefCell<Option<Waker>>,
243 state_arena: StateArena,
244 runtime_id: RuntimeId,
245}
246
247struct TaskEntry {
248 id: u64,
249 future: Pin<Box<dyn Future<Output = ()> + 'static>>,
250}
251
252impl RuntimeInner {
253 fn new(scheduler: Arc<dyn RuntimeScheduler>) -> Self {
254 let (tx, rx) = mpsc::channel();
255 let dispatcher = Arc::new(UiDispatcherInner::new(scheduler.clone(), tx));
256 Self {
257 scheduler,
258 needs_frame: RefCell::new(false),
259 node_updates: RefCell::new(Vec::new()),
260 invalid_scopes: RefCell::new(HashSet::default()),
261 scope_queue: RefCell::new(Vec::new()),
262 frame_callbacks: RefCell::new(VecDeque::new()),
263 next_frame_callback_id: Cell::new(1),
264 ui_dispatcher: dispatcher,
265 ui_rx: RefCell::new(rx),
266 local_tasks: RefCell::new(VecDeque::new()),
267 ui_conts: RefCell::new(UiContinuationMap::default()),
268 next_cont_id: Cell::new(1),
269 ui_thread_id: std::thread::current().id(),
270 tasks: RefCell::new(Vec::new()),
271 next_task_id: Cell::new(1),
272 task_waker: RefCell::new(None),
273 state_arena: StateArena::default(),
274 runtime_id: RuntimeId::next(),
275 }
276 }
277
278 fn init_task_waker(this: &Rc<Self>) {
279 let weak = Rc::downgrade(this);
280 let waker = RuntimeTaskWaker::new(weak).into_waker();
281 *this.task_waker.borrow_mut() = Some(waker);
282 }
283
284 fn schedule(&self) {
285 *self.needs_frame.borrow_mut() = true;
286 self.scheduler.schedule_frame();
287 }
288
289 fn enqueue_update(&self, command: Command) {
290 self.node_updates.borrow_mut().push(command);
291 self.schedule(); }
293
294 fn take_updates(&self) -> Vec<Command> {
295 let updates = self.node_updates.borrow_mut().drain(..).collect::<Vec<_>>();
297 updates
298 }
299
300 fn has_updates(&self) -> bool {
301 !self.node_updates.borrow().is_empty() || self.has_invalid_scopes()
302 }
303
304 fn register_invalid_scope(&self, id: ScopeId, scope: Weak<RecomposeScopeInner>) {
305 let mut invalid = self.invalid_scopes.borrow_mut();
306 if invalid.insert(id) {
307 self.scope_queue.borrow_mut().push((id, scope));
308 self.schedule();
309 }
310 }
311
312 fn mark_scope_recomposed(&self, id: ScopeId) {
313 self.invalid_scopes.borrow_mut().remove(&id);
314 }
315
316 fn take_invalidated_scopes(&self) -> Vec<(ScopeId, Weak<RecomposeScopeInner>)> {
317 let mut queue = self.scope_queue.borrow_mut();
319 if queue.is_empty() {
320 return Vec::new();
321 }
322 let pending: Vec<_> = queue.drain(..).collect();
323 drop(queue);
324 let invalid = self.invalid_scopes.borrow();
325 pending
326 .into_iter()
327 .filter(|(id, _)| invalid.contains(id))
328 .collect()
329 }
330
331 fn has_invalid_scopes(&self) -> bool {
332 !self.invalid_scopes.borrow().is_empty()
333 }
334
335 fn has_frame_callbacks(&self) -> bool {
336 !self.frame_callbacks.borrow().is_empty()
337 }
338
339 fn enqueue_ui_task(&self, task: Box<dyn FnOnce() + 'static>) {
344 self.local_tasks.borrow_mut().push_back(task);
345 self.schedule();
346 }
347
348 fn spawn_ui_task(&self, future: Pin<Box<dyn Future<Output = ()> + 'static>>) -> u64 {
349 let id = self.next_task_id.get();
350 self.next_task_id.set(id + 1);
351 self.tasks.borrow_mut().push(TaskEntry { id, future });
352 self.schedule();
353 id
354 }
355
356 fn cancel_task(&self, id: u64) {
357 let mut tasks = self.tasks.borrow_mut();
358 if tasks.iter().any(|entry| entry.id == id) {
359 tasks.retain(|entry| entry.id != id);
360 }
361 }
362
363 fn poll_async_tasks(&self) -> bool {
364 let waker = match self.task_waker.borrow().as_ref() {
365 Some(waker) => waker.clone(),
366 None => return false,
367 };
368 let mut cx = Context::from_waker(&waker);
369 let mut tasks_ref = self.tasks.borrow_mut();
370 let tasks = std::mem::take(&mut *tasks_ref);
371 drop(tasks_ref);
372 let mut pending = Vec::with_capacity(tasks.len());
373 let mut made_progress = false;
374 for mut entry in tasks.into_iter() {
375 match entry.future.as_mut().poll(&mut cx) {
376 Poll::Ready(()) => {
377 made_progress = true;
378 }
379 Poll::Pending => {
380 pending.push(entry);
381 }
382 }
383 }
384 if !pending.is_empty() {
385 self.tasks.borrow_mut().extend(pending);
386 }
387 made_progress
388 }
389
390 fn drain_ui(&self) {
391 loop {
392 let mut executed = false;
393
394 {
395 let rx = &mut *self.ui_rx.borrow_mut();
396 for message in rx.try_iter() {
397 executed = true;
398 let _guard = PendingGuard::new(&self.ui_dispatcher.pending);
399 match message {
400 UiMessage::Task(task) => {
401 task();
402 }
403 UiMessage::Invoke { id, value } => {
404 self.invoke_ui_cont(id, value);
405 }
406 }
407 }
408 }
409
410 loop {
411 let task = {
412 let mut local = self.local_tasks.borrow_mut();
413 local.pop_front()
414 };
415
416 match task {
417 Some(task) => {
418 executed = true;
419 task();
420 }
421 None => break,
422 }
423 }
424
425 if self.poll_async_tasks() {
426 executed = true;
427 }
428
429 if !executed {
430 break;
431 }
432 }
433 }
434
435 fn has_pending_ui(&self) -> bool {
436 let local_pending = self
437 .local_tasks
438 .try_borrow()
439 .map(|tasks| !tasks.is_empty())
440 .unwrap_or(true);
441
442 let async_pending = self
443 .tasks
444 .try_borrow()
445 .map(|tasks| !tasks.is_empty())
446 .unwrap_or(true);
447
448 local_pending || self.ui_dispatcher.has_pending() || async_pending
449 }
450
451 fn register_ui_cont<T: 'static>(&self, f: impl FnOnce(T) + 'static) -> u64 {
452 debug_assert_eq!(
453 std::thread::current().id(),
454 self.ui_thread_id,
455 "UI continuation registered off the runtime thread",
456 );
457 let id = self.next_cont_id.get();
458 self.next_cont_id.set(id + 1);
459 let callback = RefCell::new(Some(f));
460 self.ui_conts.borrow_mut().insert(
461 id,
462 Box::new(move |value: Box<dyn Any>| {
463 let slot = callback
464 .borrow_mut()
465 .take()
466 .expect("UI continuation invoked more than once");
467 let value = value
468 .downcast::<T>()
469 .expect("UI continuation type mismatch");
470 slot(*value);
471 }),
472 );
473 id
474 }
475
476 fn invoke_ui_cont(&self, id: u64, value: Box<dyn Any + Send>) {
477 debug_assert_eq!(
478 std::thread::current().id(),
479 self.ui_thread_id,
480 "UI continuation invoked off the runtime thread",
481 );
482 if let Some(callback) = self.ui_conts.borrow_mut().remove(&id) {
483 let value: Box<dyn Any> = value;
484 callback(value);
485 }
486 }
487
488 fn cancel_ui_cont(&self, id: u64) {
489 self.ui_conts.borrow_mut().remove(&id);
490 }
491
492 fn register_frame_callback(&self, callback: Box<dyn FnOnce(u64) + 'static>) -> FrameCallbackId {
493 let id = self.next_frame_callback_id.get();
494 self.next_frame_callback_id.set(id + 1);
495 self.frame_callbacks
496 .borrow_mut()
497 .push_back(FrameCallbackEntry {
498 id,
499 callback: Some(callback),
500 });
501 self.schedule();
502 id
503 }
504
505 fn cancel_frame_callback(&self, id: FrameCallbackId) {
506 let mut callbacks = self.frame_callbacks.borrow_mut();
507 if let Some(index) = callbacks.iter().position(|entry| entry.id == id) {
508 callbacks.remove(index);
509 }
510 let callbacks_empty = callbacks.is_empty();
511 drop(callbacks);
512 let local_pending = self
513 .local_tasks
514 .try_borrow()
515 .map(|tasks| !tasks.is_empty())
516 .unwrap_or(true);
517 let async_pending = self
518 .tasks
519 .try_borrow()
520 .map(|tasks| !tasks.is_empty())
521 .unwrap_or(true);
522 if !self.has_invalid_scopes()
523 && !self.has_updates()
524 && callbacks_empty
525 && !local_pending
526 && !self.ui_dispatcher.has_pending()
527 && !async_pending
528 {
529 *self.needs_frame.borrow_mut() = false;
530 }
531 }
532
533 fn drain_frame_callbacks(&self, frame_time_nanos: u64) {
534 let mut callbacks = self.frame_callbacks.borrow_mut();
535 let mut pending: Vec<Box<dyn FnOnce(u64) + 'static>> = Vec::with_capacity(callbacks.len());
536 while let Some(mut entry) = callbacks.pop_front() {
537 if let Some(callback) = entry.callback.take() {
538 pending.push(callback);
539 }
540 }
541 drop(callbacks);
542
543 if !pending.is_empty() {
548 let _ = crate::run_in_mutable_snapshot(|| {
549 for callback in pending {
550 callback(frame_time_nanos);
551 }
552 });
553 }
554
555 if !self.has_invalid_scopes()
556 && !self.has_updates()
557 && !self.has_frame_callbacks()
558 && !self.has_pending_ui()
559 {
560 *self.needs_frame.borrow_mut() = false;
561 }
562 }
563}
564
565#[derive(Clone)]
566pub struct Runtime {
567 inner: Rc<RuntimeInner>, }
569
570impl Runtime {
571 pub fn new(scheduler: Arc<dyn RuntimeScheduler>) -> Self {
572 let inner = Rc::new(RuntimeInner::new(scheduler));
573 RuntimeInner::init_task_waker(&inner);
574 let runtime = Self { inner };
575 register_runtime_handle(&runtime.handle());
576 runtime
577 }
578
579 pub fn handle(&self) -> RuntimeHandle {
580 RuntimeHandle {
581 inner: Rc::downgrade(&self.inner),
582 dispatcher: UiDispatcher::new(self.inner.ui_dispatcher.clone()),
583 ui_thread_id: self.inner.ui_thread_id,
584 id: self.inner.runtime_id,
585 }
586 }
587
588 pub fn has_updates(&self) -> bool {
589 self.inner.has_updates()
590 }
591
592 pub fn needs_frame(&self) -> bool {
593 *self.inner.needs_frame.borrow() || self.inner.ui_dispatcher.has_pending()
594 }
595
596 pub fn set_needs_frame(&self, value: bool) {
597 *self.inner.needs_frame.borrow_mut() = value;
598 }
599
600 #[cfg(any(feature = "internal", test))]
601 pub fn frame_clock(&self) -> FrameClock {
602 FrameClock::new(self.handle())
603 }
604}
605
606#[derive(Default)]
607pub struct DefaultScheduler;
608
609impl RuntimeScheduler for DefaultScheduler {
610 fn schedule_frame(&self) {}
611}
612
613#[cfg(test)]
614#[derive(Default)]
615pub struct TestScheduler;
616
617#[cfg(test)]
618impl RuntimeScheduler for TestScheduler {
619 fn schedule_frame(&self) {}
620}
621
622#[cfg(test)]
623pub struct TestRuntime {
624 runtime: Runtime,
625}
626
627#[cfg(test)]
628impl Default for TestRuntime {
629 fn default() -> Self {
630 Self::new()
631 }
632}
633
634#[cfg(test)]
635impl TestRuntime {
636 pub fn new() -> Self {
637 Self {
638 runtime: Runtime::new(Arc::new(TestScheduler)),
639 }
640 }
641
642 pub fn handle(&self) -> RuntimeHandle {
643 self.runtime.handle()
644 }
645}
646
647#[derive(Clone)]
648pub struct RuntimeHandle {
649 inner: Weak<RuntimeInner>,
650 dispatcher: UiDispatcher,
651 ui_thread_id: ThreadId,
652 id: RuntimeId,
653}
654
655pub struct TaskHandle {
656 id: u64,
657 runtime: RuntimeHandle,
658}
659
660impl RuntimeHandle {
661 pub fn id(&self) -> RuntimeId {
662 self.id
663 }
664
665 pub(crate) fn alloc_state<T: Clone + 'static>(&self, value: T) -> StateId {
666 self.with_state_arena(|arena| arena.alloc(value, self.clone()))
667 }
668
669 pub(crate) fn with_state_arena<R>(&self, f: impl FnOnce(&StateArena) -> R) -> R {
670 self.inner
671 .upgrade()
672 .map(|inner| f(&inner.state_arena))
673 .unwrap_or_else(|| panic!("runtime dropped"))
674 }
675
676 #[allow(dead_code)]
677 pub(crate) fn alloc_value<T: 'static>(&self, value: T) -> StateId {
678 self.with_state_arena(|arena| arena.alloc_raw(value))
679 }
680
681 #[allow(dead_code)]
682 pub(crate) fn with_value<T: 'static, R>(&self, id: StateId, f: impl FnOnce(&T) -> R) -> R {
683 self.with_state_arena(|arena| {
684 let value = arena.get_raw::<T>(id);
685 f(&value)
686 })
687 }
688
689 pub fn schedule(&self) {
690 if let Some(inner) = self.inner.upgrade() {
691 inner.schedule();
692 }
693 }
694
695 pub fn enqueue_node_update(&self, command: Command) {
696 if let Some(inner) = self.inner.upgrade() {
697 inner.enqueue_update(command);
698 }
699 }
700
701 pub fn enqueue_ui_task(&self, task: Box<dyn FnOnce() + 'static>) {
708 if let Some(inner) = self.inner.upgrade() {
709 inner.enqueue_ui_task(task);
710 } else {
711 task();
712 }
713 }
714
715 pub fn spawn_ui<F>(&self, fut: F) -> Option<TaskHandle>
716 where
717 F: Future<Output = ()> + 'static,
718 {
719 self.inner.upgrade().map(|inner| {
720 let id = inner.spawn_ui_task(Box::pin(fut));
721 TaskHandle {
722 id,
723 runtime: self.clone(),
724 }
725 })
726 }
727
728 pub fn cancel_task(&self, id: u64) {
729 if let Some(inner) = self.inner.upgrade() {
730 inner.cancel_task(id);
731 }
732 }
733
734 pub fn post_ui(&self, task: impl FnOnce() + Send + 'static) {
739 self.dispatcher.post(task);
740 }
741
742 pub fn register_ui_cont<T: 'static>(&self, f: impl FnOnce(T) + 'static) -> Option<u64> {
743 self.inner.upgrade().map(|inner| inner.register_ui_cont(f))
744 }
745
746 pub fn cancel_ui_cont(&self, id: u64) {
747 if let Some(inner) = self.inner.upgrade() {
748 inner.cancel_ui_cont(id);
749 }
750 }
751
752 pub fn drain_ui(&self) {
753 if let Some(inner) = self.inner.upgrade() {
754 inner.drain_ui();
755 }
756 }
757
758 pub fn has_pending_ui(&self) -> bool {
759 self.inner
760 .upgrade()
761 .map(|inner| inner.has_pending_ui())
762 .unwrap_or_else(|| self.dispatcher.has_pending())
763 }
764
765 pub fn register_frame_callback(
766 &self,
767 callback: impl FnOnce(u64) + 'static,
768 ) -> Option<FrameCallbackId> {
769 self.inner
770 .upgrade()
771 .map(|inner| inner.register_frame_callback(Box::new(callback)))
772 }
773
774 pub fn cancel_frame_callback(&self, id: FrameCallbackId) {
775 if let Some(inner) = self.inner.upgrade() {
776 inner.cancel_frame_callback(id);
777 }
778 }
779
780 pub fn drain_frame_callbacks(&self, frame_time_nanos: u64) {
781 if let Some(inner) = self.inner.upgrade() {
782 inner.drain_frame_callbacks(frame_time_nanos);
783 }
784 }
785
786 #[cfg(any(feature = "internal", test))]
787 pub fn frame_clock(&self) -> FrameClock {
788 FrameClock::new(self.clone())
789 }
790
791 pub fn set_needs_frame(&self, value: bool) {
792 if let Some(inner) = self.inner.upgrade() {
793 *inner.needs_frame.borrow_mut() = value;
794 }
795 }
796
797 pub(crate) fn take_updates(&self) -> Vec<Command> {
798 self.inner
800 .upgrade()
801 .map(|inner| inner.take_updates())
802 .unwrap_or_default()
803 }
804
805 pub fn has_updates(&self) -> bool {
806 self.inner
807 .upgrade()
808 .map(|inner| inner.has_updates())
809 .unwrap_or(false)
810 }
811
812 pub(crate) fn register_invalid_scope(&self, id: ScopeId, scope: Weak<RecomposeScopeInner>) {
813 if let Some(inner) = self.inner.upgrade() {
814 inner.register_invalid_scope(id, scope);
815 }
816 }
817
818 pub(crate) fn mark_scope_recomposed(&self, id: ScopeId) {
819 if let Some(inner) = self.inner.upgrade() {
820 inner.mark_scope_recomposed(id);
821 }
822 }
823
824 pub(crate) fn take_invalidated_scopes(&self) -> Vec<(ScopeId, Weak<RecomposeScopeInner>)> {
825 self.inner
827 .upgrade()
828 .map(|inner| inner.take_invalidated_scopes())
829 .unwrap_or_default()
830 }
831
832 pub fn has_invalid_scopes(&self) -> bool {
833 self.inner
834 .upgrade()
835 .map(|inner| inner.has_invalid_scopes())
836 .unwrap_or(false)
837 }
838
839 pub fn has_frame_callbacks(&self) -> bool {
840 self.inner
841 .upgrade()
842 .map(|inner| inner.has_frame_callbacks())
843 .unwrap_or(false)
844 }
845
846 pub fn assert_ui_thread(&self) {
847 debug_assert_eq!(
848 std::thread::current().id(),
849 self.ui_thread_id,
850 "state mutated off the runtime's UI thread"
851 );
852 }
853
854 pub fn dispatcher(&self) -> UiDispatcher {
855 self.dispatcher.clone()
856 }
857}
858
859impl TaskHandle {
860 pub fn cancel(self) {
861 self.runtime.cancel_task(self.id);
862 }
863}
864
865pub(crate) struct FrameCallbackEntry {
866 id: FrameCallbackId,
867 callback: Option<Box<dyn FnOnce(u64) + 'static>>,
868}
869
870struct RuntimeTaskWaker {
871 scheduler: Arc<dyn RuntimeScheduler>,
872}
873
874impl RuntimeTaskWaker {
875 fn new(inner: Weak<RuntimeInner>) -> Self {
876 let scheduler = inner
879 .upgrade()
880 .map(|rc| rc.scheduler.clone())
881 .expect("RuntimeInner dropped before waker created");
882 Self { scheduler }
883 }
884
885 fn into_waker(self) -> Waker {
886 futures_task::waker(Arc::new(self))
887 }
888}
889
890impl futures_task::ArcWake for RuntimeTaskWaker {
891 fn wake_by_ref(arc_self: &Arc<Self>) {
892 arc_self.scheduler.schedule_frame();
893 }
894}
895
896thread_local! {
897 static ACTIVE_RUNTIMES: RefCell<Vec<RuntimeHandle>> = const { RefCell::new(Vec::new()) }; static LAST_RUNTIME: RefCell<Option<RuntimeHandle>> = const { RefCell::new(None) };
899}
900
901pub fn current_runtime_handle() -> Option<RuntimeHandle> {
906 if let Some(handle) = ACTIVE_RUNTIMES.with(|stack| stack.borrow().last().cloned()) {
907 return Some(handle);
908 }
909 LAST_RUNTIME.with(|slot| slot.borrow().clone())
910}
911
912pub(crate) fn push_active_runtime(handle: &RuntimeHandle) {
913 ACTIVE_RUNTIMES.with(|stack| stack.borrow_mut().push(handle.clone()));
914 LAST_RUNTIME.with(|slot| *slot.borrow_mut() = Some(handle.clone()));
915}
916
917pub(crate) fn pop_active_runtime() {
918 ACTIVE_RUNTIMES.with(|stack| {
919 stack.borrow_mut().pop();
920 });
921}
922
923pub fn schedule_frame() {
925 if let Some(handle) = current_runtime_handle() {
926 handle.schedule();
927 return;
928 }
929 panic!("no runtime available to schedule frame");
930}
931
932pub fn schedule_node_update(
934 update: impl FnOnce(&mut dyn Applier) -> Result<(), NodeError> + 'static,
935) {
936 let handle = current_runtime_handle().expect("no runtime available to schedule node update");
937 let mut update_opt = Some(update);
938 handle.enqueue_node_update(Box::new(move |applier: &mut dyn Applier| {
939 if let Some(update) = update_opt.take() {
940 return update(applier);
941 }
942 Ok(())
943 }));
944}