1use crate::collections::map::{HashMap, HashSet};
2use crate::retention::{RetainKey, RetentionManager};
3use crate::slot::{FinishGroupResult, PayloadKind};
4use crate::slot::{GroupStart, GroupStartKind, ValueSlotId};
5use crate::{
6 composer_context, empty_local_stack, explicit_group_key_seed, runtime, Applier, ApplierHost,
7 ChildList, Command, CommandQueue, CompositionLocal, DirtyBubble, Key, LocalKey,
8 LocalStackSnapshot, LocalStateEntry, MutableState, Node, NodeError, NodeId, Owned,
9 ProvidedValue, RecomposeOptions, RecomposeScope, RecycledNode, RetentionMode, RetentionPolicy,
10 RuntimeHandle, ScopeId, SlotId, SlotPassOutcome, SlotTable, SlotsHost, SnapshotStateList,
11 SnapshotStateMap, SnapshotStateObserver, StaticCompositionLocal, StaticLocalEntry,
12 SubcomposeState, COMMAND_FLUSH_THRESHOLD,
13};
14use smallvec::SmallVec;
15use std::any::Any;
16use std::cell::{Cell, RefCell, RefMut};
17use std::hash::Hash;
18use std::marker::PhantomData;
19use std::rc::Rc;
20
21pub struct ValueSlotHandle<'pass, T: 'static> {
22 slot: ValueSlotId,
23 _pass: PhantomData<&'pass Composer>,
24 _value: PhantomData<fn() -> T>,
25}
26
27impl<T: 'static> Copy for ValueSlotHandle<'_, T> {}
28
29impl<T: 'static> Clone for ValueSlotHandle<'_, T> {
30 fn clone(&self) -> Self {
31 *self
32 }
33}
34
35impl<T: 'static> ValueSlotHandle<'_, T> {
36 pub(crate) fn new(slot: ValueSlotId) -> Self {
37 Self {
38 slot,
39 _pass: PhantomData,
40 _value: PhantomData,
41 }
42 }
43
44 pub(crate) fn slot(self) -> ValueSlotId {
45 self.slot
46 }
47}
48
49fn slots_storage_key(host: &Rc<SlotsHost>) -> usize {
50 host.storage_key()
51}
52
53fn bind_slots_host_to_runtime_state(state: &Rc<ComposerRuntimeState>, host: &Rc<SlotsHost>) {
54 if let Some(bound_state) = host.runtime_state() {
55 if Rc::ptr_eq(&bound_state, state) {
56 state.bind_slots_host(host);
57 return;
58 }
59 drop(bound_state);
60 if host.rebind_orphaned_runtime_state(state) {
61 state.bind_slots_host(host);
62 return;
63 }
64 panic!("slot host already belongs to a different composer runtime state");
65 }
66 state.bind_slots_host(host);
67}
68
69pub(crate) struct ComposerRuntimeState {
70 scope_registry: RefCell<HashMap<ScopeId, RecomposeScope>>,
71 retention_by_host: RefCell<HashMap<usize, RetentionManager>>,
72 retention_policy: Cell<RetentionPolicy>,
73 live_hosts: RefCell<HashMap<usize, std::rc::Weak<SlotsHost>>>,
74 applier_host: RefCell<Option<std::rc::Weak<dyn ApplierHost>>>,
75}
76
77impl Default for ComposerRuntimeState {
78 fn default() -> Self {
79 Self {
80 scope_registry: RefCell::new(HashMap::default()),
81 retention_by_host: RefCell::new(HashMap::default()),
82 retention_policy: Cell::new(RetentionPolicy::default()),
83 live_hosts: RefCell::new(HashMap::default()),
84 applier_host: RefCell::new(None),
85 }
86 }
87}
88
89impl ComposerRuntimeState {
90 pub(crate) fn clear_host_storage_key(&self, host_key: usize) {
91 self.retention_by_host.borrow_mut().remove(&host_key);
92 self.live_hosts.borrow_mut().remove(&host_key);
93 self.scope_registry
94 .borrow_mut()
95 .retain(|_, scope| scope.slots_storage_key() != Some(host_key));
96 }
97
98 pub(crate) fn bind_applier_host(&self, applier: &Rc<dyn ApplierHost>) {
99 *self.applier_host.borrow_mut() = Some(Rc::downgrade(applier));
100 }
101
102 pub(crate) fn has_live_applier_host(&self) -> bool {
103 self.applier_host
104 .borrow()
105 .as_ref()
106 .and_then(std::rc::Weak::upgrade)
107 .is_some()
108 }
109
110 pub(crate) fn bind_slots_host(self: &Rc<Self>, host: &Rc<SlotsHost>) {
111 host.bind_runtime_state(self);
112 self.live_hosts
113 .borrow_mut()
114 .insert(host.storage_key(), Rc::downgrade(host));
115 }
116
117 pub(crate) fn scope_for_id(&self, scope_id: ScopeId) -> Option<RecomposeScope> {
118 self.scope_registry.borrow().get(&scope_id).cloned()
119 }
120
121 pub(crate) fn register_scope(&self, scope: &RecomposeScope) {
122 self.scope_registry
123 .borrow_mut()
124 .insert(scope.id(), scope.clone());
125 }
126
127 pub(crate) fn remove_scope(&self, scope_id: ScopeId) -> Option<RecomposeScope> {
128 self.scope_registry.borrow_mut().remove(&scope_id)
129 }
130
131 pub(crate) fn set_retention_policy(&self, policy: RetentionPolicy) {
132 self.retention_policy.set(policy);
133 }
134
135 pub(crate) fn retention_policy(&self) -> RetentionPolicy {
136 self.retention_policy.get()
137 }
138
139 pub(crate) fn scope_registry_len(&self) -> usize {
140 self.scope_registry.borrow().len()
141 }
142
143 pub(crate) fn take_retained(
144 &self,
145 host: &Rc<SlotsHost>,
146 key: RetainKey,
147 preflight: impl FnOnce(&crate::slot::DetachedSubtree),
148 ) -> Option<crate::slot::DetachedSubtree> {
149 let host_key = slots_storage_key(host);
150 let mut retention = self.retention_by_host.borrow_mut();
151 let subtree = retention
152 .get_mut(&host_key)?
153 .take_after_restore_preflight(key, preflight);
154 if retention
155 .get(&host_key)
156 .is_some_and(|manager| manager.is_empty() && manager.evictions_total() == 0)
157 {
158 retention.remove(&host_key);
159 }
160 subtree
161 }
162
163 pub(crate) fn insert_retained(
164 &self,
165 host: &Rc<SlotsHost>,
166 key: RetainKey,
167 subtree: crate::slot::DetachedSubtree,
168 ) -> Vec<crate::slot::DetachedSubtree> {
169 let policy = self.retention_policy();
170 let mut retention_by_host = self.retention_by_host.borrow_mut();
171 let manager = retention_by_host
172 .entry(slots_storage_key(host))
173 .or_insert_with(|| RetentionManager::new(policy));
174 manager.set_policy(policy);
175 manager.insert(key, subtree)
176 }
177
178 pub(crate) fn advance_retention_pass(
179 &self,
180 host: &Rc<SlotsHost>,
181 ) -> Vec<crate::slot::DetachedSubtree> {
182 let host_key = slots_storage_key(host);
183 let policy = self.retention_policy();
184 let mut retention_by_host = self.retention_by_host.borrow_mut();
185 let Some(manager) = retention_by_host.get_mut(&host_key) else {
186 return Vec::new();
187 };
188 manager.set_policy(policy);
189 manager.advance_pass()
190 }
191
192 pub(crate) fn fill_slot_debug_snapshot(
193 &self,
194 host: &SlotsHost,
195 snapshot: &mut crate::SlotDebugSnapshot,
196 ) {
197 let retention = self.retention_debug_stats(host.storage_key());
198 snapshot.runtime_scope_registry_count = Some(self.scope_registry_len());
199 snapshot.retained_subtree_count = retention.subtree_count;
200 snapshot.retained_group_count = retention.group_count;
201 snapshot.retained_payload_count = retention.payload_count;
202 snapshot.retained_node_count = retention.node_count;
203 snapshot.retained_scope_count = retention.scope_count;
204 }
205
206 pub(crate) fn slot_retention_debug_stats(
207 &self,
208 host: &SlotsHost,
209 ) -> crate::slot::SlotRetentionDebugStats {
210 let retention = self.retention_debug_stats(host.storage_key());
211 crate::slot::SlotRetentionDebugStats {
212 retained_subtree_count: retention.subtree_count,
213 retained_group_count: retention.group_count,
214 retained_payload_count: retention.payload_count,
215 retained_node_count: retention.node_count,
216 retained_scope_count: retention.scope_count,
217 retained_anchor_count: retention.anchor_count,
218 retained_heap_bytes: retention.heap_bytes,
219 retained_evictions_total: retention.evictions_total,
220 }
221 }
222
223 pub(crate) fn compact_table_identity_storage_for_host(
224 &self,
225 host: &SlotsHost,
226 table: &mut SlotTable,
227 compact_anchors: bool,
228 compact_payloads: bool,
229 ) {
230 if !compact_anchors && !compact_payloads {
231 return;
232 }
233
234 let host_key = host.storage_key();
235 let mut retention = self.retention_by_host.borrow_mut();
236 if let Some(retained) = retention.get_mut(&host_key) {
237 if compact_anchors {
238 table.compact_anchor_registry_storage(Some(&mut *retained));
239 }
240 if compact_payloads {
241 table.compact_payload_anchor_registry_storage(Some(&mut *retained));
242 }
243 } else {
244 if compact_anchors {
245 table.compact_anchor_registry_storage(None);
246 }
247 if compact_payloads {
248 table.compact_payload_anchor_registry_storage(None);
249 }
250 }
251 }
252
253 pub(crate) fn clear_host(&self, host: &SlotsHost) {
254 let host_key = host.storage_key();
255 debug_assert!(
256 self.host_retention_is_empty(host),
257 "host retention must be drained before clearing host ownership"
258 );
259 self.clear_host_storage_key(host_key);
260 }
261
262 pub(crate) fn dispose_retained_subtrees_for_host(
263 &self,
264 host_key: usize,
265 table: &mut SlotTable,
266 lifecycle: &mut crate::slot::SlotLifecycleCoordinator,
267 ) -> Result<(), NodeError> {
268 let applier_host = self
269 .applier_host
270 .borrow()
271 .as_ref()
272 .and_then(std::rc::Weak::upgrade);
273 if let Some(applier_host) = applier_host.as_ref() {
274 let retention_by_host = self.retention_by_host.borrow();
275 let Some(retention) = retention_by_host.get(&host_key) else {
276 return Ok(());
277 };
278 let mut applier = applier_host.borrow_dyn();
279 for subtree in retention.subtrees() {
280 crate::slot::dispose_detached_subtree_now(&mut *applier, subtree)?;
281 }
282 }
283 let Some(retention) = self.retention_by_host.borrow_mut().remove(&host_key) else {
284 return Ok(());
285 };
286 for subtree in retention.into_subtrees() {
287 for scope_id in subtree.scope_ids() {
288 if let Some(scope) = self.remove_scope(scope_id) {
289 scope.deactivate();
290 }
291 }
292 table.invalidate_detached_subtree_anchors(&subtree);
293 lifecycle.queue_subtree_disposal(subtree);
294 }
295 Ok(())
296 }
297
298 pub(crate) fn abandon_retained_subtrees_for_host(
299 &self,
300 host_key: usize,
301 table: &mut SlotTable,
302 lifecycle: &mut crate::slot::SlotLifecycleCoordinator,
303 ) {
304 let Some(retention) = self.retention_by_host.borrow_mut().remove(&host_key) else {
305 self.clear_host_storage_key(host_key);
306 return;
307 };
308 for subtree in retention.into_subtrees() {
309 for scope_id in subtree.scope_ids() {
310 if let Some(scope) = self.remove_scope(scope_id) {
311 scope.deactivate();
312 }
313 }
314 table.invalidate_detached_subtree_anchors(&subtree);
315 lifecycle.queue_subtree_disposal(subtree);
316 }
317 self.clear_host_storage_key(host_key);
318 }
319
320 pub(crate) fn host_retention_is_empty(&self, host: &SlotsHost) -> bool {
321 self.retention_by_host
322 .borrow()
323 .get(&host.storage_key())
324 .is_none_or(RetentionManager::is_empty)
325 }
326
327 #[cfg(any(test, debug_assertions))]
328 pub(crate) fn debug_verify_host(&self, host: &SlotsHost, table: &SlotTable) {
329 if let Some(retention) = self.retention_by_host.borrow().get(&host.storage_key()) {
330 retention.debug_verify(table);
331 }
332 }
333
334 #[cfg(test)]
335 pub(crate) fn validate_host_retention(
336 &self,
337 host: &SlotsHost,
338 table: &SlotTable,
339 ) -> Result<(), crate::slot::SlotInvariantError> {
340 if let Some(retention) = self.retention_by_host.borrow().get(&host.storage_key()) {
341 retention.validate(table)?;
342 }
343 Ok(())
344 }
345
346 pub(crate) fn host_for_storage_key(&self, storage_key: usize) -> Option<Rc<SlotsHost>> {
347 self.live_hosts
348 .borrow()
349 .get(&storage_key)
350 .and_then(std::rc::Weak::upgrade)
351 }
352
353 fn retention_debug_stats(&self, host_key: usize) -> crate::retention::RetentionDebugStats {
354 self.retention_by_host
355 .borrow()
356 .get(&host_key)
357 .map(RetentionManager::debug_stats)
358 .unwrap_or_default()
359 }
360}
361
362pub(crate) struct ParentFrame {
363 pub(crate) id: NodeId,
364 pub(crate) previous: ChildList,
365 pub(crate) new_children: ChildList,
366 pub(crate) new_children_membership: Option<HashSet<NodeId>>,
367 pub(crate) attach_mode: ParentAttachMode,
368}
369
370const LARGE_DEFERRED_CHILD_TRACKING_THRESHOLD: usize = 16;
371
372#[derive(Clone, Copy, Debug, PartialEq, Eq)]
373pub(crate) enum ParentAttachMode {
374 ImmediateAppend,
375 DeferredSync,
376}
377
378#[derive(Default)]
379pub(crate) struct SubcomposeFrame {
380 pub(crate) nodes: Vec<NodeId>,
381 pub(crate) scopes: Vec<RecomposeScope>,
382}
383
384#[derive(Default, Clone)]
385pub(crate) struct LocalContext {
386 pub(crate) values: HashMap<LocalKey, Rc<dyn Any>>,
387}
388
389pub(crate) struct ComposerCore {
390 pub(crate) shared_state: Rc<ComposerRuntimeState>,
391 pub(crate) slots: Rc<SlotsHost>,
392 slot_hosts: RefCell<Vec<Rc<SlotsHost>>>,
393 pub(crate) applier: Rc<dyn ApplierHost>,
394 pub(crate) runtime: RuntimeHandle,
395 pub(crate) observer: SnapshotStateObserver,
396 pub(crate) parent_stack: RefCell<Vec<ParentFrame>>,
397 pub(crate) subcompose_stack: RefCell<Vec<SubcomposeFrame>>,
398 pub(crate) root: Cell<Option<NodeId>>,
399 pub(crate) commands: RefCell<CommandQueue>,
400 pub(crate) scope_stack: RefCell<Vec<RecomposeScope>>,
401 pub(crate) local_stack: RefCell<LocalStackSnapshot>,
402 pub(crate) side_effects: RefCell<Vec<Box<dyn FnOnce()>>>,
403 pub(crate) pending_scope_options: RefCell<Option<RecomposeOptions>>,
404 pub(crate) phase: Cell<crate::Phase>,
405 pub(crate) last_node_reused: Cell<Option<bool>>,
406 pub(crate) recranpose_parent_hint: Cell<Option<NodeId>>,
407 pub(crate) root_render_requested: Cell<bool>,
408 pub(crate) _not_send: PhantomData<*const ()>,
409}
410
411impl ComposerCore {
412 pub(crate) fn new(
413 shared_state: Rc<ComposerRuntimeState>,
414 slots: Rc<SlotsHost>,
415 applier: Rc<dyn ApplierHost>,
416 runtime: RuntimeHandle,
417 observer: SnapshotStateObserver,
418 root: Option<NodeId>,
419 ) -> Self {
420 let parent_stack = if let Some(root_id) = root {
421 vec![ParentFrame {
422 id: root_id,
423 previous: ChildList::new(),
424 new_children: ChildList::new(),
425 new_children_membership: None,
426 attach_mode: ParentAttachMode::DeferredSync,
427 }]
428 } else {
429 Vec::new()
430 };
431
432 Self {
433 shared_state,
434 slots,
435 slot_hosts: RefCell::new(Vec::new()),
436 applier,
437 runtime,
438 observer,
439 parent_stack: RefCell::new(parent_stack),
440 subcompose_stack: RefCell::new(Vec::new()),
441 root: Cell::new(root),
442 commands: RefCell::new(CommandQueue::default()),
443 scope_stack: RefCell::new(Vec::new()),
444 local_stack: RefCell::new(empty_local_stack()),
445 side_effects: RefCell::new(Vec::new()),
446 pending_scope_options: RefCell::new(None),
447 phase: Cell::new(crate::Phase::Compose),
448 last_node_reused: Cell::new(None),
449 recranpose_parent_hint: Cell::new(None),
450 root_render_requested: Cell::new(false),
451 _not_send: PhantomData,
452 }
453 }
454}
455
456#[derive(Clone)]
457pub struct Composer {
458 pub(crate) core: Rc<ComposerCore>,
459}
460
461pub(crate) enum EmittedNode {
462 Fresh(Box<dyn Node>),
463 Recycled(RecycledNode),
464}
465
466impl Composer {
467 pub(crate) fn new_with_shared_state(
468 shared_state: Rc<ComposerRuntimeState>,
469 slots: Rc<SlotsHost>,
470 applier: Rc<dyn ApplierHost>,
471 runtime: RuntimeHandle,
472 observer: SnapshotStateObserver,
473 root: Option<NodeId>,
474 ) -> Self {
475 shared_state.bind_applier_host(&applier);
476 bind_slots_host_to_runtime_state(&shared_state, &slots);
477 let core = Rc::new(ComposerCore::new(
478 shared_state,
479 slots,
480 applier,
481 runtime,
482 observer,
483 root,
484 ));
485 Self { core }
486 }
487
488 pub fn new(
489 slots: Rc<SlotsHost>,
490 applier: Rc<dyn ApplierHost>,
491 runtime: RuntimeHandle,
492 observer: SnapshotStateObserver,
493 root: Option<NodeId>,
494 ) -> Self {
495 Self::new_with_shared_state(
496 slots
497 .runtime_state()
498 .unwrap_or_else(|| Rc::new(ComposerRuntimeState::default())),
499 slots,
500 applier,
501 runtime,
502 observer,
503 root,
504 )
505 }
506
507 pub(crate) fn from_core(core: Rc<ComposerCore>) -> Self {
508 Self { core }
509 }
510
511 pub(crate) fn clone_core(&self) -> Rc<ComposerCore> {
512 Rc::clone(&self.core)
513 }
514
515 fn observer(&self) -> SnapshotStateObserver {
516 self.core.observer.clone()
517 }
518
519 pub(crate) fn request_root_render(&self) {
520 self.core.root_render_requested.set(true);
521 }
522
523 pub(crate) fn take_root_render_request(&self) -> bool {
524 self.core.root_render_requested.replace(false)
525 }
526
527 pub(crate) fn observe_scope<R>(&self, scope: &RecomposeScope, block: impl FnOnce() -> R) -> R {
528 let observer = self.observer();
529 let scope_clone = scope.clone();
530 observer.observe_reads(scope_clone, move |scope_ref| scope_ref.invalidate(), block)
531 }
532
533 pub(crate) fn active_slots_host(&self) -> Rc<SlotsHost> {
534 self.core
535 .slot_hosts
536 .borrow()
537 .last()
538 .cloned()
539 .unwrap_or_else(|| Rc::clone(&self.core.slots))
540 }
541
542 pub(crate) fn with_slots<R>(&self, f: impl FnOnce(&SlotTable) -> R) -> R {
543 let host = self.active_slots_host();
544 let slots = host.borrow();
545 f(&slots)
546 }
547
548 pub(crate) fn with_slots_mut<R>(&self, f: impl FnOnce(&mut SlotTable) -> R) -> R {
549 let host = self.active_slots_host();
550 let mut slots = host.borrow_mut();
551 f(&mut slots)
552 }
553
554 pub(crate) fn with_slot_session_mut<R>(
555 &self,
556 f: impl FnOnce(&mut crate::slot::SlotWriteSession<'_>) -> R,
557 ) -> R {
558 self.active_slots_host().with_write_session(f)
559 }
560
561 pub(crate) fn try_with_slot_host_pass<R>(
562 &self,
563 slots: Rc<SlotsHost>,
564 mode: crate::slot::SlotPassMode,
565 f: impl FnOnce(&Composer) -> R,
566 ) -> Result<(R, SlotPassOutcome), NodeError> {
567 bind_slots_host_to_runtime_state(&self.core.shared_state, &slots);
568 slots.begin_pass(mode);
569 self.core.slot_hosts.borrow_mut().push(Rc::clone(&slots));
570
571 struct Guard {
572 core: Rc<ComposerCore>,
573 host: Rc<SlotsHost>,
574 active: bool,
575 }
576 impl Guard {
577 fn close(&mut self) {
578 if !self.active {
579 return;
580 }
581 if self.host.has_active_pass() {
582 self.host.abandon_active_pass();
583 }
584 let host = self
585 .core
586 .slot_hosts
587 .borrow_mut()
588 .pop()
589 .expect("slot host underflow");
590 debug_assert!(Rc::ptr_eq(&host, &self.host));
591 self.active = false;
592 }
593 }
594 impl Drop for Guard {
595 fn drop(&mut self) {
596 self.close();
597 }
598 }
599 let mut guard = Guard {
600 core: self.clone_core(),
601 host: Rc::clone(&slots),
602 active: true,
603 };
604 let result = f(self);
605 let finished = {
606 let mut applier = self.core.applier.borrow_dyn();
607 slots.finish_pass(&mut *applier)
608 }?;
609 self.handle_detached_children_in_host(&slots, None, finished.detached_root_children)?;
610 self.evict_retained_subtrees_for_host(&slots)?;
611 slots.complete_pass_cleanup(&finished.outcome);
612 guard.close();
613 Ok((result, finished.outcome))
614 }
615
616 pub(crate) fn with_slot_host_pass<R>(
617 &self,
618 slots: Rc<SlotsHost>,
619 mode: crate::slot::SlotPassMode,
620 f: impl FnOnce(&Composer) -> R,
621 ) -> (R, SlotPassOutcome) {
622 self.try_with_slot_host_pass(slots, mode, f)
623 .expect("slot pass finalization must dispose detached nodes")
624 }
625
626 pub(crate) fn with_slot_override<R>(
627 &self,
628 slots: Rc<SlotsHost>,
629 f: impl FnOnce(&Composer) -> R,
630 ) -> (R, SlotPassOutcome) {
631 self.with_slot_host_pass(slots, crate::slot::SlotPassMode::Compose, f)
632 }
633
634 pub(crate) fn parent_stack(&self) -> RefMut<'_, Vec<ParentFrame>> {
635 self.core.parent_stack.borrow_mut()
636 }
637
638 pub(crate) fn subcompose_stack(&self) -> RefMut<'_, Vec<SubcomposeFrame>> {
639 self.core.subcompose_stack.borrow_mut()
640 }
641
642 pub(crate) fn commands_mut(&self) -> RefMut<'_, CommandQueue> {
643 self.core.commands.borrow_mut()
644 }
645
646 pub(crate) fn enqueue_semantics_invalidation(&self, id: NodeId) {
647 self.commands_mut().push(Command::BubbleDirty {
648 node_id: id,
649 bubble: DirtyBubble::SEMANTICS,
650 });
651 }
652
653 pub(crate) fn scope_stack(&self) -> RefMut<'_, Vec<RecomposeScope>> {
654 self.core.scope_stack.borrow_mut()
655 }
656
657 fn scope_for_id(&self, scope_id: ScopeId) -> Option<RecomposeScope> {
658 self.core.shared_state.scope_for_id(scope_id)
659 }
660
661 fn register_scope(&self, scope: &RecomposeScope) {
662 self.core.shared_state.register_scope(scope);
663 }
664
665 fn remove_scope(&self, scope_id: ScopeId) -> Option<RecomposeScope> {
666 self.core.shared_state.remove_scope(scope_id)
667 }
668
669 pub(crate) fn local_stack(&self) -> RefMut<'_, LocalStackSnapshot> {
670 self.core.local_stack.borrow_mut()
671 }
672
673 pub(crate) fn current_local_stack(&self) -> LocalStackSnapshot {
674 self.core.local_stack.borrow().clone()
675 }
676
677 pub(crate) fn side_effects_mut(&self) -> RefMut<'_, Vec<Box<dyn FnOnce()>>> {
678 self.core.side_effects.borrow_mut()
679 }
680
681 fn pending_scope_options(&self) -> RefMut<'_, Option<RecomposeOptions>> {
682 self.core.pending_scope_options.borrow_mut()
683 }
684
685 pub(crate) fn borrow_applier(&self) -> RefMut<'_, dyn Applier> {
686 self.core.applier.borrow_dyn()
687 }
688
689 pub fn register_virtual_node(
696 &self,
697 node_id: NodeId,
698 node: Box<dyn Node>,
699 ) -> Result<(), NodeError> {
700 let mut applier = self.borrow_applier();
701 applier.insert_with_id(node_id, node)
702 }
703
704 pub fn node_has_no_parent(&self, node_id: NodeId) -> bool {
707 let mut applier = self.borrow_applier();
708 match applier.get_mut(node_id) {
709 Ok(node) => node.parent().is_none(),
710 Err(_) => true,
711 }
712 }
713
714 pub fn get_node_children(&self, node_id: NodeId) -> SmallVec<[NodeId; 8]> {
719 let mut applier = self.borrow_applier();
720 match applier.get_mut(node_id) {
721 Ok(node) => {
722 let mut children = SmallVec::<[NodeId; 8]>::new();
723 node.collect_children_into(&mut children);
724 children
725 }
726 Err(_) => SmallVec::<[NodeId; 8]>::new(),
727 }
728 }
729
730 pub fn record_subcompose_child(&self, child_id: NodeId) {
740 let mut parent_stack = self.parent_stack();
741 if let Some(frame) = parent_stack.last_mut() {
742 if matches!(frame.attach_mode, ParentAttachMode::DeferredSync) {
743 if let Some(membership) = frame.new_children_membership.as_mut() {
744 if membership.insert(child_id) {
745 frame.new_children.push(child_id);
746 }
747 } else if frame.new_children.len() >= LARGE_DEFERRED_CHILD_TRACKING_THRESHOLD {
748 let mut membership = HashSet::default();
749 membership.reserve(frame.new_children.len() + 1);
750 membership.extend(frame.new_children.iter().copied());
751 if membership.insert(child_id) {
752 frame.new_children.push(child_id);
753 }
754 frame.new_children_membership = Some(membership);
755 } else if !frame.new_children.contains(&child_id) {
756 frame.new_children.push(child_id);
757 }
758 }
759 }
760 }
761
762 pub fn clear_node_children(&self, node_id: NodeId) {
768 let mut applier = self.borrow_applier();
769 if let Ok(node) = applier.get_mut(node_id) {
770 node.update_children(&[]);
771 }
772 }
773
774 pub fn install<R>(&self, f: impl FnOnce(&Composer) -> R) -> R {
775 let _composer_guard = composer_context::enter(self);
776 runtime::push_active_runtime(&self.core.runtime);
777 struct Guard;
778 impl Drop for Guard {
779 fn drop(&mut self) {
780 runtime::pop_active_runtime();
781 }
782 }
783 let guard = Guard;
784 let result = f(self);
785 drop(guard);
786 result
787 }
788
789 pub(crate) fn flush_pending_commands_if_large(&self) {
790 let queued = self.core.commands.borrow().len();
791 if queued < COMMAND_FLUSH_THRESHOLD {
792 return;
793 }
794 self.apply_pending_commands()
795 .expect("mid-composition command flush failed");
796 }
797
798 fn with_group_in_active_pass<R>(
799 &self,
800 key: crate::slot::GroupKeySeed,
801 f: impl FnOnce(&Composer) -> R,
802 ) -> R {
803 struct GroupGuard {
804 composer: Composer,
805 scope: RecomposeScope,
806 }
807
808 impl Drop for GroupGuard {
809 fn drop(&mut self) {
810 self.composer
811 .close_current_group_body_for_scope(&self.scope);
812 self.scope.mark_recomposed();
813 self.composer
814 .with_slot_session_mut(|slots| slots.end_group());
815 self.composer.flush_pending_commands_if_large();
816 }
817 }
818
819 let parent_scope = self.current_recranpose_scope();
820 let options = self.pending_scope_options().take().unwrap_or_default();
821 let parent_scope_id = parent_scope.as_ref().map(RecomposeScope::id);
822 let reserved_key = self.with_slot_session_mut(|slots| slots.preview_group_key(key));
823 let host = self.active_slots_host();
824 let restored = self.core.shared_state.take_retained(
825 &host,
826 RetainKey {
827 parent_scope: parent_scope_id,
828 key: reserved_key,
829 },
830 |subtree| {
831 self.with_slot_session_mut(|slots| {
832 slots.assert_retained_restore_ready(reserved_key, subtree);
833 });
834 },
835 );
836 let (group, start_scope_id, start_kind) = self.with_slot_session_mut(|slots| {
837 let GroupStart {
838 group,
839 scope_id,
840 kind,
841 ..
842 } = slots.begin_group(reserved_key, restored);
843 (group, scope_id, kind)
844 });
845 let scope_ref =
846 if let Some(scope) = start_scope_id.and_then(|scope_id| self.scope_for_id(scope_id)) {
847 scope
848 } else {
849 let scope = RecomposeScope::new(self.runtime_handle());
850 self.register_scope(&scope);
851 self.with_slot_session_mut(|slots| slots.set_group_scope(group, scope.id()));
852 scope
853 };
854
855 scope_ref.reactivate();
856 scope_ref.set_parent_scope(parent_scope);
857 scope_ref.set_retention_mode(options.retention);
858
859 if options.force_recompose {
860 scope_ref.force_recompose();
861 } else if options.force_reuse {
862 scope_ref.force_reuse();
863 }
864 if matches!(start_kind, GroupStartKind::Restored) {
865 scope_ref.force_recompose();
866 }
867
868 scope_ref.set_slots_host(&host);
869
870 {
871 let mut stack = self.scope_stack();
872 stack.push(scope_ref.clone());
873 }
874
875 {
876 let mut stack = self.subcompose_stack();
877 if let Some(frame) = stack.last_mut() {
878 frame.scopes.push(scope_ref.clone());
879 }
880 }
881
882 scope_ref.snapshot_locals(self.current_local_stack());
883 {
884 let parent_hint = self.parent_stack().last().map(|frame| frame.id);
885 scope_ref.set_parent_hint(parent_hint);
886 }
887
888 let guard = GroupGuard {
889 composer: self.clone(),
890 scope: scope_ref.clone(),
891 };
892 let result = self.observe_scope(&scope_ref, || f(self));
893 scope_ref.mark_composed_once();
894 drop(guard);
895 result
896 }
897
898 pub(crate) fn with_group_seed<R>(
899 &self,
900 key: crate::slot::GroupKeySeed,
901 f: impl FnOnce(&Composer) -> R,
902 ) -> R {
903 let host = self.active_slots_host();
904 if host.has_active_pass() {
905 return self.with_group_in_active_pass(key, f);
906 }
907 let (result, _) =
908 self.with_slot_host_pass(host, crate::slot::SlotPassMode::Compose, |composer| {
909 composer.with_group_in_active_pass(key, f)
910 });
911 result
912 }
913
914 pub fn with_group<R>(&self, key: Key, f: impl FnOnce(&Composer) -> R) -> R {
915 self.with_group_seed(crate::slot::GroupKeySeed::unkeyed(key), f)
916 }
917
918 pub fn cranpose_with_reuse<R>(
919 &self,
920 key: Key,
921 mut options: RecomposeOptions,
922 f: impl FnOnce(&Composer) -> R,
923 ) -> R {
924 options.retention = RetentionMode::RetainWhenInactive;
925 self.pending_scope_options().replace(options);
926 self.with_group(key, f)
927 }
928
929 #[track_caller]
930 pub fn with_key<K: Hash, R>(&self, key: &K, f: impl FnOnce(&Composer) -> R) -> R {
931 let seed = explicit_group_key_seed(key, std::panic::Location::caller());
932 self.with_group_seed(seed, f)
933 }
934
935 fn dispose_detached_nodes(&self, nodes: impl IntoIterator<Item = NodeId>) {
936 for node_id in nodes {
937 self.commands_mut().push(Command::callback(move |applier| {
938 crate::slot::dispose_detached_node_now(applier, node_id)
939 }));
940 }
941 }
942
943 fn deactivate_scope_ids(&self, scope_ids: impl IntoIterator<Item = ScopeId>) {
944 for scope_id in scope_ids {
945 if let Some(scope) = self.scope_for_id(scope_id) {
946 scope.deactivate();
947 }
948 }
949 }
950
951 fn dispose_scope_ids(&self, scope_ids: impl IntoIterator<Item = ScopeId>) {
952 for scope_id in scope_ids {
953 if let Some(scope) = self.remove_scope(scope_id) {
954 scope.deactivate();
955 }
956 }
957 }
958
959 fn detached_root_parent_commands(
960 &self,
961 subtree: &crate::slot::DetachedSubtree,
962 context: &'static str,
963 ) -> Result<Vec<(NodeId, Option<NodeId>)>, NodeError> {
964 let mut root_nodes = Vec::new();
965 subtree.collect_root_nodes_checked_into(&mut root_nodes, context);
966 let mut roots = Vec::with_capacity(root_nodes.len());
967 for root in root_nodes {
968 let parent_id = {
969 let mut applier = self.borrow_applier();
970 applier.get_mut(root)?.parent()
971 };
972 roots.push((root, parent_id));
973 }
974 Ok(roots)
975 }
976
977 fn retain_detached_subtree_in_host(
978 &self,
979 slots_host: &Rc<SlotsHost>,
980 parent_scope: Option<ScopeId>,
981 subtree: crate::slot::DetachedSubtree,
982 ) -> Result<(), NodeError> {
983 let root_detaches = self.detached_root_parent_commands(&subtree, "retention")?;
987 self.deactivate_scope_ids(subtree.scope_ids_iter());
988 for (root, parent_id) in root_detaches {
989 if let Some(parent_id) = parent_id {
990 self.commands_mut().push(Command::DetachChild {
991 parent_id,
992 child_id: root,
993 });
994 }
995 }
996 let evicted = self.core.shared_state.insert_retained(
997 slots_host,
998 RetainKey {
999 parent_scope,
1000 key: subtree.root_key(),
1001 },
1002 subtree,
1003 );
1004 for subtree in evicted {
1005 self.dispose_detached_subtree_in_host(slots_host, subtree)?;
1006 }
1007 Ok(())
1008 }
1009
1010 fn evict_retained_subtrees_for_host(
1011 &self,
1012 slots_host: &Rc<SlotsHost>,
1013 ) -> Result<(), NodeError> {
1014 let evicted = self.core.shared_state.advance_retention_pass(slots_host);
1015 for subtree in evicted {
1016 self.dispose_detached_subtree_in_host(slots_host, subtree)?;
1017 }
1018 Ok(())
1019 }
1020
1021 fn dispose_detached_subtree_in_host(
1022 &self,
1023 slots_host: &Rc<SlotsHost>,
1024 subtree: crate::slot::DetachedSubtree,
1025 ) -> Result<(), NodeError> {
1026 let root_nodes = self
1027 .detached_root_parent_commands(&subtree, "disposal")?
1028 .into_iter()
1029 .map(|(root, _)| root);
1030 self.dispose_scope_ids(subtree.scope_ids_iter());
1031 self.dispose_detached_nodes(root_nodes);
1032 slots_host.with_table_and_lifecycle_mut(|table, lifecycle| {
1033 table.invalidate_detached_subtree_anchors(&subtree);
1034 lifecycle.queue_subtree_disposal(subtree);
1035 });
1036 Ok(())
1037 }
1038
1039 fn handle_detached_children_in_host(
1040 &self,
1041 slots_host: &Rc<SlotsHost>,
1042 parent_scope: Option<ScopeId>,
1043 detached: Vec<crate::slot::DetachedSubtree>,
1044 ) -> Result<(), NodeError> {
1045 for subtree in detached {
1046 let retention_mode = subtree
1047 .root_scope_id()
1048 .and_then(|scope_id| self.scope_for_id(scope_id))
1049 .map(|scope| scope.retention_mode())
1050 .unwrap_or_default();
1051 match retention_mode {
1052 RetentionMode::DisposeWhenInactive => {
1053 self.dispose_detached_subtree_in_host(slots_host, subtree)?
1054 }
1055 RetentionMode::RetainWhenInactive => {
1056 self.retain_detached_subtree_in_host(slots_host, parent_scope, subtree)?
1057 }
1058 }
1059 }
1060 Ok(())
1061 }
1062
1063 fn handle_detached_children(
1064 &self,
1065 parent_scope: Option<ScopeId>,
1066 detached: Vec<crate::slot::DetachedSubtree>,
1067 ) {
1068 let host = self.active_slots_host();
1069 self.handle_detached_children_in_host(&host, parent_scope, detached)
1070 .expect("detached subtree root nodes must be present while closing a group");
1071 }
1072
1073 fn handle_finished_group_result(
1074 &self,
1075 parent_scope: Option<ScopeId>,
1076 result: FinishGroupResult,
1077 ) {
1078 let FinishGroupResult {
1079 detached_children,
1080 direct_nodes,
1081 root_nodes,
1082 was_skipped,
1083 } = result;
1084 if was_skipped {
1085 self.attach_root_nodes(root_nodes);
1086 }
1087 self.dispose_detached_nodes(direct_nodes);
1088 self.handle_detached_children(parent_scope, detached_children);
1089 }
1090
1091 pub(crate) fn close_current_group_body_for_scope(&self, scope: &RecomposeScope) {
1092 let result = self.with_slot_session_mut(|slots| slots.finish_group_body());
1093 self.handle_finished_group_result(Some(scope.id()), result);
1094 let popped = self.scope_stack().pop().expect("scope stack underflow");
1095 debug_assert_eq!(
1096 popped.id(),
1097 scope.id(),
1098 "closed scope must match the active scope stack"
1099 );
1100 }
1101
1102 pub fn remember<T: 'static>(&self, init: impl FnOnce() -> T) -> Owned<T> {
1103 self.remember_with_kind(PayloadKind::Remember, init)
1104 }
1105
1106 pub(crate) fn remember_internal<T: 'static>(&self, init: impl FnOnce() -> T) -> Owned<T> {
1107 self.remember_with_kind(PayloadKind::Internal, init)
1108 }
1109
1110 pub(crate) fn remember_effect<T: 'static>(&self, init: impl FnOnce() -> T) -> Owned<T> {
1111 self.remember_with_kind(PayloadKind::Effect, init)
1112 }
1113
1114 fn remember_with_kind<T: 'static>(
1115 &self,
1116 kind: PayloadKind,
1117 init: impl FnOnce() -> T,
1118 ) -> Owned<T> {
1119 self.with_slot_session_mut(|slots| slots.remember_with_kind(kind, init))
1120 }
1121
1122 pub fn use_value_slot<'pass, T: 'static>(
1123 &'pass self,
1124 init: impl FnOnce() -> T,
1125 ) -> ValueSlotHandle<'pass, T> {
1126 let slot = self
1127 .with_slot_session_mut(|slots| slots.value_slot_with_kind(PayloadKind::Internal, init));
1128 ValueSlotHandle::new(slot)
1129 }
1130
1131 #[doc(hidden)]
1132 pub fn __use_param_slot<'pass, T: 'static>(
1133 &'pass self,
1134 init: impl FnOnce() -> T,
1135 ) -> ValueSlotHandle<'pass, T> {
1136 let slot = self
1137 .with_slot_session_mut(|slots| slots.value_slot_with_kind(PayloadKind::Param, init));
1138 ValueSlotHandle::new(slot)
1139 }
1140
1141 #[doc(hidden)]
1142 pub fn __use_return_slot<'pass, T: 'static>(
1143 &'pass self,
1144 init: impl FnOnce() -> T,
1145 ) -> ValueSlotHandle<'pass, T> {
1146 let slot = self
1147 .with_slot_session_mut(|slots| slots.value_slot_with_kind(PayloadKind::Return, init));
1148 ValueSlotHandle::new(slot)
1149 }
1150
1151 pub fn with_slot_value<'pass, T: 'static, R>(
1152 &'pass self,
1153 handle: ValueSlotHandle<'pass, T>,
1154 f: impl FnOnce(&T) -> R,
1155 ) -> R {
1156 self.with_slots(|slots| f(slots.read_value(handle.slot())))
1157 }
1158
1159 pub fn with_slot_value_mut<'pass, T: 'static, R>(
1160 &'pass self,
1161 handle: ValueSlotHandle<'pass, T>,
1162 f: impl FnOnce(&mut T) -> R,
1163 ) -> R {
1164 self.with_slots_mut(|slots| f(slots.read_value_mut(handle.slot())))
1165 }
1166
1167 pub fn mutable_state_of<T: Clone + 'static>(&self, initial: T) -> MutableState<T> {
1168 MutableState::with_runtime(initial, self.runtime_handle())
1169 }
1170
1171 pub fn mutable_state_list_of<T, I>(&self, values: I) -> SnapshotStateList<T>
1172 where
1173 T: Clone + 'static,
1174 I: IntoIterator<Item = T>,
1175 {
1176 SnapshotStateList::with_runtime(values, self.runtime_handle())
1177 }
1178
1179 pub fn mutable_state_map_of<K, V, I>(&self, pairs: I) -> SnapshotStateMap<K, V>
1180 where
1181 K: Clone + Eq + Hash + 'static,
1182 V: Clone + 'static,
1183 I: IntoIterator<Item = (K, V)>,
1184 {
1185 SnapshotStateMap::with_runtime(pairs, self.runtime_handle())
1186 }
1187
1188 pub fn read_composition_local<T: Clone + 'static>(&self, local: &CompositionLocal<T>) -> T {
1189 let stack = self.core.local_stack.borrow();
1190 for context in stack.iter().rev() {
1191 if let Some(entry) = context.values.get(&local.key) {
1192 let typed = entry
1193 .clone()
1194 .downcast::<LocalStateEntry<T>>()
1195 .expect("composition local type mismatch");
1196 return typed.value();
1197 }
1198 }
1199 local.default_value()
1200 }
1201
1202 pub fn read_static_composition_local<T: Clone + 'static>(
1203 &self,
1204 local: &StaticCompositionLocal<T>,
1205 ) -> T {
1206 let stack = self.core.local_stack.borrow();
1207 for context in stack.iter().rev() {
1208 if let Some(entry) = context.values.get(&local.key) {
1209 let typed = entry
1210 .clone()
1211 .downcast::<StaticLocalEntry<T>>()
1212 .expect("static composition local type mismatch");
1213 return typed.value();
1214 }
1215 }
1216 local.default_value()
1217 }
1218
1219 pub fn current_recranpose_scope(&self) -> Option<RecomposeScope> {
1220 self.core.scope_stack.borrow().last().cloned()
1221 }
1222
1223 pub fn phase(&self) -> crate::Phase {
1224 self.core.phase.get()
1225 }
1226
1227 pub(crate) fn set_phase(&self, phase: crate::Phase) {
1228 self.core.phase.set(phase);
1229 }
1230
1231 pub fn enter_phase(&self, phase: crate::Phase) {
1232 self.set_phase(phase);
1233 }
1234
1235 pub(crate) fn subcompose<R>(
1236 &self,
1237 state: &mut SubcomposeState,
1238 slot_id: SlotId,
1239 content: impl FnOnce(&Composer) -> R,
1240 ) -> (R, Vec<NodeId>) {
1241 match self.phase() {
1242 crate::Phase::Measure | crate::Phase::Layout => {}
1243 current => panic!(
1244 "subcompose() may only be called during measure or layout; current phase: {:?}",
1245 current
1246 ),
1247 }
1248
1249 self.subcompose_stack().push(SubcomposeFrame::default());
1250 struct StackGuard {
1251 core: Rc<ComposerCore>,
1252 leaked: bool,
1253 }
1254 impl Drop for StackGuard {
1255 fn drop(&mut self) {
1256 if !self.leaked {
1257 self.core.subcompose_stack.borrow_mut().pop();
1258 }
1259 }
1260 }
1261 let mut guard = StackGuard {
1262 core: self.clone_core(),
1263 leaked: false,
1264 };
1265
1266 let slot_host = state.get_or_create_slots(slot_id);
1267 let (result, _) = self.with_slot_override(slot_host.clone(), |composer| {
1268 composer.with_group(slot_id.raw(), |composer| content(composer))
1269 });
1270
1271 let frame = {
1272 let mut stack = guard.core.subcompose_stack.borrow_mut();
1273 let frame = stack.pop().expect("subcompose stack underflow");
1274 guard.leaked = true;
1275 frame
1276 };
1277 let nodes = frame.nodes;
1278 let scopes = frame.scopes;
1279 state.register_active(slot_id, &nodes, &scopes);
1280 (result, nodes)
1281 }
1282
1283 pub fn subcompose_measurement<R>(
1284 &self,
1285 state: &mut SubcomposeState,
1286 slot_id: SlotId,
1287 content: impl FnOnce(&Composer) -> R,
1288 ) -> (R, Vec<NodeId>) {
1289 let (result, nodes) = self.subcompose(state, slot_id, content);
1290 let roots = nodes
1291 .into_iter()
1292 .filter(|&id| self.node_has_no_parent(id))
1293 .collect();
1294
1295 (result, roots)
1296 }
1297
1298 pub fn subcompose_in<R>(
1299 &self,
1300 slots: &Rc<SlotsHost>,
1301 root: Option<NodeId>,
1302 f: impl FnOnce(&Composer) -> R,
1303 ) -> Result<R, NodeError> {
1304 let runtime_handle = self.runtime_handle();
1305 let phase = self.phase();
1306 let locals = self.current_local_stack();
1307 let shared_state = slots
1308 .runtime_state()
1309 .unwrap_or_else(|| Rc::clone(&self.core.shared_state));
1310 let core = Rc::new(ComposerCore::new(
1311 shared_state,
1312 Rc::clone(slots),
1313 Rc::clone(&self.core.applier),
1314 runtime_handle.clone(),
1315 self.observer(),
1316 root,
1317 ));
1318 core.phase.set(phase);
1319 *core.local_stack.borrow_mut() = locals;
1320 let composer = Composer::from_core(core);
1321 let (result, commands, side_effects, compact_applier) = composer.install(|composer| {
1322 let (output, outcome) = composer.try_with_slot_host_pass(
1323 Rc::clone(slots),
1324 crate::slot::SlotPassMode::Compose,
1325 |composer| f(composer),
1326 )?;
1327 let commands = composer.take_commands();
1328 let side_effects = composer.take_side_effects();
1329 Ok((output, commands, side_effects, outcome.compacted))
1330 })?;
1331 {
1332 let mut applier = self.borrow_applier();
1333 commands.apply(&mut *applier)?;
1334 for update in runtime_handle.take_updates() {
1335 update.apply(&mut *applier)?;
1336 }
1337 }
1338 if compact_applier {
1339 self.core.applier.compact();
1340 self.core.applier.borrow_dyn().clear_recycled_nodes();
1341 }
1342 runtime_handle.drain_ui();
1343 for effect in side_effects {
1344 effect();
1345 }
1346 runtime_handle.drain_ui();
1347 Ok(result)
1348 }
1349
1350 pub fn subcompose_slot<R>(
1355 &self,
1356 slots: &Rc<SlotsHost>,
1357 root: Option<NodeId>,
1358 f: impl FnOnce(&Composer) -> R,
1359 ) -> Result<(R, Vec<RecomposeScope>), NodeError> {
1360 let runtime_handle = self.runtime_handle();
1361 let phase = self.phase();
1362 let locals = self.current_local_stack();
1363 let shared_state = slots
1364 .runtime_state()
1365 .unwrap_or_else(|| Rc::clone(&self.core.shared_state));
1366 let core = Rc::new(ComposerCore::new(
1367 shared_state,
1368 Rc::clone(slots),
1369 Rc::clone(&self.core.applier),
1370 runtime_handle.clone(),
1371 self.observer(),
1372 root,
1373 ));
1374 core.phase.set(phase);
1375 *core.local_stack.borrow_mut() = locals;
1376 let composer = Composer::from_core(core);
1377 composer.subcompose_stack().push(SubcomposeFrame::default());
1378 struct StackGuard {
1379 core: Rc<ComposerCore>,
1380 leaked: bool,
1381 }
1382 impl Drop for StackGuard {
1383 fn drop(&mut self) {
1384 if !self.leaked {
1385 self.core.subcompose_stack.borrow_mut().pop();
1386 }
1387 }
1388 }
1389 let mut guard = StackGuard {
1390 core: composer.clone_core(),
1391 leaked: false,
1392 };
1393 let root_group_key = crate::location_key(file!(), line!(), column!());
1394 let (result, commands, side_effects, compact_applier) = composer.install(|composer| {
1395 let (output, outcome) = composer.try_with_slot_host_pass(
1396 Rc::clone(slots),
1397 crate::slot::SlotPassMode::Compose,
1398 |composer| {
1399 let output = composer.with_group(root_group_key, |composer| f(composer));
1400 if root.is_some() {
1401 composer.pop_parent();
1402 }
1403 output
1404 },
1405 )?;
1406 let commands = composer.take_commands();
1407 let side_effects = composer.take_side_effects();
1408 Ok((output, commands, side_effects, outcome.compacted))
1409 })?;
1410 let frame = {
1411 let mut stack = guard.core.subcompose_stack.borrow_mut();
1412 let frame = stack.pop().expect("subcompose stack underflow");
1413 guard.leaked = true;
1414 frame
1415 };
1416
1417 {
1418 let mut applier = self.borrow_applier();
1419 commands.apply(&mut *applier)?;
1420 for update in runtime_handle.take_updates() {
1421 update.apply(&mut *applier)?;
1422 }
1423 }
1424 if compact_applier {
1425 self.core.applier.compact();
1426 self.core.applier.borrow_dyn().clear_recycled_nodes();
1427 }
1428 runtime_handle.drain_ui();
1429 for effect in side_effects {
1430 effect();
1431 }
1432 runtime_handle.drain_ui();
1433 Ok((result, frame.scopes))
1434 }
1435
1436 fn attach_root_nodes(&self, root_nodes: Vec<NodeId>) {
1437 for id in root_nodes {
1438 self.attach_to_parent_with_mode(id, true);
1439 }
1440 }
1441
1442 pub fn skip_current_group(&self) {
1443 self.with_slot_session_mut(|slots| slots.skip_group());
1444 }
1445
1446 pub fn runtime_handle(&self) -> RuntimeHandle {
1447 self.core.runtime.clone()
1448 }
1449
1450 pub fn set_recranpose_callback<F>(&self, callback: F)
1451 where
1452 F: FnMut(&Composer) + 'static,
1453 {
1454 if let Some(scope) = self.current_recranpose_scope() {
1455 let observer = self.observer();
1456 let scope_weak = scope.downgrade();
1457 let mut callback = callback;
1458 scope.set_recompose(Box::new(move |composer: &Composer| {
1459 if let Some(inner) = scope_weak.upgrade() {
1460 let scope_instance = RecomposeScope { inner };
1461 observer.observe_reads(
1462 scope_instance.clone(),
1463 move |scope_ref| scope_ref.invalidate(),
1464 || {
1465 callback(composer);
1466 },
1467 );
1468 }
1469 }));
1470 }
1471 }
1472
1473 pub fn set_recranpose_fn(&self, callback: fn(&Composer)) {
1474 if let Some(scope) = self.current_recranpose_scope() {
1475 scope.set_recompose_fn(callback);
1476 }
1477 }
1478
1479 pub fn with_composition_locals<R>(
1480 &self,
1481 provided: Vec<ProvidedValue>,
1482 f: impl FnOnce(&Composer) -> R,
1483 ) -> R {
1484 if provided.is_empty() {
1485 return f(self);
1486 }
1487 let mut context = LocalContext::default();
1488 for value in provided {
1489 let (key, entry) = value.into_entry(self);
1490 context.values.insert(key, entry);
1491 }
1492 {
1493 let mut stack = self.local_stack();
1494 Rc::make_mut(&mut *stack).push(context);
1495 }
1496 let result = f(self);
1497 {
1498 let mut stack = self.local_stack();
1499 Rc::make_mut(&mut *stack).pop();
1500 }
1501 result
1502 }
1503}