1use crate::{
2 events::EventsRegistry,
3 node::{Node, NodeKind},
4 render_context::RenderContext,
5};
6use bumpalo::Bump;
7use fxhash::{FxHashMap, FxHashSet};
8use std::sync::atomic::{AtomicUsize, Ordering};
9use std::u32;
10use wasm_bindgen::prelude::*;
11
12static ID_COUNTER: AtomicUsize = AtomicUsize::new(0);
13
14pub_unstable_internal! {
15 #[derive(Debug, Default)]
16 pub(crate) struct CachedSet {
17 items: FxHashMap<CacheId, CacheEntry>,
18 }
19}
20
21pub_unstable_internal! {
22 #[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
23 pub(crate) struct CacheId(u32);
24}
25
26#[derive(Debug)]
27pub(crate) struct CacheEntry {
28 bump: Bump,
29
30 node: *const Node<'static>,
32
33 edges: FxHashSet<CacheId>,
38
39 template: Option<CacheId>,
49
50 pinned: bool,
53}
54
55impl From<CacheId> for u32 {
56 #[inline]
57 fn from(id: CacheId) -> u32 {
58 id.0
59 }
60}
61
62impl CachedSet {
63 pub(crate) fn new_roots_set(&self) -> FxHashSet<CacheId> {
64 let mut roots = FxHashSet::default();
65 roots.reserve(self.items.len());
66 roots
67 }
68
69 pub(crate) fn gc(&mut self, registry: &mut EventsRegistry, roots: FxHashSet<CacheId>) {
70 let mut marked = FxHashSet::default();
71 marked.reserve(self.items.len());
72
73 for root in roots {
74 if marked.insert(root) {
75 let entry = self
76 .items
77 .get(&root)
78 .expect_throw("CachedSet::gc: should have root in cached set");
79
80 marked.extend(entry.edges.iter().cloned());
83 }
84 }
85
86 self.items.retain(|id, entry| {
87 let keep = entry.pinned || marked.contains(id);
88 if !keep {
89 let node: &Node = unsafe { &*entry.node };
90 registry.remove_subtree(node);
91 }
92 keep
93 });
94 }
95
96 fn trace(&self, node: &Node) -> FxHashSet<CacheId> {
99 let mut edges = FxHashSet::default();
100 self.trace_recursive(&mut edges, node);
101 edges
102 }
103
104 fn trace_recursive(&self, edges: &mut FxHashSet<CacheId>, node: &Node) {
105 match &node.kind {
106 NodeKind::Text(_) => return,
107 NodeKind::Cached(c) => {
108 debug_assert!(self.items.contains_key(&c.id));
109 edges.insert(c.id);
110 edges.extend(
111 self.items
112 .get(&c.id)
113 .expect_throw("CachedSet::trace: should have c.id in cached set")
114 .edges
115 .iter()
116 .cloned(),
117 );
118 }
119 NodeKind::Element(el) => {
120 for child in el.children {
121 self.trace_recursive(edges, child);
122 }
123 }
124 }
125 }
126
127 fn next_id(&mut self) -> CacheId {
128 let next = ID_COUNTER.fetch_add(1, Ordering::AcqRel) as u32;
129 let next = if next == u32::MAX { None } else { Some(next) };
130 CacheId(next.expect_throw("ID_COUNTER overflowed"))
131 }
132
133 pub(crate) fn insert<F>(
134 cx: &mut RenderContext,
135 pinned: bool,
136 template: Option<CacheId>,
137 f: F,
138 ) -> CacheId
139 where
140 F: for<'a> FnOnce(&mut RenderContext<'a>) -> Node<'a>,
141 {
142 let set = cx.cached_set;
143 let bump = Bump::new();
144 let (node, edges) = {
145 let mut nested_cx = RenderContext::new(&bump, cx.cached_set, cx.templates);
146 let node = f(&mut nested_cx);
147 let node = bump.alloc(node);
148 let edges = {
149 let set = set.borrow();
150 set.trace(node)
151 };
152 (
153 node as *mut Node<'_> as usize as *const Node<'static>,
154 edges,
155 )
156 };
157
158 let entry = CacheEntry {
159 bump,
160 node,
161 edges,
162 template,
163 pinned,
164 };
165
166 let mut set = set.borrow_mut();
167 let id = set.next_id();
168 set.items.insert(id, entry);
169 id
170 }
171
172 pub fn contains(&self, id: CacheId) -> bool {
174 self.items.contains_key(&id)
175 }
176
177 pub fn get(&self, id: CacheId) -> (&Node, Option<CacheId>) {
179 let entry = self
180 .items
181 .get(&id)
182 .expect_throw("CachedSet::get: should have id in set");
183 let node: &Node = unsafe { &*entry.node };
184 (node, entry.template)
185 }
186}