re_entity_db/
entity_tree.rs1use std::collections::BTreeMap;
2
3use ahash::HashSet;
4use nohash_hasher::{IntMap, IntSet};
5
6use re_chunk::{ComponentIdentifier, RowId, TimelineName};
7use re_chunk_store::{ChunkStoreDiffKind, ChunkStoreEvent, ChunkStoreSubscriber};
8use re_log_types::{EntityPath, EntityPathHash, EntityPathPart, TimeInt};
9use re_query::StorageEngineReadGuard;
10
11#[derive(Debug, Clone)]
17pub struct EntityTree {
18 pub path: EntityPath,
20
21 pub children: BTreeMap<EntityPathPart, EntityTree>,
23}
24
25impl ChunkStoreSubscriber for EntityTree {
28 fn name(&self) -> String {
29 "rerun.store_subscribers.EntityTree".into()
30 }
31
32 fn as_any(&self) -> &dyn std::any::Any {
33 self
34 }
35
36 fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
37 self
38 }
39
40 #[expect(clippy::unimplemented)]
41 fn on_events(&mut self, _events: &[ChunkStoreEvent]) {
42 unimplemented!(
43 r"EntityTree view is maintained manually, see `EntityTree::on_store_{{additions|deletions}}`"
44 );
45 }
46}
47
48#[derive(Default)]
53pub struct CompactedStoreEvents {
54 pub row_ids: HashSet<RowId>,
56
57 pub temporal:
59 IntMap<EntityPathHash, IntMap<TimelineName, IntMap<ComponentIdentifier, Vec<TimeInt>>>>,
60
61 pub static_: IntMap<EntityPathHash, IntMap<ComponentIdentifier, u64>>,
63}
64
65impl CompactedStoreEvents {
66 pub fn new(store_events: &[&ChunkStoreEvent]) -> Self {
67 let mut this = Self {
68 row_ids: store_events
69 .iter()
70 .flat_map(|event| event.chunk.row_ids())
71 .collect(),
72 temporal: Default::default(),
73 static_: Default::default(),
74 };
75
76 for event in store_events {
77 if event.is_static() {
78 let per_component = this
79 .static_
80 .entry(event.chunk.entity_path().hash())
81 .or_default();
82 for component in event.chunk.components_identifiers() {
83 *per_component.entry(component).or_default() += event.delta().unsigned_abs();
84 }
85 } else {
86 for (&timeline, time_column) in event.chunk.timelines() {
87 let per_timeline = this
88 .temporal
89 .entry(event.chunk.entity_path().hash())
90 .or_default();
91 for &time in time_column.times_raw() {
92 let per_component = per_timeline.entry(timeline).or_default();
93 for component in event.chunk.components_identifiers() {
94 per_component
95 .entry(component)
96 .or_default()
97 .push(TimeInt::new_temporal(time));
98 }
99 }
100 }
101 }
102 }
103
104 this
105 }
106}
107
108impl EntityTree {
109 pub fn root() -> Self {
110 Self::new(EntityPath::root())
111 }
112
113 pub fn new(path: EntityPath) -> Self {
114 Self {
115 path,
116 children: Default::default(),
117 }
118 }
119
120 pub fn is_leaf(&self) -> bool {
122 self.children.is_empty()
123 }
124
125 pub fn check_is_empty(&self, engine: &StorageEngineReadGuard<'_>) -> bool {
132 self.children.is_empty() && !engine.store().entity_has_data(&self.path)
133 }
134
135 pub fn on_store_additions(&mut self, events: &[ChunkStoreEvent]) {
140 re_tracing::profile_function!();
141 for event in events
142 .iter()
143 .filter(|e| e.kind == ChunkStoreDiffKind::Addition)
144 {
145 self.on_store_addition(event);
146 }
147 }
148
149 fn on_store_addition(&mut self, event: &ChunkStoreEvent) {
150 re_tracing::profile_function!();
151
152 let entity_path = event.chunk.entity_path();
153
154 let mut tree = self;
156 for (i, part) in entity_path.iter().enumerate() {
157 tree = tree
158 .children
159 .entry(part.clone())
160 .or_insert_with(|| Self::new(entity_path.as_slice()[..=i].into()));
161 }
162 }
163
164 pub fn on_store_deletions(
166 &mut self,
167 engine: &StorageEngineReadGuard<'_>,
168 entity_paths_with_deletions: &IntSet<EntityPath>,
169 events: &[ChunkStoreEvent],
170 ) {
171 let _ = events;
175
176 self.children.retain(|_, entity| {
177 entity.on_store_deletions(engine, entity_paths_with_deletions, events);
179
180 let has_children = || !entity.children.is_empty();
181 let has_recursive_deletion_events = || {
184 entity_paths_with_deletions
185 .iter()
186 .any(|removed_entity_path| removed_entity_path.starts_with(&entity.path))
187 };
188 let has_data = || engine.store().entity_has_data(&entity.path);
189
190 let should_be_removed =
191 !has_children() && (has_recursive_deletion_events() && !has_data());
192
193 !should_be_removed
194 });
195 }
196
197 pub fn subtree(&self, path: &EntityPath) -> Option<&Self> {
198 fn subtree_recursive<'tree>(
199 this: &'tree EntityTree,
200 path: &[EntityPathPart],
201 ) -> Option<&'tree EntityTree> {
202 match path {
203 [] => Some(this),
204 [first, rest @ ..] => {
205 let child = this.children.get(first)?;
206 subtree_recursive(child, rest)
207 }
208 }
209 }
210
211 subtree_recursive(self, path.as_slice())
212 }
213
214 pub fn visit_children_recursively(&self, mut visitor: impl FnMut(&EntityPath)) {
216 fn visit(this: &EntityTree, visitor: &mut impl FnMut(&EntityPath)) {
217 visitor(&this.path);
218 for child in this.children.values() {
219 visit(child, visitor);
220 }
221 }
222
223 visit(self, &mut visitor);
224 }
225
226 pub fn find_first_child_recursive(
233 &self,
234 mut predicate: impl FnMut(&EntityPath) -> bool,
235 ) -> Option<&Self> {
236 fn visit<'a>(
237 this: &'a EntityTree,
238 predicate: &mut impl FnMut(&EntityPath) -> bool,
239 ) -> Option<&'a EntityTree> {
240 if predicate(&this.path) {
241 return Some(this);
242 }
243
244 for child in this.children.values() {
245 if let Some(subtree) = visit(child, predicate) {
246 return Some(subtree);
248 }
249 }
250
251 None
252 }
253
254 visit(self, &mut predicate)
255 }
256}
257
258#[cfg(test)]
259mod tests {
260 use std::sync::Arc;
261
262 use re_chunk::{Chunk, RowId};
263 use re_log_types::{
264 EntityPath, StoreId, TimePoint, Timeline,
265 example_components::{MyPoint, MyPoints},
266 };
267
268 use crate::EntityDb;
269
270 #[test]
271 fn deleting_descendants() -> anyhow::Result<()> {
272 re_log::setup_logging();
273
274 let mut db = EntityDb::new(StoreId::random(
275 re_log_types::StoreKind::Recording,
276 "test_app",
277 ));
278
279 let timeline_frame = Timeline::new_sequence("frame");
280
281 let entity_path_parent: EntityPath = "parent".into();
282 let entity_path_child: EntityPath = "parent/child1".into();
283 let entity_path_grandchild: EntityPath = "parent/child1/grandchild".into();
284
285 assert!(db.tree().check_is_empty(&db.storage_engine()));
286
287 {
288 let row_id = RowId::new();
289 let timepoint = TimePoint::from_iter([(timeline_frame, 10)]);
290 let point = MyPoint::new(1.0, 2.0);
291 let chunk = Chunk::builder(entity_path_grandchild.clone())
292 .with_component_batches(
293 row_id,
294 timepoint,
295 [(MyPoints::descriptor_points(), &[point] as _)],
296 )
297 .build()?;
298
299 db.add_chunk(&Arc::new(chunk))?;
300 }
301
302 {
303 let parent = db
304 .tree()
305 .find_first_child_recursive(|entity_path| *entity_path == entity_path_parent)
306 .unwrap();
307 let child = db
308 .tree()
309 .find_first_child_recursive(|entity_path| *entity_path == entity_path_child)
310 .unwrap();
311 let grandchild = db
312 .tree()
313 .find_first_child_recursive(|entity_path| *entity_path == entity_path_grandchild)
314 .unwrap();
315
316 assert_eq!(1, parent.children.len());
317 assert_eq!(1, child.children.len());
318 assert_eq!(0, grandchild.children.len());
319
320 assert!(!db.tree().check_is_empty(&db.storage_engine()));
321 assert!(!parent.check_is_empty(&db.storage_engine()));
322 assert!(!child.check_is_empty(&db.storage_engine()));
323 assert!(!grandchild.check_is_empty(&db.storage_engine()));
324 }
325
326 let _store_events = db.gc(&re_chunk_store::GarbageCollectionOptions::gc_everything());
327
328 {
329 let parent = db
330 .tree()
331 .find_first_child_recursive(|entity_path| *entity_path == entity_path_parent);
332 let child = db
333 .tree()
334 .find_first_child_recursive(|entity_path| *entity_path == entity_path_child);
335 let grandchild = db
336 .tree()
337 .find_first_child_recursive(|entity_path| *entity_path == entity_path_grandchild);
338
339 assert!(db.tree().check_is_empty(&db.storage_engine()));
340 assert!(parent.is_none());
341 assert!(child.is_none());
342 assert!(grandchild.is_none());
343 }
344
345 Ok(())
346 }
347}