use nohash_hasher::{IntMap, IntSet};
use re_byte_size::SizeBytes;
use re_chunk_store::{LatestAtQuery, MissingChunkReporter};
use re_entity_db::EntityDb;
use re_log::debug_assert;
use crate::frame_id_registry::FrameIdRegistry;
use crate::transform_resolution_cache::ParentFromChildTransform;
use crate::{
CachedTransformsForTimeline, ResolvedPinholeProjection, TransformFrameIdHash,
TransformResolutionCache, image_view_coordinates,
};
#[derive(Clone, Debug, PartialEq)]
pub struct TreeTransform {
pub root: TransformFrameIdHash,
pub target_from_source: glam::DAffine3,
}
impl TreeTransform {
fn new_root(root: TransformFrameIdHash) -> Self {
Self {
root,
target_from_source: glam::DAffine3::IDENTITY,
}
}
fn left_multiply(&self, target_from_reference: glam::DAffine3) -> Self {
let Self {
root,
target_from_source: reference_from_source,
} = self;
let target_from_source = target_from_reference * reference_from_source;
Self {
root: *root,
target_from_source,
}
}
}
impl SizeBytes for TreeTransform {
fn heap_size_bytes(&self) -> u64 {
let Self {
root,
target_from_source,
} = self;
root.heap_size_bytes() + target_from_source.heap_size_bytes()
}
}
impl re_byte_size::MemUsageTreeCapture for TreeTransform {
fn capture_mem_usage_tree(&self) -> re_byte_size::MemUsageTree {
re_tracing::profile_function!();
let Self {
root,
target_from_source,
} = self;
re_byte_size::MemUsageNode::new()
.with_child("root", root.total_size_bytes())
.with_child("target_from_source", target_from_source.total_size_bytes())
.into_tree()
}
}
#[derive(Clone, Debug, thiserror::Error, PartialEq, Eq)]
pub enum TransformFromToError {
#[error("No transform relationships about the target frame {0:?} are known")]
UnknownTargetFrame(TransformFrameIdHash),
#[error("No transform relationships about the source frame {0:?} are known")]
UnknownSourceFrame(TransformFrameIdHash),
#[error(
"There's no path between {target:?} and {src:?}. The target's root is {target_root:?}, the source's root is {source_root:?}"
)]
NoPathBetweenFrames {
target: TransformFrameIdHash,
src: TransformFrameIdHash, target_root: TransformFrameIdHash,
source_root: TransformFrameIdHash,
},
}
impl TransformFromToError {
fn no_path_between_target_and_source(target: &TargetInfo, source: &SourceInfo<'_>) -> Self {
Self::NoPathBetweenFrames {
target: target.id,
src: source.id,
target_root: target.root,
source_root: source.root,
}
}
}
struct TargetInfo {
id: TransformFrameIdHash,
root: TransformFrameIdHash,
target_from_root: glam::DAffine3,
}
struct SourceInfo<'a> {
id: TransformFrameIdHash,
root: TransformFrameIdHash,
root_from_source: &'a TreeTransform,
}
#[derive(Clone, Debug, PartialEq)]
pub struct PinholeTreeRoot {
pub parent_tree_root: TransformFrameIdHash,
pub pinhole_projection: ResolvedPinholeProjection,
pub parent_root_from_pinhole_root: glam::DAffine3,
}
impl SizeBytes for PinholeTreeRoot {
fn heap_size_bytes(&self) -> u64 {
let Self {
parent_tree_root,
pinhole_projection,
parent_root_from_pinhole_root,
} = self;
parent_tree_root.heap_size_bytes()
+ pinhole_projection.heap_size_bytes()
+ parent_root_from_pinhole_root.heap_size_bytes()
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum TransformTreeRootInfo {
TransformFrameRoot,
Pinhole(PinholeTreeRoot),
}
impl SizeBytes for TransformTreeRootInfo {
fn heap_size_bytes(&self) -> u64 {
match self {
Self::TransformFrameRoot => 0,
Self::Pinhole(pinhole_tree_root) => pinhole_tree_root.heap_size_bytes(),
}
}
}
#[derive(Default, Clone)]
pub struct TransformForest {
missing_chunk_reporter: MissingChunkReporter,
roots: IntMap<TransformFrameIdHash, TransformTreeRootInfo>,
root_from_frame: IntMap<TransformFrameIdHash, TreeTransform>,
}
impl TransformForest {
pub fn new(
entity_db: &EntityDb,
transform_cache: &TransformResolutionCache,
query: &LatestAtQuery,
) -> Self {
re_tracing::profile_function!();
let transforms = transform_cache.transforms_for_timeline(query.timeline());
let frame_id_registry = transform_cache.frame_id_registry();
let mut unprocessed_frames: IntSet<_> = frame_id_registry.iter_frame_id_hashes().collect();
let mut transform_stack = Vec::new();
let mut forest = Self::default();
while let Some(current_frame) = unprocessed_frames.iter().next().copied() {
walk_towards_parent(
entity_db,
&forest.missing_chunk_reporter,
query,
current_frame,
&frame_id_registry,
&transforms,
&mut unprocessed_frames,
&mut transform_stack,
);
debug_assert!(
!transform_stack.is_empty(),
"There should be at least one element in the transform stack since we know we had at least one unprocessed element to start with."
);
forest.add_stack_of_transforms(transform_cache, &mut transform_stack);
debug_assert!(
transform_stack.is_empty(),
"Expected add_stack_of_transforms to consume an entire transform stack."
);
}
forest
}
pub fn any_missing_chunks(&self) -> bool {
self.missing_chunk_reporter.any_missing()
}
fn add_stack_of_transforms(
&mut self,
cache: &TransformResolutionCache,
transform_stack: &mut Vec<ParentChildTransforms>,
) {
re_tracing::profile_function!();
let Some(top_of_stack) = transform_stack.last() else {
return;
};
let (mut root_frame, mut root_from_target) = if let Some(parent_frame) =
top_of_stack.parent_frame
{
if let Some(root_from_frame) = self.root_from_frame.get(&parent_frame) {
debug_assert!(
self.roots.contains_key(&root_from_frame.root),
"Known root must be registered as such"
);
(root_from_frame.root, root_from_frame.target_from_source)
} else {
let previous_root = self.roots.insert(
parent_frame,
TransformTreeRootInfo::TransformFrameRoot,
);
debug_assert!(previous_root.is_none(), "Root was added already");
self.root_from_frame
.insert(parent_frame, TreeTransform::new_root(parent_frame));
(parent_frame, glam::DAffine3::IDENTITY)
}
} else {
let previous_root = if let Some(pinhole_projection) = &top_of_stack.pinhole_projection {
let new_root_info = TransformTreeRootInfo::Pinhole(PinholeTreeRoot {
parent_tree_root: top_of_stack.child_frame,
pinhole_projection: pinhole_projection.clone(),
parent_root_from_pinhole_root: glam::DAffine3::IDENTITY,
});
self.roots.insert(top_of_stack.child_frame, new_root_info)
} else {
self.roots.insert(
top_of_stack.child_frame,
TransformTreeRootInfo::TransformFrameRoot,
)
};
debug_assert!(previous_root.is_none(), "Root was added already");
(top_of_stack.child_frame, glam::DAffine3::IDENTITY)
};
while let Some(transforms) = transform_stack.pop() {
let mut root_from_current_frame = root_from_target
* transforms
.parent_from_child
.map_or(glam::DAffine3::IDENTITY, |target_from_source| {
target_from_source.transform
});
if let Some(pinhole_projection) = transforms.pinhole_projection {
let new_root_info = TransformTreeRootInfo::Pinhole(PinholeTreeRoot {
parent_tree_root: root_frame,
pinhole_projection: pinhole_projection.clone(),
parent_root_from_pinhole_root: root_from_current_frame,
});
root_frame = transforms.child_frame;
let previous_root = self.roots.insert(root_frame, new_root_info);
debug_assert!(
previous_root.is_none(),
"Root was added already at {:?} as {previous_root:?}",
cache.frame_id_registry().lookup_frame_id(root_frame)
);
root_from_current_frame = glam::DAffine3::IDENTITY;
}
let transform_root_from_current = TreeTransform {
root: root_frame,
target_from_source: root_from_current_frame,
};
let _previous_transform = self
.root_from_frame
.insert(transforms.child_frame, transform_root_from_current);
#[cfg(debug_assertions)]
{
let frame_id_registry = cache.frame_id_registry();
debug_assert!(
_previous_transform.is_none(),
"Root from frame relationship was added already for {:?}. Now targeting {:?}, previously {:?}",
frame_id_registry.lookup_frame_id(transforms.child_frame),
frame_id_registry.lookup_frame_id(root_frame),
_previous_transform.and_then(|f| frame_id_registry.lookup_frame_id(f.root))
);
}
root_from_target = root_from_current_frame;
}
}
}
impl SizeBytes for TransformForest {
fn heap_size_bytes(&self) -> u64 {
re_tracing::profile_function!();
let Self {
missing_chunk_reporter: _,
roots,
root_from_frame,
} = self;
roots.heap_size_bytes() + root_from_frame.heap_size_bytes()
}
}
impl re_byte_size::MemUsageTreeCapture for TransformForest {
fn capture_mem_usage_tree(&self) -> re_byte_size::MemUsageTree {
re_tracing::profile_function!();
let Self {
missing_chunk_reporter: _,
roots,
root_from_frame,
} = self;
re_byte_size::MemUsageNode::new()
.with_child("roots", roots.total_size_bytes())
.with_child("root_from_frame", root_from_frame.total_size_bytes())
.into_tree()
}
}
#[expect(clippy::too_many_arguments)]
fn walk_towards_parent(
entity_db: &EntityDb,
missing_chunk_reporter: &MissingChunkReporter,
query: &LatestAtQuery,
current_frame: TransformFrameIdHash,
id_registry: &FrameIdRegistry,
transforms: &CachedTransformsForTimeline,
unprocessed_frames: &mut IntSet<TransformFrameIdHash>,
transform_stack: &mut Vec<ParentChildTransforms>,
) {
re_tracing::profile_function!();
debug_assert!(
transform_stack.is_empty(),
"Didn't process the last transform stack fully."
);
let mut next_frame = Some(current_frame);
while let Some(current_frame) = next_frame
&& unprocessed_frames.remove(¤t_frame)
{
let transforms = transforms_at(
entity_db,
missing_chunk_reporter,
current_frame,
query,
id_registry,
transforms,
);
next_frame = transforms.parent_frame;
transform_stack.push(transforms);
}
}
fn implicit_transform_parent(
frame: TransformFrameIdHash,
id_registry: &FrameIdRegistry,
) -> Option<TransformFrameIdHash> {
debug_assert!(
&id_registry.lookup_frame_id(frame).is_some(),
"Frame id hash {frame:?} is not known to the cache at all."
);
Some(TransformFrameIdHash::from_entity_path(
&id_registry
.lookup_frame_id(frame)?
.as_entity_path()?
.parent()?,
))
}
impl TransformForest {
pub fn transform_frame_roots(&self) -> impl Iterator<Item = TransformFrameIdHash> {
self.roots
.iter()
.filter(|(_, info)| matches!(info, TransformTreeRootInfo::TransformFrameRoot))
.map(|(id, _)| *id)
}
#[inline]
pub fn root_info(&self, root_frame: TransformFrameIdHash) -> Option<&TransformTreeRootInfo> {
self.roots.get(&root_frame)
}
#[inline]
pub fn pinhole_tree_root_info(
&self,
root_frame: TransformFrameIdHash,
) -> Option<&PinholeTreeRoot> {
if let TransformTreeRootInfo::Pinhole(pinhole_tree_root) = self.roots.get(&root_frame)? {
Some(pinhole_tree_root)
} else {
None
}
}
#[inline]
pub fn root_from_frame(&self, frame: TransformFrameIdHash) -> Option<&TreeTransform> {
self.root_from_frame.get(&frame)
}
pub fn transform_from_to(
&self,
target: TransformFrameIdHash,
sources: impl Iterator<Item = TransformFrameIdHash>,
lookup_image_plane_distance: &dyn Fn(TransformFrameIdHash) -> f64,
) -> impl Iterator<
Item = (
TransformFrameIdHash,
Result<TreeTransform, TransformFromToError>,
),
> {
let Some(root_from_target) = self.root_from_frame.get(&target) else {
return itertools::Either::Left(sources.map(move |source| {
(
source,
Err(TransformFromToError::UnknownTargetFrame(target)),
)
}));
};
let target = {
let TreeTransform {
root: target_root,
target_from_source: root_from_entity,
} = &root_from_target;
TargetInfo {
id: target,
root: *target_root,
target_from_root: root_from_entity.inverse(),
}
};
let target_root_info = self.roots.get(&target.root);
let mut pinhole_tree_connector_cache = IntMap::default();
itertools::Either::Right(sources.map(move |source| {
let Some(root_from_source) = self.root_from_frame.get(&source) else {
return (
source,
Err(TransformFromToError::UnknownSourceFrame(source)),
);
};
let source = SourceInfo {
id: source,
root: root_from_source.root,
root_from_source,
};
let result = if source.root == target.root {
if source.root == target.id {
Ok(source.root_from_source.clone())
} else {
Ok(root_from_source.left_multiply(target.target_from_root))
}
}
else if let Some(TransformTreeRootInfo::Pinhole(pinhole_tree_root)) = target_root_info
{
from_3d_source_to_2d_target(
&target,
&source,
pinhole_tree_root,
&mut pinhole_tree_connector_cache,
)
}
else if let Some(TransformTreeRootInfo::Pinhole(pinhole_tree_root)) =
self.roots.get(&source.root)
{
from_2d_source_to_3d_target(
&target,
&source,
pinhole_tree_root,
lookup_image_plane_distance,
&mut pinhole_tree_connector_cache,
)
}
else {
Err(TransformFromToError::no_path_between_target_and_source(
&target, &source,
))
};
(source.id, result)
}))
}
}
fn from_2d_source_to_3d_target(
target: &TargetInfo,
source: &SourceInfo<'_>,
source_pinhole_tree_root: &PinholeTreeRoot,
lookup_image_plane_distance: &dyn Fn(TransformFrameIdHash) -> f64,
target_from_image_plane_cache: &mut IntMap<TransformFrameIdHash, glam::DAffine3>,
) -> Result<TreeTransform, TransformFromToError> {
let PinholeTreeRoot {
parent_tree_root,
pinhole_projection,
parent_root_from_pinhole_root: root_from_pinhole3d,
} = source_pinhole_tree_root;
if *parent_tree_root != target.root {
return Err(TransformFromToError::no_path_between_target_and_source(
target, source,
));
}
let image_plane_from_source = source.root_from_source;
let target_from_image_plane = target_from_image_plane_cache
.entry(source.root)
.or_insert_with(|| {
let pinhole_image_plane_distance = lookup_image_plane_distance(source.root);
let pinhole3d_from_image_plane =
pinhole3d_from_image_plane(pinhole_projection, pinhole_image_plane_distance);
target.target_from_root * root_from_pinhole3d * pinhole3d_from_image_plane
});
Ok(image_plane_from_source.left_multiply(*target_from_image_plane))
}
fn from_3d_source_to_2d_target(
target: &TargetInfo,
source: &SourceInfo<'_>,
target_pinhole_tree_root: &PinholeTreeRoot,
target_from_source_root_cache: &mut IntMap<TransformFrameIdHash, glam::DAffine3>,
) -> Result<TreeTransform, TransformFromToError> {
let PinholeTreeRoot {
parent_tree_root,
pinhole_projection,
parent_root_from_pinhole_root: root_from_pinhole3d,
} = target_pinhole_tree_root;
if *parent_tree_root != source.root {
return Err(TransformFromToError::no_path_between_target_and_source(
target, source,
));
}
let target_from_image_plane = target.target_from_root;
let target_from_root = target_from_source_root_cache
.entry(source.root)
.or_insert_with(|| {
let pinhole_image_plane_distance = 500.0;
let pinhole3d_from_image_plane =
pinhole3d_from_image_plane(pinhole_projection, pinhole_image_plane_distance);
let image_plane_from_pinhole3d = pinhole3d_from_image_plane.inverse();
let pinhole3d_from_root = root_from_pinhole3d.inverse();
target_from_image_plane * image_plane_from_pinhole3d * pinhole3d_from_root
});
Ok(source.root_from_source.left_multiply(*target_from_root))
}
fn pinhole3d_from_image_plane(
resolved_pinhole_projection: &ResolvedPinholeProjection,
pinhole_image_plane_distance: f64,
) -> glam::DAffine3 {
let ResolvedPinholeProjection {
parent: _, image_from_camera,
resolution: _,
view_coordinates,
} = resolved_pinhole_projection;
let focal_length = image_from_camera.focal_length_in_pixels();
let focal_length = glam::dvec2(focal_length.x() as f64, focal_length.y() as f64);
let scale = pinhole_image_plane_distance / focal_length;
let translation = (glam::DVec2::from(-image_from_camera.principal_point()) * scale)
.extend(pinhole_image_plane_distance);
let image_plane3d_from_2d_content = glam::DAffine3::from_translation(translation)
* glam::DAffine3::from_scale(
scale.extend(2.0 / (1.0 / scale.x + 1.0 / scale.y)),
);
let obj_from_image_plane3d = glam::DMat3::from_cols_array(
&view_coordinates
.from_other(&image_view_coordinates())
.to_cols_array()
.map(|x| x as f64),
);
glam::DAffine3::from_mat3(obj_from_image_plane3d) * image_plane3d_from_2d_content
}
struct ParentChildTransforms {
parent_frame: Option<TransformFrameIdHash>,
child_frame: TransformFrameIdHash,
parent_from_child: Option<ParentFromChildTransform>,
pinhole_projection: Option<ResolvedPinholeProjection>,
}
fn transforms_at(
entity_db: &EntityDb,
missing_chunk_reporter: &MissingChunkReporter,
child_frame: TransformFrameIdHash,
query: &LatestAtQuery,
id_registry: &FrameIdRegistry,
transforms_for_timeline: &CachedTransformsForTimeline,
) -> ParentChildTransforms {
#![expect(clippy::useless_let_if_seq)]
let mut parent_from_child;
let pinhole_projection;
if let Some(source_transforms) = transforms_for_timeline.frame_transforms(child_frame) {
parent_from_child =
source_transforms.latest_at_transform(entity_db, missing_chunk_reporter, query);
pinhole_projection =
source_transforms.latest_at_pinhole(entity_db, missing_chunk_reporter, query);
} else {
parent_from_child = None;
pinhole_projection = None;
}
let parent_frame = if let Some(transform) = parent_from_child.as_ref() {
if let Some(pinhole_projection) = pinhole_projection.as_ref()
&& pinhole_projection.parent != transform.parent
{
let transform_frame = id_registry.lookup_frame_id(child_frame);
let pinhole_parent_frame = id_registry.lookup_frame_id(pinhole_projection.parent);
let transform_parent_frame = id_registry.lookup_frame_id(transform.parent);
if let Some(transform_frame) = transform_frame
&& let Some(pinhole_parent_frame) = pinhole_parent_frame
&& let Some(transform_parent_frame) = transform_parent_frame
{
re_log::warn_once!(
"The transform frame {transform_frame:?} is connected to {pinhole_parent_frame:?} via a pinhole but also connected to {transform_parent_frame:?} via a transform. Any frame is only ever allowed to have a single parent at any given time.",
);
} else {
for frame in [
transform_frame,
pinhole_parent_frame,
transform_parent_frame,
] {
if frame.is_none() {
re_log::debug_panic!(
"Couldn't resolve frame id for {frame:?} in the registry, even though it was present in the transforms for timeline.",
);
}
}
}
}
Some(transform.parent)
} else if let Some(pinhole_projection) = pinhole_projection.as_ref() {
Some(pinhole_projection.parent)
} else if let Some(parent) = implicit_transform_parent(child_frame, id_registry) {
parent_from_child = Some(ParentFromChildTransform {
parent,
transform: glam::DAffine3::IDENTITY,
});
Some(parent)
} else {
None
};
ParentChildTransforms {
parent_frame,
child_frame,
parent_from_child,
pinhole_projection,
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use itertools::Itertools as _;
use re_chunk_store::Chunk;
use re_entity_db::EntityDb;
use re_log_types::{EntityPath, StoreInfo, TimeCell, TimePoint, Timeline, TimelineName};
use re_sdk_types::components::TransformFrameId;
use re_sdk_types::{RowId, archetypes, components};
use super::*;
fn test_pinhole() -> archetypes::Pinhole {
archetypes::Pinhole::from_focal_length_and_resolution([1.0, 2.0], [100.0, 200.0])
}
fn test_resolved_pinhole(parent: TransformFrameIdHash) -> ResolvedPinholeProjection {
ResolvedPinholeProjection {
parent,
image_from_camera: components::PinholeProjection::from_focal_length_and_principal_point(
[1.0, 2.0],
[50.0, 100.0],
),
resolution: Some([100.0, 200.0].into()),
view_coordinates: archetypes::Pinhole::DEFAULT_CAMERA_XYZ,
}
}
fn entity_hierarchy_test_scene() -> Result<EntityDb, Box<dyn std::error::Error>> {
let mut entity_db = EntityDb::new(StoreInfo::testing().store_id);
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("top"))
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([1.0, 0.0, 0.0]),
)
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::InstancePoses3D::new()
.with_translations([[10.0, 0.0, 0.0], [20.0, 0.0, 0.0]]),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("top/pinhole"))
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([0.0, 1.0, 0.0]),
)
.with_archetype(RowId::new(), TimePoint::STATIC, &test_pinhole())
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("top/pure_leaf_pinhole"))
.with_archetype(RowId::new(), TimePoint::STATIC, &test_pinhole())
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("top/pinhole/child2d"))
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([2.0, 0.0, 0.0]),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("top/child3d"))
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([0.0, 0.0, 1.0]),
)
.build()?,
))?;
Ok(entity_db)
}
fn pretty_print_transform_frame_ids_in<T: std::fmt::Debug>(
obj: T,
transform_cache: &TransformResolutionCache,
) -> String {
let mut result = format!("{obj:#?}");
for (hash, frame) in transform_cache.frame_id_registry().iter_frame_ids() {
result = result.replace(&format!("{hash:#?}"), &format!("{frame}"));
}
result
}
#[test]
fn test_simple_entity_hierarchy() -> Result<(), Box<dyn std::error::Error>> {
let test_scene = entity_hierarchy_test_scene()?;
let mut transform_cache = TransformResolutionCache::new(&test_scene);
transform_cache.ensure_timeline_is_initialized(
test_scene.storage_engine().store(),
TimelineName::log_tick(),
);
let query = LatestAtQuery::latest(TimelineName::log_tick());
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
{
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::entity_path_hierarchy_root()),
Some(&TransformTreeRootInfo::TransformFrameRoot)
);
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::from_entity_path(
&EntityPath::from("top/pinhole")
)),
Some(&TransformTreeRootInfo::Pinhole(PinholeTreeRoot {
parent_tree_root: TransformFrameIdHash::entity_path_hierarchy_root(),
pinhole_projection: test_resolved_pinhole(
TransformFrameIdHash::from_entity_path(&EntityPath::from("top"))
),
parent_root_from_pinhole_root: glam::DAffine3::from_translation(glam::dvec3(
1.0, 1.0, 0.0
)),
}))
);
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::from_entity_path(
&EntityPath::from("top/pure_leaf_pinhole")
)),
Some(&TransformTreeRootInfo::Pinhole(PinholeTreeRoot {
parent_tree_root: TransformFrameIdHash::entity_path_hierarchy_root(),
pinhole_projection: test_resolved_pinhole(
TransformFrameIdHash::from_entity_path(&EntityPath::from("top"))
),
parent_root_from_pinhole_root: glam::DAffine3::from_translation(glam::dvec3(
1.0, 0.0, 0.0
)),
}))
);
assert_eq!(transform_forest.roots.len(), 3);
}
let target_paths = [
EntityPath::root(),
EntityPath::from("top"),
EntityPath::from("top/pinhole"),
EntityPath::from("top/nonexistent"),
EntityPath::from("top/pinhole/child2d"),
EntityPath::from("top/pure_leaf_pinhole"),
];
let source_paths = [
EntityPath::root(),
EntityPath::from("top"),
EntityPath::from("top/pinhole"),
EntityPath::from("top/child3d"),
EntityPath::from("top/nonexistent"),
EntityPath::from("top/pinhole/child2d"),
EntityPath::from("top/pure_leaf_pinhole"),
];
for target in &target_paths {
let name = if target == &EntityPath::root() {
"_root".to_owned()
} else {
target.to_string().replace('/', "_")
};
let target_frame = TransformFrameIdHash::from_entity_path(target);
let result = transform_forest
.transform_from_to(
target_frame,
source_paths
.iter()
.map(TransformFrameIdHash::from_entity_path),
&|_| 1.0,
)
.collect::<Vec<_>>();
let target_result = result.iter().find(|(key, _)| *key == target_frame).unwrap();
if let Ok(target_result) = &target_result.1 {
assert_eq!(target_result.target_from_source, glam::DAffine3::IDENTITY);
} else {
assert_eq!(
target_result.1,
Err(TransformFromToError::UnknownTargetFrame(target_frame))
);
}
insta::assert_snapshot!(
format!("simple_entity_hierarchy__transform_from_to_{}", name),
pretty_print_transform_frame_ids_in(&result, &transform_cache)
);
}
Ok(())
}
fn simple_frame_hierarchy_test_scene(
multiple_entities: bool,
) -> Result<EntityDb, Box<dyn std::error::Error>> {
let mut entity_db = EntityDb::new(StoreInfo::testing().store_id);
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from(if multiple_entities {
"transforms0"
} else {
"tf"
}))
.with_archetype_auto_row(
[(Timeline::log_tick(), 0)],
&archetypes::Transform3D::from_translation([1.0, 0.0, 0.0])
.with_child_frame("top")
.with_parent_frame("root"),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from(if multiple_entities {
"transforms1"
} else {
"tf"
}))
.with_archetype_auto_row(
[(Timeline::log_tick(), 0)],
&archetypes::Transform3D::from_translation([2.0, 0.0, 0.0])
.with_child_frame("child0")
.with_parent_frame("top"),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from(if multiple_entities {
"transforms2"
} else {
"tf"
}))
.with_archetype_auto_row(
[(Timeline::log_tick(), 0)],
&archetypes::Transform3D::from_translation([3.0, 0.0, 0.0])
.with_child_frame("child1")
.with_parent_frame("top"),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from(if multiple_entities {
"transforms3"
} else {
"tf"
}))
.with_archetype_auto_row(
[(Timeline::log_tick(), 0)],
&archetypes::Transform3D::from_translation([0.0, 1.0, 0.0])
.with_child_frame("pinhole")
.with_parent_frame("root"),
)
.with_archetype(
RowId::new(),
[(Timeline::log_tick(), 0)],
&test_pinhole()
.with_child_frame("pinhole")
.with_parent_frame("root"),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from(if multiple_entities {
"transforms4"
} else {
"tf"
}))
.with_archetype_auto_row(
[(Timeline::log_tick(), 0)],
&archetypes::Transform3D::from_translation([0.0, 2.0, 0.0])
.with_child_frame("child2d")
.with_parent_frame("pinhole"),
)
.build()?,
))?;
Ok(entity_db)
}
fn test_simple_frame_hierarchy(
multiple_entities: bool,
) -> Result<(), Box<dyn std::error::Error>> {
let test_scene = simple_frame_hierarchy_test_scene(multiple_entities)?;
let mut transform_cache = TransformResolutionCache::new(&test_scene);
transform_cache.ensure_timeline_is_initialized(
test_scene.storage_engine().store(),
TimelineName::log_tick(),
);
let query = LatestAtQuery::latest(TimelineName::log_tick());
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
{
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::entity_path_hierarchy_root()),
Some(&TransformTreeRootInfo::TransformFrameRoot)
);
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::from_str("root")),
Some(&TransformTreeRootInfo::TransformFrameRoot)
);
assert_eq!(
transform_forest.root_info(TransformFrameIdHash::from_str("pinhole")),
Some(&TransformTreeRootInfo::Pinhole(PinholeTreeRoot {
parent_tree_root: TransformFrameIdHash::from_str("root"),
pinhole_projection: test_resolved_pinhole(TransformFrameIdHash::from_str(
"root"
)),
parent_root_from_pinhole_root: glam::DAffine3::from_translation(glam::dvec3(
0.0, 1.0, 0.0
)),
}))
);
assert_eq!(transform_forest.roots.len(), 3);
}
let implicit_frame = if multiple_entities {
TransformFrameIdHash::from_entity_path(&"transforms2".into())
} else {
TransformFrameIdHash::from_entity_path(&"tf".into())
};
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("child0"),
std::iter::once(implicit_frame),
&|_| 0.0
)
.collect_vec(),
vec![(
implicit_frame,
Err(TransformFromToError::NoPathBetweenFrames {
target: TransformFrameIdHash::from_str("child0"),
src: implicit_frame,
target_root: TransformFrameIdHash::from_str("root"),
source_root: TransformFrameIdHash::entity_path_hierarchy_root(),
})
)]
);
let implicit_frames = if multiple_entities {
vec![
TransformFrameId::from_entity_path(&"transforms0".into()),
TransformFrameId::from_entity_path(&"transforms1".into()),
TransformFrameId::from_entity_path(&"transforms2".into()),
TransformFrameId::from_entity_path(&EntityPath::root()),
]
} else {
vec![TransformFrameId::from_entity_path(&"tf".into())]
};
for tree_elements in [
[
TransformFrameId::new("top"),
TransformFrameId::new("root"),
TransformFrameId::new("child0"),
TransformFrameId::new("child1"),
TransformFrameId::new("pinhole"),
TransformFrameId::new("child2d"),
]
.iter(),
implicit_frames.iter(),
] {
for pair in tree_elements.permutations(2) {
let from = pair[0];
let to = pair[1];
assert!(
matches!(
transform_forest
.transform_from_to(
TransformFrameIdHash::new(from),
std::iter::once(TransformFrameIdHash::new(to)),
&|_| 1.0
)
.next(),
Some((_, Ok(_)))
),
"Connection from {from:?} to {to:?}"
);
}
}
insta::assert_snapshot!(
if multiple_entities {
"simple_frame_hierarchy__multiple_entities"
} else {
"simple_frame_hierarchy__all_on_single_entity"
},
pretty_print_transform_frame_ids_in(
&transform_forest.root_from_frame,
&transform_cache
)
);
Ok(())
}
#[test]
fn test_simple_frame_hierarchy_multiple_entities() -> Result<(), Box<dyn std::error::Error>> {
test_simple_frame_hierarchy(true)
}
#[test]
fn test_simple_frame_hierarchy_all_on_single_entity() -> Result<(), Box<dyn std::error::Error>>
{
test_simple_frame_hierarchy(false)
}
#[test]
fn test_handling_unknown_frames_gracefully() -> Result<(), Box<dyn std::error::Error>> {
let query = LatestAtQuery::latest(TimelineName::log_tick());
{
let test_scene = EntityDb::new(StoreInfo::testing().store_id);
let transform_cache = TransformResolutionCache::default();
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("top"),
std::iter::once(TransformFrameIdHash::from_str("child0")),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_str("child0"),
Err(TransformFromToError::UnknownTargetFrame(
TransformFrameIdHash::from_str("top")
))
)]
);
}
{
let transform_cache = TransformResolutionCache::default();
let test_scene = simple_frame_hierarchy_test_scene(true)?;
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("top"),
std::iter::once(TransformFrameIdHash::from_str("child0")),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_str("child0"),
Err(TransformFromToError::UnknownTargetFrame(
TransformFrameIdHash::from_str("top")
))
)]
);
}
{
let mut test_scene = simple_frame_hierarchy_test_scene(true)?;
let mut transform_cache = TransformResolutionCache::new(&test_scene);
transform_cache.ensure_timeline_is_initialized(
test_scene.storage_engine().store(),
query.timeline(),
);
test_scene.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("transforms"))
.with_archetype_auto_row(
[(query.timeline(), TimeCell::from_sequence(0))],
&archetypes::Transform3D::from_translation([4.0, 0.0, 0.0])
.with_child_frame("child2")
.with_parent_frame("top"),
)
.build()?,
))?;
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("top"),
std::iter::once(TransformFrameIdHash::from_str("child2")),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_str("child2"),
Err(TransformFromToError::UnknownSourceFrame(
TransformFrameIdHash::from_str("child2")
))
)]
);
}
{
let mut test_scene = simple_frame_hierarchy_test_scene(true)?;
test_scene.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("transforms"))
.with_archetype_auto_row(
[(query.timeline(), TimeCell::from_sequence(0))],
&archetypes::Transform3D::from_translation([4.0, 0.0, 0.0])
.with_child_frame("child2")
.with_parent_frame("top"),
)
.build()?,
))?;
let mut transform_cache = TransformResolutionCache::new(&test_scene);
transform_cache.ensure_timeline_is_initialized(
test_scene.storage_engine().store(),
query.timeline(),
);
test_scene.add_chunk(&Arc::new(
Chunk::builder(EntityPath::from("transforms"))
.with_archetype_auto_row(
[(query.timeline(), TimeCell::from_sequence(0))], &archetypes::Transform3D::from_translation([5.0, 0.0, 0.0])
.with_child_frame("child2")
.with_parent_frame("new_top"),
)
.build()?,
))?;
let transform_forest = TransformForest::new(&test_scene, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("child2"),
std::iter::once(TransformFrameIdHash::from_str("new_top")),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_str("new_top"),
Ok(TreeTransform {
root: TransformFrameIdHash::from_str("new_top"),
target_from_source: glam::DAffine3::from_translation(glam::dvec3(
-5.0, 0.0, 0.0
)),
})
)]
);
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_str("child2"),
std::iter::once(TransformFrameIdHash::from_str("top")),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_str("top"),
Err(TransformFromToError::NoPathBetweenFrames {
target: TransformFrameIdHash::from_str("child2"),
src: TransformFrameIdHash::from_str("top"),
target_root: TransformFrameIdHash::from_str("new_top"),
source_root: TransformFrameIdHash::from_str("root"),
})
)]
);
}
Ok(())
}
#[test]
fn test_implicit_transform_at_root_being_ignored_with_warning()
-> Result<(), Box<dyn std::error::Error>> {
re_log::setup_logging();
let (logger, log_rx) = re_log::ChannelLogger::new(re_log::LevelFilter::Warn);
re_log::add_boxed_logger(Box::new(logger)).expect("Failed to add logger");
let mut entity_db = EntityDb::new(StoreInfo::testing().store_id);
entity_db.add_chunk(&Arc::new(
Chunk::builder(EntityPath::root())
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([1.0, 0.0, 0.0]),
)
.build()?,
))?;
entity_db.add_chunk(&Arc::new(
Chunk::builder("/child")
.with_archetype_auto_row(
TimePoint::STATIC,
&archetypes::Transform3D::from_translation([0.0, 1.0, 0.0]),
)
.build()?,
))?;
let query = LatestAtQuery::latest(TimelineName::log_tick());
let mut transform_cache = TransformResolutionCache::new(&entity_db);
transform_cache
.ensure_timeline_is_initialized(entity_db.storage_engine().store(), query.timeline());
let transform_forest = TransformForest::new(&entity_db, &transform_cache, &query);
assert!(!transform_forest.any_missing_chunks());
assert_eq!(
transform_forest
.transform_from_to(
TransformFrameIdHash::from_entity_path(&"child".into()),
std::iter::once(TransformFrameIdHash::from_entity_path(&EntityPath::root())),
&|_| 1.0
)
.collect::<Vec<_>>(),
vec![(
TransformFrameIdHash::from_entity_path(&EntityPath::root()),
Ok(TreeTransform {
root: TransformFrameIdHash::from_entity_path(&EntityPath::root()),
target_from_source: glam::DAffine3::from_translation(glam::dvec3(
0.0, -1.0, 0.0
)),
})
)]
);
let received_log = log_rx.try_recv()?;
assert_eq!(received_log.level, re_log::Level::Warn);
assert!(
received_log
.msg
.contains("Ignoring transform at root entity"),
"Expected warning about ignoring implicit root parent frame, got: {}",
received_log.msg
);
Ok(())
}
}