From 77d4f58e4cd9c2292a38e8ab7399df0b487352e9 Mon Sep 17 00:00:00 2001
From: Andreas Reich <andreas@rerun.io>
Date: Wed, 15 Jan 2025 16:06:58 +0100
Subject: [PATCH 1/4] Improve transform performance (by caching affine
 transforms resulting from transform components) (#8691)

---
 crates/store/re_query/src/latest_at.rs        |    4 +-
 .../re_types/src/archetypes/pinhole_ext.rs    |   10 +-
 crates/viewer/re_view/src/query.rs            |   26 +-
 .../re_view_spatial/src/contexts/mod.rs       |    6 +-
 ...m_context.rs => transform_tree_context.rs} |  589 ++++-----
 crates/viewer/re_view_spatial/src/lib.rs      |    3 +-
 .../re_view_spatial/src/transform_cache.rs    | 1093 +++++++++++++++++
 .../src/transform_component_tracker.rs        |  139 ---
 crates/viewer/re_view_spatial/src/ui_2d.rs    |    3 +-
 crates/viewer/re_view_spatial/src/view_2d.rs  |    2 +-
 crates/viewer/re_view_spatial/src/view_3d.rs  |    2 +-
 .../src/visualizers/cameras.rs                |   19 +-
 .../src/visualizers/depth_images.rs           |    3 +-
 .../re_view_spatial/src/visualizers/mod.rs    |    6 +-
 .../src/visualizers/transform3d_arrows.rs     |    6 +-
 .../visualizers/utilities/entity_iterator.rs  |    7 +-
 16 files changed, 1365 insertions(+), 553 deletions(-)
 rename crates/viewer/re_view_spatial/src/contexts/{transform_context.rs => transform_tree_context.rs} (50%)
 create mode 100644 crates/viewer/re_view_spatial/src/transform_cache.rs
 delete mode 100644 crates/viewer/re_view_spatial/src/transform_component_tracker.rs

diff --git a/crates/store/re_query/src/latest_at.rs b/crates/store/re_query/src/latest_at.rs
index eb0afd78425d..922cd3a648b1 100644
--- a/crates/store/re_query/src/latest_at.rs
+++ b/crates/store/re_query/src/latest_at.rs
@@ -508,7 +508,7 @@ impl LatestAtResults {
 
     /// Returns the deserialized data for the specified component, assuming a mono-batch.
     ///
-    /// Returns an error if the data cannot be deserialized, or if the underlying batch is not of unit length.
+    /// Logs an error if the data cannot be deserialized, or if the underlying batch is not of unit length.
     #[inline]
     pub fn component_mono<C: Component>(&self) -> Option<C> {
         self.component_mono_with_log_level(re_log::Level::Error)
@@ -516,7 +516,7 @@ impl LatestAtResults {
 
     /// Returns the deserialized data for the specified component, assuming a mono-batch.
     ///
-    /// Returns an error if the data cannot be deserialized, or if the underlying batch is not of unit length.
+    /// Returns none if the data cannot be deserialized, or if the underlying batch is not of unit length.
     #[inline]
     pub fn component_mono_quiet<C: Component>(&self) -> Option<C> {
         self.components
diff --git a/crates/store/re_types/src/archetypes/pinhole_ext.rs b/crates/store/re_types/src/archetypes/pinhole_ext.rs
index 787e2b932837..dba2076ef46b 100644
--- a/crates/store/re_types/src/archetypes/pinhole_ext.rs
+++ b/crates/store/re_types/src/archetypes/pinhole_ext.rs
@@ -1,8 +1,16 @@
-use crate::datatypes::Vec2D;
+use crate::{components::ViewCoordinates, datatypes::Vec2D};
 
 use super::Pinhole;
 
 impl Pinhole {
+    /// Camera orientation used when there's no camera orientation explicitly logged.
+    ///
+    /// - x pointing right
+    /// - y pointing down
+    /// - z pointing into the image plane
+    ///   (this is convenient for reading out a depth image which has typically positive z values)
+    pub const DEFAULT_CAMERA_XYZ: ViewCoordinates = ViewCoordinates::RDF;
+
     /// Creates a pinhole from the camera focal length and resolution, both specified in pixels.
     ///
     /// The focal length is the diagonal of the projection matrix.
diff --git a/crates/viewer/re_view/src/query.rs b/crates/viewer/re_view/src/query.rs
index 7365c2324cc8..d997c2a94e35 100644
--- a/crates/viewer/re_view/src/query.rs
+++ b/crates/viewer/re_view/src/query.rs
@@ -215,6 +215,12 @@ pub trait DataResultQuery {
         latest_at_query: &'a LatestAtQuery,
     ) -> HybridLatestAtResults<'a>;
 
+    fn latest_at_with_blueprint_resolved_data_for_component<'a, C: re_types_core::Component>(
+        &'a self,
+        ctx: &'a ViewContext<'a>,
+        latest_at_query: &'a LatestAtQuery,
+    ) -> HybridLatestAtResults<'a>;
+
     fn query_archetype_with_history<'a, A: re_types_core::Archetype>(
         &'a self,
         ctx: &'a ViewContext<'a>,
@@ -235,14 +241,30 @@ impl DataResultQuery for DataResult {
         ctx: &'a ViewContext<'a>,
         latest_at_query: &'a LatestAtQuery,
     ) -> HybridLatestAtResults<'a> {
-        let query_shadowed_defaults = false;
+        let query_shadowed_components = false;
         latest_at_with_blueprint_resolved_data(
             ctx,
             None,
             latest_at_query,
             self,
             A::all_components().iter().map(|descr| descr.component_name),
-            query_shadowed_defaults,
+            query_shadowed_components,
+        )
+    }
+
+    fn latest_at_with_blueprint_resolved_data_for_component<'a, C: re_types_core::Component>(
+        &'a self,
+        ctx: &'a ViewContext<'a>,
+        latest_at_query: &'a LatestAtQuery,
+    ) -> HybridLatestAtResults<'a> {
+        let query_shadowed_components = false;
+        latest_at_with_blueprint_resolved_data(
+            ctx,
+            None,
+            latest_at_query,
+            self,
+            std::iter::once(C::name()),
+            query_shadowed_components,
         )
     }
 
diff --git a/crates/viewer/re_view_spatial/src/contexts/mod.rs b/crates/viewer/re_view_spatial/src/contexts/mod.rs
index 7bab048b990c..2bb6a7ac3877 100644
--- a/crates/viewer/re_view_spatial/src/contexts/mod.rs
+++ b/crates/viewer/re_view_spatial/src/contexts/mod.rs
@@ -1,10 +1,10 @@
 mod depth_offsets;
-mod transform_context;
+mod transform_tree_context;
 
 pub use depth_offsets::EntityDepthOffsets;
 use re_types::ViewClassIdentifier;
 use re_view::AnnotationSceneContext;
-pub use transform_context::{TransformContext, TransformInfo, TwoDInThreeDTransformInfo};
+pub use transform_tree_context::{TransformInfo, TransformTreeContext, TwoDInThreeDTransformInfo};
 
 // -----------------------------------------------------------------------------
 
@@ -24,7 +24,7 @@ pub struct SpatialSceneEntityContext<'a> {
 pub fn register_spatial_contexts(
     system_registry: &mut re_viewer_context::ViewSystemRegistrator<'_>,
 ) -> Result<(), ViewClassRegistryError> {
-    system_registry.register_context_system::<TransformContext>()?;
+    system_registry.register_context_system::<TransformTreeContext>()?;
     system_registry.register_context_system::<EntityDepthOffsets>()?;
     system_registry.register_context_system::<AnnotationSceneContext>()?;
     Ok(())
diff --git a/crates/viewer/re_view_spatial/src/contexts/transform_context.rs b/crates/viewer/re_view_spatial/src/contexts/transform_tree_context.rs
similarity index 50%
rename from crates/viewer/re_view_spatial/src/contexts/transform_context.rs
rename to crates/viewer/re_view_spatial/src/contexts/transform_tree_context.rs
index 4e7cbc27eb5c..6d474bd6b525 100644
--- a/crates/viewer/re_view_spatial/src/contexts/transform_context.rs
+++ b/crates/viewer/re_view_spatial/src/contexts/transform_tree_context.rs
@@ -1,23 +1,23 @@
-use itertools::Either;
 use nohash_hasher::IntMap;
 
 use re_chunk_store::LatestAtQuery;
-use re_entity_db::{EntityDb, EntityPath, EntityTree};
+use re_entity_db::{EntityPath, EntityTree};
+use re_log_types::EntityPathHash;
 use re_types::{
-    archetypes::{InstancePoses3D, Pinhole, Transform3D},
-    components::{
-        ImagePlaneDistance, PinholeProjection, PoseRotationAxisAngle, PoseRotationQuat,
-        PoseScale3D, PoseTransformMat3x3, PoseTranslation3D, RotationAxisAngle, RotationQuat,
-        Scale3D, TransformMat3x3, TransformRelation, Translation3D, ViewCoordinates,
-    },
+    archetypes::{InstancePoses3D, Transform3D},
+    components::{ImagePlaneDistance, PinholeProjection},
     Archetype, Component as _, ComponentNameSet,
 };
 use re_view::DataResultQuery as _;
-use re_viewer_context::{IdentifiedViewSystem, ViewContext, ViewContextSystem};
+use re_viewer_context::{
+    DataResultNode, DataResultTree, IdentifiedViewSystem, ViewContext, ViewContextSystem,
+};
 use vec1::smallvec_v1::SmallVec1;
 
 use crate::{
-    transform_component_tracker::TransformComponentTrackerStoreSubscriber,
+    transform_cache::{
+        CachedTransformsPerTimeline, ResolvedPinholeProjection, TransformCacheStoreSubscriber,
+    },
     visualizers::image_view_coordinates,
 };
 
@@ -107,54 +107,50 @@ impl TransformInfo {
     }
 }
 
-#[derive(Clone, Copy)]
-enum UnreachableTransformReason {
-    /// More than one pinhole camera between this and the reference space.
-    NestedPinholeCameras,
-}
-
 /// Provides transforms from an entity to a chosen reference space for all elements in the scene
 /// for the currently selected time & timeline.
 ///
+/// The resulting transforms are dependent on:
+/// * tree, pose, pinhole and view-coordinates transforms components as logged to the data store
+///    * TODO(#6743): blueprint overrides aren't respected yet
+/// * the view' spatial origin
+/// * the query time
+///    * TODO(#723): ranges aren't taken into account yet
+/// * TODO(andreas): the queried entities. Right now we determine transforms for ALL entities in the scene.
+///                  since 3D views tend to display almost everything that's mostly fine, but it's very wasteful when they don't.
+///
 /// The renderer then uses this reference space as its world space,
 /// making world and reference space equivalent for a given view.
 ///
-/// Should be recomputed every frame.
-///
-/// TODO(#7025): Alternative proposal to not have to deal with tree upwards walking & per-origin tree walking.
+/// TODO(#7025): Right now we also do full tree traversal in here to resolve transforms to the root.
+/// However, for views that share the same query, we can easily make all entities relative to the respective origin in a linear pass over all matrices.
+/// (Note that right now the query IS always the same across all views for a given frame since it's just latest-at controlled by the timeline,
+/// but once we support range queries it may be not or only partially the case)
 #[derive(Clone)]
-pub struct TransformContext {
+pub struct TransformTreeContext {
     /// All transforms provided are relative to this reference path.
     space_origin: EntityPath,
 
     /// All reachable entities.
-    transform_per_entity: IntMap<EntityPath, TransformInfo>,
-
-    /// All unreachable descendant paths of `reference_path`.
-    unreachable_descendants: Vec<(EntityPath, UnreachableTransformReason)>,
-
-    /// The first parent of `reference_path` that is no longer reachable.
-    first_unreachable_parent: Option<(EntityPath, UnreachableTransformReason)>,
+    transform_per_entity: IntMap<EntityPathHash, TransformInfo>,
 }
 
-impl IdentifiedViewSystem for TransformContext {
+impl IdentifiedViewSystem for TransformTreeContext {
     fn identifier() -> re_viewer_context::ViewSystemIdentifier {
         "TransformContext".into()
     }
 }
 
-impl Default for TransformContext {
+impl Default for TransformTreeContext {
     fn default() -> Self {
         Self {
             space_origin: EntityPath::root(),
             transform_per_entity: Default::default(),
-            unreachable_descendants: Default::default(),
-            first_unreachable_parent: None,
         }
     }
 }
 
-impl ViewContextSystem for TransformContext {
+impl ViewContextSystem for TransformTreeContext {
     fn compatible_component_sets(&self) -> Vec<ComponentNameSet> {
         vec![
             Transform3D::all_components()
@@ -180,10 +176,19 @@ impl ViewContextSystem for TransformContext {
         query: &re_viewer_context::ViewQuery<'_>,
     ) {
         re_tracing::profile_function!();
-
         debug_assert_transform_field_order(ctx.viewer_ctx.reflection);
 
+        // Make sure transform cache is up to date.
+        // TODO(andreas): This is a rather annoying sync point between different views.
+        // We could alleviate this by introducing a per view class (not instance) method that is called
+        // before system execution.
+        TransformCacheStoreSubscriber::access_mut(&ctx.recording().store_id(), |cache| {
+            cache.apply_all_updates(ctx.recording());
+        });
+
         let entity_tree = ctx.recording().tree();
+        let query_result = ctx.viewer_ctx.lookup_query_result(query.view_id);
+        let data_result_tree = &query_result.tree;
 
         self.space_origin = query.space_origin.clone();
 
@@ -197,19 +202,44 @@ impl ViewContextSystem for TransformContext {
 
         let time_query = ctx.current_query();
 
-        // Child transforms of this space
-        self.gather_descendants_transforms(
-            ctx,
-            query,
-            current_tree,
-            ctx.recording(),
-            &time_query,
-            // Ignore potential pinhole camera at the root of the view, since it regarded as being "above" this root.
-            TransformInfo::default(),
-        );
+        TransformCacheStoreSubscriber::access(&ctx.recording().store_id(), |cache| {
+            let Some(transforms_per_timeline) = cache.transforms_per_timeline(query.timeline)
+            else {
+                // No transforms on this timeline at all. In other words, everything is identity!
+                query_result.tree.visit(&mut |node: &DataResultNode| {
+                    self.transform_per_entity.insert(
+                        node.data_result.entity_path.hash(),
+                        TransformInfo::default(),
+                    );
+                    true
+                });
+                return;
+            };
+
+            // Child transforms of this space
+            {
+                re_tracing::profile_scope!("gather_descendants_transforms");
+
+                self.gather_descendants_transforms(
+                    ctx,
+                    data_result_tree,
+                    current_tree,
+                    &time_query,
+                    // Ignore potential pinhole camera at the root of the view, since it is regarded as being "above" this root.
+                    TransformInfo::default(),
+                    transforms_per_timeline,
+                );
+            }
 
-        // Walk up from the reference to the highest reachable parent.
-        self.gather_parent_transforms(ctx, query, current_tree, &time_query);
+            // Walk up from the reference to the highest reachable parent.
+            self.gather_parent_transforms(
+                ctx,
+                data_result_tree,
+                current_tree,
+                &time_query,
+                transforms_per_timeline,
+            );
+        }); // Note that this can return None if no event has happened for this timeline yet.
     }
 
     fn as_any(&self) -> &dyn std::any::Any {
@@ -217,64 +247,56 @@ impl ViewContextSystem for TransformContext {
     }
 }
 
-impl TransformContext {
+impl TransformTreeContext {
     /// Gather transforms for everything _above_ the root.
     fn gather_parent_transforms<'a>(
         &mut self,
         ctx: &'a ViewContext<'a>,
-        query: &re_viewer_context::ViewQuery<'_>,
+        data_result_tree: &DataResultTree,
         mut current_tree: &'a EntityTree,
         time_query: &LatestAtQuery,
+        transforms_per_timeline: &CachedTransformsPerTimeline,
     ) {
         re_tracing::profile_function!();
 
         let entity_tree = ctx.recording().tree();
 
-        let mut encountered_pinhole = None;
         let mut reference_from_ancestor = glam::Affine3A::IDENTITY;
         while let Some(parent_path) = current_tree.path.parent() {
             let Some(parent_tree) = entity_tree.subtree(&parent_path) else {
                 // Unlike not having the space path in the hierarchy, this should be impossible.
                 re_log::error_once!(
-                    "Path {} is not part of the global entity tree whereas its child {} is",
-                    parent_path,
-                    query.space_origin
+                    "Path {parent_path} is not part of the global entity tree whereas its child is"
                 );
                 return;
             };
 
             // Note that the transform at the reference is the first that needs to be inverted to "break out" of its hierarchy.
             // Generally, the transform _at_ a node isn't relevant to it's children, but only to get to its parent in turn!
-            let new_transform = match transforms_at(
+            let transforms_at_entity = transforms_at(
                 &current_tree.path,
-                ctx.recording(),
                 time_query,
                 // TODO(#1025): See comment in transform_at. This is a workaround for precision issues
                 // and the fact that there is no meaningful image plane distance for 3D->2D views.
                 |_| 500.0,
-                &mut encountered_pinhole,
-            ) {
-                Err(unreachable_reason) => {
-                    self.first_unreachable_parent =
-                        Some((parent_tree.path.clone(), unreachable_reason));
-                    break;
-                }
-                Ok(transforms_at_entity) => transform_info_for_upward_propagation(
-                    reference_from_ancestor,
-                    transforms_at_entity,
-                ),
-            };
+                &mut None, // Don't care about pinhole encounters.
+                transforms_per_timeline,
+            );
+            let new_transform = transform_info_for_upward_propagation(
+                reference_from_ancestor,
+                &transforms_at_entity,
+            );
 
             reference_from_ancestor = new_transform.reference_from_entity;
 
             // (this skips over everything at and under `current_tree` automatically)
             self.gather_descendants_transforms(
                 ctx,
-                query,
+                data_result_tree,
                 parent_tree,
-                ctx.recording(),
                 time_query,
                 new_transform,
+                transforms_per_timeline,
             );
 
             current_tree = parent_tree;
@@ -285,15 +307,15 @@ impl TransformContext {
     fn gather_descendants_transforms(
         &mut self,
         ctx: &ViewContext<'_>,
-        view_query: &re_viewer_context::ViewQuery<'_>,
+        data_result_tree: &DataResultTree,
         subtree: &EntityTree,
-        entity_db: &EntityDb,
         query: &LatestAtQuery,
         transform: TransformInfo,
+        transforms_per_timeline: &CachedTransformsPerTimeline,
     ) {
         let twod_in_threed_info = transform.twod_in_threed_info.clone();
         let reference_from_parent = transform.reference_from_entity;
-        match self.transform_per_entity.entry(subtree.path.clone()) {
+        match self.transform_per_entity.entry(subtree.path.hash()) {
             std::collections::hash_map::Entry::Occupied(_) => {
                 return;
             }
@@ -305,54 +327,34 @@ impl TransformContext {
         for child_tree in subtree.children.values() {
             let child_path = &child_tree.path;
 
-            let lookup_image_plane = |p: &_| {
-                let query_result = ctx.viewer_ctx.lookup_query_result(view_query.view_id);
-
-                query_result
-                    .tree
-                    .lookup_result_by_path(p)
-                    .cloned()
-                    .map(|data_result| {
-                        let results = data_result
-                            .latest_at_with_blueprint_resolved_data::<Pinhole>(ctx, query);
-
-                        results.get_mono_with_fallback::<ImagePlaneDistance>()
-                    })
-                    .unwrap_or_default()
-                    .into()
-            };
+            let lookup_image_plane =
+                |p: &_| lookup_image_plane_distance(ctx, data_result_tree, p, query);
 
             let mut encountered_pinhole = twod_in_threed_info
                 .as_ref()
                 .map(|info| info.parent_pinhole.clone());
-            let new_transform = match transforms_at(
+
+            let transforms_at_entity = transforms_at(
                 child_path,
-                entity_db,
                 query,
                 lookup_image_plane,
                 &mut encountered_pinhole,
-            ) {
-                Err(unreachable_reason) => {
-                    self.unreachable_descendants
-                        .push((child_path.clone(), unreachable_reason));
-                    continue;
-                }
-
-                Ok(transforms_at_entity) => transform_info_for_downward_propagation(
-                    child_path,
-                    reference_from_parent,
-                    twod_in_threed_info.clone(),
-                    transforms_at_entity,
-                ),
-            };
+                transforms_per_timeline,
+            );
+            let new_transform = transform_info_for_downward_propagation(
+                child_path,
+                reference_from_parent,
+                twod_in_threed_info.clone(),
+                &transforms_at_entity,
+            );
 
             self.gather_descendants_transforms(
                 ctx,
-                view_query,
+                data_result_tree,
                 child_tree,
-                entity_db,
                 query,
                 new_transform,
+                transforms_per_timeline,
             );
         }
     }
@@ -364,15 +366,35 @@ impl TransformContext {
     /// Retrieves transform information for a given entity.
     ///
     /// Returns `None` if it's not reachable from the view's origin.
-    pub fn transform_info_for_entity(&self, ent_path: &EntityPath) -> Option<&TransformInfo> {
-        self.transform_per_entity.get(ent_path)
+    pub fn transform_info_for_entity(&self, ent_path: EntityPathHash) -> Option<&TransformInfo> {
+        self.transform_per_entity.get(&ent_path)
     }
 }
 
+fn lookup_image_plane_distance(
+    ctx: &ViewContext<'_>,
+    data_result_tree: &DataResultTree,
+    entity_path: &EntityPath,
+    query: &LatestAtQuery,
+) -> f32 {
+    data_result_tree
+        .lookup_result_by_path(entity_path)
+        .cloned()
+        .map(|data_result| {
+            data_result
+                .latest_at_with_blueprint_resolved_data_for_component::<ImagePlaneDistance>(
+                    ctx, query,
+                )
+                .get_mono_with_fallback::<ImagePlaneDistance>()
+        })
+        .unwrap_or_default()
+        .into()
+}
+
 /// Compute transform info for when we walk up the tree from the reference.
 fn transform_info_for_upward_propagation(
     reference_from_ancestor: glam::Affine3A,
-    transforms_at_entity: TransformsAtEntity,
+    transforms_at_entity: &TransformsAtEntity<'_>,
 ) -> TransformInfo {
     let mut reference_from_entity = reference_from_ancestor;
 
@@ -390,7 +412,7 @@ fn transform_info_for_upward_propagation(
 
     // Collect & compute poses.
     let (mut reference_from_instances, has_instance_transforms) =
-        if let Ok(mut entity_from_instances) = SmallVec1::<[glam::Affine3A; 1]>::try_from_vec(
+        if let Ok(mut entity_from_instances) = SmallVec1::<[glam::Affine3A; 1]>::try_from_slice(
             transforms_at_entity.entity_from_instance_poses,
         ) {
             for entity_from_instance in &mut entity_from_instances {
@@ -402,18 +424,16 @@ fn transform_info_for_upward_propagation(
             (SmallVec1::new(reference_from_entity), false)
         };
 
-    // Apply tree transform if any.
-    if let Some(parent_from_entity_tree_transform) =
-        transforms_at_entity.parent_from_entity_tree_transform
-    {
-        reference_from_entity *= parent_from_entity_tree_transform.inverse();
-        if has_instance_transforms {
-            for reference_from_instance in &mut reference_from_instances {
-                *reference_from_instance = reference_from_entity * (*reference_from_instance);
-            }
-        } else {
-            *reference_from_instances.first_mut() = reference_from_entity;
+    // Apply tree transform.
+    reference_from_entity *= transforms_at_entity
+        .parent_from_entity_tree_transform
+        .inverse();
+    if has_instance_transforms {
+        for reference_from_instance in &mut reference_from_instances {
+            *reference_from_instance = reference_from_entity * (*reference_from_instance);
         }
+    } else {
+        *reference_from_instances.first_mut() = reference_from_entity;
     }
 
     TransformInfo {
@@ -431,21 +451,18 @@ fn transform_info_for_downward_propagation(
     current_path: &EntityPath,
     reference_from_parent: glam::Affine3A,
     mut twod_in_threed_info: Option<TwoDInThreeDTransformInfo>,
-    transforms_at_entity: TransformsAtEntity,
+    transforms_at_entity: &TransformsAtEntity<'_>,
 ) -> TransformInfo {
     let mut reference_from_entity = reference_from_parent;
 
     // Apply tree transform.
-    if let Some(parent_from_entity_tree_transform) =
-        transforms_at_entity.parent_from_entity_tree_transform
-    {
-        reference_from_entity *= parent_from_entity_tree_transform;
-    }
+
+    reference_from_entity *= transforms_at_entity.parent_from_entity_tree_transform;
 
     // Collect & compute poses.
     let (mut reference_from_instances, has_instance_transforms) =
         if let Ok(mut entity_from_instances) =
-            SmallVec1::try_from_vec(transforms_at_entity.entity_from_instance_poses)
+            SmallVec1::try_from_slice(transforms_at_entity.entity_from_instance_poses)
         {
             for entity_from_instance in &mut entity_from_instances {
                 *entity_from_instance = reference_from_entity * (*entity_from_instance);
@@ -491,15 +508,16 @@ fn transform_info_for_downward_propagation(
 
 #[cfg(debug_assertions)]
 fn debug_assert_transform_field_order(reflection: &re_types::reflection::Reflection) {
+    use re_types::{components, Archetype as _};
+
     let expected_order = vec![
-        Translation3D::name(),
-        RotationAxisAngle::name(),
-        RotationQuat::name(),
-        Scale3D::name(),
-        TransformMat3x3::name(),
+        components::Translation3D::name(),
+        components::RotationAxisAngle::name(),
+        components::RotationQuat::name(),
+        components::Scale3D::name(),
+        components::TransformMat3x3::name(),
     ];
 
-    use re_types::Archetype as _;
     let transform3d_reflection = reflection
         .archetypes
         .get(&re_types::archetypes::Transform3D::name())
@@ -528,276 +546,87 @@ But they are instead ordered like this:\n{actual_order:?}"
 #[cfg(not(debug_assertions))]
 fn debug_assert_transform_field_order(_: &re_types::reflection::Reflection) {}
 
-fn query_and_resolve_tree_transform_at_entity(
+fn transform_from_pinhole_with_image_plane(
     entity_path: &EntityPath,
-    entity_db: &EntityDb,
-    query: &LatestAtQuery,
-    transform3d_components: impl Iterator<Item = re_types::ComponentName>,
-) -> Option<glam::Affine3A> {
-    // TODO(#6743): Doesn't take into account overrides.
-    let result = entity_db.latest_at(query, entity_path, transform3d_components);
-    if result.components.is_empty() {
-        return None;
-    }
-
-    let mut transform = glam::Affine3A::IDENTITY;
-
-    // Order see `debug_assert_transform_field_order`
-    if let Some(translation) = result.component_instance::<Translation3D>(0) {
-        transform = glam::Affine3A::from(translation);
-    }
-    if let Some(axis_angle) = result.component_instance::<RotationAxisAngle>(0) {
-        if let Ok(axis_angle) = glam::Affine3A::try_from(axis_angle) {
-            transform *= axis_angle;
-        } else {
-            // Invalid transform.
-            return None;
-        }
-    }
-    if let Some(quaternion) = result.component_instance::<RotationQuat>(0) {
-        if let Ok(quaternion) = glam::Affine3A::try_from(quaternion) {
-            transform *= quaternion;
-        } else {
-            // Invalid transform.
-            return None;
-        }
-    }
-    if let Some(scale) = result.component_instance::<Scale3D>(0) {
-        if scale.x() == 0.0 && scale.y() == 0.0 && scale.z() == 0.0 {
-            // Invalid scale.
-            return None;
-        }
-        transform *= glam::Affine3A::from(scale);
-    }
-    if let Some(mat3x3) = result.component_instance::<TransformMat3x3>(0) {
-        let affine_transform = glam::Affine3A::from(mat3x3);
-        if affine_transform.matrix3.determinant() == 0.0 {
-            // Invalid transform.
-            return None;
-        }
-        transform *= affine_transform;
-    }
-
-    if result.component_instance::<TransformRelation>(0) == Some(TransformRelation::ChildFromParent)
-    // TODO(andreas): Should we warn? This might be intentionally caused by zero scale.
-        && transform.matrix3.determinant() != 0.0
-    {
-        transform = transform.inverse();
-    }
-
-    Some(transform)
-}
-
-fn query_and_resolve_instance_poses_at_entity(
-    entity_path: &EntityPath,
-    entity_db: &EntityDb,
-    query: &LatestAtQuery,
-    pose3d_components: impl Iterator<Item = re_types::ComponentName>,
-) -> Vec<glam::Affine3A> {
-    // TODO(#6743): Doesn't take into account overrides.
-    let result = entity_db.latest_at(query, entity_path, pose3d_components);
-
-    let max_count = result
-        .components
-        .iter()
-        .map(|(name, row)| row.num_instances(name))
-        .max()
-        .unwrap_or(0) as usize;
-
-    if max_count == 0 {
-        return Vec::new();
-    }
-
-    #[inline]
-    pub fn clamped_or_nothing<T: Clone>(
-        values: Vec<T>,
-        clamped_len: usize,
-    ) -> impl Iterator<Item = T> {
-        let Some(last) = values.last() else {
-            return Either::Left(std::iter::empty());
-        };
-        let last = last.clone();
-        Either::Right(
-            values
-                .into_iter()
-                .chain(std::iter::repeat(last))
-                .take(clamped_len),
-        )
-    }
-
-    let mut iter_translation = clamped_or_nothing(
-        result
-            .component_batch::<PoseTranslation3D>()
-            .unwrap_or_default(),
-        max_count,
-    );
-    let mut iter_rotation_quat = clamped_or_nothing(
-        result
-            .component_batch::<PoseRotationQuat>()
-            .unwrap_or_default(),
-        max_count,
-    );
-    let mut iter_rotation_axis_angle = clamped_or_nothing(
-        result
-            .component_batch::<PoseRotationAxisAngle>()
-            .unwrap_or_default(),
-        max_count,
-    );
-    let mut iter_scale = clamped_or_nothing(
-        result.component_batch::<PoseScale3D>().unwrap_or_default(),
-        max_count,
-    );
-    let mut iter_mat3x3 = clamped_or_nothing(
-        result
-            .component_batch::<PoseTransformMat3x3>()
-            .unwrap_or_default(),
-        max_count,
-    );
-
-    let mut transforms = Vec::with_capacity(max_count);
-    for _ in 0..max_count {
-        // Order see `debug_assert_transform_field_order`
-        let mut transform = glam::Affine3A::IDENTITY;
-        if let Some(translation) = iter_translation.next() {
-            transform = glam::Affine3A::from(translation);
-        }
-        if let Some(rotation_quat) = iter_rotation_quat.next() {
-            if let Ok(rotation_quat) = glam::Affine3A::try_from(rotation_quat) {
-                transform *= rotation_quat;
-            } else {
-                transform = glam::Affine3A::ZERO;
-            }
-        }
-        if let Some(rotation_axis_angle) = iter_rotation_axis_angle.next() {
-            if let Ok(axis_angle) = glam::Affine3A::try_from(rotation_axis_angle) {
-                transform *= axis_angle;
-            } else {
-                transform = glam::Affine3A::ZERO;
-            }
-        }
-        if let Some(scale) = iter_scale.next() {
-            transform *= glam::Affine3A::from(scale);
-        }
-        if let Some(mat3x3) = iter_mat3x3.next() {
-            transform *= glam::Affine3A::from(mat3x3);
-        }
-
-        transforms.push(transform);
-    }
-
-    transforms
-}
-
-fn query_and_resolve_obj_from_pinhole_image_plane(
-    entity_path: &EntityPath,
-    entity_db: &EntityDb,
-    query: &LatestAtQuery,
+    resolved_pinhole_projection: &ResolvedPinholeProjection,
     pinhole_image_plane_distance: impl Fn(&EntityPath) -> f32,
-) -> Option<glam::Affine3A> {
-    entity_db
-        .latest_at_component::<PinholeProjection>(entity_path, query)
-        .map(|(_index, image_from_camera)| {
-            (
-                image_from_camera,
-                entity_db
-                    .latest_at_component::<ViewCoordinates>(entity_path, query)
-                    .map_or(ViewCoordinates::RDF, |(_index, res)| res),
-            )
-        })
-        .map(|(image_from_camera, view_coordinates)| {
-            // Everything under a pinhole camera is a 2D projection, thus doesn't actually have a proper 3D representation.
-            // Our visualization interprets this as looking at a 2D image plane from a single point (the pinhole).
-
-            // Center the image plane and move it along z, scaling the further the image plane is.
-            let distance = pinhole_image_plane_distance(entity_path);
-            let focal_length = image_from_camera.focal_length_in_pixels();
-            let focal_length = glam::vec2(focal_length.x(), focal_length.y());
-            let scale = distance / focal_length;
-            let translation = (-image_from_camera.principal_point() * scale).extend(distance);
-
-            let image_plane3d_from_2d_content = glam::Affine3A::from_translation(translation)
+) -> glam::Affine3A {
+    let ResolvedPinholeProjection {
+        image_from_camera,
+        view_coordinates,
+    } = resolved_pinhole_projection;
+
+    // Everything under a pinhole camera is a 2D projection, thus doesn't actually have a proper 3D representation.
+    // Our visualization interprets this as looking at a 2D image plane from a single point (the pinhole).
+
+    // Center the image plane and move it along z, scaling the further the image plane is.
+    let distance = pinhole_image_plane_distance(entity_path);
+    let focal_length = image_from_camera.focal_length_in_pixels();
+    let focal_length = glam::vec2(focal_length.x(), focal_length.y());
+    let scale = distance / focal_length;
+    let translation = (-image_from_camera.principal_point() * scale).extend(distance);
+
+    let image_plane3d_from_2d_content = glam::Affine3A::from_translation(translation)
             // We want to preserve any depth that might be on the pinhole image.
             // Use harmonic mean of x/y scale for those.
             * glam::Affine3A::from_scale(
                 scale.extend(2.0 / (1.0 / scale.x + 1.0 / scale.y)),
             );
 
-            // Our interpretation of the pinhole camera implies that the axis semantics, i.e. ViewCoordinates,
-            // determine how the image plane is oriented.
-            // (see also `CamerasPart` where the frustum lines are set up)
-            let obj_from_image_plane3d = view_coordinates.from_other(&image_view_coordinates());
+    // Our interpretation of the pinhole camera implies that the axis semantics, i.e. ViewCoordinates,
+    // determine how the image plane is oriented.
+    // (see also `CamerasPart` where the frustum lines are set up)
+    let obj_from_image_plane3d = view_coordinates.from_other(&image_view_coordinates());
 
-            glam::Affine3A::from_mat3(obj_from_image_plane3d) * image_plane3d_from_2d_content
+    glam::Affine3A::from_mat3(obj_from_image_plane3d) * image_plane3d_from_2d_content
 
-            // Above calculation is nice for a certain kind of visualizing a projected image plane,
-            // but the image plane distance is arbitrary and there might be other, better visualizations!
+    // Above calculation is nice for a certain kind of visualizing a projected image plane,
+    // but the image plane distance is arbitrary and there might be other, better visualizations!
 
-            // TODO(#1025):
-            // As such we don't ever want to invert this matrix!
-            // However, currently our 2D views require do to exactly that since we're forced to
-            // build a relationship between the 2D plane and the 3D world, when actually the 2D plane
-            // should have infinite depth!
-            // The inverse of this matrix *is* working for this, but quickly runs into precision issues.
-            // See also `ui_2d.rs#setup_target_config`
-        })
+    // TODO(#1025):
+    // As such we don't ever want to invert this matrix!
+    // However, currently our 2D views require do to exactly that since we're forced to
+    // build a relationship between the 2D plane and the 3D world, when actually the 2D plane
+    // should have infinite depth!
+    // The inverse of this matrix *is* working for this, but quickly runs into precision issues.
+    // See also `ui_2d.rs#setup_target_config`
 }
 
 /// Resolved transforms at an entity.
 #[derive(Default)]
-struct TransformsAtEntity {
-    parent_from_entity_tree_transform: Option<glam::Affine3A>,
-    entity_from_instance_poses: Vec<glam::Affine3A>,
+struct TransformsAtEntity<'a> {
+    parent_from_entity_tree_transform: glam::Affine3A,
+    entity_from_instance_poses: &'a [glam::Affine3A],
     instance_from_pinhole_image_plane: Option<glam::Affine3A>,
 }
 
-fn transforms_at(
+fn transforms_at<'a>(
     entity_path: &EntityPath,
-    entity_db: &EntityDb,
     query: &LatestAtQuery,
     pinhole_image_plane_distance: impl Fn(&EntityPath) -> f32,
     encountered_pinhole: &mut Option<EntityPath>,
-) -> Result<TransformsAtEntity, UnreachableTransformReason> {
+    transforms_per_timeline: &'a CachedTransformsPerTimeline,
+) -> TransformsAtEntity<'a> {
     // This is called very frequently, don't put a profile scope here.
 
-    let potential_transform_components =
-        TransformComponentTrackerStoreSubscriber::access(&entity_db.store_id(), |tracker| {
-            tracker.potential_transform_components(entity_path).cloned()
-        })
-        .flatten()
-        .unwrap_or_default();
-
-    let parent_from_entity_tree_transform = if potential_transform_components.transform3d.is_empty()
-    {
-        None
-    } else {
-        query_and_resolve_tree_transform_at_entity(
-            entity_path,
-            entity_db,
-            query,
-            potential_transform_components.transform3d.iter().copied(),
-        )
-    };
-    let entity_from_instance_poses = if potential_transform_components.pose3d.is_empty() {
-        Vec::new()
-    } else {
-        query_and_resolve_instance_poses_at_entity(
-            entity_path,
-            entity_db,
-            query,
-            potential_transform_components.pose3d.iter().copied(),
-        )
-    };
-    let instance_from_pinhole_image_plane = if potential_transform_components.pinhole {
-        query_and_resolve_obj_from_pinhole_image_plane(
-            entity_path,
-            entity_db,
-            query,
-            pinhole_image_plane_distance,
-        )
-    } else {
-        None
+    let Some(entity_transforms) = transforms_per_timeline.entity_transforms(entity_path.hash())
+    else {
+        return TransformsAtEntity::default();
     };
 
+    let parent_from_entity_tree_transform = entity_transforms.latest_at_tree_transform(query);
+    let entity_from_instance_poses = entity_transforms.latest_at_instance_poses(query);
+    let instance_from_pinhole_image_plane =
+        entity_transforms
+            .latest_at_pinhole(query)
+            .map(|resolved_pinhole_projection| {
+                transform_from_pinhole_with_image_plane(
+                    entity_path,
+                    resolved_pinhole_projection,
+                    pinhole_image_plane_distance,
+                )
+            });
+
     let transforms_at_entity = TransformsAtEntity {
         parent_from_entity_tree_transform,
         entity_from_instance_poses,
@@ -809,12 +638,8 @@ fn transforms_at(
         .instance_from_pinhole_image_plane
         .is_some()
     {
-        if encountered_pinhole.is_some() {
-            return Err(UnreachableTransformReason::NestedPinholeCameras);
-        } else {
-            *encountered_pinhole = Some(entity_path.clone());
-        }
+        *encountered_pinhole = Some(entity_path.clone());
     }
 
-    Ok(transforms_at_entity)
+    transforms_at_entity
 }
diff --git a/crates/viewer/re_view_spatial/src/lib.rs b/crates/viewer/re_view_spatial/src/lib.rs
index 62cf77783d4f..1ce80145f306 100644
--- a/crates/viewer/re_view_spatial/src/lib.rs
+++ b/crates/viewer/re_view_spatial/src/lib.rs
@@ -19,7 +19,6 @@ mod proc_mesh;
 mod scene_bounding_boxes;
 mod space_camera_3d;
 mod spatial_topology;
-mod transform_component_tracker;
 mod ui;
 mod ui_2d;
 mod ui_3d;
@@ -29,6 +28,8 @@ mod view_3d;
 mod view_3d_properties;
 mod visualizers;
 
+mod transform_cache;
+
 pub use view_2d::SpatialView2D;
 pub use view_3d::SpatialView3D;
 
diff --git a/crates/viewer/re_view_spatial/src/transform_cache.rs b/crates/viewer/re_view_spatial/src/transform_cache.rs
new file mode 100644
index 000000000000..87922ecb7eef
--- /dev/null
+++ b/crates/viewer/re_view_spatial/src/transform_cache.rs
@@ -0,0 +1,1093 @@
+use std::collections::BTreeMap;
+
+use ahash::{HashMap, HashSet};
+use glam::Affine3A;
+use itertools::Either;
+use nohash_hasher::{IntMap, IntSet};
+
+use once_cell::sync::OnceCell;
+use re_chunk_store::{
+    ChunkStore, ChunkStoreSubscriberHandle, LatestAtQuery, PerStoreChunkSubscriber,
+};
+use re_entity_db::EntityDb;
+use re_log_types::{EntityPath, EntityPathHash, StoreId, TimeInt, Timeline};
+use re_types::{
+    archetypes::{self},
+    components::{self},
+    Archetype as _, Component, ComponentName,
+};
+
+/// Store subscriber that resolves all transform components at a given entity to an affine transform.
+///
+/// It only handles resulting transforms individually to each entity, not how these transforms propagate in the tree.
+/// For transform tree propagation see [`crate::contexts::TransformTreeContext`].
+///
+/// There are different kinds of transforms handled here:
+/// * [`archetypes::Transform3D`]
+///   Tree transforms that should propagate in the tree (via [`crate::contexts::TransformTreeContext`]).
+/// * [`archetypes::InstancePoses3D`]
+///   Instance poses that should be applied to the tree transforms (via [`crate::contexts::TransformTreeContext`]) but not propagate.
+/// * [`components::PinholeProjection`] and [`components::ViewCoordinates`]
+///   Pinhole projections & associated view coordinates used for visualizing cameras in 3D and embedding 2D in 3D
+pub struct TransformCacheStoreSubscriber {
+    /// All components of [`archetypes::Transform3D`]
+    transform_components: IntSet<ComponentName>,
+
+    /// All components of [`archetypes::InstancePoses3D`]
+    pose_components: IntSet<ComponentName>,
+
+    /// All components related to pinholes (i.e. [`components::PinholeProjection`] and [`components::ViewCoordinates`]).
+    pinhole_components: IntSet<ComponentName>,
+
+    per_timeline: HashMap<Timeline, CachedTransformsPerTimeline>,
+}
+
+impl Default for TransformCacheStoreSubscriber {
+    #[inline]
+    fn default() -> Self {
+        use re_types::Archetype as _;
+
+        Self {
+            transform_components: archetypes::Transform3D::all_components()
+                .iter()
+                .map(|descr| descr.component_name)
+                .collect(),
+            pose_components: archetypes::InstancePoses3D::all_components()
+                .iter()
+                .map(|descr| descr.component_name)
+                .collect(),
+            pinhole_components: [
+                components::PinholeProjection::name(),
+                components::ViewCoordinates::name(),
+            ]
+            .into_iter()
+            .collect(),
+
+            per_timeline: Default::default(),
+        }
+    }
+}
+
+bitflags::bitflags! {
+    /// Flags for the different kinds of independent transforms that the transform cache handles.
+    #[derive(Debug, Clone, Copy)]
+    pub struct TransformAspect: u8 {
+        /// The entity has a tree transform, i.e. any non-style component of [`archetypes::Transform3D`].
+        const Tree = 1 << 0;
+
+        /// The entity has instance poses, i.e. any non-style component of [`archetypes::InstancePoses3D`].
+        const Pose = 1 << 1;
+
+        /// The entity has a pinhole projection or view coordinates, i.e. either [`components::PinholeProjection`] or [`components::ViewCoordinates`].
+        const PinholeOrViewCoordinates = 1 << 2;
+    }
+}
+
+/// Points in time that have changed for a given entity,
+/// i.e. the cache is invalid for these times.
+#[derive(Debug)]
+struct InvalidatedTransforms {
+    entity_path: EntityPath,
+    times: Vec<TimeInt>,
+    aspects: TransformAspect,
+}
+
+#[derive(Default)]
+pub struct CachedTransformsPerTimeline {
+    /// Updates that should be applied to the cache.
+    /// I.e. times & entities at which the cache is invalid right now.
+    invalidated_transforms: Vec<InvalidatedTransforms>,
+
+    per_entity: IntMap<EntityPathHash, PerTimelinePerEntityTransforms>,
+}
+
+type PoseTransformMap = BTreeMap<TimeInt, Vec<Affine3A>>;
+
+/// Maps from time to pinhole projection.
+///
+/// Unlike with tree & pose transforms, there's identity value that we can insert upon clears.
+/// (clears here meaning that the user first logs a pinhole and then later either logs a clear or an empty pinhole array)
+/// Therefore, we instead store those events as `None` values to ensure that everything after a clear
+/// is properly marked as having no pinhole projection.
+type PinholeProjectionMap = BTreeMap<TimeInt, Option<ResolvedPinholeProjection>>;
+
+pub struct PerTimelinePerEntityTransforms {
+    timeline: Timeline,
+
+    tree_transforms: BTreeMap<TimeInt, Affine3A>,
+
+    // Pose transforms and pinhole projections are typically more rare, which is why we store them as optional boxes.
+    pose_transforms: Option<Box<PoseTransformMap>>,
+    pinhole_projections: Option<Box<PinholeProjectionMap>>,
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct ResolvedPinholeProjection {
+    pub image_from_camera: components::PinholeProjection,
+
+    /// View coordinates at this pinhole camera.
+    ///
+    /// This is needed to orient 2D in 3D and 3D in 2D the right way around
+    /// (answering questions like which axis is distance to viewer increasing).
+    /// If no view coordinates were logged, this is set to [`archetypes::Pinhole::DEFAULT_CAMERA_XYZ`].
+    pub view_coordinates: components::ViewCoordinates,
+}
+
+impl CachedTransformsPerTimeline {
+    #[inline]
+    pub fn entity_transforms(
+        &self,
+        entity_path: EntityPathHash,
+    ) -> Option<&PerTimelinePerEntityTransforms> {
+        self.per_entity.get(&entity_path)
+    }
+}
+
+impl PerTimelinePerEntityTransforms {
+    #[inline]
+    pub fn latest_at_tree_transform(&self, query: &LatestAtQuery) -> Affine3A {
+        debug_assert_eq!(query.timeline(), self.timeline);
+        self.tree_transforms
+            .range(..query.at().inc())
+            .next_back()
+            .map(|(_time, transform)| *transform)
+            .unwrap_or(Affine3A::IDENTITY)
+    }
+
+    #[inline]
+    pub fn latest_at_instance_poses(&self, query: &LatestAtQuery) -> &[Affine3A] {
+        debug_assert_eq!(query.timeline(), self.timeline);
+        self.pose_transforms
+            .as_ref()
+            .and_then(|pose_transforms| pose_transforms.range(..query.at().inc()).next_back())
+            .map(|(_time, pose_transforms)| pose_transforms.as_slice())
+            .unwrap_or(&[])
+    }
+
+    #[inline]
+    pub fn latest_at_pinhole(&self, query: &LatestAtQuery) -> Option<&ResolvedPinholeProjection> {
+        debug_assert_eq!(query.timeline(), self.timeline);
+        self.pinhole_projections
+            .as_ref()
+            .and_then(|pinhole_projections| {
+                pinhole_projections.range(..query.at().inc()).next_back()
+            })
+            .and_then(|(_time, projection)| projection.as_ref())
+    }
+}
+
+impl TransformCacheStoreSubscriber {
+    /// Accesses the global store subscriber.
+    ///
+    /// Lazily registers the subscriber if it hasn't been registered yet.
+    pub fn subscription_handle() -> ChunkStoreSubscriberHandle {
+        static SUBSCRIPTION: OnceCell<ChunkStoreSubscriberHandle> = OnceCell::new();
+        *SUBSCRIPTION.get_or_init(ChunkStore::register_per_store_subscriber::<Self>)
+    }
+
+    /// Accesses the transform component tracking data for a given store.
+    #[inline]
+    pub fn access<T>(store_id: &StoreId, f: impl FnMut(&Self) -> T) -> Option<T> {
+        ChunkStore::with_per_store_subscriber(Self::subscription_handle(), store_id, f)
+    }
+
+    /// Accesses the transform component tracking data for a given store exclusively.
+    #[inline]
+    pub fn access_mut<T>(store_id: &StoreId, f: impl FnMut(&mut Self) -> T) -> Option<T> {
+        ChunkStore::with_per_store_subscriber_mut(Self::subscription_handle(), store_id, f)
+    }
+
+    /// Accesses the transform component tracking data for a given timeline.
+    ///
+    /// Returns `None` if the timeline doesn't have any transforms at all.
+    #[inline]
+    pub fn transforms_per_timeline(
+        &self,
+        timeline: Timeline,
+    ) -> Option<&CachedTransformsPerTimeline> {
+        self.per_timeline.get(&timeline)
+    }
+
+    /// Makes sure the transform cache is up to date with the latest data.
+    ///
+    /// This needs to be called once per frame prior to any transform propagation.
+    /// (which is done by [`crate::contexts::TransformTreeContext`])
+    pub fn apply_all_updates(&mut self, entity_db: &EntityDb) {
+        re_tracing::profile_function!();
+
+        for (timeline, per_timeline) in &mut self.per_timeline {
+            for invalidated_transform in per_timeline.invalidated_transforms.drain(..) {
+                let entity_path = &invalidated_transform.entity_path;
+                let entity_entry = per_timeline
+                    .per_entity
+                    .entry(entity_path.hash())
+                    .or_insert_with(|| PerTimelinePerEntityTransforms {
+                        timeline: *timeline,
+                        tree_transforms: Default::default(),
+                        pose_transforms: Default::default(),
+                        pinhole_projections: Default::default(),
+                    });
+
+                for time in invalidated_transform.times {
+                    let query = LatestAtQuery::new(*timeline, time);
+
+                    if invalidated_transform
+                        .aspects
+                        .contains(TransformAspect::Tree)
+                    {
+                        let transform = query_and_resolve_tree_transform_at_entity(
+                            entity_path,
+                            entity_db,
+                            &query,
+                        )
+                        .unwrap_or(Affine3A::IDENTITY);
+                        // If there's *no* transform, we have to put identity in, otherwise we'd miss clears!
+                        entity_entry.tree_transforms.insert(time, transform);
+                    }
+                    if invalidated_transform
+                        .aspects
+                        .contains(TransformAspect::Pose)
+                    {
+                        let poses = query_and_resolve_instance_poses_at_entity(
+                            entity_path,
+                            entity_db,
+                            &query,
+                        );
+                        // *do* also insert empty ones, otherwise it's not possible to clear previous state.
+                        entity_entry
+                            .pose_transforms
+                            .get_or_insert_with(Box::default)
+                            .insert(time, poses);
+                    }
+                    if invalidated_transform
+                        .aspects
+                        .contains(TransformAspect::PinholeOrViewCoordinates)
+                    {
+                        let pinhole_projection = query_and_resolve_pinhole_projection_at_entity(
+                            entity_path,
+                            entity_db,
+                            &query,
+                        );
+                        // `None` values need to be inserted as well to clear out previous state.
+                        // See also doc string on `PinholeProjectionMap`.
+                        entity_entry
+                            .pinhole_projections
+                            .get_or_insert_with(Box::default)
+                            .insert(time, pinhole_projection);
+                    }
+                }
+            }
+        }
+    }
+
+    fn add_chunk(&mut self, event: &re_chunk_store::ChunkStoreEvent, aspects: TransformAspect) {
+        let entity_path = event.chunk.entity_path();
+
+        for (timeline, time_column) in event.diff.chunk.timelines() {
+            let per_timeline = self.per_timeline.entry(*timeline).or_default();
+
+            // All of these require complex latest-at queries that would require a lot more context,
+            // are fairly expensive, and may depend on other components that may come in at the same time.
+            // (we could inject that here, but it's not entirely straight forward).
+            // So instead, we note down that the caches is invalidated for the given entity & times.
+
+            // This invalidates any time _after_ the first event in this chunk.
+            // (e.g. if a rotation is added prior to translations later on,
+            // then the resulting transforms at those translations changes as well for latest-at queries)
+            let mut invalidated_times = Vec::new();
+            let Some(min_time) = time_column.times().min() else {
+                continue;
+            };
+            if let Some(entity_entry) = per_timeline.per_entity.get_mut(&entity_path.hash()) {
+                if aspects.contains(TransformAspect::Tree) {
+                    let invalidated_tree_transforms =
+                        entity_entry.tree_transforms.split_off(&min_time);
+                    invalidated_times.extend(invalidated_tree_transforms.into_keys());
+                }
+                if aspects.contains(TransformAspect::Pose) {
+                    if let Some(pose_transforms) = &mut entity_entry.pose_transforms {
+                        let invalidated_pose_transforms = pose_transforms.split_off(&min_time);
+                        invalidated_times.extend(invalidated_pose_transforms.into_keys());
+                    }
+                }
+                if aspects.contains(TransformAspect::PinholeOrViewCoordinates) {
+                    if let Some(pinhole_projections) = &mut entity_entry.pinhole_projections {
+                        let invalidated_pinhole_projections =
+                            pinhole_projections.split_off(&min_time);
+                        invalidated_times.extend(invalidated_pinhole_projections.into_keys());
+                    }
+                }
+            }
+
+            per_timeline
+                .invalidated_transforms
+                .push(InvalidatedTransforms {
+                    entity_path: entity_path.clone(),
+                    times: time_column
+                        .times()
+                        .chain(invalidated_times.into_iter())
+                        .collect(),
+                    aspects,
+                });
+        }
+    }
+
+    fn remove_chunk(&mut self, event: &re_chunk_store::ChunkStoreEvent, aspects: TransformAspect) {
+        let entity_path = event.chunk.entity_path();
+
+        for (timeline, time_column) in event.diff.chunk.timelines() {
+            let Some(per_timeline) = self.per_timeline.get_mut(timeline) else {
+                continue;
+            };
+
+            // Remove incoming data.
+            for invalidated_transform in per_timeline
+                .invalidated_transforms
+                .iter_mut()
+                .filter(|invalidated_transform| &invalidated_transform.entity_path == entity_path)
+            {
+                let times = time_column.times().collect::<HashSet<_>>();
+                invalidated_transform
+                    .times
+                    .retain(|time| !times.contains(time));
+            }
+            per_timeline
+                .invalidated_transforms
+                .retain(|invalidated_transform| !invalidated_transform.times.is_empty());
+
+            // Remove existing data.
+            if let Some(per_entity) = per_timeline.per_entity.get_mut(&entity_path.hash()) {
+                for time in time_column.times() {
+                    if aspects.contains(TransformAspect::Tree) {
+                        per_entity.tree_transforms.remove(&time);
+                    }
+                    if aspects.contains(TransformAspect::Pose) {
+                        if let Some(pose_transforms) = &mut per_entity.pose_transforms {
+                            pose_transforms.remove(&time);
+                        }
+                    }
+                    if aspects.contains(TransformAspect::PinholeOrViewCoordinates) {
+                        if let Some(pinhole_projections) = &mut per_entity.pinhole_projections {
+                            pinhole_projections.remove(&time);
+                        }
+                    }
+                }
+
+                if per_entity.tree_transforms.is_empty()
+                    && per_entity
+                        .pose_transforms
+                        .as_ref()
+                        .map_or(true, |pose_transforms| pose_transforms.is_empty())
+                    && per_entity
+                        .pinhole_projections
+                        .as_ref()
+                        .map_or(true, |pinhole_projections| pinhole_projections.is_empty())
+                {
+                    per_timeline.per_entity.remove(&entity_path.hash());
+                }
+            }
+
+            if per_timeline.per_entity.is_empty() && per_timeline.invalidated_transforms.is_empty()
+            {
+                self.per_timeline.remove(timeline);
+            }
+        }
+    }
+}
+
+impl PerStoreChunkSubscriber for TransformCacheStoreSubscriber {
+    fn name() -> String {
+        "rerun.TransformCacheStoreSubscriber".to_owned()
+    }
+
+    fn on_events<'a>(&mut self, events: impl Iterator<Item = &'a re_chunk_store::ChunkStoreEvent>) {
+        re_tracing::profile_function!();
+
+        for event in events {
+            // The components we are interested in may only show up on some of the timelines
+            // within this chunk, so strictly speaking the affected "aspects" we compute here are conservative.
+            // But that's fairly rare, so a few false positive entries here are fine.
+            let mut aspects = TransformAspect::empty();
+            for component_name in event.chunk.component_names() {
+                if self.transform_components.contains(&component_name) {
+                    aspects |= TransformAspect::Tree;
+                }
+                if self.pose_components.contains(&component_name) {
+                    aspects |= TransformAspect::Pose;
+                }
+                if self.pinhole_components.contains(&component_name) {
+                    aspects |= TransformAspect::PinholeOrViewCoordinates;
+                }
+            }
+            if aspects.is_empty() {
+                continue;
+            }
+
+            if event.kind == re_chunk_store::ChunkStoreDiffKind::Deletion {
+                self.remove_chunk(event, aspects);
+            } else {
+                self.add_chunk(event, aspects);
+            }
+        }
+    }
+}
+
+/// Queries all components that are part of pose transforms, returning the transform from child to parent.
+///
+/// If any of the components yields an invalid transform, returns a `glam::Affine3A::ZERO`.
+/// (this effectively disconnects a subtree from the transform hierarchy!)
+// TODO(#3849): There's no way to discover invalid transforms right now (they can be intentional but often aren't).
+fn query_and_resolve_tree_transform_at_entity(
+    entity_path: &EntityPath,
+    entity_db: &EntityDb,
+    query: &LatestAtQuery,
+) -> Option<Affine3A> {
+    // TODO(andreas): Filter out styling components.
+    let components = archetypes::Transform3D::all_components();
+    let component_names = components.iter().map(|descr| descr.component_name);
+    let results = entity_db.latest_at(query, entity_path, component_names);
+    if results.components.is_empty() {
+        return None;
+    }
+
+    let mut transform = Affine3A::IDENTITY;
+
+    // It's an error if there's more than one component. Warn in that case.
+    let mono_log_level = re_log::Level::Warn;
+
+    // The order of the components here is important, and checked by `debug_assert_transform_field_order`
+    if let Some(translation) =
+        results.component_mono_with_log_level::<components::Translation3D>(mono_log_level)
+    {
+        transform = Affine3A::from(translation);
+    }
+    if let Some(axis_angle) =
+        results.component_mono_with_log_level::<components::RotationAxisAngle>(mono_log_level)
+    {
+        if let Ok(axis_angle) = Affine3A::try_from(axis_angle) {
+            transform *= axis_angle;
+        } else {
+            return Some(Affine3A::ZERO);
+        }
+    }
+    if let Some(quaternion) =
+        results.component_mono_with_log_level::<components::RotationQuat>(mono_log_level)
+    {
+        if let Ok(quaternion) = Affine3A::try_from(quaternion) {
+            transform *= quaternion;
+        } else {
+            return Some(Affine3A::ZERO);
+        }
+    }
+    if let Some(scale) =
+        results.component_mono_with_log_level::<components::Scale3D>(mono_log_level)
+    {
+        if scale.x() == 0.0 && scale.y() == 0.0 && scale.z() == 0.0 {
+            return Some(Affine3A::ZERO);
+        }
+        transform *= Affine3A::from(scale);
+    }
+    if let Some(mat3x3) =
+        results.component_mono_with_log_level::<components::TransformMat3x3>(mono_log_level)
+    {
+        let affine_transform = Affine3A::from(mat3x3);
+        if affine_transform.matrix3.determinant() == 0.0 {
+            return Some(Affine3A::ZERO);
+        }
+        transform *= affine_transform;
+    }
+
+    if results.component_mono_with_log_level::<components::TransformRelation>(mono_log_level)
+        == Some(components::TransformRelation::ChildFromParent)
+    {
+        let determinant = transform.matrix3.determinant();
+        if determinant != 0.0 && determinant.is_finite() {
+            transform = transform.inverse();
+        } else {
+            // All "regular invalid" transforms should have been caught.
+            // So ending up here means something else went wrong?
+            re_log::warn_once!(
+                "Failed to express child-from-parent transform at {} since it wasn't invertible",
+                entity_path,
+            );
+        }
+    }
+
+    Some(transform)
+}
+
+/// Queries all components that are part of pose transforms, returning the transform from child to parent.
+///
+/// If any of the components yields an invalid transform, returns a `glam::Affine3A::ZERO` for that instance.
+/// (this effectively ignores the instance for most visualizations!)
+// TODO(#3849): There's no way to discover invalid transforms right now (they can be intentional but often aren't).
+fn query_and_resolve_instance_poses_at_entity(
+    entity_path: &EntityPath,
+    entity_db: &EntityDb,
+    query: &LatestAtQuery,
+) -> Vec<Affine3A> {
+    // TODO(andreas): Filter out styling components.
+    let components = archetypes::InstancePoses3D::all_components();
+    let component_names = components.iter().map(|descr| descr.component_name);
+    let result = entity_db.latest_at(query, entity_path, component_names);
+
+    let max_num_instances = result
+        .components
+        .iter()
+        .map(|(name, row)| row.num_instances(name))
+        .max()
+        .unwrap_or(0) as usize;
+
+    if max_num_instances == 0 {
+        return Vec::new();
+    }
+
+    #[inline]
+    pub fn clamped_or_nothing<T: Clone>(
+        values: Vec<T>,
+        clamped_len: usize,
+    ) -> impl Iterator<Item = T> {
+        let Some(last) = values.last() else {
+            return Either::Left(std::iter::empty());
+        };
+        let last = last.clone();
+        Either::Right(
+            values
+                .into_iter()
+                .chain(std::iter::repeat(last))
+                .take(clamped_len),
+        )
+    }
+
+    let batch_translation = result
+        .component_batch::<components::PoseTranslation3D>()
+        .unwrap_or_default();
+    let batch_rotation_quat = result
+        .component_batch::<components::PoseRotationQuat>()
+        .unwrap_or_default();
+    let batch_rotation_axis_angle = result
+        .component_batch::<components::PoseRotationAxisAngle>()
+        .unwrap_or_default();
+    let batch_scale = result
+        .component_batch::<components::PoseScale3D>()
+        .unwrap_or_default();
+    let batch_mat3x3 = result
+        .component_batch::<components::PoseTransformMat3x3>()
+        .unwrap_or_default();
+
+    if batch_translation.is_empty()
+        && batch_rotation_quat.is_empty()
+        && batch_rotation_axis_angle.is_empty()
+        && batch_scale.is_empty()
+        && batch_mat3x3.is_empty()
+    {
+        return Vec::new();
+    }
+    let mut iter_translation = clamped_or_nothing(batch_translation, max_num_instances);
+    let mut iter_rotation_quat = clamped_or_nothing(batch_rotation_quat, max_num_instances);
+    let mut iter_rotation_axis_angle =
+        clamped_or_nothing(batch_rotation_axis_angle, max_num_instances);
+    let mut iter_scale = clamped_or_nothing(batch_scale, max_num_instances);
+    let mut iter_mat3x3 = clamped_or_nothing(batch_mat3x3, max_num_instances);
+
+    (0..max_num_instances)
+        .map(|_| {
+            // We apply these in a specific order - see `debug_assert_transform_field_order`
+            let mut transform = Affine3A::IDENTITY;
+            if let Some(translation) = iter_translation.next() {
+                transform = Affine3A::from(translation);
+            }
+            if let Some(rotation_quat) = iter_rotation_quat.next() {
+                if let Ok(rotation_quat) = Affine3A::try_from(rotation_quat) {
+                    transform *= rotation_quat;
+                } else {
+                    transform = Affine3A::ZERO;
+                }
+            }
+            if let Some(rotation_axis_angle) = iter_rotation_axis_angle.next() {
+                if let Ok(axis_angle) = Affine3A::try_from(rotation_axis_angle) {
+                    transform *= axis_angle;
+                } else {
+                    transform = Affine3A::ZERO;
+                }
+            }
+            if let Some(scale) = iter_scale.next() {
+                transform *= Affine3A::from(scale);
+            }
+            if let Some(mat3x3) = iter_mat3x3.next() {
+                transform *= Affine3A::from(mat3x3);
+            }
+            transform
+        })
+        .collect()
+}
+
+fn query_and_resolve_pinhole_projection_at_entity(
+    entity_path: &EntityPath,
+    entity_db: &EntityDb,
+    query: &LatestAtQuery,
+) -> Option<ResolvedPinholeProjection> {
+    entity_db
+        .latest_at_component::<components::PinholeProjection>(entity_path, query)
+        .map(|(_index, image_from_camera)| ResolvedPinholeProjection {
+            image_from_camera,
+            view_coordinates: entity_db
+                .latest_at_component::<components::ViewCoordinates>(entity_path, query)
+                .map_or(archetypes::Pinhole::DEFAULT_CAMERA_XYZ, |(_index, res)| res),
+        })
+}
+
+#[cfg(test)]
+mod tests {
+    use std::sync::Arc;
+
+    use re_chunk_store::{
+        external::re_chunk::ChunkBuilder, ChunkId, GarbageCollectionOptions, RowId,
+    };
+    use re_types::{archetypes, Loggable, SerializedComponentBatch};
+
+    use super::*;
+
+    fn ensure_subscriber_registered(entity_db: &EntityDb) {
+        TransformCacheStoreSubscriber::access(&entity_db.store_id(), |_| {
+            // Make sure the subscriber is registered.
+        });
+    }
+
+    #[test]
+    fn test_transforms_per_timeline_access() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        // Log a few tree transforms at different times.
+        let timeline = Timeline::new_sequence("t");
+        let chunk0 = ChunkBuilder::new(ChunkId::new(), EntityPath::from("with_transform"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::Transform3D::from_translation([1.0, 2.0, 3.0]),
+            )
+            .build()
+            .unwrap();
+        let chunk1 = ChunkBuilder::new(ChunkId::new(), EntityPath::from("without_transform"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                // Anything that doesn't have components the transform cache is interested in.
+                &archetypes::Points3D::new([[1.0, 2.0, 3.0]]),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk0)).unwrap();
+        entity_db.add_chunk(&Arc::new(chunk1)).unwrap();
+
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            assert!(transforms_per_timeline
+                .entity_transforms(EntityPath::from("without_transform").hash())
+                .is_none());
+            assert!(transforms_per_timeline
+                .entity_transforms(EntityPath::from("rando").hash())
+                .is_none());
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("with_transform").hash())
+                .unwrap();
+            assert_eq!(transforms.timeline, timeline);
+            assert_eq!(transforms.tree_transforms.len(), 1);
+            assert_eq!(transforms.pose_transforms, None);
+            assert_eq!(transforms.pinhole_projections, None);
+        });
+    }
+
+    #[test]
+    fn test_tree_transforms() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        // Log a few tree transforms at different times.
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::Transform3D::from_translation([1.0, 2.0, 3.0]),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 3)],
+                &archetypes::Transform3D::update_fields().with_scale([1.0, 2.0, 3.0]),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 4)],
+                &archetypes::Transform3D::from_rotation(glam::Quat::from_rotation_x(1.0)),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 5)],
+                &archetypes::Transform3D::clear_fields(),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Check that the transform cache has the expected transforms.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("my_entity").hash())
+                .unwrap();
+
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 0)),
+                glam::Affine3A::IDENTITY
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 1)),
+                glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0))
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 2)),
+                glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0))
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 3)),
+                glam::Affine3A::from_scale_rotation_translation(
+                    glam::Vec3::new(1.0, 2.0, 3.0),
+                    glam::Quat::IDENTITY,
+                    glam::Vec3::new(1.0, 2.0, 3.0),
+                )
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 4)),
+                glam::Affine3A::from_quat(glam::Quat::from_rotation_x(1.0))
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 5)),
+                glam::Affine3A::IDENTITY
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 123)),
+                glam::Affine3A::IDENTITY
+            );
+        });
+    }
+
+    #[test]
+    fn test_pose_transforms() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        // Log a few tree transforms at different times.
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::InstancePoses3D::new().with_translations([
+                    [1.0, 2.0, 3.0],
+                    [4.0, 5.0, 6.0],
+                    [7.0, 8.0, 9.0],
+                ]),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 3)],
+                // Less instances, and a splatted scale.
+                &archetypes::InstancePoses3D::new()
+                    .with_translations([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
+                    .with_scales([[2.0, 3.0, 4.0]]),
+            )
+            .with_serialized_batches(
+                RowId::new(),
+                [(timeline, 4)],
+                [
+                    SerializedComponentBatch::new(
+                        arrow::array::new_empty_array(&components::Translation3D::arrow_datatype()),
+                        archetypes::InstancePoses3D::descriptor_translations(),
+                    ),
+                    SerializedComponentBatch::new(
+                        arrow::array::new_empty_array(&components::Scale3D::arrow_datatype()),
+                        archetypes::InstancePoses3D::descriptor_scales(),
+                    ),
+                ],
+            )
+            // TODO(#7245): Use this instead of the above
+            // .with_archetype(
+            //     RowId::new(),
+            //     [(timeline, 4)],
+            //     &archetypes::InstancePoses3D::clear_fields(),
+            // )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Check that the transform cache has the expected transforms.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("my_entity").hash())
+                .unwrap();
+
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 0)),
+                &[]
+            );
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 1)),
+                &[
+                    glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0)),
+                    glam::Affine3A::from_translation(glam::Vec3::new(4.0, 5.0, 6.0)),
+                    glam::Affine3A::from_translation(glam::Vec3::new(7.0, 8.0, 9.0)),
+                ]
+            );
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 2)),
+                &[
+                    glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0)),
+                    glam::Affine3A::from_translation(glam::Vec3::new(4.0, 5.0, 6.0)),
+                    glam::Affine3A::from_translation(glam::Vec3::new(7.0, 8.0, 9.0)),
+                ]
+            );
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 3)),
+                &[
+                    glam::Affine3A::from_scale_rotation_translation(
+                        glam::Vec3::new(2.0, 3.0, 4.0),
+                        glam::Quat::IDENTITY,
+                        glam::Vec3::new(1.0, 2.0, 3.0),
+                    ),
+                    glam::Affine3A::from_scale_rotation_translation(
+                        glam::Vec3::new(2.0, 3.0, 4.0),
+                        glam::Quat::IDENTITY,
+                        glam::Vec3::new(4.0, 5.0, 6.0),
+                    ),
+                ]
+            );
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 4)),
+                &[]
+            );
+            assert_eq!(
+                transforms.latest_at_instance_poses(&LatestAtQuery::new(timeline, 123)),
+                &[]
+            );
+        });
+    }
+
+    #[test]
+    fn test_pinhole_projections() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        let image_from_camera =
+            components::PinholeProjection::from_focal_length_and_principal_point(
+                [1.0, 2.0],
+                [1.0, 2.0],
+            );
+
+        // Log a few tree transforms at different times.
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::Pinhole::new(image_from_camera),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 3)],
+                &archetypes::ViewCoordinates::BLU,
+            )
+            // Clear out the pinhole projection (this should yield nothing then for the remaining view coordinates.)
+            .with_serialized_batch(
+                RowId::new(),
+                [(timeline, 4)],
+                SerializedComponentBatch::new(
+                    arrow::array::new_empty_array(&components::PinholeProjection::arrow_datatype()),
+                    archetypes::Pinhole::descriptor_image_from_camera(),
+                ),
+            )
+            // TODO(#7245): Use this instead
+            // .with_archetype(
+            //     RowId::new(),
+            //     [(timeline, 4)],
+            //     &archetypes::Pinhole::clear_fields(),
+            // )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Check that the transform cache has the expected transforms.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("my_entity").hash())
+                .unwrap();
+
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 0)),
+                None
+            );
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 1)),
+                Some(&ResolvedPinholeProjection {
+                    image_from_camera,
+                    view_coordinates: archetypes::Pinhole::DEFAULT_CAMERA_XYZ,
+                })
+            );
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 2)),
+                Some(&ResolvedPinholeProjection {
+                    image_from_camera,
+                    view_coordinates: archetypes::Pinhole::DEFAULT_CAMERA_XYZ,
+                })
+            );
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 3)),
+                Some(&ResolvedPinholeProjection {
+                    image_from_camera,
+                    view_coordinates: components::ViewCoordinates::BLU,
+                })
+            );
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 4)),
+                None // View coordinates alone doesn't give us a pinhole projection from the transform cache.
+            );
+            assert_eq!(
+                transforms.latest_at_pinhole(&LatestAtQuery::new(timeline, 123)),
+                None
+            );
+        });
+    }
+
+    #[test]
+    fn test_out_of_order_updates() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        // Log a few tree transforms at different times.
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::Transform3D::from_translation([1.0, 2.0, 3.0]),
+            )
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 3)],
+                // Note that this doesn't clear anything that could be inserted at time 2.
+                &archetypes::Transform3D::update_fields().with_translation([2.0, 3.0, 4.0]),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Check that the transform cache has the expected transforms.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("my_entity").hash())
+                .unwrap();
+
+            // Check that the transform cache has the expected transforms.
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 1)),
+                glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0))
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 3)),
+                glam::Affine3A::from_translation(glam::Vec3::new(2.0, 3.0, 4.0))
+            );
+        });
+
+        // Add a transform between the two that invalidates the one at time stamp 3.
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 2)],
+                &archetypes::Transform3D::update_fields().with_scale([-1.0, -2.0, -3.0]),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Check that the transform cache has the expected changed transforms.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+            let transforms_per_timeline = cache.transforms_per_timeline(timeline).unwrap();
+            let transforms = transforms_per_timeline
+                .entity_transforms(EntityPath::from("my_entity").hash())
+                .unwrap();
+
+            // Check that the transform cache has the expected transforms.
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 1)),
+                glam::Affine3A::from_translation(glam::Vec3::new(1.0, 2.0, 3.0))
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 2)),
+                glam::Affine3A::from_scale_rotation_translation(
+                    glam::Vec3::new(-1.0, -2.0, -3.0),
+                    glam::Quat::IDENTITY,
+                    glam::Vec3::new(1.0, 2.0, 3.0),
+                )
+            );
+            assert_eq!(
+                transforms.latest_at_tree_transform(&LatestAtQuery::new(timeline, 3)),
+                glam::Affine3A::from_scale_rotation_translation(
+                    glam::Vec3::new(-1.0, -2.0, -3.0),
+                    glam::Quat::IDENTITY,
+                    glam::Vec3::new(2.0, 3.0, 4.0),
+                )
+            );
+        });
+    }
+
+    #[test]
+    fn test_gc() {
+        let mut entity_db = EntityDb::new(StoreId::random(re_log_types::StoreKind::Recording));
+        ensure_subscriber_registered(&entity_db);
+
+        let timeline = Timeline::new_sequence("t");
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity0"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 1)],
+                &archetypes::Transform3D::from_translation([1.0, 2.0, 3.0]),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Apply some updates to the transform before GC pass.
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            cache.apply_all_updates(&entity_db);
+        });
+
+        let chunk = ChunkBuilder::new(ChunkId::new(), EntityPath::from("my_entity1"))
+            .with_archetype(
+                RowId::new(),
+                [(timeline, 2)],
+                &archetypes::Transform3D::from_translation([4.0, 5.0, 6.0]),
+            )
+            .build()
+            .unwrap();
+        entity_db.add_chunk(&Arc::new(chunk)).unwrap();
+
+        // Don't apply updates for this chunk.
+
+        entity_db.gc(&GarbageCollectionOptions::gc_everything());
+
+        TransformCacheStoreSubscriber::access_mut(&entity_db.store_id(), |cache| {
+            assert!(cache.transforms_per_timeline(timeline).is_none());
+        });
+    }
+}
diff --git a/crates/viewer/re_view_spatial/src/transform_component_tracker.rs b/crates/viewer/re_view_spatial/src/transform_component_tracker.rs
deleted file mode 100644
index 7f6b564f10f6..000000000000
--- a/crates/viewer/re_view_spatial/src/transform_component_tracker.rs
+++ /dev/null
@@ -1,139 +0,0 @@
-use once_cell::sync::OnceCell;
-
-use nohash_hasher::{IntMap, IntSet};
-use re_chunk_store::{
-    ChunkStore, ChunkStoreDiffKind, ChunkStoreEvent, ChunkStoreSubscriberHandle,
-    PerStoreChunkSubscriber,
-};
-use re_log_types::{EntityPath, EntityPathHash, StoreId};
-use re_types::{Component as _, ComponentName};
-
-// ---
-
-/// Set of components that an entity ever had over its known lifetime.
-#[derive(Default, Clone)]
-pub struct PotentialTransformComponentSet {
-    /// All transform components ever present.
-    pub transform3d: IntSet<ComponentName>,
-
-    /// All pose transform components ever present.
-    pub pose3d: IntSet<ComponentName>,
-
-    /// Whether the entity ever had a pinhole camera.
-    pub pinhole: bool,
-}
-
-/// Keeps track of which entities have had any `Transform3D`-related data on any timeline at any
-/// point in time.
-///
-/// This is used to optimize queries in the `TransformContext`, so that we don't unnecessarily pay
-/// for the fixed overhead of all the query layers when we know for a fact that there won't be any
-/// data there.
-/// This is a huge performance improvement in practice, especially in recordings with many entities.
-pub struct TransformComponentTrackerStoreSubscriber {
-    /// The components of interest.
-    transform_components: IntSet<ComponentName>,
-    pose_components: IntSet<ComponentName>,
-
-    components_per_entity: IntMap<EntityPathHash, PotentialTransformComponentSet>,
-}
-
-impl Default for TransformComponentTrackerStoreSubscriber {
-    #[inline]
-    fn default() -> Self {
-        use re_types::Archetype as _;
-        Self {
-            transform_components: re_types::archetypes::Transform3D::all_components()
-                .iter()
-                .map(|descr| descr.component_name)
-                .collect(),
-            pose_components: re_types::archetypes::InstancePoses3D::all_components()
-                .iter()
-                .map(|descr| descr.component_name)
-                .collect(),
-            components_per_entity: Default::default(),
-        }
-    }
-}
-
-impl TransformComponentTrackerStoreSubscriber {
-    /// Accesses the global store subscriber.
-    ///
-    /// Lazily registers the subscriber if it hasn't been registered yet.
-    pub fn subscription_handle() -> ChunkStoreSubscriberHandle {
-        static SUBSCRIPTION: OnceCell<ChunkStoreSubscriberHandle> = OnceCell::new();
-        *SUBSCRIPTION.get_or_init(ChunkStore::register_per_store_subscriber::<Self>)
-    }
-
-    /// Accesses the transform component tracking data for a given store.
-    #[inline]
-    pub fn access<T>(store_id: &StoreId, f: impl FnOnce(&Self) -> T) -> Option<T> {
-        ChunkStore::with_per_store_subscriber_once(Self::subscription_handle(), store_id, f)
-    }
-
-    pub fn potential_transform_components(
-        &self,
-        entity_path: &EntityPath,
-    ) -> Option<&PotentialTransformComponentSet> {
-        self.components_per_entity.get(&entity_path.hash())
-    }
-}
-
-impl PerStoreChunkSubscriber for TransformComponentTrackerStoreSubscriber {
-    #[inline]
-    fn name() -> String {
-        "rerun.store_subscriber.TransformComponentTracker".into()
-    }
-
-    fn on_events<'a>(&mut self, events: impl Iterator<Item = &'a ChunkStoreEvent>) {
-        re_tracing::profile_function!();
-
-        for event in events
-            // This is only additive, don't care about removals.
-            .filter(|e| e.kind == ChunkStoreDiffKind::Addition)
-        {
-            let entity_path_hash = event.chunk.entity_path().hash();
-
-            let contains_non_zero_component_array = |component_name| {
-                event
-                    .chunk
-                    .components()
-                    .get(&component_name)
-                    .is_some_and(|per_desc| {
-                        per_desc
-                            .values()
-                            .any(|list_array| list_array.offsets().lengths().any(|len| len > 0))
-                    })
-            };
-
-            for component_name in event.chunk.component_names() {
-                if self.transform_components.contains(&component_name)
-                    && contains_non_zero_component_array(component_name)
-                {
-                    self.components_per_entity
-                        .entry(entity_path_hash)
-                        .or_default()
-                        .transform3d
-                        .insert(component_name);
-                }
-                if self.pose_components.contains(&component_name)
-                    && contains_non_zero_component_array(component_name)
-                {
-                    self.components_per_entity
-                        .entry(entity_path_hash)
-                        .or_default()
-                        .pose3d
-                        .insert(component_name);
-                }
-                if component_name == re_types::components::PinholeProjection::name()
-                    && contains_non_zero_component_array(component_name)
-                {
-                    self.components_per_entity
-                        .entry(entity_path_hash)
-                        .or_default()
-                        .pinhole = true;
-                }
-            }
-        }
-    }
-}
diff --git a/crates/viewer/re_view_spatial/src/ui_2d.rs b/crates/viewer/re_view_spatial/src/ui_2d.rs
index 3104e685c88b..a77dc8c6df91 100644
--- a/crates/viewer/re_view_spatial/src/ui_2d.rs
+++ b/crates/viewer/re_view_spatial/src/ui_2d.rs
@@ -10,7 +10,6 @@ use re_types::{
         archetypes::{Background, NearClipPlane, VisualBounds2D},
         components as blueprint_components,
     },
-    components::ViewCoordinates,
 };
 use re_ui::{ContextExt as _, ModifiersMarkdown, MouseButtonMarkdown};
 use re_view::controls::{DRAG_PAN2D_BUTTON, ZOOM_SCROLL_MODIFIER};
@@ -353,7 +352,7 @@ fn setup_target_config(
             )
             .into(),
             resolution: Some([resolution.x, resolution.y].into()),
-            camera_xyz: Some(ViewCoordinates::RDF),
+            camera_xyz: Some(Pinhole::DEFAULT_CAMERA_XYZ),
             image_plane_distance: None,
         };
     }
diff --git a/crates/viewer/re_view_spatial/src/view_2d.rs b/crates/viewer/re_view_spatial/src/view_2d.rs
index 1ed42c0201d7..1949cfa043cb 100644
--- a/crates/viewer/re_view_spatial/src/view_2d.rs
+++ b/crates/viewer/re_view_spatial/src/view_2d.rs
@@ -68,7 +68,7 @@ impl ViewClass for SpatialView2D {
     ) -> Result<(), ViewClassRegistryError> {
         // Ensure spatial topology & max image dimension is registered.
         crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle();
-        crate::transform_component_tracker::TransformComponentTrackerStoreSubscriber::subscription_handle();
+        crate::transform_cache::TransformCacheStoreSubscriber::subscription_handle();
         crate::max_image_dimension_subscriber::MaxImageDimensionsStoreSubscriber::subscription_handle();
 
         register_spatial_contexts(system_registry)?;
diff --git a/crates/viewer/re_view_spatial/src/view_3d.rs b/crates/viewer/re_view_spatial/src/view_3d.rs
index 2af66bb20404..c83faf828c5a 100644
--- a/crates/viewer/re_view_spatial/src/view_3d.rs
+++ b/crates/viewer/re_view_spatial/src/view_3d.rs
@@ -74,7 +74,7 @@ impl ViewClass for SpatialView3D {
     ) -> Result<(), ViewClassRegistryError> {
         // Ensure spatial topology is registered.
         crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle();
-        crate::transform_component_tracker::TransformComponentTrackerStoreSubscriber::subscription_handle();
+        crate::transform_cache::TransformCacheStoreSubscriber::subscription_handle();
 
         register_spatial_contexts(system_registry)?;
         register_3d_spatial_visualizers(system_registry)?;
diff --git a/crates/viewer/re_view_spatial/src/visualizers/cameras.rs b/crates/viewer/re_view_spatial/src/visualizers/cameras.rs
index 709ccae686c4..834d90f90daf 100644
--- a/crates/viewer/re_view_spatial/src/visualizers/cameras.rs
+++ b/crates/viewer/re_view_spatial/src/visualizers/cameras.rs
@@ -14,7 +14,8 @@ use re_viewer_context::{
 
 use super::{filter_visualizable_3d_entities, SpatialViewVisualizerData};
 use crate::{
-    contexts::TransformContext, query_pinhole, space_camera_3d::SpaceCamera3D, ui::SpatialViewState,
+    contexts::TransformTreeContext, query_pinhole, space_camera_3d::SpaceCamera3D,
+    ui::SpatialViewState,
 };
 
 const CAMERA_COLOR: re_renderer::Color32 = re_renderer::Color32::from_rgb(150, 150, 150);
@@ -46,7 +47,7 @@ impl CamerasVisualizer {
     fn visit_instance(
         &mut self,
         line_builder: &mut re_renderer::LineDrawableBuilder<'_>,
-        transforms: &TransformContext,
+        transforms: &TransformTreeContext,
         data_result: &DataResult,
         pinhole: &Pinhole,
         pinhole_view_coordinates: ViewCoordinates,
@@ -71,7 +72,7 @@ impl CamerasVisualizer {
         }
 
         // The camera transform does not include the pinhole transform.
-        let Some(transform_info) = transforms.transform_info_for_entity(ent_path) else {
+        let Some(transform_info) = transforms.transform_info_for_entity(ent_path.hash()) else {
             return;
         };
         let Some(twod_in_threed_info) = &transform_info.twod_in_threed_info else {
@@ -214,7 +215,7 @@ impl VisualizerSystem for CamerasVisualizer {
         query: &ViewQuery<'_>,
         context_systems: &ViewContextCollection,
     ) -> Result<Vec<re_renderer::QueueableDrawData>, ViewSystemExecutionError> {
-        let transforms = context_systems.get::<TransformContext>()?;
+        let transforms = context_systems.get::<TransformTreeContext>()?;
 
         // Counting all cameras ahead of time is a bit wasteful, but we also don't expect a huge amount,
         // so let re_renderer's allocator internally decide what buffer sizes to pick & grow them as we go.
@@ -236,7 +237,7 @@ impl VisualizerSystem for CamerasVisualizer {
                     transforms,
                     data_result,
                     &pinhole,
-                    pinhole.camera_xyz.unwrap_or(ViewCoordinates::RDF), // TODO(#2641): This should come from archetype
+                    pinhole.camera_xyz.unwrap_or(Pinhole::DEFAULT_CAMERA_XYZ),
                     entity_highlight,
                 );
             }
@@ -279,4 +280,10 @@ impl TypedComponentFallbackProvider<ImagePlaneDistance> for CamerasVisualizer {
     }
 }
 
-re_viewer_context::impl_component_fallback_provider!(CamerasVisualizer => [ImagePlaneDistance]);
+impl TypedComponentFallbackProvider<ViewCoordinates> for CamerasVisualizer {
+    fn fallback_for(&self, _ctx: &QueryContext<'_>) -> ViewCoordinates {
+        Pinhole::DEFAULT_CAMERA_XYZ
+    }
+}
+
+re_viewer_context::impl_component_fallback_provider!(CamerasVisualizer => [ImagePlaneDistance, ViewCoordinates]);
diff --git a/crates/viewer/re_view_spatial/src/visualizers/depth_images.rs b/crates/viewer/re_view_spatial/src/visualizers/depth_images.rs
index 652eafbdb38f..3b946caefd14 100644
--- a/crates/viewer/re_view_spatial/src/visualizers/depth_images.rs
+++ b/crates/viewer/re_view_spatial/src/visualizers/depth_images.rs
@@ -7,7 +7,6 @@ use re_types::{
     archetypes::DepthImage,
     components::{
         self, Colormap, DepthMeter, DrawOrder, FillRatio, ImageBuffer, ImageFormat, ValueRange,
-        ViewCoordinates,
     },
     image::ImageKind,
     Component as _,
@@ -190,7 +189,7 @@ impl DepthImageVisualizer {
             * glam::Affine3A::from_mat3(
                 intrinsics
                     .camera_xyz
-                    .unwrap_or(ViewCoordinates::RDF) // TODO(#2641): This should come from archetype
+                    .unwrap_or(re_types::archetypes::Pinhole::DEFAULT_CAMERA_XYZ)
                     .from_rdf(),
             );
 
diff --git a/crates/viewer/re_view_spatial/src/visualizers/mod.rs b/crates/viewer/re_view_spatial/src/visualizers/mod.rs
index b1b5ea34d38f..0bf2cb4b8297 100644
--- a/crates/viewer/re_view_spatial/src/visualizers/mod.rs
+++ b/crates/viewer/re_view_spatial/src/visualizers/mod.rs
@@ -267,11 +267,7 @@ pub fn load_keypoint_connections(
 ///
 /// TODO(#1387): Image coordinate space should be configurable.
 pub fn image_view_coordinates() -> re_types::components::ViewCoordinates {
-    // Typical image spaces have
-    // - x pointing right
-    // - y pointing down
-    // - z pointing into the image plane (this is convenient for reading out a depth image which has typically positive z values)
-    re_types::components::ViewCoordinates::RDF
+    re_types::archetypes::Pinhole::DEFAULT_CAMERA_XYZ
 }
 
 fn filter_visualizable_2d_entities(
diff --git a/crates/viewer/re_view_spatial/src/visualizers/transform3d_arrows.rs b/crates/viewer/re_view_spatial/src/visualizers/transform3d_arrows.rs
index a2340f88fd85..1ecb0dc73177 100644
--- a/crates/viewer/re_view_spatial/src/visualizers/transform3d_arrows.rs
+++ b/crates/viewer/re_view_spatial/src/visualizers/transform3d_arrows.rs
@@ -13,7 +13,7 @@ use re_viewer_context::{
     VisualizableEntities, VisualizableFilterContext, VisualizerQueryInfo, VisualizerSystem,
 };
 
-use crate::{contexts::TransformContext, ui::SpatialViewState, view_kind::SpatialViewKind};
+use crate::{contexts::TransformTreeContext, ui::SpatialViewState, view_kind::SpatialViewKind};
 
 use super::{filter_visualizable_3d_entities, CamerasVisualizer, SpatialViewVisualizerData};
 
@@ -81,7 +81,7 @@ impl VisualizerSystem for Transform3DArrowsVisualizer {
         query: &ViewQuery<'_>,
         context_systems: &ViewContextCollection,
     ) -> Result<Vec<re_renderer::QueueableDrawData>, ViewSystemExecutionError> {
-        let transforms = context_systems.get::<TransformContext>()?;
+        let transforms = context_systems.get::<TransformTreeContext>()?;
 
         let latest_at_query = re_chunk_store::LatestAtQuery::new(query.timeline, query.latest_at);
 
@@ -95,7 +95,7 @@ impl VisualizerSystem for Transform3DArrowsVisualizer {
         for data_result in query.iter_visible_data_results(ctx, Self::identifier()) {
             // Use transform without potential pinhole, since we don't want to visualize image-space coordinates.
             let Some(transform_info) =
-                transforms.transform_info_for_entity(&data_result.entity_path)
+                transforms.transform_info_for_entity(data_result.entity_path.hash())
             else {
                 continue;
             };
diff --git a/crates/viewer/re_view_spatial/src/visualizers/utilities/entity_iterator.rs b/crates/viewer/re_view_spatial/src/visualizers/utilities/entity_iterator.rs
index 2b5718a26e92..2ca627bf1430 100644
--- a/crates/viewer/re_view_spatial/src/visualizers/utilities/entity_iterator.rs
+++ b/crates/viewer/re_view_spatial/src/visualizers/utilities/entity_iterator.rs
@@ -6,7 +6,7 @@ use re_viewer_context::{
     ViewSystemExecutionError,
 };
 
-use crate::contexts::{EntityDepthOffsets, SpatialSceneEntityContext, TransformContext};
+use crate::contexts::{EntityDepthOffsets, SpatialSceneEntityContext, TransformTreeContext};
 
 // ---
 
@@ -84,7 +84,7 @@ where
         &HybridResults<'_>,
     ) -> Result<(), ViewSystemExecutionError>,
 {
-    let transforms = view_ctx.get::<TransformContext>()?;
+    let transforms = view_ctx.get::<TransformTreeContext>()?;
     let depth_offsets = view_ctx.get::<EntityDepthOffsets>()?;
     let annotations = view_ctx.get::<AnnotationSceneContext>()?;
 
@@ -93,7 +93,8 @@ where
     let system_identifier = System::identifier();
 
     for data_result in query.iter_visible_data_results(ctx, system_identifier) {
-        let Some(transform_info) = transforms.transform_info_for_entity(&data_result.entity_path)
+        let Some(transform_info) =
+            transforms.transform_info_for_entity(data_result.entity_path.hash())
         else {
             continue;
         };

From ec07b14fdb2ed9a21f09323e85cb65e27500102a Mon Sep 17 00:00:00 2001
From: Emil Ernerfeldt <emil.ernerfeldt@gmail.com>
Date: Wed, 15 Jan 2025 16:24:25 +0100
Subject: [PATCH 2/4] Use insta for dataframe snapshot tests (#8696)

* Part of #3741

This will make it easier to switch out TransportChunk for RecordBatch
---
 Cargo.lock                                    |   1 +
 crates/store/re_chunk/src/arrow.rs            |   8 +-
 crates/store/re_chunk/src/transport.rs        |  14 +-
 crates/store/re_dataframe/Cargo.toml          |   1 +
 crates/store/re_dataframe/src/query.rs        | 310 ++----------------
 ..._query__tests__async_barebones_static.snap |  13 +
 ...uery__tests__async_barebones_temporal.snap |  13 +
 ..._dataframe__query__tests__barebones-2.snap |  13 +
 ...re_dataframe__query__tests__barebones.snap |  13 +
 .../re_dataframe__query__tests__clears-2.snap |  13 +
 .../re_dataframe__query__tests__clears.snap   |  13 +
 ...e__query__tests__filtered_index_range.snap |  13 +
 ...__query__tests__filtered_index_values.snap |  13 +
 ..._query__tests__filtered_is_not_null-2.snap |   7 +
 ..._query__tests__filtered_is_not_null-3.snap |  13 +
 ..._query__tests__filtered_is_not_null-4.snap |  13 +
 ...e__query__tests__filtered_is_not_null.snap |   7 +
 ..._dataframe__query__tests__selection-2.snap |  11 +
 ..._dataframe__query__tests__selection-3.snap |  12 +
 ..._dataframe__query__tests__selection-4.snap |  19 ++
 ...re_dataframe__query__tests__selection.snap |   7 +
 ...__sparse_fill_strategy_latestatglobal.snap |  13 +
 ...e__query__tests__using_index_values-2.snap |  13 +
 ...ame__query__tests__using_index_values.snap |  13 +
 ...aframe__query__tests__view_contents-2.snap |  12 +
 ...ataframe__query__tests__view_contents.snap |   7 +
 ...y__tests__view_contents_and_selection.snap |  14 +
 rerun_py/src/remote.rs                        |   2 +-
 28 files changed, 310 insertions(+), 291 deletions(-)
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_static.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_temporal.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_range.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_values.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-3.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-4.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-3.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-4.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__sparse_fill_strategy_latestatglobal.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents-2.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents.snap
 create mode 100644 crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents_and_selection.snap

diff --git a/Cargo.lock b/Cargo.lock
index 950cec6514a2..75faee5fd61a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -5913,6 +5913,7 @@ version = "0.22.0-alpha.1+dev"
 dependencies = [
  "anyhow",
  "arrow",
+ "insta",
  "itertools 0.13.0",
  "nohash-hasher",
  "rayon",
diff --git a/crates/store/re_chunk/src/arrow.rs b/crates/store/re_chunk/src/arrow.rs
index 32642e298555..4802c2170484 100644
--- a/crates/store/re_chunk/src/arrow.rs
+++ b/crates/store/re_chunk/src/arrow.rs
@@ -13,11 +13,9 @@ impl TransportChunk {
     /// related rust structures that refer to those data buffers.
     pub fn try_to_arrow_record_batch(&self) -> Result<RecordBatch, ArrowError> {
         let columns: Vec<_> = self
-            .all_columns()
-            .map(|(_field, arr2_array)| {
-                let data = arrow2::array::to_data(arr2_array.as_ref());
-                make_array(data)
-            })
+            .columns()
+            .iter()
+            .map(|arr2_array| make_array(arrow2::array::to_data(*arr2_array)))
             .collect();
 
         RecordBatch::try_new(self.schema(), columns)
diff --git a/crates/store/re_chunk/src/transport.rs b/crates/store/re_chunk/src/transport.rs
index 5f0a9c17e162..0b5a4fe7d3e8 100644
--- a/crates/store/re_chunk/src/transport.rs
+++ b/crates/store/re_chunk/src/transport.rs
@@ -380,7 +380,7 @@ impl TransportChunk {
     /// * [`Self::FIELD_METADATA_VALUE_KIND_CONTROL`]
     /// * [`Self::FIELD_METADATA_VALUE_KIND_DATA`]
     #[inline]
-    pub fn columns<'a>(
+    fn columns_of_kind<'a>(
         &'a self,
         kind: &'a str,
     ) -> impl Iterator<Item = (&'a ArrowField, &'a Box<dyn Arrow2Array>)> + 'a {
@@ -402,7 +402,9 @@ impl TransportChunk {
     }
 
     #[inline]
-    pub fn all_columns(&self) -> impl Iterator<Item = (&ArrowField, &Box<dyn Arrow2Array>)> + '_ {
+    pub fn fields_and_columns(
+        &self,
+    ) -> impl Iterator<Item = (&ArrowField, &Box<dyn Arrow2Array>)> + '_ {
         self.schema
             .fields
             .iter()
@@ -416,26 +418,26 @@ impl TransportChunk {
     }
 
     #[inline]
-    pub fn all_columns_collected(&self) -> Vec<&dyn Arrow2Array> {
+    pub fn columns(&self) -> Vec<&dyn Arrow2Array> {
         self.data.iter().map(|c| c.as_ref()).collect()
     }
 
     /// Iterates all control columns present in this chunk.
     #[inline]
     pub fn controls(&self) -> impl Iterator<Item = (&ArrowField, &Box<dyn Arrow2Array>)> {
-        self.columns(Self::FIELD_METADATA_VALUE_KIND_CONTROL)
+        self.columns_of_kind(Self::FIELD_METADATA_VALUE_KIND_CONTROL)
     }
 
     /// Iterates all data columns present in this chunk.
     #[inline]
     pub fn components(&self) -> impl Iterator<Item = (&ArrowField, &Box<dyn Arrow2Array>)> {
-        self.columns(Self::FIELD_METADATA_VALUE_KIND_DATA)
+        self.columns_of_kind(Self::FIELD_METADATA_VALUE_KIND_DATA)
     }
 
     /// Iterates all timeline columns present in this chunk.
     #[inline]
     pub fn timelines(&self) -> impl Iterator<Item = (&ArrowField, &Box<dyn Arrow2Array>)> {
-        self.columns(Self::FIELD_METADATA_VALUE_KIND_TIME)
+        self.columns_of_kind(Self::FIELD_METADATA_VALUE_KIND_TIME)
     }
 
     /// How many columns in total? Includes control, time, and component columns.
diff --git a/crates/store/re_dataframe/Cargo.toml b/crates/store/re_dataframe/Cargo.toml
index af5cce5c268a..cd62b928e2b0 100644
--- a/crates/store/re_dataframe/Cargo.toml
+++ b/crates/store/re_dataframe/Cargo.toml
@@ -40,6 +40,7 @@ re_types_core.workspace = true
 anyhow.workspace = true
 arrow.workspace = true
 arrow2.workspace = true
+insta.workspace = true
 itertools.workspace = true
 nohash-hasher.workspace = true
 rayon.workspace = true
diff --git a/crates/store/re_dataframe/src/query.rs b/crates/store/re_dataframe/src/query.rs
index 4834b631dff4..1a5c6358ba50 100644
--- a/crates/store/re_dataframe/src/query.rs
+++ b/crates/store/re_dataframe/src/query.rs
@@ -1323,6 +1323,7 @@ impl<E: StorageEngineLike> QueryHandle<E> {
 mod tests {
     use std::sync::Arc;
 
+    use insta::assert_debug_snapshot;
     use re_chunk::{
         concat_record_batches::concatenate_record_batches, Chunk, ChunkId, RowId, TimePoint,
         TransportChunk,
@@ -1398,20 +1399,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[None],
-                    Timestamp(Nanosecond, None)[None],
-                    ListArray[None],
-                    ListArray[[c]],
-                    ListArray[None],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // temporal
@@ -1433,20 +1421,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
-                    ListArray[None, None, [2], [3], [4], None, [6]],
-                    ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
-                ]\
-                "
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -1480,20 +1455,7 @@ mod tests {
         )?;
         eprintln!("{dataframe}");
 
-        let got = format!("{:#?}", dataframe.all_columns_collected());
-        let expected = unindent::unindent(
-            "\
-            [
-                Int64[10, 20, 30, 40, 50, 60, 70],
-                Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
-                ListArray[None, None, [2], [3], [4], [4], [6]],
-                ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
-            ]\
-            "
-        );
-
-        similar_asserts::assert_eq!(expected, got);
+        assert_debug_snapshot!(dataframe.columns());
 
         Ok(())
     }
@@ -1526,20 +1488,7 @@ mod tests {
         )?;
         eprintln!("{dataframe}");
 
-        let got = format!("{:#?}", dataframe.all_columns_collected());
-        let expected = unindent::unindent(
-            "\
-            [
-                Int64[30, 40, 50, 60],
-                Timestamp(Nanosecond, None)[None, None, 1970-01-01 00:00:00.000000050, None],
-                ListArray[[2], [3], [4], None],
-                ListArray[[c], [c], [c], [c]],
-                ListArray[[{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}]],
-            ]\
-            ",
-        );
-
-        similar_asserts::assert_eq!(expected, got);
+        assert_debug_snapshot!(dataframe.columns());
 
         Ok(())
     }
@@ -1578,20 +1527,7 @@ mod tests {
         )?;
         eprintln!("{dataframe}");
 
-        let got = format!("{:#?}", dataframe.all_columns_collected());
-        let expected = unindent::unindent(
-            "\
-            [
-                Int64[30, 60],
-                Timestamp(Nanosecond, None)[None, None],
-                ListArray[[2], None],
-                ListArray[[c], [c]],
-                ListArray[[{x: 2, y: 2}], [{x: 5, y: 5}]],
-            ]\
-            ",
-        );
-
-        similar_asserts::assert_eq!(expected, got);
+        assert_debug_snapshot!(dataframe.columns());
 
         Ok(())
     }
@@ -1633,20 +1569,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[0, 15, 30, 45, 60, 75, 90],
-                    Timestamp(Nanosecond, None)[None, None, None, None, None, None, None],
-                    ListArray[None, None, [2], None, None, None, None],
-                    ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                    ListArray[None, None, [{x: 2, y: 2}], None, [{x: 5, y: 5}], None, None],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // sparse-filled
@@ -1676,20 +1599,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[0, 15, 30, 45, 60, 75, 90],
-                    Timestamp(Nanosecond, None)[None, 1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000070, 1970-01-01 00:00:00.000000070],
-                    ListArray[None, None, [2], [3], [4], [6], [6]],
-                    ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                    ListArray[None, [{x: 0, y: 0}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 5, y: 5}], [{x: 8, y: 8}], [{x: 8, y: 8}]],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -1730,10 +1640,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = "[]";
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // non-existing component
@@ -1759,10 +1666,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = "[]";
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // MyPoint
@@ -1788,20 +1692,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
-                    ListArray[None, None, [2], [3], [4], None, [6]],
-                    ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
-                ]\
-                "
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // MyColor
@@ -1827,20 +1718,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[30, 40, 50, 70],
-                    Timestamp(Nanosecond, None)[None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000070],
-                    ListArray[[2], [3], [4], [6]],
-                    ListArray[[c], [c], [c], [c]],
-                    ListArray[[{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 8, y: 8}]],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -1882,10 +1760,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = "[]";
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         {
@@ -1922,19 +1797,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[30, 40, 50, 70],
-                    Timestamp(Nanosecond, None)[None, None, None, None],
-                    ListArray[[2], [3], [4], [6]],
-                    ListArray[[c], [c], [c], [c]],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -1972,10 +1835,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = "[]";
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // only indices (+ duplication)
@@ -2008,18 +1868,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    NullArray(7),
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // only components (+ duplication)
@@ -2059,19 +1908,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    ListArray[None, None, [2], [3], [4], None, [6]],
-                    ListArray[None, None, [2], [3], [4], None, [6]],
-                    NullArray(7),
-                    NullArray(7),
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // static
@@ -2132,26 +1969,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    Int64[10, 20, 30, 40, 50, 60, 70],
-                    ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -2220,21 +2038,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-                "\
-                [
-                    Int64[30, 40, 50, 70],
-                    Timestamp(Nanosecond, None)[None, None, None, None],
-                    NullArray(4),
-                    NullArray(4),
-                    ListArray[[2], [3], [4], [6]],
-                    ListArray[[c], [c], [c], [c]],
-                ]\
-                ",
-            );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -2274,20 +2078,7 @@ mod tests {
             )?;
             eprintln!("{dataframe}");
 
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-            "\
-            [
-                Int64[10, 20, 30, 40, 50, 60, 65, 70],
-                Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000060, 1970-01-01 00:00:00.000000065, 1970-01-01 00:00:00.000000070],
-                ListArray[None, None, [2], [3], [4], [], [], [6]],
-                ListArray[[c], [c], [c], [c], [c], [c], [c], [c]],
-                ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [], [], [{x: 8, y: 8}]],
-            ]\
-            "
-        );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         // sparse-filled
@@ -2315,20 +2106,7 @@ mod tests {
             // static clear semantics in general are pretty unhinged right now, especially when
             // ranges are involved.
             // It's extremely niche, our time is better spent somewhere else right now.
-            let got = format!("{:#?}", dataframe.all_columns_collected());
-            let expected = unindent::unindent(
-            "\
-            [
-                Int64[10, 20, 30, 40, 50, 60, 65, 70],
-                Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000060, 1970-01-01 00:00:00.000000065, 1970-01-01 00:00:00.000000070],
-                ListArray[None, None, [2], [3], [4], [], [], [6]],
-                ListArray[[c], [c], [c], [c], [c], [c], [c], [c]],
-                ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [], [], [{x: 8, y: 8}]],
-            ]\
-            "
-        );
-
-            similar_asserts::assert_eq!(expected, got);
+            assert_debug_snapshot!(dataframe.columns());
         }
 
         Ok(())
@@ -2375,8 +2153,8 @@ mod tests {
                         &query_handle.batch_iter().take(3).collect_vec(),
                     )?;
 
-                    let expected = format!("{:#?}", expected.all_columns_collected());
-                    let got = format!("{:#?}", got.all_columns_collected());
+                    let expected = format!("{:#?}", expected.columns());
+                    let got = format!("{:#?}", got.columns());
 
                     similar_asserts::assert_eq!(expected, got);
                 }
@@ -2416,8 +2194,8 @@ mod tests {
                         &query_handle.batch_iter().take(3).collect_vec(),
                     )?;
 
-                    let expected = format!("{:#?}", expected.all_columns_collected());
-                    let got = format!("{:#?}", got.all_columns_collected());
+                    let expected = format!("{:#?}", expected.columns());
+                    let got = format!("{:#?}", got.columns());
 
                     similar_asserts::assert_eq!(expected, got);
                 }
@@ -2460,8 +2238,8 @@ mod tests {
                         &query_handle.batch_iter().take(3).collect_vec(),
                     )?;
 
-                    let expected = format!("{:#?}", expected.all_columns_collected());
-                    let got = format!("{:#?}", got.all_columns_collected());
+                    let expected = format!("{:#?}", expected.columns());
+                    let got = format!("{:#?}", got.columns());
 
                     similar_asserts::assert_eq!(expected, got);
                 }
@@ -2498,8 +2276,8 @@ mod tests {
                         &query_handle.batch_iter().take(3).collect_vec(),
                     )?;
 
-                    let expected = format!("{:#?}", expected.all_columns_collected());
-                    let got = format!("{:#?}", got.all_columns_collected());
+                    let expected = format!("{:#?}", expected.columns());
+                    let got = format!("{:#?}", got.columns());
 
                     similar_asserts::assert_eq!(expected, got);
                 }
@@ -2566,20 +2344,7 @@ mod tests {
                 )?;
                 eprintln!("{dataframe}");
 
-                let got = format!("{:#?}", dataframe.all_columns_collected());
-                let expected = unindent::unindent(
-                    "\
-                    [
-                        Int64[None],
-                        Timestamp(Nanosecond, None)[None],
-                        ListArray[None],
-                        ListArray[[c]],
-                        ListArray[None],
-                    ]\
-                    ",
-                );
-
-                similar_asserts::assert_eq!(expected, got);
+                assert_debug_snapshot!("async_barebones_static", dataframe.columns());
 
                 Ok::<_, anyhow::Error>(())
             }
@@ -2610,20 +2375,7 @@ mod tests {
                 )?;
                 eprintln!("{dataframe}");
 
-                let got = format!("{:#?}", dataframe.all_columns_collected());
-                let expected = unindent::unindent(
-                    "\
-                    [
-                        Int64[10, 20, 30, 40, 50, 60, 70],
-                        Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
-                        ListArray[None, None, [2], [3], [4], None, [6]],
-                        ListArray[[c], [c], [c], [c], [c], [c], [c]],
-                        ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
-                    ]\
-                    "
-                );
-
-                similar_asserts::assert_eq!(expected, got);
+                assert_debug_snapshot!("async_barebones_temporal", dataframe.columns());
 
                 Ok::<_, anyhow::Error>(())
             }
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_static.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_static.snap
new file mode 100644
index 000000000000..ec3f6001de19
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_static.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 2347
+expression: dataframe.columns()
+snapshot_kind: text
+---
+[
+    Int64[None],
+    Timestamp(Nanosecond, None)[None],
+    ListArray[None],
+    ListArray[[c]],
+    ListArray[None],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_temporal.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_temporal.snap
new file mode 100644
index 000000000000..a037d330cd2c
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__async_barebones_temporal.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 2378
+expression: dataframe.columns()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], None, [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones-2.snap
new file mode 100644
index 000000000000..f143af9dbdba
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones-2.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1424
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], None, [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones.snap
new file mode 100644
index 000000000000..f3b4325d5236
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__barebones.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1402
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[None],
+    Timestamp(Nanosecond, None)[None],
+    ListArray[None],
+    ListArray[[c]],
+    ListArray[None],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears-2.snap
new file mode 100644
index 000000000000..afc62129c581
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears-2.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 2109
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 65, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000060, 1970-01-01 00:00:00.000000065, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], [], [], [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [], [], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears.snap
new file mode 100644
index 000000000000..bffc10269340
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__clears.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 2081
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 65, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000060, 1970-01-01 00:00:00.000000065, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], [], [], [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [], [], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_range.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_range.snap
new file mode 100644
index 000000000000..08ebd00df026
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_range.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1491
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[30, 40, 50, 60],
+    Timestamp(Nanosecond, None)[None, None, 1970-01-01 00:00:00.000000050, None],
+    ListArray[[2], [3], [4], None],
+    ListArray[[c], [c], [c], [c]],
+    ListArray[[{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_values.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_values.snap
new file mode 100644
index 000000000000..e537db4b0d30
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_index_values.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1530
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[30, 60],
+    Timestamp(Nanosecond, None)[None, None],
+    ListArray[[2], None],
+    ListArray[[c], [c]],
+    ListArray[[{x: 2, y: 2}], [{x: 5, y: 5}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-2.snap
new file mode 100644
index 000000000000..7ddef2114658
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-2.snap
@@ -0,0 +1,7 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1669
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-3.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-3.snap
new file mode 100644
index 000000000000..672f02d24438
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-3.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1695
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], None, [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-4.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-4.snap
new file mode 100644
index 000000000000..e913daa93261
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null-4.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1721
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[30, 40, 50, 70],
+    Timestamp(Nanosecond, None)[None, None, 1970-01-01 00:00:00.000000050, 1970-01-01 00:00:00.000000070],
+    ListArray[[2], [3], [4], [6]],
+    ListArray[[c], [c], [c], [c]],
+    ListArray[[{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null.snap
new file mode 100644
index 000000000000..f18b0ee5e796
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__filtered_is_not_null.snap
@@ -0,0 +1,7 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1643
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-2.snap
new file mode 100644
index 000000000000..821038ac14e8
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-2.snap
@@ -0,0 +1,11 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1871
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    NullArray(7),
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-3.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-3.snap
new file mode 100644
index 000000000000..d78191b475bf
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-3.snap
@@ -0,0 +1,12 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1911
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    ListArray[None, None, [2], [3], [4], None, [6]],
+    ListArray[None, None, [2], [3], [4], None, [6]],
+    NullArray(7),
+    NullArray(7),
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-4.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-4.snap
new file mode 100644
index 000000000000..ab708132a72b
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection-4.snap
@@ -0,0 +1,19 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1972
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection.snap
new file mode 100644
index 000000000000..af25dece2e43
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__selection.snap
@@ -0,0 +1,7 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1838
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__sparse_fill_strategy_latestatglobal.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__sparse_fill_strategy_latestatglobal.snap
new file mode 100644
index 000000000000..a2063e9472af
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__sparse_fill_strategy_latestatglobal.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1458
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[10, 20, 30, 40, 50, 60, 70],
+    Timestamp(Nanosecond, None)[1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000050, None, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], [4], [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[[{x: 0, y: 0}], [{x: 1, y: 1}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 4, y: 4}], [{x: 5, y: 5}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values-2.snap
new file mode 100644
index 000000000000..7329e2113b69
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values-2.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1602
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[0, 15, 30, 45, 60, 75, 90],
+    Timestamp(Nanosecond, None)[None, 1970-01-01 00:00:00.000000010, None, None, None, 1970-01-01 00:00:00.000000070, 1970-01-01 00:00:00.000000070],
+    ListArray[None, None, [2], [3], [4], [6], [6]],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[None, [{x: 0, y: 0}], [{x: 2, y: 2}], [{x: 3, y: 3}], [{x: 5, y: 5}], [{x: 8, y: 8}], [{x: 8, y: 8}]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values.snap
new file mode 100644
index 000000000000..357656d2c29c
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__using_index_values.snap
@@ -0,0 +1,13 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1572
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[0, 15, 30, 45, 60, 75, 90],
+    Timestamp(Nanosecond, None)[None, None, None, None, None, None, None],
+    ListArray[None, None, [2], None, None, None, None],
+    ListArray[[c], [c], [c], [c], [c], [c], [c]],
+    ListArray[None, None, [{x: 2, y: 2}], None, [{x: 5, y: 5}], None, None],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents-2.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents-2.snap
new file mode 100644
index 000000000000..340c56ae733b
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents-2.snap
@@ -0,0 +1,12 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1800
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[30, 40, 50, 70],
+    Timestamp(Nanosecond, None)[None, None, None, None],
+    ListArray[[2], [3], [4], [6]],
+    ListArray[[c], [c], [c], [c]],
+]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents.snap
new file mode 100644
index 000000000000..c43c3d2929cc
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents.snap
@@ -0,0 +1,7 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 1763
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[]
diff --git a/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents_and_selection.snap b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents_and_selection.snap
new file mode 100644
index 000000000000..5b5a196fff87
--- /dev/null
+++ b/crates/store/re_dataframe/src/snapshots/re_dataframe__query__tests__view_contents_and_selection.snap
@@ -0,0 +1,14 @@
+---
+source: crates/store/re_dataframe/src/query.rs
+assertion_line: 2041
+expression: dataframe.all_columns_collected()
+snapshot_kind: text
+---
+[
+    Int64[30, 40, 50, 70],
+    Timestamp(Nanosecond, None)[None, None, None, None],
+    NullArray(4),
+    NullArray(4),
+    ListArray[[2], [3], [4], [6]],
+    ListArray[[c], [c], [c], [c]],
+]
diff --git a/rerun_py/src/remote.rs b/rerun_py/src/remote.rs
index 61ff89875aa3..bc408ba72570 100644
--- a/rerun_py/src/remote.rs
+++ b/rerun_py/src/remote.rs
@@ -347,7 +347,7 @@ impl PyStorageNodeClient {
                 .map_err(|err| PyRuntimeError::new_err(err.to_string()))?;
 
             let recording_id = metadata
-                .all_columns()
+                .fields_and_columns()
                 .find(|(field, _data)| field.name() == "rerun_recording_id")
                 .map(|(_field, data)| data)
                 .ok_or(PyRuntimeError::new_err("No rerun_recording_id"))?

From 3ffdb3751c7999853291699c733a8486b581858e Mon Sep 17 00:00:00 2001
From: Emil Ernerfeldt <emil.ernerfeldt@gmail.com>
Date: Wed, 15 Jan 2025 16:47:30 +0100
Subject: [PATCH 3/4] Respect max width of formatter when formatting
 ChunkStore/Chunk (#8698)

Makes the test predictable, regardless of terminal width
---
 crates/store/re_chunk/src/transport.rs          |  1 +
 crates/store/re_chunk_store/src/store.rs        |  7 ++++++-
 crates/store/re_chunk_store/tests/formatting.rs |  2 +-
 .../formatting__format_chunk_store.snap         |  2 +-
 crates/store/re_format_arrow/src/lib.rs         | 17 ++++++++++++++++-
 5 files changed, 25 insertions(+), 4 deletions(-)

diff --git a/crates/store/re_chunk/src/transport.rs b/crates/store/re_chunk/src/transport.rs
index 0b5a4fe7d3e8..d157c8b52f98 100644
--- a/crates/store/re_chunk/src/transport.rs
+++ b/crates/store/re_chunk/src/transport.rs
@@ -63,6 +63,7 @@ impl std::fmt::Display for TransportChunk {
                 .iter()
                 .map(|list_array| ArrowArrayRef::from(list_array.clone()))
                 .collect_vec(),
+            f.width(),
         )
         .fmt(f)
     }
diff --git a/crates/store/re_chunk_store/src/store.rs b/crates/store/re_chunk_store/src/store.rs
index bb2facfdafec..a5afbfa9f7a9 100644
--- a/crates/store/re_chunk_store/src/store.rs
+++ b/crates/store/re_chunk_store/src/store.rs
@@ -530,7 +530,12 @@ impl std::fmt::Display for ChunkStore {
         f.write_str(&indent::indent_all_by(4, "chunks: [\n"))?;
         for chunk_id in chunk_id_per_min_row_id.values().flatten() {
             if let Some(chunk) = chunks_per_chunk_id.get(chunk_id) {
-                f.write_str(&indent::indent_all_by(8, format!("{chunk}\n")))?;
+                if let Some(width) = f.width() {
+                    let chunk_width = width.saturating_sub(8);
+                    f.write_str(&indent::indent_all_by(8, format!("{chunk:chunk_width$}\n")))?;
+                } else {
+                    f.write_str(&indent::indent_all_by(8, format!("{chunk}\n")))?;
+                }
             } else {
                 f.write_str(&indent::indent_all_by(8, "<not_found>\n"))?;
             }
diff --git a/crates/store/re_chunk_store/tests/formatting.rs b/crates/store/re_chunk_store/tests/formatting.rs
index e71fbffdbd1c..dc61ff3fae16 100644
--- a/crates/store/re_chunk_store/tests/formatting.rs
+++ b/crates/store/re_chunk_store/tests/formatting.rs
@@ -42,7 +42,7 @@ fn format_chunk_store() -> anyhow::Result<()> {
             .build()?,
     ))?;
 
-    insta::assert_snapshot!("format_chunk_store", store.to_string());
+    insta::assert_snapshot!("format_chunk_store", format!("{:200}", store));
 
     Ok(())
 }
diff --git a/crates/store/re_chunk_store/tests/snapshots/formatting__format_chunk_store.snap b/crates/store/re_chunk_store/tests/snapshots/formatting__format_chunk_store.snap
index af14c5c661cc..90d7aabfa44b 100644
--- a/crates/store/re_chunk_store/tests/snapshots/formatting__format_chunk_store.snap
+++ b/crates/store/re_chunk_store/tests/snapshots/formatting__format_chunk_store.snap
@@ -1,7 +1,7 @@
 ---
 source: crates/store/re_chunk_store/tests/formatting.rs
 assertion_line: 45
-expression: store.to_string()
+expression: "format!(\"{:200}\", store)"
 snapshot_kind: text
 ---
 ChunkStore {
diff --git a/crates/store/re_format_arrow/src/lib.rs b/crates/store/re_format_arrow/src/lib.rs
index 28af79ae8be5..a796f6d91e5a 100644
--- a/crates/store/re_format_arrow/src/lib.rs
+++ b/crates/store/re_format_arrow/src/lib.rs
@@ -212,7 +212,12 @@ fn trim_name(name: &str) -> &str {
         .trim_start_matches("rerun.")
 }
 
-pub fn format_dataframe(metadata: &Metadata, fields: &Fields, columns: &[ArrayRef]) -> Table {
+pub fn format_dataframe(
+    metadata: &Metadata,
+    fields: &Fields,
+    columns: &[ArrayRef],
+    width: Option<usize>,
+) -> Table {
     const MAXIMUM_CELL_CONTENT_WIDTH: u16 = 100;
 
     let mut outer_table = Table::new();
@@ -221,6 +226,16 @@ pub fn format_dataframe(metadata: &Metadata, fields: &Fields, columns: &[ArrayRe
     let mut table = Table::new();
     table.load_preset(presets::UTF8_FULL);
 
+    if let Some(width) = width {
+        outer_table.set_width(width as _);
+        outer_table.set_content_arrangement(comfy_table::ContentArrangement::Disabled);
+        table.set_width(width as _);
+        table.set_content_arrangement(comfy_table::ContentArrangement::Disabled);
+    } else {
+        outer_table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
+        table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
+    }
+
     outer_table.add_row({
         let mut row = Row::new();
         row.add_cell(Cell::new(format!(

From d0a7d1fdf96c022c7d1f02735a86b046d96cf2f8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jan=20Proch=C3=A1zka?= <honza.spacir@gmail.com>
Date: Wed, 15 Jan 2025 16:49:44 +0100
Subject: [PATCH 4/4] Add JS timeline control and callback APIs (#8673)

---
 .vscode/settings.json                         |   6 +-
 crates/viewer/re_viewer/src/app.rs            |  63 +++-
 crates/viewer/re_viewer/src/app_state.rs      |  17 +-
 crates/viewer/re_viewer/src/web.rs            | 287 ++++++++++++++++++
 crates/viewer/re_viewer/src/web_tools.rs      |  14 +-
 crates/viewer/re_viewer_context/src/lib.rs    |   2 +-
 .../re_viewer_context/src/time_control.rs     | 181 ++++++++---
 rerun_js/package.json                         |   3 +-
 rerun_js/web-viewer/index.ts                  | 182 ++++++++++-
 9 files changed, 701 insertions(+), 54 deletions(-)

diff --git a/.vscode/settings.json b/.vscode/settings.json
index f92aa692cba5..65fbf4caa119 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -46,11 +46,7 @@
     // Uncomment the following options and restart rust-analyzer to get it to check code behind `cfg(target_arch=wasm32)`.
     // Don't forget to put it in a comment again before committing.
     // "rust-analyzer.cargo.target": "wasm32-unknown-unknown",
-    // "rust-analyzer.cargo.cfgs": {
-    //     "web": null,
-    //     "webgl": null,
-    //     "webgpu": null,
-    // },
+    // "rust-analyzer.cargo.cfgs": ["web","webgl","webgpu"],
 
     "C_Cpp.default.configurationProvider": "ms-vscode.cmake-tools", // Use cmake-tools to grab configs.
     "C_Cpp.autoAddFileAssociations": false,
diff --git a/crates/viewer/re_viewer/src/app.rs b/crates/viewer/re_viewer/src/app.rs
index 678a21100709..17d0546e1624 100644
--- a/crates/viewer/re_viewer/src/app.rs
+++ b/crates/viewer/re_viewer/src/app.rs
@@ -82,6 +82,13 @@ pub struct StartupOptions {
     /// This also can be changed in the viewer's option menu.
     pub video_decoder_hw_acceleration: Option<re_video::decode::DecodeHardwareAcceleration>,
 
+    /// Interaction between JS and timeline.
+    ///
+    /// This field isn't used directly, but is propagated to all recording configs
+    /// when they are created.
+    #[cfg(target_arch = "wasm32")]
+    pub timeline_options: Option<crate::web::TimelineOptions>,
+
     /// Fullscreen is handled by JS on web.
     ///
     /// This holds some callbacks which we use to communicate
@@ -131,6 +138,9 @@ impl Default for StartupOptions {
             force_wgpu_backend: None,
             video_decoder_hw_acceleration: None,
 
+            #[cfg(target_arch = "wasm32")]
+            timeline_options: Default::default(),
+
             #[cfg(target_arch = "wasm32")]
             fullscreen_options: Default::default(),
 
@@ -220,6 +230,12 @@ pub struct App {
     pub(crate) panel_state_overrides: PanelStateOverrides,
 
     reflection: re_types_core::reflection::Reflection,
+
+    /// Interaction between JS and timeline.
+    ///
+    /// This field isn't used directly, but is propagated to all recording configs
+    /// when they are created.
+    pub timeline_callbacks: Option<re_viewer_context::TimelineCallbacks>,
 }
 
 impl App {
@@ -325,6 +341,46 @@ impl App {
             Default::default()
         });
 
+        #[cfg(target_arch = "wasm32")]
+        let timeline_callbacks = {
+            use crate::web_tools::string_from_js_value;
+            use std::rc::Rc;
+            use wasm_bindgen::JsValue;
+
+            startup_options.timeline_options.clone().map(|opts| {
+                re_viewer_context::TimelineCallbacks {
+                    on_timelinechange: Rc::new(move |timeline, time| {
+                        if let Err(err) = opts.on_timelinechange.call2(
+                            &JsValue::from_str(timeline.name().as_str()),
+                            &JsValue::from_f64(time.as_f64()),
+                        ) {
+                            re_log::error!("{}", string_from_js_value(err));
+                        };
+                    }),
+                    on_timeupdate: Rc::new(move |time| {
+                        if let Err(err) =
+                            opts.on_timeupdate.call1(&JsValue::from_f64(time.as_f64()))
+                        {
+                            re_log::error!("{}", string_from_js_value(err));
+                        }
+                    }),
+                    on_play: Rc::new(move || {
+                        if let Err(err) = opts.on_play.call0() {
+                            re_log::error!("{}", string_from_js_value(err));
+                        }
+                    }),
+                    on_pause: Rc::new(move || {
+                        if let Err(err) = opts.on_pause.call0() {
+                            re_log::error!("{}", string_from_js_value(err));
+                        }
+                    }),
+                }
+            })
+        };
+
+        #[cfg(not(target_arch = "wasm32"))]
+        let timeline_callbacks = None;
+
         Self {
             main_thread_token,
             build_info,
@@ -374,6 +430,8 @@ impl App {
             panel_state_overrides,
 
             reflection,
+
+            timeline_callbacks,
         }
     }
 
@@ -1126,6 +1184,7 @@ impl App {
                                 opacity: self.welcome_screen_opacity(egui_ctx),
                             },
                             is_history_enabled,
+                            self.timeline_callbacks.as_ref(),
                         );
                         render_ctx.before_submit();
                     }
@@ -1573,7 +1632,7 @@ impl App {
         {
             if let Some(options) = &self.startup_options.fullscreen_options {
                 // Tell JS to toggle fullscreen.
-                if let Err(err) = options.on_toggle.call() {
+                if let Err(err) = options.on_toggle.call0() {
                     re_log::error!("{}", crate::web_tools::string_from_js_value(err));
                 };
             }
@@ -1589,7 +1648,7 @@ impl App {
     pub(crate) fn is_fullscreen_mode(&self) -> bool {
         if let Some(options) = &self.startup_options.fullscreen_options {
             // Ask JS if fullscreen is on or not.
-            match options.get_state.call() {
+            match options.get_state.call0() {
                 Ok(v) => return v.is_truthy(),
                 Err(err) => re_log::error_once!("{}", crate::web_tools::string_from_js_value(err)),
             }
diff --git a/crates/viewer/re_viewer/src/app_state.rs b/crates/viewer/re_viewer/src/app_state.rs
index a29e202065d2..b2dcbb77dd60 100644
--- a/crates/viewer/re_viewer/src/app_state.rs
+++ b/crates/viewer/re_viewer/src/app_state.rs
@@ -151,6 +151,7 @@ impl AppState {
         command_sender: &CommandSender,
         welcome_screen_state: &WelcomeScreenState,
         is_history_enabled: bool,
+        timeline_callbacks: Option<&re_viewer_context::TimelineCallbacks>,
     ) {
         re_tracing::profile_function!();
 
@@ -291,7 +292,7 @@ impl AppState {
 
         // We move the time at the very start of the frame,
         // so that we always show the latest data when we're in "follow" mode.
-        move_time(&ctx, recording, rx);
+        move_time(&ctx, recording, rx, timeline_callbacks);
 
         // Update the viewport. May spawn new views and handle queued requests (like screenshots).
         viewport_ui.on_frame_start(&ctx);
@@ -546,6 +547,11 @@ impl AppState {
         *focused_item = None;
     }
 
+    #[cfg(target_arch = "wasm32")] // Only used in Wasm
+    pub fn recording_config(&self, rec_id: &StoreId) -> Option<&RecordingConfig> {
+        self.recording_configs.get(rec_id)
+    }
+
     pub fn recording_config_mut(&mut self, rec_id: &StoreId) -> Option<&mut RecordingConfig> {
         self.recording_configs.get_mut(rec_id)
     }
@@ -584,7 +590,12 @@ impl AppState {
     }
 }
 
-fn move_time(ctx: &ViewerContext<'_>, recording: &EntityDb, rx: &ReceiveSet<LogMsg>) {
+fn move_time(
+    ctx: &ViewerContext<'_>,
+    recording: &EntityDb,
+    rx: &ReceiveSet<LogMsg>,
+    timeline_callbacks: Option<&re_viewer_context::TimelineCallbacks>,
+) {
     let dt = ctx.egui_ctx.input(|i| i.stable_dt);
 
     // Are we still connected to the data source for the current store?
@@ -598,6 +609,7 @@ fn move_time(ctx: &ViewerContext<'_>, recording: &EntityDb, rx: &ReceiveSet<LogM
         recording.times_per_timeline(),
         dt,
         more_data_is_coming,
+        timeline_callbacks,
     );
 
     let blueprint_needs_repaint = if ctx.app_options.inspect_blueprint_timeline {
@@ -605,6 +617,7 @@ fn move_time(ctx: &ViewerContext<'_>, recording: &EntityDb, rx: &ReceiveSet<LogM
             ctx.store_context.blueprint.times_per_timeline(),
             dt,
             more_data_is_coming,
+            None,
         )
     } else {
         re_viewer_context::NeedsRepaint::No
diff --git a/crates/viewer/re_viewer/src/web.rs b/crates/viewer/re_viewer/src/web.rs
index 00b9cc675ddf..fc34f5b8254e 100644
--- a/crates/viewer/re_viewer/src/web.rs
+++ b/crates/viewer/re_viewer/src/web.rs
@@ -279,6 +279,272 @@ impl WebHandle {
             );
         }
     }
+
+    #[wasm_bindgen]
+    pub fn get_active_recording_id(&self) -> Option<String> {
+        let app = self.runner.app_mut::<crate::App>()?;
+        let hub = app.store_hub.as_ref()?;
+        let recording = hub.active_recording()?;
+
+        Some(recording.store_id().to_string())
+    }
+
+    #[wasm_bindgen]
+    pub fn set_active_recording_id(&self, store_id: &str) {
+        let Some(mut app) = self.runner.app_mut::<crate::App>() else {
+            return;
+        };
+
+        let Some(hub) = app.store_hub.as_mut() else {
+            return;
+        };
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        if !hub.store_bundle().contains(&store_id) {
+            return;
+        };
+
+        hub.set_activate_recording(store_id);
+
+        app.egui_ctx.request_repaint();
+    }
+
+    #[wasm_bindgen]
+    pub fn get_active_timeline(&self, store_id: &str) -> Option<String> {
+        let mut app = self.runner.app_mut::<crate::App>()?;
+        let crate::App {
+            store_hub: Some(ref hub),
+            state,
+            ..
+        } = &mut *app
+        else {
+            return None;
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        if !hub.store_bundle().contains(&store_id) {
+            return None;
+        };
+
+        let rec_cfg = state.recording_config_mut(&store_id)?;
+        let time_ctrl = rec_cfg.time_ctrl.read();
+        Some(time_ctrl.timeline().name().as_str().to_owned())
+    }
+
+    /// Set the active timeline.
+    ///
+    /// This does nothing if the timeline can't be found.
+    #[wasm_bindgen]
+    pub fn set_active_timeline(&self, store_id: &str, timeline: &str) {
+        let Some(mut app) = self.runner.app_mut::<crate::App>() else {
+            return;
+        };
+        let crate::App {
+            store_hub: Some(ref hub),
+            state,
+            egui_ctx,
+            ..
+        } = &mut *app
+        else {
+            return;
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        let Some(recording) = hub.store_bundle().get(&store_id) else {
+            return;
+        };
+        let Some(rec_cfg) = state.recording_config_mut(&store_id) else {
+            return;
+        };
+        let Some(timeline) = recording
+            .timelines()
+            .find(|t| t.name().as_str() == timeline)
+        else {
+            return;
+        };
+
+        rec_cfg.time_ctrl.write().set_timeline(*timeline);
+
+        egui_ctx.request_repaint();
+    }
+
+    #[wasm_bindgen]
+    pub fn get_time_for_timeline(&self, store_id: &str, timeline: &str) -> Option<f64> {
+        let app = self.runner.app_mut::<crate::App>()?;
+        let crate::App {
+            store_hub: Some(ref hub),
+            state,
+            ..
+        } = &*app
+        else {
+            return None;
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        let recording = hub.store_bundle().get(&store_id)?;
+        let rec_cfg = state.recording_config(&store_id)?;
+        let timeline = recording
+            .timelines()
+            .find(|t| t.name().as_str() == timeline)?;
+
+        let time_ctrl = rec_cfg.time_ctrl.read();
+        time_ctrl.time_for_timeline(*timeline).map(|v| v.as_f64())
+    }
+
+    #[wasm_bindgen]
+    pub fn set_time_for_timeline(&self, store_id: &str, timeline: &str, time: f64) {
+        let Some(mut app) = self.runner.app_mut::<crate::App>() else {
+            return;
+        };
+        let crate::App {
+            store_hub: Some(ref hub),
+            state,
+            egui_ctx,
+            ..
+        } = &mut *app
+        else {
+            return;
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        let Some(recording) = hub.store_bundle().get(&store_id) else {
+            return;
+        };
+        let Some(rec_cfg) = state.recording_config_mut(&store_id) else {
+            return;
+        };
+        let Some(timeline) = recording
+            .timelines()
+            .find(|t| t.name().as_str() == timeline)
+        else {
+            return;
+        };
+
+        rec_cfg
+            .time_ctrl
+            .write()
+            .set_timeline_and_time(*timeline, time);
+        egui_ctx.request_repaint();
+    }
+
+    #[wasm_bindgen]
+    pub fn get_timeline_time_range(&self, store_id: &str, timeline: &str) -> JsValue {
+        let Some(app) = self.runner.app_mut::<crate::App>() else {
+            return JsValue::null();
+        };
+        let crate::App {
+            store_hub: Some(ref hub),
+            ..
+        } = &*app
+        else {
+            return JsValue::null();
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        let Some(recording) = hub.store_bundle().get(&store_id) else {
+            return JsValue::null();
+        };
+        let Some(timeline) = recording
+            .timelines()
+            .find(|t| t.name().as_str() == timeline)
+        else {
+            return JsValue::null();
+        };
+
+        let Some(time_range) = recording.time_range_for(timeline) else {
+            return JsValue::null();
+        };
+
+        let min = time_range.min().as_f64();
+        let max = time_range.max().as_f64();
+
+        let obj = js_sys::Object::new();
+        js_sys::Reflect::set(&obj, &"min".into(), &min.into()).ok_or_log_js_error();
+        js_sys::Reflect::set(&obj, &"max".into(), &max.into()).ok_or_log_js_error();
+
+        JsValue::from(obj)
+    }
+
+    #[wasm_bindgen]
+    pub fn get_playing(&self, store_id: &str) -> Option<bool> {
+        let app = self.runner.app_mut::<crate::App>()?;
+        let crate::App {
+            store_hub: Some(ref hub),
+            state,
+            ..
+        } = &*app
+        else {
+            return None;
+        };
+
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        if !hub.store_bundle().contains(&store_id) {
+            return None;
+        };
+        let rec_cfg = state.recording_config(&store_id)?;
+
+        let time_ctrl = rec_cfg.time_ctrl.read();
+        Some(time_ctrl.play_state() == re_viewer_context::PlayState::Playing)
+    }
+
+    #[wasm_bindgen]
+    pub fn set_playing(&self, store_id: &str, value: bool) {
+        let Some(mut app) = self.runner.app_mut::<crate::App>() else {
+            return;
+        };
+        let crate::App {
+            store_hub,
+            state,
+            egui_ctx,
+            ..
+        } = &mut *app;
+
+        let Some(hub) = store_hub.as_ref() else {
+            return;
+        };
+        let store_id = re_log_types::StoreId::from_string(
+            re_log_types::StoreKind::Recording,
+            store_id.to_owned(),
+        );
+        let Some(recording) = hub.store_bundle().get(&store_id) else {
+            return;
+        };
+        let Some(rec_cfg) = state.recording_config_mut(&store_id) else {
+            return;
+        };
+
+        let play_state = if value {
+            re_viewer_context::PlayState::Playing
+        } else {
+            re_viewer_context::PlayState::Paused
+        };
+
+        rec_cfg
+            .time_ctrl
+            .write()
+            .set_play_state(recording.times_per_timeline(), play_state);
+        egui_ctx.request_repaint();
+    }
 }
 
 // TODO(jprochazk): figure out a way to auto-generate these types on JS side
@@ -321,6 +587,7 @@ pub struct AppOptions {
     video_decoder: Option<String>,
     hide_welcome_screen: Option<bool>,
     panel_state_overrides: Option<PanelStateOverrides>,
+    timeline: Option<TimelineOptions>,
     fullscreen: Option<FullscreenOptions>,
     enable_history: Option<bool>,
 
@@ -328,6 +595,24 @@ pub struct AppOptions {
     persist: Option<bool>,
 }
 
+// Keep in sync with the `TimelineOptions` interface in `rerun_js/web-viewer/index.ts`
+#[derive(Clone, Deserialize)]
+pub struct TimelineOptions {
+    /// Fired when the a different timeline is selected.
+    pub on_timelinechange: Callback,
+
+    /// Fired when the timepoint changes.
+    ///
+    /// Does not fire when `on_seek` is called.
+    pub on_timeupdate: Callback,
+
+    /// Fired when the timeline is paused.
+    pub on_pause: Callback,
+
+    /// Fired when the timeline is played.
+    pub on_play: Callback,
+}
+
 // Keep in sync with the `FullscreenOptions` interface in `rerun_js/web-viewer/index.ts`
 #[derive(Clone, Deserialize)]
 pub struct FullscreenOptions {
@@ -374,6 +659,7 @@ fn create_app(
         video_decoder,
         hide_welcome_screen,
         panel_state_overrides,
+        timeline,
         fullscreen,
         enable_history,
 
@@ -403,6 +689,7 @@ fn create_app(
         force_wgpu_backend: render_backend.clone(),
         video_decoder_hw_acceleration,
         hide_welcome_screen: hide_welcome_screen.unwrap_or(false),
+        timeline_options: timeline.clone(),
         fullscreen_options: fullscreen.clone(),
         panel_state_overrides: panel_state_overrides.unwrap_or_default().into(),
 
diff --git a/crates/viewer/re_viewer/src/web_tools.rs b/crates/viewer/re_viewer/src/web_tools.rs
index 202615ed3d67..d56ce482077f 100644
--- a/crates/viewer/re_viewer/src/web_tools.rs
+++ b/crates/viewer/re_viewer/src/web_tools.rs
@@ -191,10 +191,22 @@ pub struct Callback(#[serde(with = "serde_wasm_bindgen::preserve")] js_sys::Func
 
 impl Callback {
     #[inline]
-    pub fn call(&self) -> Result<JsValue, JsValue> {
+    pub fn call0(&self) -> Result<JsValue, JsValue> {
         let window: JsValue = window()?.into();
         self.0.call0(&window)
     }
+
+    #[inline]
+    pub fn call1(&self, arg0: &JsValue) -> Result<JsValue, JsValue> {
+        let window: JsValue = window()?.into();
+        self.0.call1(&window, arg0)
+    }
+
+    #[inline]
+    pub fn call2(&self, arg0: &JsValue, arg1: &JsValue) -> Result<JsValue, JsValue> {
+        let window: JsValue = window()?.into();
+        self.0.call2(&window, arg0, arg1)
+    }
 }
 
 // Deserializes from JS string or array of strings.
diff --git a/crates/viewer/re_viewer_context/src/lib.rs b/crates/viewer/re_viewer_context/src/lib.rs
index df0a25b94492..faaa5d6983d2 100644
--- a/crates/viewer/re_viewer_context/src/lib.rs
+++ b/crates/viewer/re_viewer_context/src/lib.rs
@@ -71,7 +71,7 @@ pub use self::{
     store_context::StoreContext,
     store_hub::StoreHub,
     tensor::{ImageStats, TensorStats},
-    time_control::{Looping, PlayState, TimeControl, TimeView},
+    time_control::{Looping, PlayState, TimeControl, TimeView, TimelineCallbacks},
     time_drag_value::TimeDragValue,
     typed_entity_collections::{
         ApplicableEntities, IndicatedEntities, PerVisualizer, VisualizableEntities,
diff --git a/crates/viewer/re_viewer_context/src/time_control.rs b/crates/viewer/re_viewer_context/src/time_control.rs
index be5596ff3b3a..330b95dc8db7 100644
--- a/crates/viewer/re_viewer_context/src/time_control.rs
+++ b/crates/viewer/re_viewer_context/src/time_control.rs
@@ -1,4 +1,5 @@
 use std::collections::BTreeMap;
+use std::rc::Rc;
 
 use re_entity_db::{TimeCounts, TimesPerTimeline};
 use re_log_types::{
@@ -107,14 +108,65 @@ impl std::ops::Deref for ActiveTimeline {
     }
 }
 
+#[derive(Clone, PartialEq, serde::Deserialize, serde::Serialize)]
+struct TimeStateEntry {
+    prev: TimeState,
+    current: TimeState,
+}
+
+impl TimeStateEntry {
+    fn new(time: impl Into<TimeReal>) -> Self {
+        let state = TimeState::new(time);
+        Self {
+            prev: state,
+            current: state,
+        }
+    }
+}
+
+#[derive(serde::Deserialize, serde::Serialize, Clone, PartialEq)]
+struct LastFrame {
+    timeline: Option<Timeline>,
+    playing: bool,
+}
+
+impl Default for LastFrame {
+    fn default() -> Self {
+        Self {
+            timeline: None,
+            playing: true,
+        }
+    }
+}
+
+// Keep in sync with the `TimelineOptions` interface in `rerun_js/web-viewer/index.ts`
+#[derive(Clone)]
+pub struct TimelineCallbacks {
+    /// Fired when the a different timeline is selected.
+    pub on_timelinechange: Rc<dyn Fn(Timeline, TimeReal)>,
+
+    /// Fired when the timepoint changes.
+    ///
+    /// Does not fire when `on_seek` is called.
+    pub on_timeupdate: Rc<dyn Fn(TimeReal)>,
+
+    /// Fired when the timeline is paused.
+    pub on_pause: Rc<dyn Fn()>,
+
+    /// Fired when the timeline is played.
+    pub on_play: Rc<dyn Fn()>,
+}
+
 /// Controls the global view and progress of the time.
 #[derive(serde::Deserialize, serde::Serialize, Clone, PartialEq)]
 #[serde(default)]
 pub struct TimeControl {
+    last_frame: LastFrame,
+
     /// Name of the timeline (e.g. `log_time`).
     timeline: ActiveTimeline,
 
-    states: BTreeMap<Timeline, TimeState>,
+    states: BTreeMap<Timeline, TimeStateEntry>,
 
     /// If true, we are either in [`PlayState::Playing`] or [`PlayState::Following`].
     playing: bool,
@@ -137,6 +189,7 @@ pub struct TimeControl {
 impl Default for TimeControl {
     fn default() -> Self {
         Self {
+            last_frame: Default::default(),
             timeline: ActiveTimeline::Auto(default_timeline([])),
             states: Default::default(),
             playing: true,
@@ -156,6 +209,7 @@ impl TimeControl {
         times_per_timeline: &TimesPerTimeline,
         stable_dt: f32,
         more_data_is_coming: bool,
+        callbacks: Option<&TimelineCallbacks>,
     ) -> NeedsRepaint {
         self.select_a_valid_timeline(times_per_timeline);
 
@@ -163,14 +217,14 @@ impl TimeControl {
             return NeedsRepaint::No; // we have no data on this timeline yet, so bail
         };
 
-        match self.play_state() {
+        let needs_repaint = match self.play_state() {
             PlayState::Paused => {
                 // It's possible that the playback is paused because e.g. it reached its end, but
                 // then the user decides to switch timelines.
                 // When they do so, it might be the case that they switch to a timeline they've
                 // never interacted with before, in which case we don't even have a time state yet.
                 self.states.entry(*self.timeline).or_insert_with(|| {
-                    TimeState::new(if self.following {
+                    TimeStateEntry::new(if self.following {
                         full_range.max()
                     } else {
                         full_range.min()
@@ -184,11 +238,11 @@ impl TimeControl {
                 let state = self
                     .states
                     .entry(*self.timeline)
-                    .or_insert_with(|| TimeState::new(full_range.min()));
+                    .or_insert_with(|| TimeStateEntry::new(full_range.min()));
 
-                if self.looping == Looping::Off && full_range.max() <= state.time {
+                if self.looping == Looping::Off && full_range.max() <= state.current.time {
                     // We've reached the end of the data
-                    state.time = full_range.max().into();
+                    state.current.time = full_range.max().into();
 
                     if more_data_is_coming {
                         // then let's wait for it without pausing!
@@ -201,24 +255,24 @@ impl TimeControl {
 
                 let loop_range = match self.looping {
                     Looping::Off => None,
-                    Looping::Selection => state.loop_selection,
+                    Looping::Selection => state.current.loop_selection,
                     Looping::All => Some(full_range.into()),
                 };
 
                 if let Some(loop_range) = loop_range {
-                    state.time = state.time.max(loop_range.min);
+                    state.current.time = state.current.time.max(loop_range.min);
                 }
 
                 match self.timeline.typ() {
                     TimeType::Sequence => {
-                        state.time += TimeReal::from(state.fps * dt);
+                        state.current.time += TimeReal::from(state.current.fps * dt);
                     }
-                    TimeType::Time => state.time += TimeReal::from(Duration::from_secs(dt)),
+                    TimeType::Time => state.current.time += TimeReal::from(Duration::from_secs(dt)),
                 }
 
                 if let Some(loop_range) = loop_range {
-                    if loop_range.max < state.time {
-                        state.time = loop_range.min; // loop!
+                    if loop_range.max < state.current.time {
+                        state.current.time = loop_range.min; // loop!
                     }
                 }
 
@@ -228,14 +282,52 @@ impl TimeControl {
                 // Set the time to the max:
                 match self.states.entry(*self.timeline) {
                     std::collections::btree_map::Entry::Vacant(entry) => {
-                        entry.insert(TimeState::new(full_range.max()));
+                        entry.insert(TimeStateEntry::new(full_range.max()));
                     }
                     std::collections::btree_map::Entry::Occupied(mut entry) => {
-                        entry.get_mut().time = full_range.max().into();
+                        entry.get_mut().current.time = full_range.max().into();
                     }
                 }
                 NeedsRepaint::No // no need for request_repaint - we already repaint when new data arrives
             }
+        };
+
+        if let Some(callbacks) = callbacks {
+            self.handle_callbacks(callbacks);
+        }
+
+        needs_repaint
+    }
+
+    /// Handle updating last frame state and trigger callbacks on changes.
+    pub fn handle_callbacks(&mut self, callbacks: &TimelineCallbacks) {
+        if self.last_frame.playing != self.playing {
+            self.last_frame.playing = self.playing;
+
+            if self.playing {
+                (callbacks.on_play)();
+            } else {
+                (callbacks.on_pause)();
+            }
+        }
+
+        if self.last_frame.timeline != Some(*self.timeline) {
+            self.last_frame.timeline = Some(*self.timeline);
+
+            let time = self
+                .time_for_timeline(*self.timeline)
+                .unwrap_or(TimeReal::MIN);
+
+            (callbacks.on_timelinechange)(*self.timeline, time);
+        }
+
+        if let Some(state) = self.states.get_mut(&self.timeline) {
+            // TODO(jan): throttle?
+            if state.prev.time != state.current.time {
+                state.prev.time = state.current.time;
+
+                (callbacks.on_timeupdate)(state.current.time);
+            }
         }
     }
 
@@ -279,12 +371,12 @@ impl TimeControl {
                 // Start from beginning if we are at the end:
                 if let Some(time_points) = times_per_timeline.get(&self.timeline) {
                     if let Some(state) = self.states.get_mut(&self.timeline) {
-                        if max(time_points) <= state.time {
-                            state.time = min(time_points).into();
+                        if max(time_points) <= state.current.time {
+                            state.current.time = min(time_points).into();
                         }
                     } else {
                         self.states
-                            .insert(*self.timeline, TimeState::new(min(time_points)));
+                            .insert(*self.timeline, TimeStateEntry::new(min(time_points)));
                     }
                 }
             }
@@ -296,10 +388,10 @@ impl TimeControl {
                     // Set the time to the max:
                     match self.states.entry(*self.timeline) {
                         std::collections::btree_map::Entry::Vacant(entry) => {
-                            entry.insert(TimeState::new(max(time_points)));
+                            entry.insert(TimeStateEntry::new(max(time_points)));
                         }
                         std::collections::btree_map::Entry::Occupied(mut entry) => {
-                            entry.get_mut().time = max(time_points).into();
+                            entry.get_mut().current.time = max(time_points).into();
                         }
                     }
                 }
@@ -350,7 +442,7 @@ impl TimeControl {
     pub fn restart(&mut self, times_per_timeline: &TimesPerTimeline) {
         if let Some(time_points) = times_per_timeline.get(&self.timeline) {
             if let Some(state) = self.states.get_mut(&self.timeline) {
-                state.time = min(time_points).into();
+                state.current.time = min(time_points).into();
                 self.following = false;
             }
         }
@@ -386,8 +478,8 @@ impl TimeControl {
             // Start from beginning if we are at the end:
             if let Some(time_points) = times_per_timeline.get(&self.timeline) {
                 if let Some(state) = self.states.get_mut(&self.timeline) {
-                    if max(time_points) <= state.time {
-                        state.time = min(time_points).into();
+                    if max(time_points) <= state.current.time {
+                        state.current.time = min(time_points).into();
                         self.playing = true;
                         self.following = false;
                         return;
@@ -415,13 +507,15 @@ impl TimeControl {
 
     /// playback fps
     pub fn fps(&self) -> Option<f32> {
-        self.states.get(self.timeline()).map(|state| state.fps)
+        self.states
+            .get(self.timeline())
+            .map(|state| state.current.fps)
     }
 
     /// playback fps
     pub fn set_fps(&mut self, fps: f32) {
         if let Some(state) = self.states.get_mut(&self.timeline) {
-            state.fps = fps;
+            state.current.fps = fps;
         }
     }
 
@@ -461,7 +555,9 @@ impl TimeControl {
 
     /// The current time.
     pub fn time(&self) -> Option<TimeReal> {
-        self.states.get(self.timeline()).map(|state| state.time)
+        self.states
+            .get(self.timeline())
+            .map(|state| state.current.time)
     }
 
     /// The current time.
@@ -485,7 +581,7 @@ impl TimeControl {
     /// The current loop range, iff selection looping is turned on.
     pub fn active_loop_selection(&self) -> Option<ResolvedTimeRangeF> {
         if self.looping == Looping::Selection {
-            self.states.get(self.timeline())?.loop_selection
+            self.states.get(self.timeline())?.current.loop_selection
         } else {
             None
         }
@@ -500,21 +596,22 @@ impl TimeControl {
     ///
     /// This can still return `Some` even if looping is currently off.
     pub fn loop_selection(&self) -> Option<ResolvedTimeRangeF> {
-        self.states.get(self.timeline())?.loop_selection
+        self.states.get(self.timeline())?.current.loop_selection
     }
 
     /// Set the current loop selection without enabling looping.
     pub fn set_loop_selection(&mut self, selection: ResolvedTimeRangeF) {
         self.states
             .entry(*self.timeline)
-            .or_insert_with(|| TimeState::new(selection.min))
+            .or_insert_with(|| TimeStateEntry::new(selection.min))
+            .current
             .loop_selection = Some(selection);
     }
 
     /// Remove the current loop selection.
     pub fn remove_loop_selection(&mut self) {
         if let Some(state) = self.states.get_mut(&self.timeline) {
-            state.loop_selection = None;
+            state.current.loop_selection = None;
         }
         if self.looping() == Looping::Selection {
             self.set_looping(Looping::Off);
@@ -528,7 +625,7 @@ impl TimeControl {
         }
 
         if let Some(state) = self.states.get(self.timeline()) {
-            state.time.floor() == needle
+            state.current.time.floor() == needle
         } else {
             false
         }
@@ -539,12 +636,27 @@ impl TimeControl {
         self.set_time(time);
     }
 
+    pub fn time_for_timeline(&self, timeline: Timeline) -> Option<TimeReal> {
+        self.states.get(&timeline).map(|state| state.current.time)
+    }
+
+    pub fn set_time_for_timeline(&mut self, timeline: Timeline, time: impl Into<TimeReal>) {
+        let time = time.into();
+
+        self.states
+            .entry(timeline)
+            .or_insert_with(|| TimeStateEntry::new(time))
+            .current
+            .time = time;
+    }
+
     pub fn set_time(&mut self, time: impl Into<TimeReal>) {
         let time = time.into();
 
         self.states
             .entry(*self.timeline)
-            .or_insert_with(|| TimeState::new(time))
+            .or_insert_with(|| TimeStateEntry::new(time))
+            .current
             .time = time;
     }
 
@@ -552,21 +664,22 @@ impl TimeControl {
     pub fn time_view(&self) -> Option<TimeView> {
         self.states
             .get(self.timeline())
-            .and_then(|state| state.view)
+            .and_then(|state| state.current.view)
     }
 
     /// The range of time we are currently zoomed in on.
     pub fn set_time_view(&mut self, view: TimeView) {
         self.states
             .entry(*self.timeline)
-            .or_insert_with(|| TimeState::new(view.min))
+            .or_insert_with(|| TimeStateEntry::new(view.min))
+            .current
             .view = Some(view);
     }
 
     /// The range of time we are currently zoomed in on.
     pub fn reset_time_view(&mut self) {
         if let Some(state) = self.states.get_mut(&self.timeline) {
-            state.view = None;
+            state.current.view = None;
         }
     }
 }
diff --git a/rerun_js/package.json b/rerun_js/package.json
index 77d6e52d2e30..05856f57a39d 100644
--- a/rerun_js/package.json
+++ b/rerun_js/package.json
@@ -11,5 +11,6 @@
   "workspaces": [
     "web-viewer",
     "web-viewer-react"
-  ]
+  ],
+  "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
 }
diff --git a/rerun_js/web-viewer/index.ts b/rerun_js/web-viewer/index.ts
index e02b13b5cf2a..66d0e736b48d 100644
--- a/rerun_js/web-viewer/index.ts
+++ b/rerun_js/web-viewer/index.ts
@@ -101,16 +101,24 @@ export interface WebViewerOptions {
 export interface AppOptions extends WebViewerOptions {
   url?: string;
   manifest_url?: string;
-  video_decoder?: VideoDecoder,
+  video_decoder?: VideoDecoder;
   render_backend?: Backend;
   hide_welcome_screen?: boolean;
   panel_state_overrides?: Partial<{
     [K in Panel]: PanelState;
   }>;
+  timeline?: TimelineOptions;
   fullscreen?: FullscreenOptions;
   enable_history?: boolean;
 }
 
+interface TimelineOptions {
+  on_timelinechange: (timeline: string, time: number) => void;
+  on_timeupdate: (time: number) => void;
+  on_pause: () => void;
+  on_play: () => void;
+}
+
 interface FullscreenOptions {
   get_state: () => boolean;
   on_toggle: () => void;
@@ -119,6 +127,11 @@ interface FullscreenOptions {
 interface WebViewerEvents {
   fullscreen: boolean;
   ready: void;
+
+  timelinechange: [timeline_name: string, time: number];
+  timeupdate: number;
+  play: void;
+  pause: void;
 }
 
 // This abomination is a mapped type with key filtering, and is used to split the events
@@ -127,7 +140,9 @@ interface WebViewerEvents {
 type EventsWithValue = {
   [K in keyof WebViewerEvents as WebViewerEvents[K] extends void
     ? never
-    : K]: WebViewerEvents[K];
+    : K]: WebViewerEvents[K] extends any[]
+    ? WebViewerEvents[K]
+    : [WebViewerEvents[K]];
 };
 
 type EventsWithoutValue = {
@@ -197,7 +212,15 @@ export class WebViewer {
         }
       : undefined;
 
-    this.#handle = new WebHandle_class({ ...options, fullscreen });
+    const timeline = {
+      on_timelinechange: (timeline: string, time: number) =>
+        this.#dispatch_event("timelinechange", timeline, time),
+      on_timeupdate: (time: number) => this.#dispatch_event("timeupdate", time),
+      on_pause: () => this.#dispatch_event("pause"),
+      on_play: () => this.#dispatch_event("play"),
+    };
+
+    this.#handle = new WebHandle_class({ ...options, fullscreen, timeline });
     try {
       await this.#handle.start(this.#canvas);
     } catch (e) {
@@ -218,15 +241,15 @@ export class WebViewer {
 
   #event_map: Map<
     keyof WebViewerEvents,
-    Map<(value: any) => void, { once: boolean }>
+    Map<(...args: any[]) => void, { once: boolean }>
   > = new Map();
 
   #dispatch_event<E extends keyof EventsWithValue>(
     event: E,
-    value: EventsWithValue[E],
+    ...args: EventsWithValue[E]
   ): void;
   #dispatch_event<E extends keyof EventsWithoutValue>(event: E): void;
-  #dispatch_event(event: any, value?: any): void {
+  #dispatch_event(event: any, ...args: any[]): void {
     // Dispatch events on next tick.
     // This is necessary because we may have been called somewhere deep within the viewer's call stack,
     // which means that `app` may be locked. The event will not actually be dispatched until the
@@ -236,7 +259,7 @@ export class WebViewer {
       const callbacks = this.#event_map.get(event);
       if (callbacks) {
         for (const [callback, { once }] of [...callbacks.entries()]) {
-          callback(value);
+          callback(...args);
           if (once) callbacks.delete(callback);
         }
       }
@@ -250,7 +273,7 @@ export class WebViewer {
    */
   on<E extends keyof EventsWithValue>(
     event: E,
-    callback: (value: EventsWithValue[E]) => void,
+    callback: (...args: EventsWithValue[E]) => void,
   ): Cancel;
   on<E extends keyof EventsWithoutValue>(
     event: E,
@@ -496,6 +519,149 @@ export class WebViewer {
     }
   }
 
+  /**
+   * Get the active recording id.
+   */
+  get_active_recording_id(): string | null {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to get active recording id in a stopped web viewer`,
+      );
+    }
+
+    return this.#handle.get_active_recording_id() ?? null;
+  }
+
+  /**
+   * Set the active recording id.
+   */
+  set_active_recording_id(value: string) {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to set active recording id to ${value} in a stopped web viewer`,
+      );
+    }
+
+    this.#handle.set_active_recording_id(value);
+  }
+
+  /**
+   * Get the play state.
+   *
+   * This always returns `false` if the recording can't be found.
+   */
+  get_playing(recording_id: string): boolean {
+    if (!this.#handle) {
+      throw new Error(`attempted to get play state in a stopped web viewer`);
+    }
+
+    return this.#handle.get_playing(recording_id) || false;
+  }
+
+  /**
+   * Set the play state.
+   *
+   * This does nothing if the recording can't be found.
+   */
+  set_playing(recording_id: string, value: boolean) {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to set play state to ${
+          value ? "playing" : "paused"
+        } in a stopped web viewer`,
+      );
+    }
+
+    this.#handle.set_playing(recording_id, value);
+  }
+
+  /**
+   * Get the current time.
+   *
+   * The interpretation of time depends on what kind of timeline it is:
+   *
+   * - For time timelines, this is the time in nanoseconds.
+   * - For sequence timelines, this is the sequence number.
+   *
+   * This always returns `0` if the recording or timeline can't be found.
+   */
+  get_current_time(recording_id: string, timeline: string): number {
+    if (!this.#handle) {
+      throw new Error(`attempted to get current time in a stopped web viewer`);
+    }
+
+    return this.#handle.get_time_for_timeline(recording_id, timeline) || 0;
+  }
+
+  /**
+   * Set the current time.
+   *
+   * Equivalent to clicking on the timeline in the time panel at the specified `time`.
+   * The interpretation of `time` depends on what kind of timeline it is:
+   *
+   * - For time timelines, this is the time in nanoseconds.
+   * - For sequence timelines, this is the sequence number.
+   *
+   * This does nothing if the recording or timeline can't be found.
+   *
+   * @param value
+   */
+  set_current_time(recording_id: string, timeline: string, time: number) {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to set current time to ${time} in a stopped web viewer`,
+      );
+    }
+
+    this.#handle.set_time_for_timeline(recording_id, timeline, time);
+  }
+
+  /**
+   * Get the active timeline.
+   *
+   * This always returns `null` if the recording can't be found.
+   */
+  get_active_timeline(recording_id: string): string | null {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to get active timeline in a stopped web viewer`,
+      );
+    }
+
+    return this.#handle.get_active_timeline(recording_id) ?? null;
+  }
+
+  /**
+   * Set the active timeline.
+   *
+   * This does nothing if the recording or timeline can't be found.
+   */
+  set_active_timeline(recording_id: string, timeline: string) {
+    if (!this.#handle) {
+      throw new Error(
+        `attempted to set active timeline to ${timeline} in a stopped web viewer`,
+      );
+    }
+
+    this.#handle.set_active_timeline(recording_id, timeline);
+  }
+
+  /**
+   * Get the time range for a timeline.
+   *
+   * This always returns `null` if the recording or timeline can't be found.
+   */
+  get_time_range(
+    recording_id: string,
+    timeline: string,
+  ): { min: number; max: number } | null {
+    if (!this.#handle) {
+      throw new Error(`attempted to get time range in a stopped web viewer`);
+    }
+
+    return this.#handle.get_timeline_time_range(recording_id, timeline);
+  }
+
   /**
    * Toggle fullscreen mode.
    *