diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index ff7ef9cf5e834..f22905cfcc4ca 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -4,8 +4,8 @@ on: workflow_dispatch: schedule: # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#schedule - # 12:15 UTC, every day - - cron: "15 12 * * *" + # 3am UTC, so 4am-5am CET and evening in east time, basically after everyone's day. + - cron: "15 3 * * *" defaults: run: @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: 1.76.0 + toolchain: 1.79.0 - run: cargo build -p rerun diff --git a/.github/workflows/reusable_bench.yml b/.github/workflows/reusable_bench.yml index 8e06b943b302a..c7a2576f71f23 100644 --- a/.github/workflows/reusable_bench.yml +++ b/.github/workflows/reusable_bench.yml @@ -155,10 +155,13 @@ jobs: save-data-file: false auto-push: false + - uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.25.0 + - name: Render benchmark result if: github.ref == 'refs/heads/main' run: | - pip install --break-system-packages google-cloud-storage==2.9.0 - scripts/ci/render_bench.py crates \ + pixi run python scripts/ci/render_bench.py crates \ --after $(date -d"30 days ago" +%Y-%m-%d) \ --output "gs://rerun-builds/graphs" diff --git a/.github/workflows/reusable_track_size.yml b/.github/workflows/reusable_track_size.yml index e070971bdd83d..f85264684522b 100644 --- a/.github/workflows/reusable_track_size.yml +++ b/.github/workflows/reusable_track_size.yml @@ -189,10 +189,13 @@ jobs: save-data-file: false auto-push: false + - uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.25.0 + - name: Render benchmark result if: github.ref == 'refs/heads/main' run: | - python3 -m pip install --break-system-packages google-cloud-storage==2.9.0 - scripts/ci/render_bench.py sizes \ + pixi run python scripts/ci/render_bench.py sizes \ --after $(date -d"180 days ago" +%Y-%m-%d) \ --output "gs://rerun-builds/graphs" diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 376d4fb5167b7..2b950f339bf08 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -164,6 +164,7 @@ Update instructions: | re_entity_db | In-memory storage of Rerun entities | | re_query | Querying data in the re_chunk_store | | re_dataframe | The Rerun public data APIs. | +| re_dataframe2 | The Rerun public data APIs. | | re_types | The built-in Rerun data types, component types, and archetypes. | | re_types_blueprint | The core traits and types that power Rerun's Blueprint sub-system. | | re_log_encoding | Helpers for encoding and transporting Rerun log messages | diff --git a/BUILD.md b/BUILD.md index 5a20261dd1264..01a293763650c 100644 --- a/BUILD.md +++ b/BUILD.md @@ -23,12 +23,12 @@ cd rerun Now install the `pixi` package manager: -Make sure `cargo --version` prints `1.76.0` once you are done. +Make sure `cargo --version` prints `1.79.0` once you are done. If you are using an Apple-silicon Mac (M1, M2), make sure `rustc -vV` outputs `host: aarch64-apple-darwin`. If not, this should fix it: ```sh -rustup set default-host aarch64-apple-darwin && rustup install 1.76.0 +rustup set default-host aarch64-apple-darwin && rustup install 1.79.0 ``` ## Building and running the Viewer diff --git a/Cargo.lock b/Cargo.lock index e5abf50609c3b..03112892db624 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -999,9 +999,9 @@ checksum = "e1e5f035d16fc623ae5f74981db80a439803888314e3a555fd6f04acd51a3205" [[package]] name = "bytemuck" -version = "1.13.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" +checksum = "94bbb0ad554ad961ddc5da507a12a29b14e4ae5bda06b19f575a3e6079d2e2ae" dependencies = [ "bytemuck_derive", ] @@ -5132,6 +5132,33 @@ dependencies = [ "thiserror", ] +[[package]] +name = "re_dataframe2" +version = "0.19.0-alpha.1+dev" +dependencies = [ + "ahash", + "anyhow", + "backtrace", + "indent", + "itertools 0.13.0", + "nohash-hasher", + "parking_lot", + "paste", + "re_arrow2", + "re_chunk", + "re_chunk_store", + "re_error", + "re_format", + "re_log", + "re_log_types", + "re_query", + "re_tracing", + "re_types", + "re_types_core", + "seq-macro", + "thiserror", +] + [[package]] name = "re_dev_tools" version = "0.19.0-alpha.1+dev" @@ -5667,6 +5694,7 @@ dependencies = [ "re_chunk_store", "re_data_ui", "re_log_types", + "re_query", "re_renderer", "re_space_view", "re_tracing", diff --git a/Cargo.toml b/Cargo.toml index 9de2396e62fc1..fb17fdd4101ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ homepage = "https://rerun.io" include = ["../../LICENSE-APACHE", "../../LICENSE-MIT", "**/*.rs", "Cargo.toml"] license = "MIT OR Apache-2.0" repository = "https://github.com/rerun-io/rerun" -rust-version = "1.76" +rust-version = "1.79" version = "0.19.0-alpha.1+dev" [workspace.dependencies] @@ -45,6 +45,7 @@ re_chunk_store = { path = "crates/store/re_chunk_store", version = "=0.19.0-alph re_data_loader = { path = "crates/store/re_data_loader", version = "=0.19.0-alpha.1", default-features = false } re_data_source = { path = "crates/store/re_data_source", version = "=0.19.0-alpha.1", default-features = false } re_dataframe = { path = "crates/store/re_dataframe", version = "=0.19.0-alpha.1", default-features = false } +re_dataframe2 = { path = "crates/store/re_dataframe2", version = "=0.19.0-alpha.1", default-features = false } re_entity_db = { path = "crates/store/re_entity_db", version = "=0.19.0-alpha.1", default-features = false } re_format_arrow = { path = "crates/store/re_format_arrow", version = "=0.19.0-alpha.1", default-features = false } re_log_encoding = { path = "crates/store/re_log_encoding", version = "=0.19.0-alpha.1", default-features = false } @@ -147,7 +148,7 @@ bincode = "1.3" bit-vec = "0.7" bitflags = { version = "2.4", features = ["bytemuck"] } blackbox = "0.2.0" -bytemuck = { version = "1.11", features = ["extern_crate_alloc"] } +bytemuck = { version = "1.18", features = ["extern_crate_alloc"] } camino = "1.1" cargo_metadata = "0.18" cargo-run-wasm = "0.3.2" @@ -388,6 +389,7 @@ disallowed_types = "warn" # See clippy.toml doc_link_with_quotes = "warn" doc_markdown = "warn" empty_enum = "warn" +empty_enum_variants_with_brackets = "warn" enum_glob_use = "warn" equatable_if_let = "warn" exit = "warn" @@ -411,6 +413,8 @@ inefficient_to_string = "warn" infinite_loop = "warn" into_iter_without_iter = "warn" invalid_upcast_comparisons = "warn" +iter_filter_is_ok = "warn" +iter_filter_is_some = "warn" iter_not_returning_iterator = "warn" iter_on_empty_collections = "warn" iter_on_single_items = "warn" @@ -427,6 +431,7 @@ macro_use_imports = "warn" manual_assert = "warn" manual_clamp = "warn" manual_instant_elapsed = "warn" +manual_is_variant_and = "warn" manual_let_else = "warn" manual_ok_or = "warn" manual_string_new = "warn" @@ -441,6 +446,7 @@ mismatched_target_os = "warn" mismatching_type_param_order = "warn" missing_enforced_import_renames = "warn" missing_safety_doc = "warn" +mixed_attributes_style = "warn" mut_mut = "warn" mutex_integer = "warn" needless_borrow = "warn" @@ -450,14 +456,17 @@ needless_pass_by_ref_mut = "warn" needless_pass_by_value = "warn" negative_feature_names = "warn" nonstandard_macro_braces = "warn" +option_as_ref_cloned = "warn" option_option = "warn" path_buf_push_overwrite = "warn" ptr_as_ptr = "warn" ptr_cast_constness = "warn" +pub_underscore_fields = "warn" pub_without_shorthand = "warn" rc_mutex = "warn" readonly_write_lock = "warn" redundant_type_annotations = "warn" +ref_as_ptr = "warn" ref_option_ref = "warn" rest_pat_in_fully_bound_structs = "warn" same_functions_in_if_condition = "warn" @@ -465,9 +474,10 @@ semicolon_if_nothing_returned = "warn" should_panic_without_expect = "warn" significant_drop_tightening = "warn" single_match_else = "warn" +str_split_at_newline = "warn" str_to_string = "warn" -string_add_assign = "warn" string_add = "warn" +string_add_assign = "warn" string_lit_as_bytes = "warn" string_lit_chars_any = "warn" string_to_string = "warn" @@ -501,6 +511,7 @@ zero_sized_map_values = "warn" # Disabled waiting on https://github.com/rust-lang/rust-clippy/issues/9602 #self_named_module_files = "warn" +assigning_clones = "allow" # Too much for too little manual_range_contains = "allow" # this one is just worse imho map_unwrap_or = "allow" # so is this one ref_patterns = "allow" # It's nice to avoid ref pattern, but there are some situations that are hard (impossible?) to express without. diff --git a/clippy.toml b/clippy.toml index 70ff980b80856..d1db6d98fcb9a 100644 --- a/clippy.toml +++ b/clippy.toml @@ -3,7 +3,7 @@ # ----------------------------------------------------------------------------- # Section identical to the main scripts/clippy_wasm/clippy.toml: -msrv = "1.76" +msrv = "1.79" allow-unwrap-in-tests = true @@ -77,6 +77,8 @@ doc-valid-idents = [ "GLTF", "iOS", "macOS", + "MessagePack", + "MiMalloc", "NaN", "OBJ", "OpenGL", @@ -84,6 +86,7 @@ doc-valid-idents = [ "sRGB", "sRGBA", "WebGL", + "WebGPU", "WebSocket", "WebSockets", ] diff --git a/crates/build/re_dev_tools/src/build_examples/example.rs b/crates/build/re_dev_tools/src/build_examples/example.rs index f93a3c5638c61..8e2a27714b5c4 100644 --- a/crates/build/re_dev_tools/src/build_examples/example.rs +++ b/crates/build/re_dev_tools/src/build_examples/example.rs @@ -129,6 +129,7 @@ impl ExamplesManifest { #[derive(serde::Deserialize)] pub struct ExampleCategory { /// Used to sort categories in the `rerun.io/examples` navbar. + #[allow(unused)] pub order: u64, /// `snake_case` name. diff --git a/crates/build/re_dev_tools/src/build_search_index/util.rs b/crates/build/re_dev_tools/src/build_search_index/util.rs index 90245ef0c0c54..d2fec7cd3f89c 100644 --- a/crates/build/re_dev_tools/src/build_search_index/util.rs +++ b/crates/build/re_dev_tools/src/build_search_index/util.rs @@ -20,11 +20,13 @@ pub trait CommandExt { I: IntoIterator, S: AsRef; + #[allow(unused)] fn with_env(self, key: K, val: V) -> Self where K: AsRef, V: AsRef; + #[allow(unused)] fn run(self) -> io::Result<()>; fn output(self) -> anyhow::Result>; diff --git a/crates/build/re_types_builder/src/codegen/common.rs b/crates/build/re_types_builder/src/codegen/common.rs index 95d73e9a5f791..ecebe584acff6 100644 --- a/crates/build/re_types_builder/src/codegen/common.rs +++ b/crates/build/re_types_builder/src/codegen/common.rs @@ -17,7 +17,7 @@ pub struct ExampleInfo<'a> { /// Path to the snippet relative to the snippet directory. pub path: &'a str, - /// The snake_case name of the example. + /// The `snake_case` name of the example. pub name: String, /// The human-readable name of the example. diff --git a/crates/build/re_types_builder/src/codegen/docs/mod.rs b/crates/build/re_types_builder/src/codegen/docs/mod.rs index dbf83d5124656..7a304ff4eb78b 100644 --- a/crates/build/re_types_builder/src/codegen/docs/mod.rs +++ b/crates/build/re_types_builder/src/codegen/docs/mod.rs @@ -12,6 +12,8 @@ use crate::{ ATTR_DOCS_VIEW_TYPES, }; +pub const DATAFRAME_VIEW_FQNAME: &str = "rerun.blueprint.views.DataframeView"; + macro_rules! putln { ($o:ident) => ( writeln!($o).ok() ); ($o:ident, $($tt:tt)*) => ( writeln!($o, $($tt)*).ok() ); @@ -574,9 +576,10 @@ fn write_archetype_fields( putln!(page, "**Optional**: {}", optional.join(", ")); } + putln!(page); + putln!(page, "## Shown in"); + if let Some(view_types) = view_per_archetype.get(&object.fqname) { - putln!(page); - putln!(page, "## Shown in"); for ViewReference { view_name, explanation, @@ -592,6 +595,9 @@ fn write_archetype_fields( putln!(page); } } + + // Special case for dataframe view: it can display anything. + putln!(page, "* [DataframeView](../views/dataframe_view.md)"); } fn write_visualized_archetypes( @@ -614,7 +620,7 @@ fn write_visualized_archetypes( } } - if archetype_fqnames.is_empty() { + if archetype_fqnames.is_empty() && view.fqname != DATAFRAME_VIEW_FQNAME { reporter.error(&view.virtpath, &view.fqname, "No archetypes use this view."); return; } @@ -624,18 +630,24 @@ fn write_visualized_archetypes( putln!(page, "## Visualized archetypes"); putln!(page); - for (fqname, explanation) in archetype_fqnames { - let object = &objects[&fqname]; - page.push_str(&format!( - "* [`{}`](../{}/{}.md)", - object.name, - object.kind.plural_snake_case(), - object.snake_case_name() - )); - if let Some(explanation) = explanation { - page.push_str(&format!(" ({explanation})")); + + // special case for dataframe view + if view.fqname == DATAFRAME_VIEW_FQNAME { + putln!(page, "Any data can be displayed by the Dataframe view."); + } else { + for (fqname, explanation) in archetype_fqnames { + let object = &objects[&fqname]; + page.push_str(&format!( + "* [`{}`](../{}/{}.md)", + object.name, + object.kind.plural_snake_case(), + object.snake_case_name() + )); + if let Some(explanation) = explanation { + page.push_str(&format!(" ({explanation})")); + } + putln!(page); } - putln!(page); } putln!(page); } diff --git a/crates/build/re_types_builder/src/codegen/python/mod.rs b/crates/build/re_types_builder/src/codegen/python/mod.rs index 97fa9ba37e3a9..d0e21f345df2f 100644 --- a/crates/build/re_types_builder/src/codegen/python/mod.rs +++ b/crates/build/re_types_builder/src/codegen/python/mod.rs @@ -185,10 +185,10 @@ struct ExtensionClass { /// a default implementation. has_array: bool, - /// Whether the `ObjectExt` contains __native_to_pa_array__() + /// Whether the `ObjectExt` contains `__native_to_pa_array__()` has_native_to_pa_array: bool, - /// Whether the `ObjectExt` contains a deferred_patch_class() method + /// Whether the `ObjectExt` contains a `deferred_patch_class()` method has_deferred_patch_class: bool, } @@ -1509,11 +1509,7 @@ fn quote_union_aliases_from_object<'a>( let name = &obj.name; let union_fields = field_types.join(","); - let aliases = if let Some(aliases) = aliases { - aliases - } else { - String::new() - }; + let aliases = aliases.unwrap_or_default(); unindent(&format!( r#" diff --git a/crates/build/re_types_builder/src/objects.rs b/crates/build/re_types_builder/src/objects.rs index 9a440c2004856..ccb790fa2dd62 100644 --- a/crates/build/re_types_builder/src/objects.rs +++ b/crates/build/re_types_builder/src/objects.rs @@ -284,7 +284,7 @@ impl ObjectKind { /// an enum. #[derive(Debug, Clone)] pub struct Object { - /// Utf8Path of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`. + /// `Utf8Path` of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`. pub virtpath: String, /// Absolute filepath of the associated fbs definition. @@ -296,7 +296,7 @@ pub struct Object { /// Fully-qualified package name of the object, e.g. `rerun.components`. pub pkg_name: String, - /// PascalCase name of the object type, e.g. `Position2D`. + /// `PascalCase` name of the object type, e.g. `Position2D`. pub name: String, /// The object's multiple layers of documentation. @@ -650,7 +650,7 @@ pub enum ObjectClass { /// union value. #[derive(Debug, Clone)] pub struct ObjectField { - /// Utf8Path of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`. + /// `Utf8Path` of the associated fbs definition in the Flatbuffers hierarchy, e.g. `//rerun/components/point2d.fbs`. pub virtpath: String, /// Absolute filepath of the associated fbs definition. diff --git a/crates/store/re_chunk/Cargo.toml b/crates/store/re_chunk/Cargo.toml index 9681a27ae0931..de978a9eaeedb 100644 --- a/crates/store/re_chunk/Cargo.toml +++ b/crates/store/re_chunk/Cargo.toml @@ -52,6 +52,7 @@ anyhow.workspace = true arrow2 = { workspace = true, features = [ "compute_concatenate", "compute_filter", + "compute_take", ] } bytemuck.workspace = true document-features.workspace = true diff --git a/crates/store/re_chunk/src/slice.rs b/crates/store/re_chunk/src/slice.rs index efb07390f3418..ccb577afe768f 100644 --- a/crates/store/re_chunk/src/slice.rs +++ b/crates/store/re_chunk/src/slice.rs @@ -435,6 +435,80 @@ impl Chunk { .collect(), } } + + /// Removes duplicate rows from sections of consecutive identical indices. + /// + /// * If the [`Chunk`] is sorted on that index, the remaining values in the index column will be unique. + /// * If the [`Chunk`] has been densified on a specific column, the resulting chunk will + /// effectively contain the latest value of that column for each given index value. + /// + /// If this is a temporal chunk and `timeline` isn't present in it, this method is a no-op. + /// + /// This does _not_ obey `RowId`-ordering semantics (or any other kind of semantics for that + /// matter) -- it merely respects how the chunk is currently laid out: no more, no less. + /// Sort the chunk according to the semantics you're looking for before calling this method. + // + // TODO(cmc): `Timeline` should really be `Index`. + #[inline] + pub fn deduped_latest_on_index(&self, index: &Timeline) -> Self { + re_tracing::profile_function!(); + + if self.is_empty() { + return self.clone(); + } + + if self.is_static() { + return self.row_sliced(self.num_rows().saturating_sub(1), 1); + } + + let Some(time_column) = self.timelines.get(index) else { + return self.clone(); + }; + + let indices = { + let mut i = 0; + let indices = time_column + .times_raw() + .iter() + .copied() + .dedup_with_count() + .map(|(count, _time)| { + i += count; + i.saturating_sub(1) as i32 + }) + .collect_vec(); + ArrowPrimitiveArray::::from_vec(indices) + }; + + let chunk = Self { + id: self.id, + entity_path: self.entity_path.clone(), + heap_size_bytes: Default::default(), + is_sorted: self.is_sorted, + row_ids: crate::util::take_array(&self.row_ids, &indices), + timelines: self + .timelines + .iter() + .map(|(&timeline, time_column)| (timeline, time_column.taken(&indices))) + .collect(), + components: self + .components + .iter() + .map(|(&component_name, list_array)| { + let filtered = crate::util::take_array(list_array, &indices); + (component_name, filtered) + }) + .collect(), + }; + + #[cfg(debug_assertions)] + #[allow(clippy::unwrap_used)] // debug-only + { + chunk.sanity_check().unwrap(); + } + + chunk + } } impl TimeColumn { @@ -517,7 +591,9 @@ impl TimeColumn { ) } - /// Runs a filter compute kernel on the time data with the specified `mask`. + /// Runs a [filter] compute kernel on the time data with the specified `mask`. + /// + /// [filter]: arrow2::compute::filter::filter #[inline] pub(crate) fn filtered(&self, filter: &ArrowBooleanArray) -> Self { let Self { @@ -552,13 +628,35 @@ impl TimeColumn { crate::util::filter_array(times, filter), ) } + + /// Runs a [take] compute kernel on the time data with the specified `indices`. + /// + /// [take]: arrow2::compute::take::take + #[inline] + pub(crate) fn taken(&self, indices: &ArrowPrimitiveArray) -> Self { + let Self { + timeline, + times, + is_sorted, + time_range: _, + } = self; + + Self::new( + Some(*is_sorted), + *timeline, + crate::util::take_array(times, indices), + ) + } } // --- #[cfg(test)] mod tests { - use re_log_types::example_components::{MyColor, MyLabel, MyPoint}; + use re_log_types::{ + example_components::{MyColor, MyLabel, MyPoint}, + TimePoint, + }; use re_types_core::{ComponentBatch, Loggable}; use crate::{Chunk, RowId, Timeline}; @@ -684,4 +782,269 @@ mod tests { Ok(()) } + + #[test] + fn dedupe_temporal() -> anyhow::Result<()> { + let entity_path = "my/entity"; + + let row_id1 = RowId::new(); + let row_id2 = RowId::new(); + let row_id3 = RowId::new(); + let row_id4 = RowId::new(); + let row_id5 = RowId::new(); + + let timepoint1 = [ + (Timeline::log_time(), 1000), + (Timeline::new_sequence("frame"), 1), + ]; + let timepoint2 = [ + (Timeline::log_time(), 1032), + (Timeline::new_sequence("frame"), 1), + ]; + let timepoint3 = [ + (Timeline::log_time(), 1064), + (Timeline::new_sequence("frame"), 1), + ]; + let timepoint4 = [ + (Timeline::log_time(), 1096), + (Timeline::new_sequence("frame"), 2), + ]; + let timepoint5 = [ + (Timeline::log_time(), 1128), + (Timeline::new_sequence("frame"), 2), + ]; + + let points1 = &[MyPoint::new(1.0, 1.0), MyPoint::new(2.0, 2.0)]; + let points3 = &[MyPoint::new(6.0, 7.0)]; + + let colors4 = &[MyColor::from_rgb(1, 1, 1)]; + let colors5 = &[MyColor::from_rgb(2, 2, 2), MyColor::from_rgb(3, 3, 3)]; + + let labels1 = &[MyLabel("a".into())]; + let labels2 = &[MyLabel("b".into())]; + let labels3 = &[MyLabel("c".into())]; + let labels4 = &[MyLabel("d".into())]; + let labels5 = &[MyLabel("e".into())]; + + let chunk = Chunk::builder(entity_path.into()) + .with_sparse_component_batches( + row_id1, + timepoint1, + [ + (MyPoint::name(), Some(points1 as _)), + (MyColor::name(), None), + (MyLabel::name(), Some(labels1 as _)), + ], + ) + .with_sparse_component_batches( + row_id2, + timepoint2, + [ + (MyPoint::name(), None), + (MyColor::name(), None), + (MyLabel::name(), Some(labels2 as _)), + ], + ) + .with_sparse_component_batches( + row_id3, + timepoint3, + [ + (MyPoint::name(), Some(points3 as _)), + (MyColor::name(), None), + (MyLabel::name(), Some(labels3 as _)), + ], + ) + .with_sparse_component_batches( + row_id4, + timepoint4, + [ + (MyPoint::name(), None), + (MyColor::name(), Some(colors4 as _)), + (MyLabel::name(), Some(labels4 as _)), + ], + ) + .with_sparse_component_batches( + row_id5, + timepoint5, + [ + (MyPoint::name(), None), + (MyColor::name(), Some(colors5 as _)), + (MyLabel::name(), Some(labels5 as _)), + ], + ) + .build()?; + + eprintln!("chunk:\n{chunk}"); + + { + let got = chunk.deduped_latest_on_index(&Timeline::new_sequence("frame")); + eprintln!("got:\n{got}"); + assert_eq!(2, got.num_rows()); + + let expectations: &[(_, _, Option<&dyn ComponentBatch>)] = &[ + (row_id3, MyPoint::name(), Some(points3 as _)), + (row_id3, MyColor::name(), None), + (row_id3, MyLabel::name(), Some(labels3 as _)), + // + (row_id5, MyPoint::name(), None), + (row_id5, MyColor::name(), Some(colors5 as _)), + (row_id5, MyLabel::name(), Some(labels5 as _)), + ]; + + for (row_id, component_name, expected) in expectations { + let expected = expected + .and_then(|expected| re_types_core::LoggableBatch::to_arrow(expected).ok()); + eprintln!("{component_name} @ {row_id}"); + similar_asserts::assert_eq!(expected, chunk.cell(*row_id, component_name)); + } + } + + { + let got = chunk.deduped_latest_on_index(&Timeline::log_time()); + eprintln!("got:\n{got}"); + assert_eq!(5, got.num_rows()); + + let expectations: &[(_, _, Option<&dyn ComponentBatch>)] = &[ + (row_id1, MyPoint::name(), Some(points1 as _)), + (row_id1, MyColor::name(), None), + (row_id1, MyLabel::name(), Some(labels1 as _)), + (row_id2, MyPoint::name(), None), + (row_id2, MyColor::name(), None), + (row_id2, MyLabel::name(), Some(labels2 as _)), + (row_id3, MyPoint::name(), Some(points3 as _)), + (row_id3, MyColor::name(), None), + (row_id3, MyLabel::name(), Some(labels3 as _)), + (row_id4, MyPoint::name(), None), + (row_id4, MyColor::name(), Some(colors4 as _)), + (row_id4, MyLabel::name(), Some(labels4 as _)), + (row_id5, MyPoint::name(), None), + (row_id5, MyColor::name(), Some(colors5 as _)), + (row_id5, MyLabel::name(), Some(labels5 as _)), + ]; + + for (row_id, component_name, expected) in expectations { + let expected = expected + .and_then(|expected| re_types_core::LoggableBatch::to_arrow(expected).ok()); + eprintln!("{component_name} @ {row_id}"); + similar_asserts::assert_eq!(expected, chunk.cell(*row_id, component_name)); + } + } + + Ok(()) + } + + #[test] + fn dedupe_static() -> anyhow::Result<()> { + let entity_path = "my/entity"; + + let row_id1 = RowId::new(); + let row_id2 = RowId::new(); + let row_id3 = RowId::new(); + let row_id4 = RowId::new(); + let row_id5 = RowId::new(); + + let timepoint_static = TimePoint::default(); + + let points1 = &[MyPoint::new(1.0, 1.0), MyPoint::new(2.0, 2.0)]; + let points3 = &[MyPoint::new(6.0, 7.0)]; + + let colors4 = &[MyColor::from_rgb(1, 1, 1)]; + let colors5 = &[MyColor::from_rgb(2, 2, 2), MyColor::from_rgb(3, 3, 3)]; + + let labels1 = &[MyLabel("a".into())]; + let labels2 = &[MyLabel("b".into())]; + let labels3 = &[MyLabel("c".into())]; + let labels4 = &[MyLabel("d".into())]; + let labels5 = &[MyLabel("e".into())]; + + let chunk = Chunk::builder(entity_path.into()) + .with_sparse_component_batches( + row_id1, + timepoint_static.clone(), + [ + (MyPoint::name(), Some(points1 as _)), + (MyColor::name(), None), + (MyLabel::name(), Some(labels1 as _)), + ], + ) + .with_sparse_component_batches( + row_id2, + timepoint_static.clone(), + [ + (MyPoint::name(), None), + (MyColor::name(), None), + (MyLabel::name(), Some(labels2 as _)), + ], + ) + .with_sparse_component_batches( + row_id3, + timepoint_static.clone(), + [ + (MyPoint::name(), Some(points3 as _)), + (MyColor::name(), None), + (MyLabel::name(), Some(labels3 as _)), + ], + ) + .with_sparse_component_batches( + row_id4, + timepoint_static.clone(), + [ + (MyPoint::name(), None), + (MyColor::name(), Some(colors4 as _)), + (MyLabel::name(), Some(labels4 as _)), + ], + ) + .with_sparse_component_batches( + row_id5, + timepoint_static.clone(), + [ + (MyPoint::name(), None), + (MyColor::name(), Some(colors5 as _)), + (MyLabel::name(), Some(labels5 as _)), + ], + ) + .build()?; + + eprintln!("chunk:\n{chunk}"); + + { + let got = chunk.deduped_latest_on_index(&Timeline::new_sequence("frame")); + eprintln!("got:\n{got}"); + assert_eq!(1, got.num_rows()); + + let expectations: &[(_, _, Option<&dyn ComponentBatch>)] = &[ + (row_id5, MyPoint::name(), None), + (row_id5, MyColor::name(), Some(colors5 as _)), + (row_id5, MyLabel::name(), Some(labels5 as _)), + ]; + + for (row_id, component_name, expected) in expectations { + let expected = expected + .and_then(|expected| re_types_core::LoggableBatch::to_arrow(expected).ok()); + eprintln!("{component_name} @ {row_id}"); + similar_asserts::assert_eq!(expected, chunk.cell(*row_id, component_name)); + } + } + + { + let got = chunk.deduped_latest_on_index(&Timeline::log_time()); + eprintln!("got:\n{got}"); + assert_eq!(1, got.num_rows()); + + let expectations: &[(_, _, Option<&dyn ComponentBatch>)] = &[ + (row_id5, MyPoint::name(), None), + (row_id5, MyColor::name(), Some(colors5 as _)), + (row_id5, MyLabel::name(), Some(labels5 as _)), + ]; + + for (row_id, component_name, expected) in expectations { + let expected = expected + .and_then(|expected| re_types_core::LoggableBatch::to_arrow(expected).ok()); + eprintln!("{component_name} @ {row_id}"); + similar_asserts::assert_eq!(expected, chunk.cell(*row_id, component_name)); + } + } + + Ok(()) + } } diff --git a/crates/store/re_chunk/src/transport.rs b/crates/store/re_chunk/src/transport.rs index c01e940f5c3a3..3888101059905 100644 --- a/crates/store/re_chunk/src/transport.rs +++ b/crates/store/re_chunk/src/transport.rs @@ -246,8 +246,7 @@ impl TransportChunk { pub fn is_sorted(&self) -> bool { self.schema .metadata - .get(Self::CHUNK_METADATA_MARKER_IS_SORTED_BY_ROW_ID) - .is_some() + .contains_key(Self::CHUNK_METADATA_MARKER_IS_SORTED_BY_ROW_ID) } /// Iterates all columns of the specified `kind`. @@ -531,8 +530,7 @@ impl Chunk { let is_sorted = field .metadata - .get(TransportChunk::FIELD_METADATA_MARKER_IS_SORTED_BY_TIME) - .is_some(); + .contains_key(TransportChunk::FIELD_METADATA_MARKER_IS_SORTED_BY_TIME); let time_column = TimeColumn::new( is_sorted.then_some(true), diff --git a/crates/store/re_chunk/src/util.rs b/crates/store/re_chunk/src/util.rs index 7974fe2a33c86..33b394ec26985 100644 --- a/crates/store/re_chunk/src/util.rs +++ b/crates/store/re_chunk/src/util.rs @@ -280,9 +280,13 @@ pub fn pad_list_array_front( ArrowListArray::new(datatype, offsets.into(), values, Some(validity)) } -/// Applies a filter kernel to the given `array`. +/// Applies a [filter] kernel to the given `array`. +/// +/// Note: a `filter` kernel _copies_ the data in order to make the resulting arrays contiguous in memory. /// /// Takes care of up- and down-casting the data back and forth on behalf of the caller. +/// +/// [filter]: arrow2::compute::filter::filter pub fn filter_array(array: &A, filter: &ArrowBooleanArray) -> A { debug_assert!(filter.validity().is_none()); // just for good measure @@ -296,3 +300,31 @@ pub fn filter_array(array: &A, filter: &ArrowBooleanArray .unwrap() .clone() } + +/// Applies a [take] kernel to the given `array`. +/// +/// Note: a `take` kernel _copies_ the data in order to make the resulting arrays contiguous in memory. +/// +/// Takes care of up- and down-casting the data back and forth on behalf of the caller. +/// +/// [take]: arrow2::compute::take::take +// +// TODO(cmc): in an ideal world, a `take` kernel should merely _slice_ the data and avoid any allocations/copies +// where possible (e.g. list-arrays). +// That is not possible with vanilla `ListArray`s since they don't expose any way to encode optional lengths, +// in addition to offsets. +// For internal stuff, we could perhaps provide a custom implementation that returns a `DictionaryArray` instead? +pub fn take_array( + array: &A, + indices: &ArrowPrimitiveArray, +) -> A { + #[allow(clippy::unwrap_used)] + arrow2::compute::take::take(array, indices) + // Unwrap: this literally cannot fail. + .unwrap() + .as_any() + .downcast_ref::() + // Unwrap: that's initial type that we got. + .unwrap() + .clone() +} diff --git a/crates/store/re_chunk/tests/memory_test.rs b/crates/store/re_chunk/tests/memory_test.rs new file mode 100644 index 0000000000000..9a502f675f3da --- /dev/null +++ b/crates/store/re_chunk/tests/memory_test.rs @@ -0,0 +1,193 @@ +//! Measures the memory overhead of the chunk store. + +// https://github.com/rust-lang/rust-clippy/issues/10011 +#![cfg(test)] + +use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; + +thread_local! { + static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) }; +} + +pub struct TrackingAllocator { + allocator: std::alloc::System, +} + +#[global_allocator] +pub static GLOBAL_ALLOCATOR: TrackingAllocator = TrackingAllocator { + allocator: std::alloc::System, +}; + +#[allow(unsafe_code)] +// SAFETY: +// We just do book-keeping and then let another allocator do all the actual work. +unsafe impl std::alloc::GlobalAlloc for TrackingAllocator { + #[allow(clippy::let_and_return)] + unsafe fn alloc(&self, layout: std::alloc::Layout) -> *mut u8 { + LIVE_BYTES_IN_THREAD.with(|bytes| bytes.fetch_add(layout.size(), Relaxed)); + + // SAFETY: + // Just deferring + unsafe { self.allocator.alloc(layout) } + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: std::alloc::Layout) { + LIVE_BYTES_IN_THREAD.with(|bytes| bytes.fetch_sub(layout.size(), Relaxed)); + + // SAFETY: + // Just deferring + unsafe { self.allocator.dealloc(ptr, layout) }; + } +} + +fn live_bytes_local() -> usize { + LIVE_BYTES_IN_THREAD.with(|bytes| bytes.load(Relaxed)) +} + +/// Returns `(ret, num_bytes_allocated_by_this_thread)`. +fn memory_use(run: impl Fn() -> R) -> (R, usize) { + let used_bytes_start_local = live_bytes_local(); + let ret = run(); + let bytes_used_local = live_bytes_local() - used_bytes_start_local; + (ret, bytes_used_local) +} + +// ---------------------------------------------------------------------------- + +use arrow2::{ + array::{ + Array as ArrowArray, BooleanArray as ArrowBooleanArray, ListArray as ArrowListArray, + PrimitiveArray as ArrowPrimitiveArray, + }, + offset::Offsets as ArrowOffsets, +}; +use itertools::Itertools as _; + +#[test] +fn filter_does_allocate() { + re_log::setup_logging(); + + const NUM_SCALARS: i64 = 10_000_000; + + let (((unfiltered, unfiltered_size_bytes), (filtered, filtered_size_bytes)), total_size_bytes) = + memory_use(|| { + let unfiltered = memory_use(|| { + let scalars = ArrowPrimitiveArray::from_vec((0..NUM_SCALARS).collect_vec()); + ArrowListArray::::new( + ArrowListArray::::default_datatype(scalars.data_type().clone()), + ArrowOffsets::try_from_lengths( + std::iter::repeat(NUM_SCALARS as usize / 10).take(10), + ) + .unwrap() + .into(), + scalars.to_boxed(), + None, + ) + }); + + let filter = ArrowBooleanArray::from_slice( + (0..unfiltered.0.len()).map(|i| i % 2 == 0).collect_vec(), + ); + let filtered = memory_use(|| re_chunk::util::filter_array(&unfiltered.0, &filter)); + + (unfiltered, filtered) + }); + + eprintln!( + "unfiltered={} filtered={} total={}", + re_format::format_bytes(unfiltered_size_bytes as _), + re_format::format_bytes(filtered_size_bytes as _), + re_format::format_bytes(total_size_bytes as _), + ); + + assert!(unfiltered_size_bytes + filtered_size_bytes <= total_size_bytes); + assert!(unfiltered_size_bytes <= filtered_size_bytes * 2); + + { + let unfiltered = unfiltered + .values() + .as_any() + .downcast_ref::>() + .unwrap(); + let filtered = filtered + .values() + .as_any() + .downcast_ref::>() + .unwrap(); + + assert!( + !std::ptr::eq( + unfiltered.values().as_ptr_range().start, + filtered.values().as_ptr_range().start + ), + "data should be copied -- pointers shouldn't match" + ); + } +} + +#[test] +// TODO(cmc): That's the end goal, but it is simply impossible with `ListArray`'s encoding. +// See `Chunk::take_array`'s doc-comment for more information. +#[should_panic = "assertion failed: untaken_size_bytes > taken_size_bytes * 10"] +fn take_does_not_allocate() { + re_log::setup_logging(); + + const NUM_SCALARS: i64 = 10_000_000; + + let (((untaken, untaken_size_bytes), (taken, taken_size_bytes)), total_size_bytes) = + memory_use(|| { + let untaken = memory_use(|| { + let scalars = ArrowPrimitiveArray::from_vec((0..NUM_SCALARS).collect_vec()); + ArrowListArray::::new( + ArrowListArray::::default_datatype(scalars.data_type().clone()), + ArrowOffsets::try_from_lengths( + std::iter::repeat(NUM_SCALARS as usize / 10).take(10), + ) + .unwrap() + .into(), + scalars.to_boxed(), + None, + ) + }); + + let indices = ArrowPrimitiveArray::from_vec( + (0..untaken.0.len() as i32) + .filter(|i| i % 2 == 0) + .collect_vec(), + ); + let taken = memory_use(|| re_chunk::util::take_array(&untaken.0, &indices)); + + (untaken, taken) + }); + + eprintln!( + "untaken={} taken={} total={}", + re_format::format_bytes(untaken_size_bytes as _), + re_format::format_bytes(taken_size_bytes as _), + re_format::format_bytes(total_size_bytes as _), + ); + + assert!(untaken_size_bytes + taken_size_bytes <= total_size_bytes); + assert!(untaken_size_bytes > taken_size_bytes * 10); + + { + let untaken = untaken + .values() + .as_any() + .downcast_ref::>() + .unwrap(); + let taken = taken + .values() + .as_any() + .downcast_ref::>() + .unwrap(); + + assert!( + std::ptr::eq( + untaken.values().as_ptr_range().start, + taken.values().as_ptr_range().start + ), + "data shouldn't be duplicated -- pointers should match" + ); + } +} diff --git a/crates/store/re_chunk_store/src/dataframe.rs b/crates/store/re_chunk_store/src/dataframe.rs index 0a4b01e8f19d4..357d22d976af4 100644 --- a/crates/store/re_chunk_store/src/dataframe.rs +++ b/crates/store/re_chunk_store/src/dataframe.rs @@ -1,6 +1,6 @@ //! All the APIs used specifically for `re_dataframe`. -use std::collections::BTreeSet; +use std::collections::{BTreeMap, BTreeSet}; use ahash::HashSet; use arrow2::{ @@ -242,6 +242,8 @@ pub struct ComponentColumnDescriptor { pub store_datatype: ArrowDatatype, /// How the data will be joined into the resulting `RecordBatch`. + // + // TODO(cmc): remove with the old re_dataframe. pub join_encoding: JoinEncoding, /// Whether this column represents static data. @@ -445,9 +447,13 @@ impl From for ColumnSelector { /// Select a control column. /// /// The only control column currently supported is `rerun.components.RowId`. +// +// TODO(cmc): `RowId` shouldnt be a control column at this point, it should be yet another index. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ControlColumnSelector { /// Name of the control column. + // + // TODO(cmc): this should be `component_name`. pub component: ComponentName, } @@ -470,6 +476,9 @@ impl From for ControlColumnSelector { } /// Select a time column. +// +// TODO(cmc): This shouldn't be specific to time, this should be an `IndexColumnSelector` or smth. +// Particularly unfortunate that this one already leaks into the public APIā€¦ #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TimeColumnSelector { /// The name of the timeline. @@ -497,9 +506,13 @@ pub struct ComponentColumnSelector { pub entity_path: EntityPath, /// Semantic name associated with this data. + // + // TODO(cmc): this should be `component_name`. pub component: ComponentName, /// How to join the data into the `RecordBatch`. + // + // TODO(cmc): remove once old `re_dataframe` is gone. pub join_encoding: JoinEncoding, } @@ -701,6 +714,192 @@ impl std::fmt::Display for RangeQueryExpression { } } +// --- Queries v2 --- + +/// Specifies how null values should be filled in the returned dataframe. +#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] +pub enum SparseFillStrategy { + /// No sparse filling. Nulls stay nulls. + #[default] + None, + + /// Fill null values using global-scope latest-at semantics. + /// + /// The latest-at semantics are applied on the entire dataset as opposed to just the current + /// view contents: it is possible to end up with values from outside the view! + LatestAtGlobal, + // + // TODO(cmc): `LatestAtView`? +} + +impl std::fmt::Display for SparseFillStrategy { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::None => f.write_str("none"), + Self::LatestAtGlobal => f.write_str("latest-at (global)"), + } + } +} + +/// The view contents specify which subset of the database (i.e., which columns) the query runs on, +/// expressed as a set of [`EntityPath`]s and their associated [`ComponentName`]s. +/// +/// Setting an entity's components to `None` means: everything. +/// +// TODO(cmc): we need to be able to build that easily in a command-line context, otherwise it's just +// very annoying. E.g. `--with /world/points:[rr.Position3D, rr.Radius] --with /cam:[rr.Pinhole]`. +pub type ViewContentsSelector = BTreeMap>>; + +// TODO(cmc): Ultimately, this shouldn't be hardcoded to `Timeline`, but to a generic `I: Index`. +// `Index` in this case should also be implemented on tuples (`(I1, I2, ...)`). +pub type Index = Timeline; + +// TODO(cmc): Ultimately, this shouldn't be hardcoded to `TimeInt`, but to a generic `I: Index`. +// `Index` in this case should also be implemented on tuples (`(I1, I2, ...)`). +pub type IndexValue = TimeInt; + +// TODO(cmc): Ultimately, this shouldn't be hardcoded to `ResolvedTimeRange`, but to a generic `I: Index`. +// `Index` in this case should also be implemented on tuples (`(I1, I2, ...)`). +pub type IndexRange = ResolvedTimeRange; + +/// Describes a complete query for Rerun's dataframe API. +/// +/// ## Terminology: view vs. selection vs. filtering vs. sampling +/// +/// * The view contents specify which subset of the database (i.e., which columns) the query runs on, +/// expressed as a set of [`EntityPath`]s and their associated [`ComponentName`]s. +/// +/// * The filters filter out _rows_ of data from the view contents. +/// A filter cannot possibly introduce new rows, it can only remove existing ones from the view contents. +/// +/// * The samplers sample _rows_ of data from the view contents at user-specified values. +/// Samplers don't necessarily return existing rows: they might introduce new ones if the sampled value +/// isn't present in the view contents in the first place. +/// +/// * The selection applies last and samples _columns_ of data from the filtered/sampled view contents. +/// Selecting a column that isn't present in the view contents results in an empty column in the +/// final dataframe (null array). +/// +/// A very rough mental model, in SQL terms: +/// ```text +/// SELECT FROM WHERE +/// ``` +// +// TODO(cmc): ideally we'd like this to be the same type as the one used in the blueprint, possibly? +// TODO(cmc): Get rid of all re_dataframe (as opposed to re_dataframe2) stuff and rename this. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct QueryExpression2 { + /// The subset of the database that the query will run on: a set of [`EntityPath`]s and their + /// associated [`ComponentName`]s. + /// + /// Defaults to `None`, which means: everything. + /// + /// Example (pseudo-code): + /// ```text + /// view_contents = { + /// "world/points": [rr.Position3D, rr.Radius], + /// "metrics": [rr.Scalar] + /// } + /// ``` + pub view_contents: Option, + + /// The index used to filter out _rows_ from the view contents. + /// + /// Only rows where at least 1 column contains non-null data at that index will be kept in the + /// final dataset. + /// + /// Example: `Timeline("frame")`. + // + // TODO(cmc): this has to be a selector otherwise this is a horrible UX. + pub filtered_index: Timeline, + + /// The range of index values used to filter out _rows_ from the view contents. + /// + /// Only rows where at least 1 of the view-contents contains non-null data within that range will be kept in + /// the final dataset. + /// + /// This is ignored if [`QueryExpression2::sampled_index_values`] is set. + /// + /// Example: `ResolvedTimeRange(10, 20)`. + pub filtered_index_range: Option, + + /// TODO(cmc): NOT IMPLEMENTED. + /// + /// The specific index values used to filter out _rows_ from the view contents. + /// + /// Only rows where at least 1 column contains non-null data at these specific values will be kept + /// in the final dataset. + /// + /// This is ignored if [`QueryExpression2::sampled_index_values`] is set. + /// + /// Example: `[TimeInt(12), TimeInt(14)]`. + pub filtered_index_values: Option>, + + /// TODO(cmc): NOT IMPLEMENTED. + /// + /// The specific index values used to sample _rows_ from the view contents. + /// + /// The final dataset will contain one row per sampled index value, regardless of whether data + /// existed for that index value in the view contents. + /// + /// The order of the samples will be respected in the final result. + /// + /// If [`QueryExpression2::sampled_index_values`] is set, it overrides both [`QueryExpression2::filtered_index_range`] + /// and [`QueryExpression2::filtered_index_values`]. + /// + /// Example: `[TimeInt(12), TimeInt(14)]`. + // + // TODO(jleibs): We need an alternative name for sampled. + pub sampled_index_values: Option>, + + /// TODO(cmc): NOT IMPLEMENTED. + /// + /// The component column used to filter out _rows_ from the view contents. + /// + /// Only rows where this column contains non-null data be kept in the final dataset. + /// + /// Example: `ComponentColumnSelector("rerun.components.Position3D")`. + // + // TODO(cmc): multi-pov support + pub filtered_point_of_view: Option, + + /// TODO(cmc): NOT IMPLEMENTED. + /// + /// Specifies how null values should be filled in the returned dataframe. + /// + /// Defaults to [`SparseFillStrategy::None`]. + pub sparse_fill_strategy: SparseFillStrategy, + + /// The specific _columns_ to sample from the final view contents. + /// + /// The order of the samples will be respected in the final result. + /// + /// Defaults to `None`, which means: everything. + /// + /// Example: `[ColumnSelector(Time("log_time")), ColumnSelector(Component("rerun.components.Position3D"))]`. + // + // TODO(cmc): the selection has to be on the QueryHandle, otherwise it's hell to use. + pub selection: Option>, +} + +impl QueryExpression2 { + #[inline] + pub fn new(index: impl Into) -> Self { + let index = index.into(); + + Self { + view_contents: None, + filtered_index: index, + filtered_index_range: None, + filtered_index_values: None, + sampled_index_values: None, + filtered_point_of_view: None, + sparse_fill_strategy: SparseFillStrategy::None, + selection: None, + } + } +} + // --- impl ChunkStore { @@ -954,4 +1153,31 @@ impl ChunkStore { .filter(|descr| !filtered_out.contains(descr)) .collect() } + + /// Returns the filtered schema for the given [`ViewContentsSelector`]. + /// + /// The order of the columns is guaranteed to be in a specific order: + /// * first, the control columns in lexical order (`RowId`); + /// * second, the time columns in lexical order (`frame_nr`, `log_time`, ...); + /// * third, the component columns in lexical order (`Color`, `Radius, ...`). + pub fn schema_for_view_contents( + &self, + view_contents: &ViewContentsSelector, + ) -> Vec { + re_tracing::profile_function!(); + + self.schema() + .into_iter() + .filter(|column| match column { + ColumnDescriptor::Control(_) | ColumnDescriptor::Time(_) => true, + ColumnDescriptor::Component(column) => view_contents + .get(&column.entity_path) + .map_or(false, |components| { + components.as_ref().map_or(true, |components| { + components.contains(&column.component_name) + }) + }), + }) + .collect() + } } diff --git a/crates/store/re_chunk_store/src/events.rs b/crates/store/re_chunk_store/src/events.rs index a76ccf33fbf8d..62e990de35f43 100644 --- a/crates/store/re_chunk_store/src/events.rs +++ b/crates/store/re_chunk_store/src/events.rs @@ -249,8 +249,7 @@ mod tests { .or_default() += delta_chunks; for (component_name, list_array) in event.chunk.components() { - let delta = - event.delta() * list_array.iter().filter(Option::is_some).count() as i64; + let delta = event.delta() * list_array.iter().flatten().count() as i64; *self.component_names.entry(*component_name).or_default() += delta; } diff --git a/crates/store/re_chunk_store/src/lib.rs b/crates/store/re_chunk_store/src/lib.rs index 66d19b1f26583..54f0df1f8e8a0 100644 --- a/crates/store/re_chunk_store/src/lib.rs +++ b/crates/store/re_chunk_store/src/lib.rs @@ -25,8 +25,9 @@ mod writes; pub use self::dataframe::{ ColumnDescriptor, ColumnSelector, ComponentColumnDescriptor, ComponentColumnSelector, - ControlColumnDescriptor, ControlColumnSelector, JoinEncoding, LatestAtQueryExpression, - QueryExpression, RangeQueryExpression, TimeColumnDescriptor, TimeColumnSelector, + ControlColumnDescriptor, ControlColumnSelector, Index, IndexRange, IndexValue, JoinEncoding, + LatestAtQueryExpression, QueryExpression, QueryExpression2, RangeQueryExpression, + SparseFillStrategy, TimeColumnDescriptor, TimeColumnSelector, ViewContentsSelector, }; pub use self::events::{ChunkStoreDiff, ChunkStoreDiffKind, ChunkStoreEvent}; pub use self::gc::{GarbageCollectionOptions, GarbageCollectionTarget}; diff --git a/crates/store/re_chunk_store/tests/memory_test.rs b/crates/store/re_chunk_store/tests/memory_test.rs index 1eef7c0592fc6..54b90648ef3c9 100644 --- a/crates/store/re_chunk_store/tests/memory_test.rs +++ b/crates/store/re_chunk_store/tests/memory_test.rs @@ -11,7 +11,7 @@ use std::sync::{ static LIVE_BYTES_GLOBAL: AtomicUsize = AtomicUsize::new(0); thread_local! { - static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0); + static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) }; } pub struct TrackingAllocator { diff --git a/crates/store/re_data_loader/src/loader_rrd.rs b/crates/store/re_data_loader/src/loader_rrd.rs index 6a71fa06d517c..8b35851cf91e9 100644 --- a/crates/store/re_data_loader/src/loader_rrd.rs +++ b/crates/store/re_data_loader/src/loader_rrd.rs @@ -1,5 +1,3 @@ -use std::{io::Read, sync::mpsc::Receiver}; - use re_log_encoding::decoder::Decoder; // --- @@ -144,15 +142,16 @@ fn decode_and_stream( // Retryable file reader that keeps retrying to read more data despite // reading zero bytes or reaching EOF. +#[cfg(not(target_arch = "wasm32"))] struct RetryableFileReader { reader: std::io::BufReader, - rx: Receiver>, + rx: std::sync::mpsc::Receiver>, #[allow(dead_code)] watcher: notify::RecommendedWatcher, } +#[cfg(not(target_arch = "wasm32"))] impl RetryableFileReader { - #[cfg(not(target_arch = "wasm32"))] fn new(filepath: &std::path::Path) -> Result { use anyhow::Context as _; use notify::{RecursiveMode, Watcher}; @@ -177,7 +176,8 @@ impl RetryableFileReader { } } -impl Read for RetryableFileReader { +#[cfg(not(target_arch = "wasm32"))] +impl std::io::Read for RetryableFileReader { fn read(&mut self, buf: &mut [u8]) -> std::io::Result { loop { match self.reader.read(buf) { @@ -194,6 +194,7 @@ impl Read for RetryableFileReader { } } +#[cfg(not(target_arch = "wasm32"))] impl RetryableFileReader { fn block_until_file_changes(&self) -> std::io::Result { #[allow(clippy::disallowed_methods)] @@ -241,7 +242,7 @@ mod tests { }; std::fs::remove_file(&rrd_file_path).ok(); // Remove the file just in case a previous test crashes hard. let rrd_file = std::fs::OpenOptions::new() - .create(true) + .create_new(true) .write(true) .open(rrd_file_path.to_str().unwrap()) .unwrap(); diff --git a/crates/store/re_dataframe/src/latest_at.rs b/crates/store/re_dataframe/src/latest_at.rs index f661289e8a55a..ad7bb6f103a62 100644 --- a/crates/store/re_dataframe/src/latest_at.rs +++ b/crates/store/re_dataframe/src/latest_at.rs @@ -161,7 +161,7 @@ impl LatestAtQueryHandle<'_> { // If the query didn't return anything at all, we just want a properly empty Recordbatch with // the right schema. - let null_array_length = max_time_per_timeline.get(&self.query.timeline).is_some() as usize; + let null_array_length = max_time_per_timeline.contains_key(&self.query.timeline) as usize; // NOTE: Keep in mind this must match the ordering specified by `Self::schema`. let packed_arrays = { diff --git a/crates/store/re_dataframe2/Cargo.toml b/crates/store/re_dataframe2/Cargo.toml new file mode 100644 index 0000000000000..0e6ffa0bbfc69 --- /dev/null +++ b/crates/store/re_dataframe2/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "re_dataframe2" +authors.workspace = true +description = "High-level query APIs" +edition.workspace = true +homepage.workspace = true +include.workspace = true +license.workspace = true +publish = true +readme = "README.md" +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[lints] +workspace = true + +[package.metadata.docs.rs] +all-features = true + + +[features] +default = [] + + +[dependencies] +# Rerun dependencies: +re_chunk.workspace = true +re_chunk_store.workspace = true +re_error.workspace = true +re_format.workspace = true +re_log.workspace = true +re_log_types.workspace = true +re_query.workspace = true +re_tracing.workspace = true +re_types_core.workspace = true + +# External dependencies: +ahash.workspace = true +anyhow.workspace = true +arrow2.workspace = true +backtrace.workspace = true +indent.workspace = true +itertools.workspace = true +nohash-hasher.workspace = true +parking_lot.workspace = true +paste.workspace = true +seq-macro.workspace = true +thiserror.workspace = true + +[dev-dependencies] +re_types.workspace = true diff --git a/crates/store/re_dataframe2/README.md b/crates/store/re_dataframe2/README.md new file mode 100644 index 0000000000000..4d7c30cbb4301 --- /dev/null +++ b/crates/store/re_dataframe2/README.md @@ -0,0 +1,10 @@ +# re_dataframe2 + +Part of the [`rerun`](https://github.com/rerun-io/rerun) family of crates. + +[![Latest version](https://img.shields.io/crates/v/re_dataframe2.svg)](https://crates.io/crates/re_dataframe2) +[![Documentation](https://docs.rs/re_dataframe2/badge.svg)](https://docs.rs/re_dataframe2) +![MIT](https://img.shields.io/badge/license-MIT-blue.svg) +![Apache](https://img.shields.io/badge/license-Apache-blue.svg) + +The Rerun public data APIs. Get dataframes back from your Rerun datastore. diff --git a/crates/store/re_dataframe2/examples/query.rs b/crates/store/re_dataframe2/examples/query.rs new file mode 100644 index 0000000000000..a7ca5ff0119d6 --- /dev/null +++ b/crates/store/re_dataframe2/examples/query.rs @@ -0,0 +1,78 @@ +#![allow(clippy::unwrap_used, clippy::match_same_arms)] + +use itertools::Itertools; + +use re_chunk::TimeInt; +use re_chunk_store::{ChunkStore, ChunkStoreConfig, QueryExpression2, Timeline, VersionPolicy}; +use re_dataframe2::{QueryCache, QueryEngine}; +use re_log_types::{EntityPathFilter, ResolvedTimeRange, StoreKind}; + +fn main() -> anyhow::Result<()> { + let args = std::env::args().collect_vec(); + + let get_arg = |i| { + let Some(value) = args.get(i) else { + eprintln!( + "Usage: {} [timeline] [from] [to] [entity_path_filter]", + args.first().map_or("$BIN", |s| s.as_str()) + ); + std::process::exit(1); + }; + value + }; + + let path_to_rrd = get_arg(1); + let timeline_name = args.get(2).map_or("log_time", |s| s.as_str()); + let time_from = args.get(3).map_or(TimeInt::MIN, |s| { + TimeInt::new_temporal(s.parse::().unwrap()) + }); + let time_to = args.get(4).map_or(TimeInt::MAX, |s| { + TimeInt::new_temporal(s.parse::().unwrap()) + }); + let entity_path_filter = EntityPathFilter::try_from(args.get(5).map_or("/**", |s| s.as_str()))?; + + // TODO(cmc): We need to take a selector, not a Timeline. + let timeline = match timeline_name { + "log_time" => Timeline::new_temporal(timeline_name), + "log_tick" => Timeline::new_sequence(timeline_name), + "frame" => Timeline::new_sequence(timeline_name), + "frame_nr" => Timeline::new_sequence(timeline_name), + _ => Timeline::new_temporal(timeline_name), + }; + + let stores = ChunkStore::from_rrd_filepath( + &ChunkStoreConfig::DEFAULT, + path_to_rrd, + VersionPolicy::Warn, + )?; + + for (store_id, store) in &stores { + if store_id.kind != StoreKind::Recording { + continue; + } + + let query_cache = QueryCache::new(store); + let query_engine = QueryEngine { + store, + cache: &query_cache, + }; + + let mut query = QueryExpression2::new(timeline); + query.view_contents = Some( + query_engine + .iter_entity_paths(&entity_path_filter) + .map(|entity_path| (entity_path, None)) + .collect(), + ); + query.filtered_index_range = Some(ResolvedTimeRange::new(time_from, time_to)); + eprintln!("{query:#?}:"); + + let query_handle = query_engine.query(query.clone()); + // eprintln!("{:#?}", query_handle.selected_contents()); + for batch in query_handle.into_batch_iter() { + eprintln!("{batch}"); + } + } + + Ok(()) +} diff --git a/crates/store/re_dataframe2/src/engine.rs b/crates/store/re_dataframe2/src/engine.rs new file mode 100644 index 0000000000000..4b6d45f8aef27 --- /dev/null +++ b/crates/store/re_dataframe2/src/engine.rs @@ -0,0 +1,96 @@ +use re_chunk::{EntityPath, TransportChunk}; +use re_chunk_store::{ + ChunkStore, ColumnDescriptor, QueryExpression, QueryExpression2, ViewContentsSelector, +}; +use re_log_types::EntityPathFilter; +use re_query::Caches; + +use crate::QueryHandle; + +// Used all over in docstrings. +#[allow(unused_imports)] +use re_chunk_store::ComponentColumnDescriptor; + +// --- + +// TODO(#3741): `arrow2` has no concept of a `RecordBatch`, so for now we just use our trustworthy +// `TransportChunk` type until we migrate to `arrow-rs`. +// `TransportChunk` maps 1:1 to `RecordBatch` so the switch (and the compatibility layer in the meantime) +// will be trivial. +// TODO(cmc): add an `arrow` feature to transportchunk in a follow-up pr and call it a day. +pub type RecordBatch = TransportChunk; + +// --- Queries --- + +/// A handle to our user-facing query engine. +/// +/// See the following methods: +/// * [`QueryEngine::schema`]: get the complete schema of the recording. +/// * [`QueryEngine::query`]: execute a [`QueryExpression2`] on the recording. +// +// TODO(cmc): This needs to be a refcounted type that can be easily be passed around: the ref has +// got to go. But for that we need to generally introduce `ChunkStoreHandle` and `QueryCacheHandle` +// first, and this is not as straightforward as it seems. +pub struct QueryEngine<'a> { + pub store: &'a ChunkStore, + pub cache: &'a Caches, +} + +impl QueryEngine<'_> { + /// Returns the full schema of the store. + /// + /// This will include a column descriptor for every timeline and every component on every + /// entity that has been written to the store so far. + /// + /// The order of the columns to guaranteed to be in a specific order: + /// * first, the control columns in lexical order (`RowId`); + /// * second, the time columns in lexical order (`frame_nr`, `log_time`, ...); + /// * third, the component columns in lexical order (`Color`, `Radius, ...`). + #[inline] + pub fn schema(&self) -> Vec { + self.store.schema() + } + + /// Returns the filtered schema for the given query expression. + /// + /// This will only include columns which may contain non-empty values from the perspective of + /// the query semantics. + /// + /// The order of the columns is guaranteed to be in a specific order: + /// * first, the control columns in lexical order (`RowId`); + /// * second, the time columns in lexical order (`frame_nr`, `log_time`, ...); + /// * third, the component columns in lexical order (`Color`, `Radius, ...`). + /// + /// This does not run a full-blown query, but rather just inspects `Chunk`-level metadata, + /// which can lead to false positives, but makes this very cheap to compute. + #[inline] + pub fn schema_for_query(&self, query: &QueryExpression) -> Vec { + self.store.schema_for_query(query) + } + + #[inline] + pub fn schema_for_view_contents( + &self, + view_contents: &ViewContentsSelector, + ) -> Vec { + self.store.schema_for_view_contents(view_contents) + } + + /// Starts a new query by instantiating a [`QueryHandle`]. + #[inline] + pub fn query(&self, query: QueryExpression2) -> QueryHandle<'_> { + QueryHandle::new(self, query) + } + + /// Returns an iterator over all the [`EntityPath`]s present in the database. + #[inline] + pub fn iter_entity_paths<'a>( + &self, + filter: &'a EntityPathFilter, + ) -> impl Iterator + 'a { + self.store + .all_entities() + .into_iter() + .filter(|entity_path| filter.matches(entity_path)) + } +} diff --git a/crates/store/re_dataframe2/src/lib.rs b/crates/store/re_dataframe2/src/lib.rs new file mode 100644 index 0000000000000..3d387c2954196 --- /dev/null +++ b/crates/store/re_dataframe2/src/lib.rs @@ -0,0 +1,20 @@ +//! The Rerun public data APIs. Get dataframes back from your Rerun datastore. + +mod engine; +mod query; + +pub use self::engine::{QueryEngine, RecordBatch}; +pub use self::query::QueryHandle; + +#[doc(no_inline)] +pub use self::external::arrow2::chunk::Chunk as ArrowChunk; +#[doc(no_inline)] +pub use self::external::re_query::Caches as QueryCache; + +pub mod external { + pub use re_chunk; + pub use re_chunk_store; + pub use re_query; + + pub use arrow2; +} diff --git a/crates/store/re_dataframe2/src/query.rs b/crates/store/re_dataframe2/src/query.rs new file mode 100644 index 0000000000000..2109d97756088 --- /dev/null +++ b/crates/store/re_dataframe2/src/query.rs @@ -0,0 +1,728 @@ +use std::sync::{ + atomic::{AtomicU64, Ordering}, + OnceLock, +}; + +use ahash::HashSet; +use arrow2::{ + array::Array as ArrowArray, chunk::Chunk as ArrowChunk, datatypes::Schema as ArrowSchema, +}; +use itertools::Itertools as _; + +use nohash_hasher::IntMap; +use re_chunk::{Chunk, RangeQuery, RowId, TimeInt, Timeline}; +use re_chunk_store::{ + ColumnDescriptor, ColumnSelector, ComponentColumnDescriptor, ComponentColumnSelector, + ControlColumnDescriptor, ControlColumnSelector, JoinEncoding, QueryExpression2, + TimeColumnDescriptor, TimeColumnSelector, +}; +use re_log_types::ResolvedTimeRange; + +use crate::{QueryEngine, RecordBatch}; + +// --- + +// TODO(cmc): (no specific order) (should we make issues for these?) +// * [x] basic thing working +// * [x] custom selection +// * [x] support for overlaps (slow) +// * [x] pagination (any solution, even a slow one) +// * [ ] overlaps (less dumb) +// * [ ] selector-based `filtered_index` +// * [ ] clears +// * [ ] latestat sparse-filling +// * [ ] pagination (fast) +// * [ ] pov support +// * [ ] sampling support +// * [ ] configurable cache bypass +// * [ ] allocate null arrays once +// * [ ] take kernel duplicates all memory +// * [ ] dedupe-latest without allocs/copies + +/// A handle to a dataframe query, ready to be executed. +/// +/// Cheaply created via [`QueryEngine::query`]. +/// +/// See [`QueryHandle::next_row`] or [`QueryHandle::into_iter`]. +pub struct QueryHandle<'a> { + /// Handle to the [`QueryEngine`]. + pub(crate) engine: &'a QueryEngine<'a>, + + /// The original query expression used to instantiate this handle. + pub(crate) query: QueryExpression2, + + /// Internal private state. Lazily computed. + /// + /// It is important that handles stay cheap to create. + state: OnceLock, +} + +/// Internal private state. Lazily computed. +struct QueryHandleState { + /// Describes the columns that make up this view. + /// + /// See [`QueryExpression2::view_contents`]. + view_contents: Vec, + + /// Describes the columns specifically selected to be returned from this view. + /// + /// All returned rows will have an Arrow schema that matches this selection. + /// + /// Columns that do not yield any data will still be present in the results, filled with null values. + /// + /// The extra `usize` is the index in [`QueryHandleState::view_contents`] that this selection + /// points to. + /// + /// See also [`QueryHandleState::arrow_schema`]. + selected_contents: Vec<(usize, ColumnDescriptor)>, + + /// The Arrow schema that corresponds to the `selected_contents`. + /// + /// All returned rows will have this schema. + arrow_schema: ArrowSchema, + + /// All the [`Chunk`]s included in the view contents. + /// + /// These are already sorted, densified, vertically sliced, and [latest-deduped] according + /// to the query. + /// + /// The atomic counter is used as a cursor which keeps track of our current position within + /// each individual chunk. + /// Because chunks are allowed to overlap, we might need to rebound between two or more chunks + /// during our iteration. + /// + /// This vector's entries correspond to those in [`QueryHandleState::view_contents`]. + /// Note: time and column entries don't have chunks -- inner vectors will be empty. + /// + /// [latest-deduped]: [`Chunk::deduped_latest_on_index`] + // + // NOTE: Reminder: we have to query everything in the _view_, irrelevant of the current selection. + view_chunks: Vec>, + + /// Tracks the current row index: the position of the iterator. For [`QueryHandle::next_row`]. + /// + /// This represents the number of rows that the caller has iterated on: it is completely + /// unrelated to the cursors used to track the current position in each individual chunk. + cur_row: AtomicU64, +} + +impl<'a> QueryHandle<'a> { + pub(crate) fn new(engine: &'a QueryEngine<'a>, query: QueryExpression2) -> Self { + Self { + engine, + query, + state: Default::default(), + } + } +} + +impl QueryHandle<'_> { + /// Lazily initialize internal private state. + /// + /// It is important that query handles stay cheap to create. + fn init(&self) -> &QueryHandleState { + self.state.get_or_init(|| self.init_()) + } + + // NOTE: This is split in its own method otherwise it completely breaks `rustfmt`. + fn init_(&self) -> QueryHandleState { + re_tracing::profile_scope!("init"); + + // 1. Compute the schema of the view contents. + let view_contents = if let Some(view_contents) = self.query.view_contents.as_ref() { + self.engine.store.schema_for_view_contents(view_contents) + } else { + self.engine.store.schema() + }; + + // 2. Compute the schema of the selected contents. + // + // The caller might have selected columns that do not exist in the view: they should + // still appear in the results. + let selected_contents: Vec<(_, _)> = if let Some(selection) = self.query.selection.as_ref() + { + selection + .iter() + .map(|column| { + match column { + ColumnSelector::Control(selected_column) => { + let ControlColumnSelector { + component: selected_component_name, + } = selected_column; + + view_contents + .iter() + .enumerate() + .filter_map(|(idx, view_column)| match view_column { + ColumnDescriptor::Control(view_descr) => { + Some((idx, view_descr)) + } + _ => None, + }) + .find(|(_idx, view_descr)| { + view_descr.component_name == *selected_component_name + }) + .map_or_else( + || { + ( + usize::MAX, + ColumnDescriptor::Control(ControlColumnDescriptor { + component_name: *selected_component_name, + datatype: arrow2::datatypes::DataType::Null, + }), + ) + }, + |(idx, view_descr)| { + (idx, ColumnDescriptor::Control(view_descr.clone())) + }, + ) + } + + ColumnSelector::Time(selected_column) => { + let TimeColumnSelector { + timeline: selected_timeline, + } = selected_column; + + view_contents + .iter() + .enumerate() + .filter_map(|(idx, view_column)| match view_column { + ColumnDescriptor::Time(view_descr) => Some((idx, view_descr)), + _ => None, + }) + .find(|(_idx, view_descr)| { + *view_descr.timeline.name() == *selected_timeline + }) + .map_or_else( + || { + ( + usize::MAX, + ColumnDescriptor::Time(TimeColumnDescriptor { + // TODO(cmc): I picked a sequence here because I have to pick something. + // It doesn't matter, only the name will remain in the Arrow schema anyhow. + timeline: Timeline::new_sequence( + *selected_timeline, + ), + datatype: arrow2::datatypes::DataType::Null, + }), + ) + }, + |(idx, view_descr)| { + (idx, ColumnDescriptor::Time(view_descr.clone())) + }, + ) + } + + ColumnSelector::Component(selected_column) => { + let ComponentColumnSelector { + entity_path: selected_entity_path, + component: selected_component_name, + join_encoding: _, + } = selected_column; + + view_contents + .iter() + .enumerate() + .filter_map(|(idx, view_column)| match view_column { + ColumnDescriptor::Component(view_descr) => { + Some((idx, view_descr)) + } + _ => None, + }) + .find(|(_idx, view_descr)| { + view_descr.entity_path == *selected_entity_path + && view_descr.component_name == *selected_component_name + }) + .map_or_else( + || { + ( + usize::MAX, + ColumnDescriptor::Component( + ComponentColumnDescriptor { + entity_path: selected_entity_path.clone(), + archetype_name: None, + archetype_field_name: None, + component_name: *selected_component_name, + store_datatype: + arrow2::datatypes::DataType::Null, + join_encoding: JoinEncoding::default(), + is_static: false, + }, + ), + ) + }, + |(idx, view_descr)| { + (idx, ColumnDescriptor::Component(view_descr.clone())) + }, + ) + } + } + }) + .collect_vec() + } else { + view_contents.clone().into_iter().enumerate().collect() + }; + + // 3. Compute the Arrow schema of the selected components. + // + // Every result returned using this `QueryHandle` will match this schema exactly. + let arrow_schema = ArrowSchema { + fields: selected_contents + .iter() + .map(|(_, descr)| descr.to_arrow_field()) + .collect_vec(), + metadata: Default::default(), + }; + + // 4. Perform the query and keep track of all the relevant chunks. + let view_chunks = { + let index_range = self + .query + .filtered_index_range + .unwrap_or(ResolvedTimeRange::EVERYTHING); + + let query = RangeQuery::new(self.query.filtered_index, index_range) + .keep_extra_timelines(true) // we want all the timelines we can get! + .keep_extra_components(false); + + view_contents + .iter() + .map(|selected_column| match selected_column { + ColumnDescriptor::Control(_) | ColumnDescriptor::Time(_) => Vec::new(), + + ColumnDescriptor::Component(column) => { + // NOTE: Keep in mind that the range APIs natively make sure that we will + // either get a bunch of relevant _static_ chunks, or a bunch of relevant + // _temporal_ chunks, but never both. + // + // TODO(cmc): Going through the cache is very useful in a Viewer context, but + // not so much in an SDK context. Make it configurable. + let results = self.engine.cache.range( + self.engine.store, + &query, + &column.entity_path, + [column.component_name], + ); + + debug_assert!( + results.components.len() <= 1, + "cannot possibly get more than one component with this query" + ); + + results + .components + .into_iter() + .next() + .map(|(_component_name, chunks)| { + chunks + .into_iter() + .map(|chunk| { + // NOTE: Keep in mind that the range APIs would have already taken care + // of A) sorting the chunk on the `filtered_index` (and row-id) and + // B) densifying it according to the current `component_name`. + // Both of these are mandatory requirements for the deduplication logic to + // do what we want: keep the latest known value for `component_name` at all + // remaining unique index values all while taking row-id ordering semantics + // into account. + debug_assert!( + chunk.is_sorted(), + "the query cache should have already taken care of sorting (and densifying!) the chunk", + ); + + let chunk = chunk.deduped_latest_on_index(&self.query.filtered_index); + + (AtomicU64::default(), chunk) + }) + .collect_vec() + }) + .unwrap_or_default() + }, + }) + .collect() + }; + + QueryHandleState { + view_contents, + selected_contents, + arrow_schema, + view_chunks, + cur_row: AtomicU64::new(0), + } + } + + /// The query used to instantiate this handle. + pub fn query(&self) -> &QueryExpression2 { + &self.query + } + + /// Describes the columns that make up this view. + /// + /// See [`QueryExpression2::view_contents`]. + pub fn view_contents(&self) -> &[ColumnDescriptor] { + &self.init().view_contents + } + + /// Describes the columns that make up this selection. + /// + /// The extra `usize` is the index in [`Self::view_contents`] that this selection points to. + /// + /// See [`QueryExpression2::selection`]. + pub fn selected_contents(&self) -> &[(usize, ColumnDescriptor)] { + &self.init().selected_contents + } + + /// All results returned by this handle will strictly follow this Arrow schema. + /// + /// Columns that do not yield any data will still be present in the results, filled with null values. + pub fn schema(&self) -> &ArrowSchema { + &self.init().arrow_schema + } + + /// How many rows of data will be returned? + /// + /// The number of rows depends and only depends on the _view contents_. + /// The _selected contents_ has no influence on this value. + // + // TODO(cmc): implement this properly, cache the result, etc. + pub fn num_rows(&self) -> u64 { + let all_unique_timestamps: HashSet = self + .init() + .view_chunks + .iter() + .flat_map(|chunks| { + chunks.iter().filter_map(|(_cursor, chunk)| { + chunk + .timelines() + .get(&self.query.filtered_index) + .map(|time_column| time_column.times()) + }) + }) + .flatten() + .collect(); + + all_unique_timestamps.len() as _ + } + + /// Returns the next row's worth of data. + /// + /// The returned vector of Arrow arrays strictly follows the schema specified by [`Self::schema`]. + /// Columns that do not yield any data will still be present in the results, filled with null values. + /// + /// Each cell in the result corresponds to the latest _locally_ known value at that particular point in + /// the index, for each respective `ColumnDescriptor`. + /// See [`QueryExpression2::sparse_fill_strategy`] to go beyond local resolution. + /// + /// Example: + /// ```ignore + /// while let Some(row) = query_handle.next_row() { + /// // ā€¦ + /// } + /// ``` + /// + /// ## Pagination + /// + /// This does not offer any kind of native pagination yet. + /// + /// To emulate pagination from user-space, use the `Iterator` API: + /// ```ignore + /// for row in query_handle.into_iter().skip(offset).take(len) { + /// // ā€¦ + /// } + /// ``` + // + // TODO(cmc): better/actual pagination + pub fn next_row(&self) -> Option>> { + re_tracing::profile_function!(); + + /// Temporary state used to resolve the streaming join for the current iteration. + struct StreamingJoinState<'a> { + /// Which `Chunk` is this? + chunk: &'a Chunk, + + /// How far are we into this `Chunk`? + cursor: &'a AtomicU64, + + /// What's the index value at the current cursor? + index_value: TimeInt, + + /// What's the `RowId` at the current cursor? + row_id: RowId, + } + + let state = self.init(); + + let _cur_row = state.cur_row.fetch_add(1, Ordering::Relaxed); + + // First, we need to find, among all the chunks available for the current view contents, + // what is their index value for the current row? + // + // NOTE: Non-component columns don't have a streaming state, hence the optional layer. + let mut view_streaming_state: Vec>> = + // NOTE: cannot use vec![], it has limitations with non-cloneable options. + // vec![None; state.view_chunks.len()]; + std::iter::repeat(()) + .map(|_| None) + .take(state.view_chunks.len()) + .collect(); + for (view_column_idx, view_chunks) in state.view_chunks.iter().enumerate() { + let streaming_state = &mut view_streaming_state[view_column_idx]; + + for (cur_cursor, cur_chunk) in view_chunks { + // NOTE: Too soon to increment the cursor, we cannot know yet which chunks will or + // will not be part of the current row. + let cursor_value = cur_cursor.load(Ordering::Relaxed) as usize; + + // TODO(cmc): This can easily be optimized by looking ahead and breaking as soon as chunks + // stop overlapping. + + let Some(cur_row_id) = cur_chunk.row_ids().nth(cursor_value) else { + continue; + }; + + let Some(cur_index_value) = cur_chunk + .timelines() + .get(&self.query.filtered_index) + .map_or(Some(TimeInt::STATIC), |time_column| { + time_column + .times_raw() + .get(cursor_value) + .copied() + .map(TimeInt::new_temporal) + }) + else { + continue; + }; + + if let Some(streaming_state) = streaming_state.as_mut() { + let StreamingJoinState { + chunk, + cursor, + index_value, + row_id, + } = streaming_state; + + let cur_chunk_has_smaller_index_value = cur_index_value < *index_value; + // If these two chunks overlap and share the index value of the current + // iteration, we shall pick the row with the most recent row-id. + let cur_chunk_has_equal_index_but_higher_rowid = + cur_index_value == *index_value && cur_row_id > *row_id; + + if cur_chunk_has_smaller_index_value + || cur_chunk_has_equal_index_but_higher_rowid + { + *chunk = chunk; + *cursor = cursor; + *index_value = cur_index_value; + *row_id = cur_row_id; + } + } else { + *streaming_state = Some(StreamingJoinState { + chunk: cur_chunk, + cursor: cur_cursor, + index_value: cur_index_value, + row_id: cur_row_id, + }); + }; + } + } + + // What's the index value we're looking for at the current iteration? + let cur_index_value = view_streaming_state + .iter() + .flatten() + // NOTE: We're purposefully ignoring RowId-related semantics here: we just want to know + // the value we're looking for on the "main" index (dedupe semantics). + .min_by_key(|streaming_state| streaming_state.index_value) + .map(|streaming_state| streaming_state.index_value)?; + + for streaming_state in &mut view_streaming_state { + if streaming_state.as_ref().map(|s| s.index_value) != Some(cur_index_value) { + *streaming_state = None; + } + } + + // The most recent chunk in the current iteration, according to RowId semantics. + let cur_most_recent_row = view_streaming_state + .iter() + .flatten() + .max_by_key(|streaming_state| streaming_state.row_id)?; + + // We are stitching a bunch of unrelated cells together in order to create the final row + // that is being returned. + // + // For this reason, we can only guarantee that the index being explicitly queried for + // (`QueryExpression2::filtered_index`) will match for all these cells. + // + // When it comes to other indices that the caller might have asked for, it is possible that + // these different cells won't share the same values (e.g. two cells were found at + // `log_time=100`, but one of them has `frame=3` and the other `frame=5`, for whatever + // reason). + // In order to deal with this, we keep track of the maximum value for every possible index + // within the returned set of cells, and return that. + // + // TODO(cmc): In the future, it would be nice to make that either configurable, e.g.: + // * return the minimum value instead of the max + // * return the exact value for each component (that would be a _lot_ of columns!) + // * etc + let mut max_value_per_index = IntMap::default(); + { + // Unless we are currently iterating over a static row, then we know for sure that the + // timeline being used as `filtered_index` is A) present and B) has for value `cur_index_value`. + if cur_index_value != TimeInt::STATIC { + let slice = cur_most_recent_row + .chunk + .timelines() + .get(&self.query.filtered_index) + .map(|time_column| { + time_column.times_array().sliced( + cur_most_recent_row.cursor.load(Ordering::Relaxed) as usize, + 1, + ) + }); + + debug_assert!( + slice.is_some(), + "Timeline must exist, otherwise the query engine would have never returned that chunk in the first place", + ); + + // NOTE: Cannot fail, just want to stay away from unwraps. + if let Some(slice) = slice { + max_value_per_index.insert(self.query.filtered_index, (cur_index_value, slice)); + } + } + + view_streaming_state + .iter() + .flatten() + .flat_map(|streaming_state| { + streaming_state + .chunk + .timelines() + .values() + // NOTE: Already took care of that one above. + .filter(|time_column| *time_column.timeline() != self.query.filtered_index) + // NOTE: Cannot fail, just want to stay away from unwraps. + .filter_map(|time_column| { + let cursor = streaming_state.cursor.load(Ordering::Relaxed) as usize; + time_column + .times_raw() + .get(cursor) + .copied() + .map(TimeInt::new_temporal) + .map(|time| { + ( + *time_column.timeline(), + (time, time_column.times_array().sliced(cursor, 1)), + ) + }) + }) + }) + .for_each(|(timeline, (time, time_sliced))| { + max_value_per_index + .entry(timeline) + .and_modify(|(max_time, max_time_sliced)| { + if time > *max_time { + *max_time = time; + *max_time_sliced = time_sliced.clone(); + } + }) + .or_insert((time, time_sliced)); + }); + } + + // NOTE: Non-component entries have no data to slice, hence the optional layer. + // + // TODO(cmc): no point in slicing arrays that are not selected. + let view_sliced_arrays: Vec> = view_streaming_state + .iter() + .map(|streaming_state| { + streaming_state.as_ref().and_then(|streaming_state| { + let cursor = streaming_state.cursor.fetch_add(1, Ordering::Relaxed); + + debug_assert!( + streaming_state.chunk.components().len() <= 1, + "cannot possibly get more than one component with this query" + ); + + let list_array = streaming_state + .chunk + .components() + .first_key_value() + .map(|(_, list_array)| list_array.sliced(cursor as usize, 1)); + + debug_assert!( + list_array.is_some(), + "This must exist or the chunk wouldn't have been sliced to start with." + ); + + // NOTE: This cannot possibly return None, see assert above. + list_array + }) + }) + .collect(); + + // TODO(cmc): It would likely be worth it to allocate all these possible + // null-arrays ahead of time, and just return a pointer to those in the failure + // case here. + let arrays = state + .selected_contents + .iter() + .map(|(view_idx, column)| match column { + ColumnDescriptor::Control(_) => cur_most_recent_row.chunk.row_ids_array().sliced( + cur_most_recent_row + .cursor + .load(Ordering::Relaxed) + // NOTE: We did the cursor increments while computing the final sliced arrays, + // so we need to go back one tick for this. + .saturating_sub(1) as usize, + 1, + ), + + ColumnDescriptor::Time(descr) => { + max_value_per_index.remove(&descr.timeline).map_or_else( + || arrow2::array::new_null_array(column.datatype(), 1), + |(_time, time_sliced)| time_sliced, + ) + } + + ColumnDescriptor::Component(_descr) => view_sliced_arrays + .get(*view_idx) + .cloned() + .flatten() + .unwrap_or_else(|| arrow2::array::new_null_array(column.datatype(), 1)), + }) + .collect_vec(); + + debug_assert_eq!(state.arrow_schema.fields.len(), arrays.len()); + + Some(arrays) + } + + /// Calls [`Self::next_row`] and wraps the result in a [`RecordBatch`]. + /// + /// Only use this if you absolutely need a [`RecordBatch`] as this adds a lot of allocation + /// overhead. + /// + /// See [`Self::next_row`] for more information. + #[inline] + pub fn next_row_batch(&self) -> Option { + Some(RecordBatch { + schema: self.schema().clone(), + data: ArrowChunk::new(self.next_row()?), + }) + } +} + +impl<'a> QueryHandle<'a> { + /// Returns an iterator backed by [`Self::next_row`]. + #[allow(clippy::should_implement_trait)] // we need an anonymous closure, this won't work + pub fn into_iter(self) -> impl Iterator>> + 'a { + std::iter::from_fn(move || self.next_row()) + } + + /// Returns an iterator backed by [`Self::next_row_batch`]. + #[allow(clippy::should_implement_trait)] // we need an anonymous closure, this won't work + pub fn into_batch_iter(self) -> impl Iterator + 'a { + std::iter::from_fn(move || self.next_row_batch()) + } +} diff --git a/crates/store/re_entity_db/examples/memory_usage.rs b/crates/store/re_entity_db/examples/memory_usage.rs index 8ed3353379d58..7c4c524dc10e1 100644 --- a/crates/store/re_entity_db/examples/memory_usage.rs +++ b/crates/store/re_entity_db/examples/memory_usage.rs @@ -4,7 +4,7 @@ use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; thread_local! { - static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0); + static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) }; } struct TrackingAllocator { diff --git a/crates/store/re_log_encoding/src/file_sink.rs b/crates/store/re_log_encoding/src/file_sink.rs index 2a76c0969faa3..166e4403dd844 100644 --- a/crates/store/re_log_encoding/src/file_sink.rs +++ b/crates/store/re_log_encoding/src/file_sink.rs @@ -170,10 +170,7 @@ fn spawn_and_stream( impl fmt::Debug for FileSink { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("FileSink") - .field( - "path", - &self.path.as_ref().cloned().unwrap_or("stdout".into()), - ) + .field("path", &self.path.clone().unwrap_or("stdout".into())) .finish_non_exhaustive() } } diff --git a/crates/store/re_log_types/src/path/entity_path_filter.rs b/crates/store/re_log_types/src/path/entity_path_filter.rs index 6e668ec72e463..220e70c641657 100644 --- a/crates/store/re_log_types/src/path/entity_path_filter.rs +++ b/crates/store/re_log_types/src/path/entity_path_filter.rs @@ -88,6 +88,27 @@ pub struct EntityPathRule { pub include_subtree: bool, } +impl From for EntityPathRule { + #[inline] + fn from(entity_path: EntityPath) -> Self { + Self::exact(entity_path) + } +} + +impl std::hash::Hash for EntityPathRule { + #[inline] + fn hash(&self, state: &mut H) { + let Self { + raw_expression: _, + path, + include_subtree, + } = self; + + std::hash::Hash::hash(path, state); + std::hash::Hash::hash(include_subtree, state); + } +} + impl std::fmt::Display for EntityPathRule { #[inline] fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { diff --git a/crates/store/re_log_types/src/time.rs b/crates/store/re_log_types/src/time.rs index 6963a8c9c7893..36cf68be284fa 100644 --- a/crates/store/re_log_types/src/time.rs +++ b/crates/store/re_log_types/src/time.rs @@ -433,7 +433,7 @@ mod tests { pub struct Duration(i64); impl Duration { - pub const MAX: Self = Self(std::i64::MAX); + pub const MAX: Self = Self(i64::MAX); const NANOS_PER_SEC: i64 = 1_000_000_000; const NANOS_PER_MILLI: i64 = 1_000_000; const SEC_PER_MINUTE: i64 = 60; @@ -541,8 +541,8 @@ impl std::ops::Neg for Duration { #[inline] fn neg(self) -> Self { // Handle negation without overflow: - if self.0 == std::i64::MIN { - Self(std::i64::MAX) + if self.0 == i64::MIN { + Self(i64::MAX) } else { Self(-self.0) } diff --git a/crates/store/re_log_types/src/time_point/timeline.rs b/crates/store/re_log_types/src/time_point/timeline.rs index b142e96ebfbaa..da8f723115d91 100644 --- a/crates/store/re_log_types/src/time_point/timeline.rs +++ b/crates/store/re_log_types/src/time_point/timeline.rs @@ -20,7 +20,7 @@ impl Default for TimelineName { #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Timeline { - /// Name of the timeline (e.g. "log_time"). + /// Name of the timeline (e.g. `log_time`). name: TimelineName, /// Sequence or time? diff --git a/crates/store/re_types/definitions/rerun/archetypes/depth_image.fbs b/crates/store/re_types/definitions/rerun/archetypes/depth_image.fbs index 8c2365b73ac70..24d63fa7c9e30 100644 --- a/crates/store/re_types/definitions/rerun/archetypes/depth_image.fbs +++ b/crates/store/re_types/definitions/rerun/archetypes/depth_image.fbs @@ -5,7 +5,7 @@ namespace rerun.archetypes; /// /// Each pixel corresponds to a depth value in units specified by [components.DepthMeter]. /// -/// \cpp Since the underlying `rerun::datatypes::TensorData` uses `rerun::Collection` internally, +/// \cpp Since the underlying `rerun::datatypes::ImageBuffer` uses `rerun::Collection` internally, /// \cpp data can be passed in without a copy from raw pointers or by reference from `std::vector`/`std::array`/c-arrays. /// \cpp If needed, this "borrow-behavior" can be extended by defining your own `rerun::CollectionAdapter`. /// @@ -41,6 +41,19 @@ table DepthImage ( /// If not set, the depth image will be rendered using the Turbo colormap. colormap: rerun.components.Colormap ("attr.rerun.component_optional", nullable, order: 3200); + /// The expected range of depth values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Note that point clouds generated from this image will still display all points, regardless of this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the depth image. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + depth_range: rerun.components.ValueRange ("attr.rerun.component_optional", nullable, order: 3300); + /// Scale the radii of the points in the point cloud generated from this image. /// /// A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor @@ -48,10 +61,10 @@ table DepthImage ( /// A fill ratio of 0.5 means that each point touches the edge of its neighbor if it has the same depth. /// /// TODO(#6744): This applies only to 3D views! - point_fill_ratio: rerun.components.FillRatio ("attr.rerun.component_optional", nullable, order: 3300); + point_fill_ratio: rerun.components.FillRatio ("attr.rerun.component_optional", nullable, order: 3400); /// An optional floating point value that specifies the 2D drawing order, used only if the depth image is shown as a 2D image. /// /// Objects with higher values are drawn on top of those with lower values. - draw_order: rerun.components.DrawOrder ("attr.rerun.component_optional", nullable, order: 3400); + draw_order: rerun.components.DrawOrder ("attr.rerun.component_optional", nullable, order: 3500); } diff --git a/crates/store/re_types/definitions/rerun/archetypes/tensor.fbs b/crates/store/re_types/definitions/rerun/archetypes/tensor.fbs index abf8e5150b213..00ea0344f1128 100644 --- a/crates/store/re_types/definitions/rerun/archetypes/tensor.fbs +++ b/crates/store/re_types/definitions/rerun/archetypes/tensor.fbs @@ -19,4 +19,19 @@ table Tensor ( ) { /// The tensor data data: rerun.components.TensorData ("attr.rerun.component_required", order: 1000); + + // --- Optional --- + + /// The expected range of values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Any colormap applied for display, will map this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the tensor. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + value_range: rerun.components.ValueRange ("attr.rerun.component_optional", nullable, order: 2000); } diff --git a/crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs b/crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs index f2b1e126330cc..aed1f5b1b0e07 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs @@ -25,6 +25,7 @@ namespace rerun.blueprint.archetypes; // -- /// The query for the dataframe view. +//TODO(ab): replace by DataframeQueryV2 when ready table DataframeQuery ( "attr.rerun.scope": "blueprint" ) { @@ -49,3 +50,31 @@ table DataframeQuery ( time_range_queries: rerun.blueprint.components.TimeRangeQueries ("attr.rerun.component_optional", nullable,order: 500); } + + +/// The query for the dataframe view. +table DataframeQueryV2 ( + "attr.rerun.scope": "blueprint" +) { + // --- Optional --- + + /// The timeline for this query. + /// + /// If unset, the timeline currently active on the time panel is used. + timeline: rerun.blueprint.components.TimelineName ("attr.rerun.component_optional", nullable, order: 100); + + /// If provided, only rows whose timestamp is within this range will be shown. + /// + /// Note: will be unset as soon as `timeline` is changed. + filter_by_range: rerun.blueprint.components.FilterByRange ("attr.rerun.component_optional", nullable, order: 200); + + /// If provided, only show rows which contains a logged event for the specified component. + filter_by_event: rerun.blueprint.components.FilterByEvent ("attr.rerun.component_optional", nullable, order: 300); + + /// Should empty cells be filled with latest-at queries? + apply_latest_at: rerun.blueprint.components.ApplyLatestAt ("attr.rerun.component_optional", nullable, order: 400); + + /// Selected columns. If unset, all columns are selected. + select: rerun.blueprint.components.SelectedColumns ("attr.rerun.component_optional", nullable, order: 500); +} + diff --git a/crates/store/re_types/definitions/rerun/blueprint/archetypes/tensor_scalar_mapping.fbs b/crates/store/re_types/definitions/rerun/blueprint/archetypes/tensor_scalar_mapping.fbs index def7226b2ef46..ff772e6abdfcb 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/archetypes/tensor_scalar_mapping.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/archetypes/tensor_scalar_mapping.fbs @@ -17,10 +17,8 @@ table TensorScalarMapping ( /// /// Raises the normalized values to the power of this value before mapping to color. /// Acts like an inverse brightness. Defaults to 1.0. + /// + /// The final value for display is set as: + /// `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` gamma: rerun.components.GammaCorrection ("attr.rerun.component_optional", nullable, order: 1200); - - // TODO(andreas): explicit scalar ranges should go in here as well! - // Overall we should communicate scalar mapping to work like this here - // https://matplotlib.org/stable/api/_as_gen/matplotlib.colors.PowerNorm.html#matplotlib.colors.PowerNorm - // (value - vmin) ** gamma / (vmax - vmin) ** gamma } diff --git a/crates/store/re_types/definitions/rerun/blueprint/components.fbs b/crates/store/re_types/definitions/rerun/blueprint/components.fbs index ebdcce7929a22..bba11a44b2f98 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/components.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/components.fbs @@ -1,6 +1,7 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/fbs/mod.rs include "./components/active_tab.fbs"; +include "./components/apply_latest_at.fbs"; include "./components/auto_layout.fbs"; include "./components/auto_space_views.fbs"; include "./components/background_kind.fbs"; @@ -9,6 +10,8 @@ include "./components/column_share.fbs"; include "./components/component_column_selector.fbs"; include "./components/container_kind.fbs"; include "./components/corner_2d.fbs"; +include "./components/filter_by_event.fbs"; +include "./components/filter_by_range.fbs"; include "./components/grid_columns.fbs"; include "./components/included_content.fbs"; include "./components/included_space_view.fbs"; @@ -20,6 +23,7 @@ include "./components/query_expression.fbs"; include "./components/query_kind.fbs"; include "./components/root_container.fbs"; include "./components/row_share.fbs"; +include "./components/selected_columns.fbs"; include "./components/space_view_class.fbs"; include "./components/space_view_maximized.fbs"; include "./components/space_view_origin.fbs"; diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs new file mode 100644 index 0000000000000..7b19ddfee5f87 --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs @@ -0,0 +1,15 @@ +namespace rerun.blueprint.components; + +// --- + +/// Whether empty cells in a dataframe should be filled with a latest-at query. +struct ApplyLatestAt ( + "attr.arrow.transparent", + "attr.rerun.scope": "blueprint", + "attr.python.aliases": "bool", + "attr.rust.derive": "Copy, Default, PartialEq, Eq, PartialOrd, Ord", + "attr.rust.repr": "transparent", + "attr.rust.tuple_struct" +) { + apply_latest_at: rerun.datatypes.Bool (order: 100); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs new file mode 100644 index 0000000000000..a2ed8f8e56d8d --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs @@ -0,0 +1,12 @@ + +namespace rerun.blueprint.components; + +/// Configuration for the filter-by-event feature of the dataframe view. +table FilterByEvent ( + "attr.arrow.transparent", + "attr.rust.derive": "Default, PartialEq, Eq", + "attr.rust.repr": "transparent", + "attr.rerun.scope": "blueprint" +) { + filter_by_event: rerun.blueprint.datatypes.FilterByEvent (order: 100); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs new file mode 100644 index 0000000000000..790d5c49bcd78 --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs @@ -0,0 +1,13 @@ + +namespace rerun.blueprint.components; + +/// Configuration for a filter-by-range feature of the dataframe view. +//TODO(ab, jleibs): this probably needs reunification with whatever structure the data out API uses. +table FilterByRange ( + "attr.arrow.transparent", + "attr.rust.derive": "PartialEq, Eq", + "attr.rust.repr": "transparent", + "attr.rerun.scope": "blueprint" +) { + range: rerun.blueprint.datatypes.FilterByRange (order: 100); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/included_content.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/included_content.fbs index 6d9993272152d..1e36024a62e62 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/components/included_content.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/components/included_content.fbs @@ -8,7 +8,7 @@ table IncludedContent ( "attr.rerun.scope": "blueprint", "attr.rust.derive": "Default" ) { - /// List of the contents by EntityPath. + /// List of the contents by [datatypes.EntityPath]. /// /// This must be a path in the blueprint store. /// Typically structure as `/`. diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs new file mode 100644 index 0000000000000..5640a8a064912 --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs @@ -0,0 +1,13 @@ + +namespace rerun.blueprint.components; + +/// Describe a component column to be selected in the dataframe view. +//TODO(ab, jleibs): this probably needs reunification with whatever structure the data out API uses. +table SelectedColumns ( + "attr.arrow.transparent", + "attr.rust.derive": "Default, PartialEq, Eq", + "attr.rust.repr": "transparent", + "attr.rerun.scope": "blueprint" +) { + selected_columns: rerun.blueprint.datatypes.SelectedColumns (order: 100); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/components/visualizer_overrides.fbs b/crates/store/re_types/definitions/rerun/blueprint/components/visualizer_overrides.fbs index 7b42a31aacf10..4043917c61f03 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/components/visualizer_overrides.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/components/visualizer_overrides.fbs @@ -23,26 +23,26 @@ table VisualizerOverrides ( /// Names of the visualizers that should be active. /// /// The built-in visualizers are: - /// - BarChart - /// - Arrows2D - /// - Arrows3D - /// - Asset3D - /// - Boxes2D - /// - Boxes3D - /// - Cameras - /// - DepthImage - /// - Image - /// - Lines2D - /// - Lines3D - /// - Mesh3D - /// - Points2D - /// - Points3D - /// - Transform3DArrows - /// - Tensor - /// - TextDocument - /// - TextLog - /// - SegmentationImage - /// - SeriesLine - /// - SeriesPoint + /// - `BarChart` + /// - `Arrows2D` + /// - `Arrows3D` + /// - `Asset3D` + /// - `Boxes2D` + /// - `Boxes3D` + /// - `Cameras` + /// - `DepthImage` + /// - `Image` + /// - `Lines2D` + /// - `Lines3D` + /// - `Mesh3D` + /// - `Points2D` + /// - `Points3D` + /// - `Transform3DArrows` + /// - `Tensor` + /// - `TextDocument` + /// - `TextLog` + /// - `SegmentationImage` + /// - `SeriesLine` + /// - `SeriesPoint` visualizers: rerun.blueprint.datatypes.Utf8List (order: 100); } diff --git a/crates/store/re_types/definitions/rerun/blueprint/datatypes.fbs b/crates/store/re_types/definitions/rerun/blueprint/datatypes.fbs index 571a107d056c9..59ede3e6ccd56 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/datatypes.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/datatypes.fbs @@ -1,7 +1,10 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/fbs/mod.rs include "./datatypes/component_column_selector.fbs"; +include "./datatypes/filter_by_event.fbs"; +include "./datatypes/filter_by_range.fbs"; include "./datatypes/latest_at_query.fbs"; +include "./datatypes/selected_columns.fbs"; include "./datatypes/tensor_dimension_index_slider.fbs"; include "./datatypes/time_range_query.fbs"; include "./datatypes/utf8_list.fbs"; diff --git a/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs b/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs new file mode 100644 index 0000000000000..b59d2e127596b --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs @@ -0,0 +1,15 @@ +namespace rerun.blueprint.datatypes; + + +/// Configuration for the filter by event feature of the dataframe view. +// Has to be a table because flatbuffer doesn't support strings in structs. +table FilterByEvent ( + "attr.rust.derive": "Default, PartialEq, Eq", + "attr.rerun.scope": "blueprint" +) { + /// Whether the filter by event feature is active. + active: rerun.datatypes.Bool (order: 100); + + /// The column used when the filter by event feature is used. + column: rerun.blueprint.datatypes.ComponentColumnSelector (order: 200); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs b/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs new file mode 100644 index 0000000000000..bb7e64baa5917 --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs @@ -0,0 +1,14 @@ +namespace rerun.blueprint.datatypes; + + +/// Configuration for the filter-by-range feature of the dataframe view. +table FilterByRange ( + "attr.rust.derive": "PartialEq, Eq", + "attr.rerun.scope": "blueprint" +) { + /// Beginning of the time range. + start: rerun.datatypes.TimeInt (order: 100); + + /// End of the time range (inclusive). + end: rerun.datatypes.TimeInt (order: 200); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs b/crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs new file mode 100644 index 0000000000000..da87e4dcfd658 --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs @@ -0,0 +1,15 @@ +namespace rerun.blueprint.datatypes; + + +/// List of selected columns in a dataframe. +// Has to be a table because flatbuffer doesn't support strings in structs. +table SelectedColumns ( + "attr.rust.derive": "Default, PartialEq, Eq", + "attr.rerun.scope": "blueprint" +) { + /// The time columns to include + time_columns: [rerun.datatypes.Utf8] (order: 100); + + /// The component columns to include + component_columns: [rerun.blueprint.datatypes.ComponentColumnSelector] (order: 200); +} diff --git a/crates/store/re_types/definitions/rerun/blueprint/views.fbs b/crates/store/re_types/definitions/rerun/blueprint/views.fbs index 5857977a2cd57..bf6def5f9e96f 100644 --- a/crates/store/re_types/definitions/rerun/blueprint/views.fbs +++ b/crates/store/re_types/definitions/rerun/blueprint/views.fbs @@ -1,6 +1,7 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/fbs/mod.rs include "./views/bar_chart.fbs"; +include "./views/dataframe.fbs"; include "./views/spatial2d.fbs"; include "./views/spatial3d.fbs"; include "./views/tensor.fbs"; diff --git a/crates/store/re_types/definitions/rerun/blueprint/views/dataframe.fbs b/crates/store/re_types/definitions/rerun/blueprint/views/dataframe.fbs new file mode 100644 index 0000000000000..94fe9c936273a --- /dev/null +++ b/crates/store/re_types/definitions/rerun/blueprint/views/dataframe.fbs @@ -0,0 +1,14 @@ +namespace rerun.blueprint.views; + +/// A view to display any data in a tabular form. +/// +/// Any data from the store can be shown, using a flexibly, user-configurable query. +/// +/// \example views/dataframe title="Use a blueprint to customize a DataframeView." +//TODO(#6896): add a thumbnail when the example becomes interesting +table DataframeView ( + "attr.rerun.view_identifier": "Dataframe" +) { + /// Query of the dataframe. + query: rerun.blueprint.archetypes.DataframeQueryV2 (order: 1000); +} diff --git a/crates/store/re_types/definitions/rerun/components.fbs b/crates/store/re_types/definitions/rerun/components.fbs index b5916c757898d..b212c946a50d2 100644 --- a/crates/store/re_types/definitions/rerun/components.fbs +++ b/crates/store/re_types/definitions/rerun/components.fbs @@ -51,6 +51,7 @@ include "./components/transform_mat3x3.fbs"; include "./components/transform_relation.fbs"; include "./components/translation3d.fbs"; include "./components/triangle_indices.fbs"; +include "./components/value_range.fbs"; include "./components/vector2d.fbs"; include "./components/vector3d.fbs"; include "./components/video_timestamp.fbs"; diff --git a/crates/store/re_types/definitions/rerun/components/value_range.fbs b/crates/store/re_types/definitions/rerun/components/value_range.fbs new file mode 100644 index 0000000000000..b238f2be856ad --- /dev/null +++ b/crates/store/re_types/definitions/rerun/components/value_range.fbs @@ -0,0 +1,11 @@ +namespace rerun.components; + +// --- + +/// Range of expected or valid values, specifying a lower and upper bound. +struct ValueRange ( + "attr.rust.derive": "Copy, PartialEq, bytemuck::Pod, bytemuck::Zeroable", + "attr.rust.repr": "transparent" +) { + range: rerun.datatypes.Range1D (order: 100); +} diff --git a/crates/store/re_types/definitions/rerun/datatypes/pixel_format.fbs b/crates/store/re_types/definitions/rerun/datatypes/pixel_format.fbs index a5b21e03285b6..298ba8736268e 100644 --- a/crates/store/re_types/definitions/rerun/datatypes/pixel_format.fbs +++ b/crates/store/re_types/definitions/rerun/datatypes/pixel_format.fbs @@ -24,13 +24,13 @@ enum PixelFormat: ubyte { // this organization and subsequently reduce the chance we may find ourselves wanting to // change the values in the future. - /// NV12 (aka Y_UV12) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. + /// `NV12` (aka `Y_UV12`) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. /// /// First comes entire image in Y in one plane, /// followed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc. NV12 = 26 (default), // _something_ has to be the default šŸ¤·ā€ā™€ļø - /// YUY2 (aka YUYV or YUYV16), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. + /// `YUY2` (aka `YUYV` or `YUYV16`), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. /// /// The order of the channels is Y0, U0, Y1, V0, all in the same plane. YUY2 = 27, diff --git a/crates/store/re_types/src/archetypes/depth_image.rs b/crates/store/re_types/src/archetypes/depth_image.rs index e446993e33282..9032c30bd0304 100644 --- a/crates/store/re_types/src/archetypes/depth_image.rs +++ b/crates/store/re_types/src/archetypes/depth_image.rs @@ -86,6 +86,19 @@ pub struct DepthImage { /// If not set, the depth image will be rendered using the Turbo colormap. pub colormap: Option, + /// The expected range of depth values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Note that point clouds generated from this image will still display all points, regardless of this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the depth image. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + pub depth_range: Option, + /// Scale the radii of the points in the point cloud generated from this image. /// /// A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor @@ -108,6 +121,7 @@ impl ::re_types_core::SizeBytes for DepthImage { + self.format.heap_size_bytes() + self.meter.heap_size_bytes() + self.colormap.heap_size_bytes() + + self.depth_range.heap_size_bytes() + self.point_fill_ratio.heap_size_bytes() + self.draw_order.heap_size_bytes() } @@ -118,6 +132,7 @@ impl ::re_types_core::SizeBytes for DepthImage { && ::is_pod() && >::is_pod() && >::is_pod() + && >::is_pod() && >::is_pod() && >::is_pod() } @@ -134,17 +149,18 @@ static REQUIRED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 2usize]> = static RECOMMENDED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 1usize]> = once_cell::sync::Lazy::new(|| ["rerun.components.DepthImageIndicator".into()]); -static OPTIONAL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 4usize]> = +static OPTIONAL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 5usize]> = once_cell::sync::Lazy::new(|| { [ "rerun.components.DepthMeter".into(), "rerun.components.Colormap".into(), + "rerun.components.ValueRange".into(), "rerun.components.FillRatio".into(), "rerun.components.DrawOrder".into(), ] }); -static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 7usize]> = +static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 8usize]> = once_cell::sync::Lazy::new(|| { [ "rerun.components.ImageBuffer".into(), @@ -152,14 +168,15 @@ static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 7usize]> = "rerun.components.DepthImageIndicator".into(), "rerun.components.DepthMeter".into(), "rerun.components.Colormap".into(), + "rerun.components.ValueRange".into(), "rerun.components.FillRatio".into(), "rerun.components.DrawOrder".into(), ] }); impl DepthImage { - /// The total number of components in the archetype: 2 required, 1 recommended, 4 optional - pub const NUM_COMPONENTS: usize = 7usize; + /// The total number of components in the archetype: 2 required, 1 recommended, 5 optional + pub const NUM_COMPONENTS: usize = 8usize; } /// Indicator component for the [`DepthImage`] [`::re_types_core::Archetype`] @@ -258,6 +275,15 @@ impl ::re_types_core::Archetype for DepthImage { } else { None }; + let depth_range = if let Some(array) = arrays_by_name.get("rerun.components.ValueRange") { + ::from_arrow_opt(&**array) + .with_context("rerun.archetypes.DepthImage#depth_range")? + .into_iter() + .next() + .flatten() + } else { + None + }; let point_fill_ratio = if let Some(array) = arrays_by_name.get("rerun.components.FillRatio") { ::from_arrow_opt(&**array) @@ -282,6 +308,7 @@ impl ::re_types_core::Archetype for DepthImage { format, meter, colormap, + depth_range, point_fill_ratio, draw_order, }) @@ -302,6 +329,9 @@ impl ::re_types_core::AsComponents for DepthImage { self.colormap .as_ref() .map(|comp| (comp as &dyn ComponentBatch).into()), + self.depth_range + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), self.point_fill_ratio .as_ref() .map(|comp| (comp as &dyn ComponentBatch).into()), @@ -329,6 +359,7 @@ impl DepthImage { format: format.into(), meter: None, colormap: None, + depth_range: None, point_fill_ratio: None, draw_order: None, } @@ -356,6 +387,26 @@ impl DepthImage { self } + /// The expected range of depth values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Note that point clouds generated from this image will still display all points, regardless of this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the depth image. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + #[inline] + pub fn with_depth_range( + mut self, + depth_range: impl Into, + ) -> Self { + self.depth_range = Some(depth_range.into()); + self + } + /// Scale the radii of the points in the point cloud generated from this image. /// /// A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor diff --git a/crates/store/re_types/src/archetypes/depth_image_ext.rs b/crates/store/re_types/src/archetypes/depth_image_ext.rs index a1ab8eac0ddeb..242d99c8eb3fd 100644 --- a/crates/store/re_types/src/archetypes/depth_image_ext.rs +++ b/crates/store/re_types/src/archetypes/depth_image_ext.rs @@ -42,6 +42,7 @@ impl DepthImage { meter: None, colormap: None, point_fill_ratio: None, + depth_range: None, }) } } diff --git a/crates/store/re_types/src/archetypes/tensor.rs b/crates/store/re_types/src/archetypes/tensor.rs index b5e277447d639..5d8ad391a76a7 100644 --- a/crates/store/re_types/src/archetypes/tensor.rs +++ b/crates/store/re_types/src/archetypes/tensor.rs @@ -52,17 +52,31 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; pub struct Tensor { /// The tensor data pub data: crate::components::TensorData, + + /// The expected range of values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Any colormap applied for display, will map this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the tensor. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + pub value_range: Option, } impl ::re_types_core::SizeBytes for Tensor { #[inline] fn heap_size_bytes(&self) -> u64 { - self.data.heap_size_bytes() + self.data.heap_size_bytes() + self.value_range.heap_size_bytes() } #[inline] fn is_pod() -> bool { ::is_pod() + && >::is_pod() } } @@ -72,20 +86,21 @@ static REQUIRED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 1usize]> = static RECOMMENDED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 1usize]> = once_cell::sync::Lazy::new(|| ["rerun.components.TensorIndicator".into()]); -static OPTIONAL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 0usize]> = - once_cell::sync::Lazy::new(|| []); +static OPTIONAL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 1usize]> = + once_cell::sync::Lazy::new(|| ["rerun.components.ValueRange".into()]); -static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 2usize]> = +static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 3usize]> = once_cell::sync::Lazy::new(|| { [ "rerun.components.TensorData".into(), "rerun.components.TensorIndicator".into(), + "rerun.components.ValueRange".into(), ] }); impl Tensor { - /// The total number of components in the archetype: 1 required, 1 recommended, 0 optional - pub const NUM_COMPONENTS: usize = 2usize; + /// The total number of components in the archetype: 1 required, 1 recommended, 1 optional + pub const NUM_COMPONENTS: usize = 3usize; } /// Indicator component for the [`Tensor`] [`::re_types_core::Archetype`] @@ -153,7 +168,16 @@ impl ::re_types_core::Archetype for Tensor { .ok_or_else(DeserializationError::missing_data) .with_context("rerun.archetypes.Tensor#data")? }; - Ok(Self { data }) + let value_range = if let Some(array) = arrays_by_name.get("rerun.components.ValueRange") { + ::from_arrow_opt(&**array) + .with_context("rerun.archetypes.Tensor#value_range")? + .into_iter() + .next() + .flatten() + } else { + None + }; + Ok(Self { data, value_range }) } } @@ -164,6 +188,9 @@ impl ::re_types_core::AsComponents for Tensor { [ Some(Self::indicator()), Some((&self.data as &dyn ComponentBatch).into()), + self.value_range + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), ] .into_iter() .flatten() @@ -177,6 +204,29 @@ impl Tensor { /// Create a new `Tensor`. #[inline] pub fn new(data: impl Into) -> Self { - Self { data: data.into() } + Self { + data: data.into(), + value_range: None, + } + } + + /// The expected range of values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Any colormap applied for display, will map this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the tensor. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + #[inline] + pub fn with_value_range( + mut self, + value_range: impl Into, + ) -> Self { + self.value_range = Some(value_range.into()); + self } } diff --git a/crates/store/re_types/src/archetypes/tensor_ext.rs b/crates/store/re_types/src/archetypes/tensor_ext.rs index 9be45f97550b3..c7063b0b6264a 100644 --- a/crates/store/re_types/src/archetypes/tensor_ext.rs +++ b/crates/store/re_types/src/archetypes/tensor_ext.rs @@ -16,7 +16,10 @@ impl Tensor { pub fn try_from>(data: T) -> Result { let data: TensorData = data.try_into()?; - Ok(Self { data: data.into() }) + Ok(Self { + data: data.into(), + value_range: None, + }) } /// Update the `names` of the contained [`TensorData`] dimensions. @@ -50,6 +53,7 @@ impl Tensor { buffer: self.data.0.buffer, } .into(), + value_range: None, } } } @@ -62,7 +66,10 @@ impl Tensor { pub fn from_image( image: impl Into, ) -> Result { - TensorData::from_image(image).map(|data| Self { data: data.into() }) + TensorData::from_image(image).map(|data| Self { + data: data.into(), + value_range: None, + }) } /// Construct a tensor from [`image::DynamicImage`]. @@ -71,7 +78,10 @@ impl Tensor { pub fn from_dynamic_image( image: image::DynamicImage, ) -> Result { - TensorData::from_dynamic_image(image).map(|data| Self { data: data.into() }) + TensorData::from_dynamic_image(image).map(|data| Self { + data: data.into(), + value_range: None, + }) } } diff --git a/crates/store/re_types/src/blueprint/archetypes/.gitattributes b/crates/store/re_types/src/blueprint/archetypes/.gitattributes index 942d136ba6025..b1f386517f061 100644 --- a/crates/store/re_types/src/blueprint/archetypes/.gitattributes +++ b/crates/store/re_types/src/blueprint/archetypes/.gitattributes @@ -3,6 +3,7 @@ .gitattributes linguist-generated=true background.rs linguist-generated=true dataframe_query.rs linguist-generated=true +dataframe_query_v2.rs linguist-generated=true dataframe_visible_columns.rs linguist-generated=true mod.rs linguist-generated=true plot_legend.rs linguist-generated=true diff --git a/crates/store/re_types/src/blueprint/archetypes/dataframe_query_v2.rs b/crates/store/re_types/src/blueprint/archetypes/dataframe_query_v2.rs new file mode 100644 index 0000000000000..754e4e3f161e1 --- /dev/null +++ b/crates/store/re_types/src/blueprint/archetypes/dataframe_query_v2.rs @@ -0,0 +1,306 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Archetype**: The query for the dataframe view. +#[derive(Clone, Debug)] +pub struct DataframeQueryV2 { + /// The timeline for this query. + /// + /// If unset, the timeline currently active on the time panel is used. + pub timeline: Option, + + /// If provided, only rows whose timestamp is within this range will be shown. + /// + /// Note: will be unset as soon as `timeline` is changed. + pub filter_by_range: Option, + + /// If provided, only show rows which contains a logged event for the specified component. + pub filter_by_event: Option, + + /// Should empty cells be filled with latest-at queries? + pub apply_latest_at: Option, + + /// Selected columns. If unset, all columns are selected. + pub select: Option, +} + +impl ::re_types_core::SizeBytes for DataframeQueryV2 { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.timeline.heap_size_bytes() + + self.filter_by_range.heap_size_bytes() + + self.filter_by_event.heap_size_bytes() + + self.apply_latest_at.heap_size_bytes() + + self.select.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + >::is_pod() + && >::is_pod() + && >::is_pod() + && >::is_pod() + && >::is_pod() + } +} + +static REQUIRED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 0usize]> = + once_cell::sync::Lazy::new(|| []); + +static RECOMMENDED_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 1usize]> = + once_cell::sync::Lazy::new(|| ["rerun.blueprint.components.DataframeQueryV2Indicator".into()]); + +static OPTIONAL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 5usize]> = + once_cell::sync::Lazy::new(|| { + [ + "rerun.blueprint.components.TimelineName".into(), + "rerun.blueprint.components.FilterByRange".into(), + "rerun.blueprint.components.FilterByEvent".into(), + "rerun.blueprint.components.ApplyLatestAt".into(), + "rerun.blueprint.components.SelectedColumns".into(), + ] + }); + +static ALL_COMPONENTS: once_cell::sync::Lazy<[ComponentName; 6usize]> = + once_cell::sync::Lazy::new(|| { + [ + "rerun.blueprint.components.DataframeQueryV2Indicator".into(), + "rerun.blueprint.components.TimelineName".into(), + "rerun.blueprint.components.FilterByRange".into(), + "rerun.blueprint.components.FilterByEvent".into(), + "rerun.blueprint.components.ApplyLatestAt".into(), + "rerun.blueprint.components.SelectedColumns".into(), + ] + }); + +impl DataframeQueryV2 { + /// The total number of components in the archetype: 0 required, 1 recommended, 5 optional + pub const NUM_COMPONENTS: usize = 6usize; +} + +/// Indicator component for the [`DataframeQueryV2`] [`::re_types_core::Archetype`] +pub type DataframeQueryV2Indicator = ::re_types_core::GenericIndicatorComponent; + +impl ::re_types_core::Archetype for DataframeQueryV2 { + type Indicator = DataframeQueryV2Indicator; + + #[inline] + fn name() -> ::re_types_core::ArchetypeName { + "rerun.blueprint.archetypes.DataframeQueryV2".into() + } + + #[inline] + fn display_name() -> &'static str { + "Dataframe query v2" + } + + #[inline] + fn indicator() -> MaybeOwnedComponentBatch<'static> { + static INDICATOR: DataframeQueryV2Indicator = DataframeQueryV2Indicator::DEFAULT; + MaybeOwnedComponentBatch::Ref(&INDICATOR) + } + + #[inline] + fn required_components() -> ::std::borrow::Cow<'static, [ComponentName]> { + REQUIRED_COMPONENTS.as_slice().into() + } + + #[inline] + fn recommended_components() -> ::std::borrow::Cow<'static, [ComponentName]> { + RECOMMENDED_COMPONENTS.as_slice().into() + } + + #[inline] + fn optional_components() -> ::std::borrow::Cow<'static, [ComponentName]> { + OPTIONAL_COMPONENTS.as_slice().into() + } + + #[inline] + fn all_components() -> ::std::borrow::Cow<'static, [ComponentName]> { + ALL_COMPONENTS.as_slice().into() + } + + #[inline] + fn from_arrow_components( + arrow_data: impl IntoIterator)>, + ) -> DeserializationResult { + re_tracing::profile_function!(); + use ::re_types_core::{Loggable as _, ResultExt as _}; + let arrays_by_name: ::std::collections::HashMap<_, _> = arrow_data + .into_iter() + .map(|(name, array)| (name.full_name(), array)) + .collect(); + let timeline = + if let Some(array) = arrays_by_name.get("rerun.blueprint.components.TimelineName") { + ::from_arrow_opt(&**array) + .with_context("rerun.blueprint.archetypes.DataframeQueryV2#timeline")? + .into_iter() + .next() + .flatten() + } else { + None + }; + let filter_by_range = + if let Some(array) = arrays_by_name.get("rerun.blueprint.components.FilterByRange") { + ::from_arrow_opt(&**array) + .with_context("rerun.blueprint.archetypes.DataframeQueryV2#filter_by_range")? + .into_iter() + .next() + .flatten() + } else { + None + }; + let filter_by_event = + if let Some(array) = arrays_by_name.get("rerun.blueprint.components.FilterByEvent") { + ::from_arrow_opt(&**array) + .with_context("rerun.blueprint.archetypes.DataframeQueryV2#filter_by_event")? + .into_iter() + .next() + .flatten() + } else { + None + }; + let apply_latest_at = + if let Some(array) = arrays_by_name.get("rerun.blueprint.components.ApplyLatestAt") { + ::from_arrow_opt(&**array) + .with_context("rerun.blueprint.archetypes.DataframeQueryV2#apply_latest_at")? + .into_iter() + .next() + .flatten() + } else { + None + }; + let select = + if let Some(array) = arrays_by_name.get("rerun.blueprint.components.SelectedColumns") { + ::from_arrow_opt(&**array) + .with_context("rerun.blueprint.archetypes.DataframeQueryV2#select")? + .into_iter() + .next() + .flatten() + } else { + None + }; + Ok(Self { + timeline, + filter_by_range, + filter_by_event, + apply_latest_at, + select, + }) + } +} + +impl ::re_types_core::AsComponents for DataframeQueryV2 { + fn as_component_batches(&self) -> Vec> { + re_tracing::profile_function!(); + use ::re_types_core::Archetype as _; + [ + Some(Self::indicator()), + self.timeline + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), + self.filter_by_range + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), + self.filter_by_event + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), + self.apply_latest_at + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), + self.select + .as_ref() + .map(|comp| (comp as &dyn ComponentBatch).into()), + ] + .into_iter() + .flatten() + .collect() + } +} + +impl ::re_types_core::ArchetypeReflectionMarker for DataframeQueryV2 {} + +impl DataframeQueryV2 { + /// Create a new `DataframeQueryV2`. + #[inline] + pub fn new() -> Self { + Self { + timeline: None, + filter_by_range: None, + filter_by_event: None, + apply_latest_at: None, + select: None, + } + } + + /// The timeline for this query. + /// + /// If unset, the timeline currently active on the time panel is used. + #[inline] + pub fn with_timeline( + mut self, + timeline: impl Into, + ) -> Self { + self.timeline = Some(timeline.into()); + self + } + + /// If provided, only rows whose timestamp is within this range will be shown. + /// + /// Note: will be unset as soon as `timeline` is changed. + #[inline] + pub fn with_filter_by_range( + mut self, + filter_by_range: impl Into, + ) -> Self { + self.filter_by_range = Some(filter_by_range.into()); + self + } + + /// If provided, only show rows which contains a logged event for the specified component. + #[inline] + pub fn with_filter_by_event( + mut self, + filter_by_event: impl Into, + ) -> Self { + self.filter_by_event = Some(filter_by_event.into()); + self + } + + /// Should empty cells be filled with latest-at queries? + #[inline] + pub fn with_apply_latest_at( + mut self, + apply_latest_at: impl Into, + ) -> Self { + self.apply_latest_at = Some(apply_latest_at.into()); + self + } + + /// Selected columns. If unset, all columns are selected. + #[inline] + pub fn with_select( + mut self, + select: impl Into, + ) -> Self { + self.select = Some(select.into()); + self + } +} diff --git a/crates/store/re_types/src/blueprint/archetypes/mod.rs b/crates/store/re_types/src/blueprint/archetypes/mod.rs index 6a7467734a151..cb7c8e466971e 100644 --- a/crates/store/re_types/src/blueprint/archetypes/mod.rs +++ b/crates/store/re_types/src/blueprint/archetypes/mod.rs @@ -2,6 +2,7 @@ mod background; mod dataframe_query; +mod dataframe_query_v2; mod dataframe_visible_columns; mod plot_legend; mod scalar_axis; @@ -16,6 +17,7 @@ mod visual_bounds2d; pub use self::background::Background; pub use self::dataframe_query::DataframeQuery; +pub use self::dataframe_query_v2::DataframeQueryV2; pub use self::dataframe_visible_columns::DataframeVisibleColumns; pub use self::plot_legend::PlotLegend; pub use self::scalar_axis::ScalarAxis; diff --git a/crates/store/re_types/src/blueprint/archetypes/tensor_scalar_mapping.rs b/crates/store/re_types/src/blueprint/archetypes/tensor_scalar_mapping.rs index 3192b0a4a7085..a0877142b5a29 100644 --- a/crates/store/re_types/src/blueprint/archetypes/tensor_scalar_mapping.rs +++ b/crates/store/re_types/src/blueprint/archetypes/tensor_scalar_mapping.rs @@ -33,6 +33,9 @@ pub struct TensorScalarMapping { /// /// Raises the normalized values to the power of this value before mapping to color. /// Acts like an inverse brightness. Defaults to 1.0. + /// + /// The final value for display is set as: + /// `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` pub gamma: Option, } @@ -231,6 +234,9 @@ impl TensorScalarMapping { /// /// Raises the normalized values to the power of this value before mapping to color. /// Acts like an inverse brightness. Defaults to 1.0. + /// + /// The final value for display is set as: + /// `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` #[inline] pub fn with_gamma(mut self, gamma: impl Into) -> Self { self.gamma = Some(gamma.into()); diff --git a/crates/store/re_types/src/blueprint/components/.gitattributes b/crates/store/re_types/src/blueprint/components/.gitattributes index 9fc30804bbf8d..9f55f576a5e49 100644 --- a/crates/store/re_types/src/blueprint/components/.gitattributes +++ b/crates/store/re_types/src/blueprint/components/.gitattributes @@ -2,11 +2,14 @@ .gitattributes linguist-generated=true active_tab.rs linguist-generated=true +apply_latest_at.rs linguist-generated=true background_kind.rs linguist-generated=true column_selection_mode.rs linguist-generated=true column_share.rs linguist-generated=true component_column_selector.rs linguist-generated=true corner2d.rs linguist-generated=true +filter_by_event.rs linguist-generated=true +filter_by_range.rs linguist-generated=true included_content.rs linguist-generated=true interactive.rs linguist-generated=true latest_at_queries.rs linguist-generated=true @@ -16,6 +19,7 @@ panel_state.rs linguist-generated=true query_expression.rs linguist-generated=true query_kind.rs linguist-generated=true row_share.rs linguist-generated=true +selected_columns.rs linguist-generated=true space_view_class.rs linguist-generated=true space_view_origin.rs linguist-generated=true tensor_dimension_index_slider.rs linguist-generated=true diff --git a/crates/store/re_types/src/blueprint/components/apply_latest_at.rs b/crates/store/re_types/src/blueprint/components/apply_latest_at.rs new file mode 100644 index 0000000000000..447c4a7831424 --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/apply_latest_at.rs @@ -0,0 +1,105 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Component**: Whether empty cells in a dataframe should be filled with a latest-at query. +#[derive(Clone, Debug, Copy, Default, PartialEq, Eq, PartialOrd, Ord)] +#[repr(transparent)] +pub struct ApplyLatestAt(pub crate::datatypes::Bool); + +impl ::re_types_core::SizeBytes for ApplyLatestAt { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.0.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for ApplyLatestAt { + fn from(v: T) -> Self { + Self(v.into()) + } +} + +impl std::borrow::Borrow for ApplyLatestAt { + #[inline] + fn borrow(&self) -> &crate::datatypes::Bool { + &self.0 + } +} + +impl std::ops::Deref for ApplyLatestAt { + type Target = crate::datatypes::Bool; + + #[inline] + fn deref(&self) -> &crate::datatypes::Bool { + &self.0 + } +} + +impl std::ops::DerefMut for ApplyLatestAt { + #[inline] + fn deref_mut(&mut self) -> &mut crate::datatypes::Bool { + &mut self.0 + } +} + +::re_types_core::macros::impl_into_cow!(ApplyLatestAt); + +impl ::re_types_core::Loggable for ApplyLatestAt { + type Name = ::re_types_core::ComponentName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.components.ApplyLatestAt".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + crate::datatypes::Bool::arrow_datatype() + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + crate::datatypes::Bool::to_arrow_opt(data.into_iter().map(|datum| { + datum.map(|datum| match datum.into() { + ::std::borrow::Cow::Borrowed(datum) => ::std::borrow::Cow::Borrowed(&datum.0), + ::std::borrow::Cow::Owned(datum) => ::std::borrow::Cow::Owned(datum.0), + }) + })) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + crate::datatypes::Bool::from_arrow_opt(arrow_data) + .map(|v| v.into_iter().map(|v| v.map(Self)).collect()) + } +} diff --git a/crates/store/re_types/src/blueprint/components/component_column_selector_ext.rs b/crates/store/re_types/src/blueprint/components/component_column_selector_ext.rs index a38aecde55f79..18957e74b779a 100644 --- a/crates/store/re_types/src/blueprint/components/component_column_selector_ext.rs +++ b/crates/store/re_types/src/blueprint/components/component_column_selector_ext.rs @@ -4,10 +4,7 @@ use re_types_core::ComponentName; impl super::ComponentColumnSelector { /// Create a [`Self`] from an [`EntityPath`] and a [`ComponentName`]. pub fn new(entity_path: &EntityPath, component_name: ComponentName) -> Self { - crate::blueprint::datatypes::ComponentColumnSelector { - entity_path: entity_path.into(), - component: component_name.as_str().into(), - } - .into() + crate::blueprint::datatypes::ComponentColumnSelector::new(entity_path, component_name) + .into() } } diff --git a/crates/store/re_types/src/blueprint/components/filter_by_event.rs b/crates/store/re_types/src/blueprint/components/filter_by_event.rs new file mode 100644 index 0000000000000..1031bc1e8408e --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/filter_by_event.rs @@ -0,0 +1,105 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Component**: Configuration for the filter-by-event feature of the dataframe view. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[repr(transparent)] +pub struct FilterByEvent(pub crate::blueprint::datatypes::FilterByEvent); + +impl ::re_types_core::SizeBytes for FilterByEvent { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.0.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for FilterByEvent { + fn from(v: T) -> Self { + Self(v.into()) + } +} + +impl std::borrow::Borrow for FilterByEvent { + #[inline] + fn borrow(&self) -> &crate::blueprint::datatypes::FilterByEvent { + &self.0 + } +} + +impl std::ops::Deref for FilterByEvent { + type Target = crate::blueprint::datatypes::FilterByEvent; + + #[inline] + fn deref(&self) -> &crate::blueprint::datatypes::FilterByEvent { + &self.0 + } +} + +impl std::ops::DerefMut for FilterByEvent { + #[inline] + fn deref_mut(&mut self) -> &mut crate::blueprint::datatypes::FilterByEvent { + &mut self.0 + } +} + +::re_types_core::macros::impl_into_cow!(FilterByEvent); + +impl ::re_types_core::Loggable for FilterByEvent { + type Name = ::re_types_core::ComponentName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.components.FilterByEvent".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + crate::blueprint::datatypes::FilterByEvent::arrow_datatype() + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + crate::blueprint::datatypes::FilterByEvent::to_arrow_opt(data.into_iter().map(|datum| { + datum.map(|datum| match datum.into() { + ::std::borrow::Cow::Borrowed(datum) => ::std::borrow::Cow::Borrowed(&datum.0), + ::std::borrow::Cow::Owned(datum) => ::std::borrow::Cow::Owned(datum.0), + }) + })) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + crate::blueprint::datatypes::FilterByEvent::from_arrow_opt(arrow_data) + .map(|v| v.into_iter().map(|v| v.map(Self)).collect()) + } +} diff --git a/crates/store/re_types/src/blueprint/components/filter_by_event_ext.rs b/crates/store/re_types/src/blueprint/components/filter_by_event_ext.rs new file mode 100644 index 0000000000000..b86ab981ff371 --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/filter_by_event_ext.rs @@ -0,0 +1,34 @@ +use re_log_types::EntityPath; +use re_types_core::ComponentName; + +use super::FilterByEvent; + +impl FilterByEvent { + /// Create a new [`Self`]. + pub fn new(active: bool, entity_path: &EntityPath, component_name: ComponentName) -> Self { + let datatype = crate::blueprint::datatypes::FilterByEvent { + active: active.into(), + column: crate::blueprint::datatypes::ComponentColumnSelector { + entity_path: entity_path.to_string().into(), + component: component_name.as_str().into(), + }, + }; + + Self(datatype) + } + + /// Is the filter active? + pub fn active(&self) -> bool { + self.active.into() + } + + /// Entity path of the filter column. + pub fn entity_path(&self) -> EntityPath { + EntityPath::from(self.column.entity_path.as_str()) + } + + /// Component name of the filter column. + pub fn component_name(&self) -> ComponentName { + self.column.component.as_str().into() + } +} diff --git a/crates/store/re_types/src/blueprint/components/filter_by_range.rs b/crates/store/re_types/src/blueprint/components/filter_by_range.rs new file mode 100644 index 0000000000000..727bc49ccc1db --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/filter_by_range.rs @@ -0,0 +1,105 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Component**: Configuration for a filter-by-range feature of the dataframe view. +#[derive(Clone, Debug, PartialEq, Eq)] +#[repr(transparent)] +pub struct FilterByRange(pub crate::blueprint::datatypes::FilterByRange); + +impl ::re_types_core::SizeBytes for FilterByRange { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.0.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for FilterByRange { + fn from(v: T) -> Self { + Self(v.into()) + } +} + +impl std::borrow::Borrow for FilterByRange { + #[inline] + fn borrow(&self) -> &crate::blueprint::datatypes::FilterByRange { + &self.0 + } +} + +impl std::ops::Deref for FilterByRange { + type Target = crate::blueprint::datatypes::FilterByRange; + + #[inline] + fn deref(&self) -> &crate::blueprint::datatypes::FilterByRange { + &self.0 + } +} + +impl std::ops::DerefMut for FilterByRange { + #[inline] + fn deref_mut(&mut self) -> &mut crate::blueprint::datatypes::FilterByRange { + &mut self.0 + } +} + +::re_types_core::macros::impl_into_cow!(FilterByRange); + +impl ::re_types_core::Loggable for FilterByRange { + type Name = ::re_types_core::ComponentName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.components.FilterByRange".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + crate::blueprint::datatypes::FilterByRange::arrow_datatype() + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + crate::blueprint::datatypes::FilterByRange::to_arrow_opt(data.into_iter().map(|datum| { + datum.map(|datum| match datum.into() { + ::std::borrow::Cow::Borrowed(datum) => ::std::borrow::Cow::Borrowed(&datum.0), + ::std::borrow::Cow::Owned(datum) => ::std::borrow::Cow::Owned(datum.0), + }) + })) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + crate::blueprint::datatypes::FilterByRange::from_arrow_opt(arrow_data) + .map(|v| v.into_iter().map(|v| v.map(Self)).collect()) + } +} diff --git a/crates/store/re_types/src/blueprint/components/filter_by_range_ext.rs b/crates/store/re_types/src/blueprint/components/filter_by_range_ext.rs new file mode 100644 index 0000000000000..dbfb867e2803e --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/filter_by_range_ext.rs @@ -0,0 +1,18 @@ +use super::FilterByRange; +use re_log_types::TimeInt; + +impl Default for FilterByRange { + fn default() -> Self { + Self::new(TimeInt::MIN, TimeInt::MAX) + } +} + +impl FilterByRange { + /// Create a new range filter with the provided time boundaries. + pub fn new(start: TimeInt, end: TimeInt) -> Self { + Self(crate::blueprint::datatypes::FilterByRange { + start: start.into(), + end: end.into(), + }) + } +} diff --git a/crates/store/re_types/src/blueprint/components/included_content.rs b/crates/store/re_types/src/blueprint/components/included_content.rs index d92d8e73044d0..5ce3ef2262703 100644 --- a/crates/store/re_types/src/blueprint/components/included_content.rs +++ b/crates/store/re_types/src/blueprint/components/included_content.rs @@ -21,7 +21,7 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// **Component**: All the contents in the container. #[derive(Clone, Debug, Default)] pub struct IncludedContent( - /// List of the contents by EntityPath. + /// List of the contents by [`datatypes::EntityPath`][crate::datatypes::EntityPath]. /// /// This must be a path in the blueprint store. /// Typically structure as `/`. diff --git a/crates/store/re_types/src/blueprint/components/mod.rs b/crates/store/re_types/src/blueprint/components/mod.rs index 4dda9f9fd705d..2cf2a63e2479a 100644 --- a/crates/store/re_types/src/blueprint/components/mod.rs +++ b/crates/store/re_types/src/blueprint/components/mod.rs @@ -1,6 +1,7 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs mod active_tab; +mod apply_latest_at; mod background_kind; mod column_selection_mode; mod column_share; @@ -8,6 +9,10 @@ mod component_column_selector; mod component_column_selector_ext; mod corner2d; mod corner2d_ext; +mod filter_by_event; +mod filter_by_event_ext; +mod filter_by_range; +mod filter_by_range_ext; mod included_content; mod interactive; mod interactive_ext; @@ -19,6 +24,7 @@ mod panel_state_ext; mod query_expression; mod query_kind; mod row_share; +mod selected_columns; mod space_view_class; mod space_view_class_ext; mod space_view_origin; @@ -39,11 +45,14 @@ mod visual_bounds2d; mod visual_bounds2d_ext; pub use self::active_tab::ActiveTab; +pub use self::apply_latest_at::ApplyLatestAt; pub use self::background_kind::BackgroundKind; pub use self::column_selection_mode::ColumnSelectionMode; pub use self::column_share::ColumnShare; pub use self::component_column_selector::ComponentColumnSelector; pub use self::corner2d::Corner2D; +pub use self::filter_by_event::FilterByEvent; +pub use self::filter_by_range::FilterByRange; pub use self::included_content::IncludedContent; pub use self::interactive::Interactive; pub use self::latest_at_queries::LatestAtQueries; @@ -52,6 +61,7 @@ pub use self::panel_state::PanelState; pub use self::query_expression::QueryExpression; pub use self::query_kind::QueryKind; pub use self::row_share::RowShare; +pub use self::selected_columns::SelectedColumns; pub use self::space_view_class::SpaceViewClass; pub use self::space_view_origin::SpaceViewOrigin; pub use self::tensor_dimension_index_slider::TensorDimensionIndexSlider; diff --git a/crates/store/re_types/src/blueprint/components/selected_columns.rs b/crates/store/re_types/src/blueprint/components/selected_columns.rs new file mode 100644 index 0000000000000..612a218a7b3cd --- /dev/null +++ b/crates/store/re_types/src/blueprint/components/selected_columns.rs @@ -0,0 +1,105 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Component**: Describe a component column to be selected in the dataframe view. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[repr(transparent)] +pub struct SelectedColumns(pub crate::blueprint::datatypes::SelectedColumns); + +impl ::re_types_core::SizeBytes for SelectedColumns { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.0.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for SelectedColumns { + fn from(v: T) -> Self { + Self(v.into()) + } +} + +impl std::borrow::Borrow for SelectedColumns { + #[inline] + fn borrow(&self) -> &crate::blueprint::datatypes::SelectedColumns { + &self.0 + } +} + +impl std::ops::Deref for SelectedColumns { + type Target = crate::blueprint::datatypes::SelectedColumns; + + #[inline] + fn deref(&self) -> &crate::blueprint::datatypes::SelectedColumns { + &self.0 + } +} + +impl std::ops::DerefMut for SelectedColumns { + #[inline] + fn deref_mut(&mut self) -> &mut crate::blueprint::datatypes::SelectedColumns { + &mut self.0 + } +} + +::re_types_core::macros::impl_into_cow!(SelectedColumns); + +impl ::re_types_core::Loggable for SelectedColumns { + type Name = ::re_types_core::ComponentName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.components.SelectedColumns".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + crate::blueprint::datatypes::SelectedColumns::arrow_datatype() + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + crate::blueprint::datatypes::SelectedColumns::to_arrow_opt(data.into_iter().map(|datum| { + datum.map(|datum| match datum.into() { + ::std::borrow::Cow::Borrowed(datum) => ::std::borrow::Cow::Borrowed(&datum.0), + ::std::borrow::Cow::Owned(datum) => ::std::borrow::Cow::Owned(datum.0), + }) + })) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + crate::blueprint::datatypes::SelectedColumns::from_arrow_opt(arrow_data) + .map(|v| v.into_iter().map(|v| v.map(Self)).collect()) + } +} diff --git a/crates/store/re_types/src/blueprint/datatypes/.gitattributes b/crates/store/re_types/src/blueprint/datatypes/.gitattributes index b280505760895..437f7b5ea2438 100644 --- a/crates/store/re_types/src/blueprint/datatypes/.gitattributes +++ b/crates/store/re_types/src/blueprint/datatypes/.gitattributes @@ -2,7 +2,10 @@ .gitattributes linguist-generated=true component_column_selector.rs linguist-generated=true +filter_by_event.rs linguist-generated=true +filter_by_range.rs linguist-generated=true latest_at_query.rs linguist-generated=true mod.rs linguist-generated=true +selected_columns.rs linguist-generated=true tensor_dimension_index_slider.rs linguist-generated=true time_range_query.rs linguist-generated=true diff --git a/crates/store/re_types/src/blueprint/datatypes/component_column_selector_ext.rs b/crates/store/re_types/src/blueprint/datatypes/component_column_selector_ext.rs new file mode 100644 index 0000000000000..ca74f5fc2a045 --- /dev/null +++ b/crates/store/re_types/src/blueprint/datatypes/component_column_selector_ext.rs @@ -0,0 +1,12 @@ +use re_log_types::EntityPath; +use re_types_core::ComponentName; + +impl super::ComponentColumnSelector { + /// Create a [`Self`] from an [`EntityPath`] and a [`ComponentName`]. + pub fn new(entity_path: &EntityPath, component_name: ComponentName) -> Self { + Self { + entity_path: entity_path.into(), + component: component_name.as_str().into(), + } + } +} diff --git a/crates/store/re_types/src/blueprint/datatypes/filter_by_event.rs b/crates/store/re_types/src/blueprint/datatypes/filter_by_event.rs new file mode 100644 index 0000000000000..4a3660b8798ba --- /dev/null +++ b/crates/store/re_types/src/blueprint/datatypes/filter_by_event.rs @@ -0,0 +1,225 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Datatype**: Configuration for the filter by event feature of the dataframe view. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct FilterByEvent { + /// Whether the filter by event feature is active. + pub active: crate::datatypes::Bool, + + /// The column used when the filter by event feature is used. + pub column: crate::blueprint::datatypes::ComponentColumnSelector, +} + +impl ::re_types_core::SizeBytes for FilterByEvent { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.active.heap_size_bytes() + self.column.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + && ::is_pod() + } +} + +::re_types_core::macros::impl_into_cow!(FilterByEvent); + +impl ::re_types_core::Loggable for FilterByEvent { + type Name = ::re_types_core::DatatypeName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.datatypes.FilterByEvent".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + #![allow(clippy::wildcard_imports)] + use arrow2::datatypes::*; + DataType::Struct(std::sync::Arc::new(vec![ + Field::new("active", ::arrow_datatype(), false), + Field::new( + "column", + ::arrow_datatype(), + false, + ), + ])) + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, datatypes::*}; + Ok({ + let (somes, data): (Vec<_>, Vec<_>) = data + .into_iter() + .map(|datum| { + let datum: Option<::std::borrow::Cow<'a, Self>> = datum.map(Into::into); + (datum.is_some(), datum) + }) + .unzip(); + let bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + StructArray::new( + Self::arrow_datatype(), + vec![ + { + let (somes, active): (Vec<_>, Vec<_>) = data + .iter() + .map(|datum| { + let datum = datum.as_ref().map(|datum| datum.active.clone()); + (datum.is_some(), datum) + }) + .unzip(); + let active_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + BooleanArray::new( + DataType::Boolean, + active + .into_iter() + .map(|datum| datum.map(|datum| datum.0).unwrap_or_default()) + .collect(), + active_bitmap, + ) + .boxed() + }, + { + let (somes, column): (Vec<_>, Vec<_>) = data + .iter() + .map(|datum| { + let datum = datum.as_ref().map(|datum| datum.column.clone()); + (datum.is_some(), datum) + }) + .unzip(); + let column_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + { + _ = column_bitmap; + crate::blueprint::datatypes::ComponentColumnSelector::to_arrow_opt( + column, + )? + } + }, + ], + bitmap, + ) + .boxed() + }) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, buffer::*, datatypes::*}; + Ok({ + let arrow_data = arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = Self::arrow_datatype(); + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.FilterByEvent")?; + if arrow_data.is_empty() { + Vec::new() + } else { + let (arrow_data_fields, arrow_data_arrays) = + (arrow_data.fields(), arrow_data.values()); + let arrays_by_name: ::std::collections::HashMap<_, _> = arrow_data_fields + .iter() + .map(|field| field.name.as_str()) + .zip(arrow_data_arrays) + .collect(); + let active = { + if !arrays_by_name.contains_key("active") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "active", + )) + .with_context("rerun.blueprint.datatypes.FilterByEvent"); + } + let arrow_data = &**arrays_by_name["active"]; + arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = DataType::Boolean; + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.FilterByEvent#active")? + .into_iter() + .map(|res_or_opt| res_or_opt.map(crate::datatypes::Bool)) + }; + let column = { + if !arrays_by_name.contains_key("column") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "column", + )) + .with_context("rerun.blueprint.datatypes.FilterByEvent"); + } + let arrow_data = &**arrays_by_name["column"]; + crate::blueprint::datatypes::ComponentColumnSelector::from_arrow_opt(arrow_data) + .with_context("rerun.blueprint.datatypes.FilterByEvent#column")? + .into_iter() + }; + arrow2::bitmap::utils::ZipValidity::new_with_validity( + ::itertools::izip!(active, column), + arrow_data.validity(), + ) + .map(|opt| { + opt.map(|(active, column)| { + Ok(Self { + active: active + .ok_or_else(DeserializationError::missing_data) + .with_context("rerun.blueprint.datatypes.FilterByEvent#active")?, + column: column + .ok_or_else(DeserializationError::missing_data) + .with_context("rerun.blueprint.datatypes.FilterByEvent#column")?, + }) + }) + .transpose() + }) + .collect::>>() + .with_context("rerun.blueprint.datatypes.FilterByEvent")? + } + }) + } +} diff --git a/crates/store/re_types/src/blueprint/datatypes/filter_by_range.rs b/crates/store/re_types/src/blueprint/datatypes/filter_by_range.rs new file mode 100644 index 0000000000000..7093d00f33371 --- /dev/null +++ b/crates/store/re_types/src/blueprint/datatypes/filter_by_range.rs @@ -0,0 +1,236 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Datatype**: Configuration for the filter-by-range feature of the dataframe view. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct FilterByRange { + /// Beginning of the time range. + pub start: crate::datatypes::TimeInt, + + /// End of the time range (inclusive). + pub end: crate::datatypes::TimeInt, +} + +impl ::re_types_core::SizeBytes for FilterByRange { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.start.heap_size_bytes() + self.end.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() && ::is_pod() + } +} + +::re_types_core::macros::impl_into_cow!(FilterByRange); + +impl ::re_types_core::Loggable for FilterByRange { + type Name = ::re_types_core::DatatypeName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.datatypes.FilterByRange".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + #![allow(clippy::wildcard_imports)] + use arrow2::datatypes::*; + DataType::Struct(std::sync::Arc::new(vec![ + Field::new( + "start", + ::arrow_datatype(), + false, + ), + Field::new("end", ::arrow_datatype(), false), + ])) + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, datatypes::*}; + Ok({ + let (somes, data): (Vec<_>, Vec<_>) = data + .into_iter() + .map(|datum| { + let datum: Option<::std::borrow::Cow<'a, Self>> = datum.map(Into::into); + (datum.is_some(), datum) + }) + .unzip(); + let bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + StructArray::new( + Self::arrow_datatype(), + vec![ + { + let (somes, start): (Vec<_>, Vec<_>) = data + .iter() + .map(|datum| { + let datum = datum.as_ref().map(|datum| datum.start.clone()); + (datum.is_some(), datum) + }) + .unzip(); + let start_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + PrimitiveArray::new( + DataType::Int64, + start + .into_iter() + .map(|datum| datum.map(|datum| datum.0).unwrap_or_default()) + .collect(), + start_bitmap, + ) + .boxed() + }, + { + let (somes, end): (Vec<_>, Vec<_>) = data + .iter() + .map(|datum| { + let datum = datum.as_ref().map(|datum| datum.end.clone()); + (datum.is_some(), datum) + }) + .unzip(); + let end_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + PrimitiveArray::new( + DataType::Int64, + end.into_iter() + .map(|datum| datum.map(|datum| datum.0).unwrap_or_default()) + .collect(), + end_bitmap, + ) + .boxed() + }, + ], + bitmap, + ) + .boxed() + }) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, buffer::*, datatypes::*}; + Ok({ + let arrow_data = arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = Self::arrow_datatype(); + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.FilterByRange")?; + if arrow_data.is_empty() { + Vec::new() + } else { + let (arrow_data_fields, arrow_data_arrays) = + (arrow_data.fields(), arrow_data.values()); + let arrays_by_name: ::std::collections::HashMap<_, _> = arrow_data_fields + .iter() + .map(|field| field.name.as_str()) + .zip(arrow_data_arrays) + .collect(); + let start = { + if !arrays_by_name.contains_key("start") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "start", + )) + .with_context("rerun.blueprint.datatypes.FilterByRange"); + } + let arrow_data = &**arrays_by_name["start"]; + arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = DataType::Int64; + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.FilterByRange#start")? + .into_iter() + .map(|opt| opt.copied()) + .map(|res_or_opt| res_or_opt.map(crate::datatypes::TimeInt)) + }; + let end = { + if !arrays_by_name.contains_key("end") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "end", + )) + .with_context("rerun.blueprint.datatypes.FilterByRange"); + } + let arrow_data = &**arrays_by_name["end"]; + arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = DataType::Int64; + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.FilterByRange#end")? + .into_iter() + .map(|opt| opt.copied()) + .map(|res_or_opt| res_or_opt.map(crate::datatypes::TimeInt)) + }; + arrow2::bitmap::utils::ZipValidity::new_with_validity( + ::itertools::izip!(start, end), + arrow_data.validity(), + ) + .map(|opt| { + opt.map(|(start, end)| { + Ok(Self { + start: start + .ok_or_else(DeserializationError::missing_data) + .with_context("rerun.blueprint.datatypes.FilterByRange#start")?, + end: end + .ok_or_else(DeserializationError::missing_data) + .with_context("rerun.blueprint.datatypes.FilterByRange#end")?, + }) + }) + .transpose() + }) + .collect::>>() + .with_context("rerun.blueprint.datatypes.FilterByRange")? + } + }) + } +} diff --git a/crates/store/re_types/src/blueprint/datatypes/mod.rs b/crates/store/re_types/src/blueprint/datatypes/mod.rs index fa0dba23ea8d0..67905a645ebc8 100644 --- a/crates/store/re_types/src/blueprint/datatypes/mod.rs +++ b/crates/store/re_types/src/blueprint/datatypes/mod.rs @@ -1,13 +1,20 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs mod component_column_selector; +mod component_column_selector_ext; +mod filter_by_event; +mod filter_by_range; mod latest_at_query; mod latest_at_query_ext; +mod selected_columns; mod tensor_dimension_index_slider; mod time_range_query; mod time_range_query_ext; pub use self::component_column_selector::ComponentColumnSelector; +pub use self::filter_by_event::FilterByEvent; +pub use self::filter_by_range::FilterByRange; pub use self::latest_at_query::LatestAtQuery; +pub use self::selected_columns::SelectedColumns; pub use self::tensor_dimension_index_slider::TensorDimensionIndexSlider; pub use self::time_range_query::TimeRangeQuery; diff --git a/crates/store/re_types/src/blueprint/datatypes/selected_columns.rs b/crates/store/re_types/src/blueprint/datatypes/selected_columns.rs new file mode 100644 index 0000000000000..deca0903e4e2f --- /dev/null +++ b/crates/store/re_types/src/blueprint/datatypes/selected_columns.rs @@ -0,0 +1,416 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Datatype**: List of selected columns in a dataframe. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct SelectedColumns { + /// The time columns to include + pub time_columns: Vec, + + /// The component columns to include + pub component_columns: Vec, +} + +impl ::re_types_core::SizeBytes for SelectedColumns { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.time_columns.heap_size_bytes() + self.component_columns.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + >::is_pod() + && >::is_pod() + } +} + +::re_types_core::macros::impl_into_cow!(SelectedColumns); + +impl ::re_types_core::Loggable for SelectedColumns { + type Name = ::re_types_core::DatatypeName; + + #[inline] + fn name() -> Self::Name { + "rerun.blueprint.datatypes.SelectedColumns".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + #![allow(clippy::wildcard_imports)] + use arrow2::datatypes::*; + DataType::Struct(std::sync::Arc::new(vec![ + Field::new( + "time_columns", + DataType::List(std::sync::Arc::new(Field::new( + "item", + ::arrow_datatype(), + false, + ))), + false, + ), + Field::new( + "component_columns", + DataType::List(std::sync::Arc::new(Field::new( + "item", + ::arrow_datatype(), + false, + ))), + false, + ), + ])) + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, datatypes::*}; + Ok({ + let (somes, data): (Vec<_>, Vec<_>) = data + .into_iter() + .map(|datum| { + let datum: Option<::std::borrow::Cow<'a, Self>> = datum.map(Into::into); + (datum.is_some(), datum) + }) + .unzip(); + let bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + StructArray::new( + Self::arrow_datatype(), + vec![ + { let (somes, time_columns) : (Vec < _ >, Vec < _ >) = data + .iter().map(| datum | { let datum = datum.as_ref().map(| datum | + { datum.time_columns.clone() }); (datum.is_some(), datum) }) + .unzip(); let time_columns_bitmap : Option < + arrow2::bitmap::Bitmap > = { let any_nones = somes.iter().any(| + some | ! * some); any_nones.then(|| somes.into()) }; { use + arrow2:: { buffer::Buffer, offset::OffsetsBuffer }; let offsets = + arrow2::offset::Offsets:: < i32 > ::try_from_lengths(time_columns + .iter().map(| opt | opt.as_ref().map_or(0, | datum | datum + .len()))) ? .into(); let time_columns_inner_data : Vec < _ > = + time_columns.into_iter().flatten().flatten().collect(); let + time_columns_inner_bitmap : Option < arrow2::bitmap::Bitmap > = + None; + ListArray::try_new(DataType::List(std::sync::Arc::new(Field::new("item", + < crate ::datatypes::Utf8 > ::arrow_datatype(), false))), + offsets, { let offsets = arrow2::offset::Offsets:: < i32 > + ::try_from_lengths(time_columns_inner_data.iter().map(| datum | { + datum.0.len() })) ? .into(); let inner_data : + arrow2::buffer::Buffer < u8 > = time_columns_inner_data + .into_iter().flat_map(| datum | { datum.0.0 }).collect(); + + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] unsafe + { Utf8Array:: < i32 > ::new_unchecked(DataType::Utf8, offsets, + inner_data, time_columns_inner_bitmap) } .boxed() }, + time_columns_bitmap,) ? .boxed() } }, { let (somes, + component_columns) : (Vec < _ >, Vec < _ >) = data.iter().map(| + datum | { let datum = datum.as_ref().map(| datum | { datum + .component_columns.clone() }); (datum.is_some(), datum) }) + .unzip(); let component_columns_bitmap : Option < + arrow2::bitmap::Bitmap > = { let any_nones = somes.iter().any(| + some | ! * some); any_nones.then(|| somes.into()) }; { use + arrow2:: { buffer::Buffer, offset::OffsetsBuffer }; let offsets = + arrow2::offset::Offsets:: < i32 > + ::try_from_lengths(component_columns.iter().map(| opt | opt + .as_ref().map_or(0, | datum | datum.len()))) ? .into(); let + component_columns_inner_data : Vec < _ > = component_columns + .into_iter().flatten().flatten().collect(); let + component_columns_inner_bitmap : Option < arrow2::bitmap::Bitmap + > = None; + ListArray::try_new(DataType::List(std::sync::Arc::new(Field::new("item", + < crate ::blueprint::datatypes::ComponentColumnSelector > + ::arrow_datatype(), false))), offsets, { _ = + component_columns_inner_bitmap; crate + ::blueprint::datatypes::ComponentColumnSelector::to_arrow_opt(component_columns_inner_data + .into_iter().map(Some)) ? }, component_columns_bitmap,) ? + .boxed() } }, + ], + bitmap, + ) + .boxed() + }) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + #![allow(clippy::wildcard_imports)] + use ::re_types_core::{Loggable as _, ResultExt as _}; + use arrow2::{array::*, buffer::*, datatypes::*}; + Ok({ + let arrow_data = arrow_data + .as_any() + .downcast_ref::() + .ok_or_else(|| { + let expected = Self::arrow_datatype(); + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context("rerun.blueprint.datatypes.SelectedColumns")?; + if arrow_data.is_empty() { + Vec::new() + } else { + let (arrow_data_fields, arrow_data_arrays) = + (arrow_data.fields(), arrow_data.values()); + let arrays_by_name: ::std::collections::HashMap<_, _> = arrow_data_fields + .iter() + .map(|field| field.name.as_str()) + .zip(arrow_data_arrays) + .collect(); + let time_columns = { + if !arrays_by_name.contains_key("time_columns") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "time_columns", + )) + .with_context("rerun.blueprint.datatypes.SelectedColumns"); + } + let arrow_data = &**arrays_by_name["time_columns"]; + { + let arrow_data = arrow_data + .as_any() + .downcast_ref::>() + .ok_or_else(|| { + let expected = DataType::List(std::sync::Arc::new(Field::new( + "item", + ::arrow_datatype(), + false, + ))); + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#time_columns", + )?; + if arrow_data.is_empty() { + Vec::new() + } else { + let arrow_data_inner = { + let arrow_data_inner = &**arrow_data.values(); + { + let arrow_data_inner = arrow_data_inner + .as_any() + .downcast_ref::>() + .ok_or_else(|| { + let expected = DataType::Utf8; + let actual = arrow_data_inner.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#time_columns", + )?; + let arrow_data_inner_buf = arrow_data_inner.values(); + let offsets = arrow_data_inner.offsets(); + arrow2::bitmap::utils::ZipValidity::new_with_validity( + offsets.iter().zip(offsets.lengths()), + arrow_data_inner.validity(), + ) + .map(|elem| { + elem + .map(|(start, len)| { + let start = *start as usize; + let end = start + len; + if end > arrow_data_inner_buf.len() { + return Err( + DeserializationError::offset_slice_oob( + (start, end), + arrow_data_inner_buf.len(), + ), + ); + } + + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + let data = unsafe { + arrow_data_inner_buf.clone().sliced_unchecked(start, len) + }; + Ok(data) + }) + .transpose() + }) + .map(|res_or_opt| { + res_or_opt + .map(|res_or_opt| { + res_or_opt + .map(|v| crate::datatypes::Utf8( + ::re_types_core::ArrowString(v), + )) + }) + }) + .collect::>>>() + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#time_columns", + )? + .into_iter() + } + .collect::>() + }; + let offsets = arrow_data.offsets(); + arrow2::bitmap::utils::ZipValidity::new_with_validity( + offsets.iter().zip(offsets.lengths()), + arrow_data.validity(), + ) + .map(|elem| { + elem + .map(|(start, len)| { + let start = *start as usize; + let end = start + len; + if end > arrow_data_inner.len() { + return Err( + DeserializationError::offset_slice_oob( + (start, end), + arrow_data_inner.len(), + ), + ); + } + + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + let data = unsafe { + arrow_data_inner.get_unchecked(start..end) + }; + let data = data + .iter() + .cloned() + .map(Option::unwrap_or_default) + .collect(); + Ok(data) + }) + .transpose() + }) + .collect::>>>()? + } + .into_iter() + } + }; + let component_columns = { + if !arrays_by_name.contains_key("component_columns") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "component_columns", + )) + .with_context("rerun.blueprint.datatypes.SelectedColumns"); + } + let arrow_data = &**arrays_by_name["component_columns"]; + { + let arrow_data = arrow_data + .as_any() + .downcast_ref::>() + .ok_or_else(|| { + let expected = DataType::List( + std::sync::Arc::new( + Field::new( + "item", + ::arrow_datatype(), + false, + ), + ), + ); + let actual = arrow_data.data_type().clone(); + DeserializationError::datatype_mismatch(expected, actual) + }) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#component_columns", + )?; + if arrow_data.is_empty() { + Vec::new() + } else { + let arrow_data_inner = { + let arrow_data_inner = &**arrow_data.values(); + crate::blueprint::datatypes::ComponentColumnSelector::from_arrow_opt( + arrow_data_inner, + ) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#component_columns", + )? + .into_iter() + .collect::>() + }; + let offsets = arrow_data.offsets(); + arrow2::bitmap::utils::ZipValidity::new_with_validity( + offsets.iter().zip(offsets.lengths()), + arrow_data.validity(), + ) + .map(|elem| { + elem + .map(|(start, len)| { + let start = *start as usize; + let end = start + len; + if end > arrow_data_inner.len() { + return Err( + DeserializationError::offset_slice_oob( + (start, end), + arrow_data_inner.len(), + ), + ); + } + + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + let data = unsafe { + arrow_data_inner.get_unchecked(start..end) + }; + let data = data + .iter() + .cloned() + .map(Option::unwrap_or_default) + .collect(); + Ok(data) + }) + .transpose() + }) + .collect::>>>()? + } + .into_iter() + } + }; + arrow2::bitmap::utils::ZipValidity::new_with_validity( + ::itertools::izip!(time_columns, component_columns), + arrow_data.validity(), + ) + .map(|opt| { + opt.map(|(time_columns, component_columns)| { + Ok(Self { + time_columns: time_columns + .ok_or_else(DeserializationError::missing_data) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#time_columns", + )?, + component_columns: component_columns + .ok_or_else(DeserializationError::missing_data) + .with_context( + "rerun.blueprint.datatypes.SelectedColumns#component_columns", + )?, + }) + }) + .transpose() + }) + .collect::>>() + .with_context("rerun.blueprint.datatypes.SelectedColumns")? + } + }) + } +} diff --git a/crates/store/re_types/src/blueprint/views/.gitattributes b/crates/store/re_types/src/blueprint/views/.gitattributes index 72c9570d24e2a..6b3cbae85b758 100644 --- a/crates/store/re_types/src/blueprint/views/.gitattributes +++ b/crates/store/re_types/src/blueprint/views/.gitattributes @@ -2,6 +2,7 @@ .gitattributes linguist-generated=true bar_chart_view.rs linguist-generated=true +dataframe_view.rs linguist-generated=true mod.rs linguist-generated=true spatial2d_view.rs linguist-generated=true spatial3d_view.rs linguist-generated=true diff --git a/crates/store/re_types/src/blueprint/views/dataframe_view.rs b/crates/store/re_types/src/blueprint/views/dataframe_view.rs new file mode 100644 index 0000000000000..1f5eb956b4902 --- /dev/null +++ b/crates/store/re_types/src/blueprint/views/dataframe_view.rs @@ -0,0 +1,76 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/views/dataframe.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **View**: A view to display any data in a tabular form. +/// +/// Any data from the store can be shown, using a flexibly, user-configurable query. +#[derive(Clone, Debug)] +pub struct DataframeView { + /// Query of the dataframe. + pub query: crate::blueprint::archetypes::DataframeQueryV2, +} + +impl ::re_types_core::SizeBytes for DataframeView { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.query.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for DataframeView { + fn from(v: T) -> Self { + Self { query: v.into() } + } +} + +impl std::borrow::Borrow for DataframeView { + #[inline] + fn borrow(&self) -> &crate::blueprint::archetypes::DataframeQueryV2 { + &self.query + } +} + +impl std::ops::Deref for DataframeView { + type Target = crate::blueprint::archetypes::DataframeQueryV2; + + #[inline] + fn deref(&self) -> &crate::blueprint::archetypes::DataframeQueryV2 { + &self.query + } +} + +impl std::ops::DerefMut for DataframeView { + #[inline] + fn deref_mut(&mut self) -> &mut crate::blueprint::archetypes::DataframeQueryV2 { + &mut self.query + } +} + +impl ::re_types_core::View for DataframeView { + #[inline] + fn identifier() -> ::re_types_core::SpaceViewClassIdentifier { + "Dataframe".into() + } +} diff --git a/crates/store/re_types/src/blueprint/views/mod.rs b/crates/store/re_types/src/blueprint/views/mod.rs index 5ba9a3625be00..331985ec849b7 100644 --- a/crates/store/re_types/src/blueprint/views/mod.rs +++ b/crates/store/re_types/src/blueprint/views/mod.rs @@ -1,6 +1,7 @@ // DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs mod bar_chart_view; +mod dataframe_view; mod spatial2d_view; mod spatial3d_view; mod tensor_view; @@ -9,6 +10,7 @@ mod text_log_view; mod time_series_view; pub use self::bar_chart_view::BarChartView; +pub use self::dataframe_view::DataframeView; pub use self::spatial2d_view::Spatial2DView; pub use self::spatial3d_view::Spatial3DView; pub use self::tensor_view::TensorView; diff --git a/crates/store/re_types/src/components/.gitattributes b/crates/store/re_types/src/components/.gitattributes index ce1cd46d7000e..bfc236554ca93 100644 --- a/crates/store/re_types/src/components/.gitattributes +++ b/crates/store/re_types/src/components/.gitattributes @@ -59,6 +59,7 @@ transform_mat3x3.rs linguist-generated=true transform_relation.rs linguist-generated=true translation3d.rs linguist-generated=true triangle_indices.rs linguist-generated=true +value_range.rs linguist-generated=true vector2d.rs linguist-generated=true vector3d.rs linguist-generated=true video_timestamp.rs linguist-generated=true diff --git a/crates/store/re_types/src/components/mod.rs b/crates/store/re_types/src/components/mod.rs index e65ab7eed0c0d..b2010e7147e2e 100644 --- a/crates/store/re_types/src/components/mod.rs +++ b/crates/store/re_types/src/components/mod.rs @@ -102,6 +102,8 @@ mod translation3d; mod translation3d_ext; mod triangle_indices; mod triangle_indices_ext; +mod value_range; +mod value_range_ext; mod vector2d; mod vector2d_ext; mod vector3d; @@ -168,6 +170,7 @@ pub use self::transform_mat3x3::TransformMat3x3; pub use self::transform_relation::TransformRelation; pub use self::translation3d::Translation3D; pub use self::triangle_indices::TriangleIndices; +pub use self::value_range::ValueRange; pub use self::vector2d::Vector2D; pub use self::vector3d::Vector3D; pub use self::video_timestamp::VideoTimestamp; diff --git a/crates/store/re_types/src/components/range1d_ext.rs b/crates/store/re_types/src/components/range1d_ext.rs index 9ff1fe448295c..7ca451ff1df54 100644 --- a/crates/store/re_types/src/components/range1d_ext.rs +++ b/crates/store/re_types/src/components/range1d_ext.rs @@ -35,13 +35,6 @@ impl Range1D { } } -impl From for emath::Rangef { - #[inline] - fn from(range2d: Range1D) -> Self { - Self::from(range2d.0) - } -} - impl Display for Range1D { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "[{}, {}]", self.start(), self.end(),) diff --git a/crates/store/re_types/src/components/value_range.rs b/crates/store/re_types/src/components/value_range.rs new file mode 100644 index 0000000000000..b5c44c0a28ff8 --- /dev/null +++ b/crates/store/re_types/src/components/value_range.rs @@ -0,0 +1,113 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/rust/api.rs +// Based on "crates/store/re_types/definitions/rerun/components/value_range.fbs". + +#![allow(unused_imports)] +#![allow(unused_parens)] +#![allow(clippy::clone_on_copy)] +#![allow(clippy::cloned_instead_of_copied)] +#![allow(clippy::map_flatten)] +#![allow(clippy::needless_question_mark)] +#![allow(clippy::new_without_default)] +#![allow(clippy::redundant_closure)] +#![allow(clippy::too_many_arguments)] +#![allow(clippy::too_many_lines)] + +use ::re_types_core::external::arrow2; +use ::re_types_core::ComponentName; +use ::re_types_core::SerializationResult; +use ::re_types_core::{ComponentBatch, MaybeOwnedComponentBatch}; +use ::re_types_core::{DeserializationError, DeserializationResult}; + +/// **Component**: Range of expected or valid values, specifying a lower and upper bound. +#[derive(Clone, Debug, Copy, PartialEq, bytemuck::Pod, bytemuck::Zeroable)] +#[repr(transparent)] +pub struct ValueRange(pub crate::datatypes::Range1D); + +impl ::re_types_core::SizeBytes for ValueRange { + #[inline] + fn heap_size_bytes(&self) -> u64 { + self.0.heap_size_bytes() + } + + #[inline] + fn is_pod() -> bool { + ::is_pod() + } +} + +impl> From for ValueRange { + fn from(v: T) -> Self { + Self(v.into()) + } +} + +impl std::borrow::Borrow for ValueRange { + #[inline] + fn borrow(&self) -> &crate::datatypes::Range1D { + &self.0 + } +} + +impl std::ops::Deref for ValueRange { + type Target = crate::datatypes::Range1D; + + #[inline] + fn deref(&self) -> &crate::datatypes::Range1D { + &self.0 + } +} + +impl std::ops::DerefMut for ValueRange { + #[inline] + fn deref_mut(&mut self) -> &mut crate::datatypes::Range1D { + &mut self.0 + } +} + +::re_types_core::macros::impl_into_cow!(ValueRange); + +impl ::re_types_core::Loggable for ValueRange { + type Name = ::re_types_core::ComponentName; + + #[inline] + fn name() -> Self::Name { + "rerun.components.ValueRange".into() + } + + #[inline] + fn arrow_datatype() -> arrow2::datatypes::DataType { + crate::datatypes::Range1D::arrow_datatype() + } + + fn to_arrow_opt<'a>( + data: impl IntoIterator>>>, + ) -> SerializationResult> + where + Self: Clone + 'a, + { + crate::datatypes::Range1D::to_arrow_opt(data.into_iter().map(|datum| { + datum.map(|datum| match datum.into() { + ::std::borrow::Cow::Borrowed(datum) => ::std::borrow::Cow::Borrowed(&datum.0), + ::std::borrow::Cow::Owned(datum) => ::std::borrow::Cow::Owned(datum.0), + }) + })) + } + + fn from_arrow_opt( + arrow_data: &dyn arrow2::array::Array, + ) -> DeserializationResult>> + where + Self: Sized, + { + crate::datatypes::Range1D::from_arrow_opt(arrow_data) + .map(|v| v.into_iter().map(|v| v.map(Self)).collect()) + } + + #[inline] + fn from_arrow(arrow_data: &dyn arrow2::array::Array) -> DeserializationResult> + where + Self: Sized, + { + crate::datatypes::Range1D::from_arrow(arrow_data).map(bytemuck::cast_vec) + } +} diff --git a/crates/store/re_types/src/components/value_range_ext.rs b/crates/store/re_types/src/components/value_range_ext.rs new file mode 100644 index 0000000000000..90b3f103d930b --- /dev/null +++ b/crates/store/re_types/src/components/value_range_ext.rs @@ -0,0 +1,49 @@ +use crate::datatypes; +use std::fmt::Display; + +use super::ValueRange; + +impl ValueRange { + /// Create a new range. + #[inline] + pub fn new(start: f64, end: f64) -> Self { + Self(datatypes::Range1D([start, end])) + } + + /// The start of the range. + #[inline] + pub fn start(&self) -> f64 { + self.0 .0[0] + } + + /// The end of the range. + #[inline] + pub fn end(&self) -> f64 { + self.0 .0[1] + } + + /// The start of the range. + #[inline] + pub fn start_mut(&mut self) -> &mut f64 { + &mut self.0 .0[0] + } + + /// The end of the range. + #[inline] + pub fn end_mut(&mut self) -> &mut f64 { + &mut self.0 .0[1] + } +} + +impl Display for ValueRange { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[{}, {}]", self.start(), self.end(),) + } +} + +impl Default for ValueRange { + #[inline] + fn default() -> Self { + Self::new(0.0, 1.0) + } +} diff --git a/crates/store/re_types/src/datatypes/channel_datatype_ext.rs b/crates/store/re_types/src/datatypes/channel_datatype_ext.rs index d4dfb01041964..d0991c90a68cb 100644 --- a/crates/store/re_types/src/datatypes/channel_datatype_ext.rs +++ b/crates/store/re_types/src/datatypes/channel_datatype_ext.rs @@ -46,7 +46,7 @@ impl ChannelDatatype { } } - /// What is the minimum value representable by this datatype? + /// What is the minimum finite value representable by this datatype? #[inline] pub fn min_value(&self) -> f64 { match self { @@ -66,7 +66,7 @@ impl ChannelDatatype { } } - /// What is the maximum value representable by this datatype? + /// What is the maximum finite value representable by this datatype? #[inline] pub fn max_value(&self) -> f64 { match self { diff --git a/crates/store/re_types/src/datatypes/pixel_format.rs b/crates/store/re_types/src/datatypes/pixel_format.rs index 8d09dc70daa61..47a0fa6743d31 100644 --- a/crates/store/re_types/src/datatypes/pixel_format.rs +++ b/crates/store/re_types/src/datatypes/pixel_format.rs @@ -32,7 +32,7 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Default)] #[repr(u8)] pub enum PixelFormat { - /// NV12 (aka Y_UV12) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. + /// `NV12` (aka `Y_UV12`) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. /// /// First comes entire image in Y in one plane, /// followed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc. @@ -40,7 +40,7 @@ pub enum PixelFormat { #[allow(clippy::upper_case_acronyms)] NV12 = 26, - /// YUY2 (aka YUYV or YUYV16), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. + /// `YUY2` (aka `YUYV` or `YUYV16`), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. /// /// The order of the channels is Y0, U0, Y1, V0, all in the same plane. #[allow(clippy::upper_case_acronyms)] @@ -57,10 +57,10 @@ impl ::re_types_core::reflection::Enum for PixelFormat { fn docstring_md(self) -> &'static str { match self { Self::NV12 => { - "NV12 (aka Y_UV12) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel.\n\nFirst comes entire image in Y in one plane,\nfollowed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc." + "`NV12` (aka `Y_UV12`) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel.\n\nFirst comes entire image in Y in one plane,\nfollowed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc." } Self::YUY2 => { - "YUY2 (aka YUYV or YUYV16), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel.\n\nThe order of the channels is Y0, U0, Y1, V0, all in the same plane." + "`YUY2` (aka `YUYV` or `YUYV16`), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel.\n\nThe order of the channels is Y0, U0, Y1, V0, all in the same plane." } } } diff --git a/crates/store/re_types/src/tensor_data.rs b/crates/store/re_types/src/tensor_data.rs index 6b299f6678bcb..08b0d2d5b67e0 100644 --- a/crates/store/re_types/src/tensor_data.rs +++ b/crates/store/re_types/src/tensor_data.rs @@ -146,7 +146,7 @@ impl TensorDataType { } } - /// What is the minimum value representable by this datatype? + /// What is the minimum finite value representable by this datatype? #[inline] pub fn min_value(&self) -> f64 { match self { @@ -166,7 +166,7 @@ impl TensorDataType { } } - /// What is the maximum value representable by this datatype? + /// What is the maximum finite value representable by this datatype? #[inline] pub fn max_value(&self) -> f64 { match self { diff --git a/crates/store/re_types/tests/types/depth_image.rs b/crates/store/re_types/tests/types/depth_image.rs index 63d443304c561..f43b836472b28 100644 --- a/crates/store/re_types/tests/types/depth_image.rs +++ b/crates/store/re_types/tests/types/depth_image.rs @@ -26,6 +26,7 @@ fn depth_image_roundtrip() { draw_order: None, colormap: None, point_fill_ratio: None, + depth_range: None, }]; let all_arch_serialized = [ diff --git a/crates/store/re_types/tests/types/tensor.rs b/crates/store/re_types/tests/types/tensor.rs index 8c72dfb4611dd..d440b7fd3b851 100644 --- a/crates/store/re_types/tests/types/tensor.rs +++ b/crates/store/re_types/tests/types/tensor.rs @@ -26,6 +26,7 @@ fn tensor_roundtrip() { buffer: TensorBuffer::U8(vec![1, 2, 3, 4, 5, 6].into()), } .into(), + value_range: None, }]; let all_arch_serialized = [Tensor::try_from(ndarray::array![[1u8, 2, 3], [4, 5, 6]]) diff --git a/crates/store/re_types_blueprint/src/blueprint/components/visualizer_overrides.rs b/crates/store/re_types_blueprint/src/blueprint/components/visualizer_overrides.rs index b7d732e8f7d8a..3a8b3a87bd4b3 100644 --- a/crates/store/re_types_blueprint/src/blueprint/components/visualizer_overrides.rs +++ b/crates/store/re_types_blueprint/src/blueprint/components/visualizer_overrides.rs @@ -34,27 +34,27 @@ pub struct VisualizerOverrides( /// Names of the visualizers that should be active. /// /// The built-in visualizers are: - /// - BarChart - /// - Arrows2D - /// - Arrows3D - /// - Asset3D - /// - Boxes2D - /// - Boxes3D - /// - Cameras - /// - DepthImage - /// - Image - /// - Lines2D - /// - Lines3D - /// - Mesh3D - /// - Points2D - /// - Points3D - /// - Transform3DArrows - /// - Tensor - /// - TextDocument - /// - TextLog - /// - SegmentationImage - /// - SeriesLine - /// - SeriesPoint + /// - `BarChart` + /// - `Arrows2D` + /// - `Arrows3D` + /// - `Asset3D` + /// - `Boxes2D` + /// - `Boxes3D` + /// - `Cameras` + /// - `DepthImage` + /// - `Image` + /// - `Lines2D` + /// - `Lines3D` + /// - `Mesh3D` + /// - `Points2D` + /// - `Points3D` + /// - `Transform3DArrows` + /// - `Tensor` + /// - `TextDocument` + /// - `TextLog` + /// - `SegmentationImage` + /// - `SeriesLine` + /// - `SeriesPoint` pub crate::blueprint::datatypes::Utf8List, ); diff --git a/crates/store/re_types_core/src/loggable.rs b/crates/store/re_types_core/src/loggable.rs index dd09ff2f60bcf..cacb8400960af 100644 --- a/crates/store/re_types_core/src/loggable.rs +++ b/crates/store/re_types_core/src/loggable.rs @@ -136,6 +136,8 @@ impl ComponentName { short_name } else if let Some(short_name) = full_name.strip_prefix("rerun.components.") { short_name + } else if let Some(short_name) = full_name.strip_prefix("rerun.controls.") { + short_name } else if let Some(short_name) = full_name.strip_prefix("rerun.") { short_name } else { diff --git a/crates/top/re_sdk/src/recording_stream.rs b/crates/top/re_sdk/src/recording_stream.rs index cc40f591962c3..b95a28d52efa1 100644 --- a/crates/top/re_sdk/src/recording_stream.rs +++ b/crates/top/re_sdk/src/recording_stream.rs @@ -1922,7 +1922,7 @@ impl ThreadInfo { fn with(f: impl FnOnce(&mut Self) -> R) -> R { use std::cell::RefCell; thread_local! { - static THREAD_INFO: RefCell> = RefCell::new(None); + static THREAD_INFO: RefCell> = const { RefCell::new(None) }; } THREAD_INFO.with(|thread_info| { diff --git a/crates/top/re_sdk/src/web_viewer.rs b/crates/top/re_sdk/src/web_viewer.rs index 8322619802c39..44b35bf425770 100644 --- a/crates/top/re_sdk/src/web_viewer.rs +++ b/crates/top/re_sdk/src/web_viewer.rs @@ -106,41 +106,107 @@ impl Drop for WebViewerSink { // ---------------------------------------------------------------------------- -/// Helper to spawn an instance of the [`WebViewerServer`]. -/// This serves the HTTP+Wasm+JS files that make up the web-viewer. -/// -/// Optionally opens a browser with the web-viewer and connects to the provided `target_url`. -/// This url could be a hosted RRD file or a `ws://` url to a running [`re_ws_comms::RerunServer`]. -/// -/// Note: this does not include the websocket server. -/// -/// - `force_wgpu_backend` is an optional string to force a specific backend, either `webgl` or `webgpu`. +/// Helper to spawn an instance of the [`WebViewerServer`] and configure a webviewer url. #[cfg(feature = "web_viewer")] -pub fn host_web_viewer( - bind_ip: &str, - web_port: WebViewerServerPort, - force_wgpu_backend: Option, - video_decoder: Option, - open_browser: bool, - source_url: &str, -) -> anyhow::Result { - let web_server = WebViewerServer::new(bind_ip, web_port)?; - let http_web_viewer_url = web_server.server_url(); - - let mut viewer_url = format!("{http_web_viewer_url}?url={source_url}"); - if let Some(force_graphics) = force_wgpu_backend { - viewer_url = format!("{viewer_url}&renderer={force_graphics}"); - } - if let Some(video_decoder) = video_decoder { - viewer_url = format!("{viewer_url}&video_decoder={video_decoder}"); - } +pub struct WebViewerConfig { + /// Ip to which the http server is bound. + /// + /// Defaults to 0.0.0.0 + pub bind_ip: String, + + /// The port to which the webviewer should bind. + /// + /// Defaults to [`WebViewerServerPort::AUTO`]. + pub web_port: WebViewerServerPort, + + /// The url from which a spawned webviewer should source + /// + /// This url could be a hosted RRD file or a `ws://` url to a running [`re_ws_comms::RerunServer`]. + /// Has no effect if [`Self::open_browser`] is false. + pub source_url: Option, + + /// If set, adjusts the browser url to force a specific backend, either `webgl` or `webgpu`. + /// + /// Has no effect if [`Self::open_browser`] is false. + pub force_wgpu_backend: Option, + + /// If set, adjusts the browser url to set the video decoder setting, either `auto`, `prefer_software` or `prefer_hardware`. + /// + /// Has no effect if [`Self::open_browser`] is false. + pub video_decoder: Option, + + /// If set to `true`, opens the default browser after hosting the webviewer. + /// + /// Defaults to `true`. + pub open_browser: bool, +} - re_log::info!("Hosting a web-viewer at {viewer_url}"); - if open_browser { - webbrowser::open(&viewer_url).ok(); +#[cfg(feature = "web_viewer")] +impl Default for WebViewerConfig { + fn default() -> Self { + Self { + bind_ip: "0.0.0.0".to_owned(), + web_port: WebViewerServerPort::AUTO, + source_url: None, + force_wgpu_backend: None, + video_decoder: None, + open_browser: true, + } } +} - Ok(web_server) +#[cfg(feature = "web_viewer")] +impl WebViewerConfig { + /// Helper to spawn an instance of the [`WebViewerServer`]. + /// This serves the HTTP+Wasm+JS files that make up the web-viewer. + /// + /// The server will immediately start listening for incoming connections + /// and stop doing so when the returned [`WebViewerServer`] is dropped. + /// + /// Note: this does not include the websocket server. + pub fn host_web_viewer(self) -> Result { + let Self { + bind_ip, + source_url, + web_port, + force_wgpu_backend, + video_decoder, + open_browser, + } = self; + + let web_server = WebViewerServer::new(&bind_ip, web_port)?; + let http_web_viewer_url = web_server.server_url(); + + let mut viewer_url = http_web_viewer_url; + + let mut first_arg = true; + let mut append_argument = |arg| { + let arg_delimiter = if first_arg { + first_arg = false; + "?" + } else { + "&" + }; + viewer_url = format!("{viewer_url}{arg_delimiter}{arg}"); + }; + + if let Some(source_url) = source_url { + append_argument(format!("url={source_url}")); + } + if let Some(force_graphics) = force_wgpu_backend { + append_argument(format!("renderer={force_graphics}")); + } + if let Some(video_decoder) = video_decoder { + append_argument(format!("video_decoder={video_decoder}")); + } + + re_log::info!("Hosting a web-viewer at {viewer_url}"); + if open_browser { + webbrowser::open(&viewer_url).ok(); + } + + Ok(web_server) + } } // ---------------------------------------------------------------------------- diff --git a/crates/top/rerun/src/commands/entrypoint.rs b/crates/top/rerun/src/commands/entrypoint.rs index d204090198b5a..71b47617c9fe0 100644 --- a/crates/top/rerun/src/commands/entrypoint.rs +++ b/crates/top/rerun/src/commands/entrypoint.rs @@ -8,7 +8,7 @@ use re_smart_channel::{ReceiveSet, Receiver, SmartMessagePayload}; use crate::{commands::RrdCommands, CallSource}; #[cfg(feature = "web_viewer")] -use re_sdk::web_viewer::host_web_viewer; +use re_sdk::web_viewer::WebViewerConfig; #[cfg(feature = "web_viewer")] use re_web_viewer_server::WebViewerServerPort; #[cfg(feature = "server")] @@ -187,7 +187,7 @@ If no arguments are given, a server will be hosted which a Rerun SDK can connect /// Start the viewer in the browser (instead of locally). /// - /// Requires Rerun to have been compiled with the 'web_viewer' feature. + /// Requires Rerun to have been compiled with the `web_viewer` feature. /// /// This implies `--serve`. #[clap(long)] @@ -680,14 +680,16 @@ fn run_impl( if let DataSource::WebSocketAddr(rerun_server_ws_url) = data_sources[0].clone() { // Special case! We are connecting a web-viewer to a web-socket address. // Instead of piping, just host a web-viewer that connects to the web-socket directly: - host_web_viewer( - &args.bind, - args.web_viewer_port, - args.renderer, - args.video_decoder, - true, - &rerun_server_ws_url, - )? + + WebViewerConfig { + bind_ip: args.bind, + web_port: args.web_viewer_port, + source_url: Some(rerun_server_ws_url), + force_wgpu_backend: args.renderer, + video_decoder: args.video_decoder, + open_browser: true, + } + .host_web_viewer()? .block(); return Ok(()); @@ -755,14 +757,15 @@ fn run_impl( let open_browser = args.web_viewer; // This is the server that serves the Wasm+HTML: - host_web_viewer( - &args.bind, - args.web_viewer_port, - args.renderer, - args.video_decoder, + WebViewerConfig { + bind_ip: args.bind, + web_port: args.web_viewer_port, + source_url: Some(_ws_server.server_url()), + force_wgpu_backend: args.renderer, + video_decoder: args.video_decoder, open_browser, - &_ws_server.server_url(), - )? + } + .host_web_viewer()? .block(); // dropping should stop the server } diff --git a/crates/top/rerun_c/src/error.rs b/crates/top/rerun_c/src/error.rs index 61e9090251158..96dfb885a9e96 100644 --- a/crates/top/rerun_c/src/error.rs +++ b/crates/top/rerun_c/src/error.rs @@ -86,8 +86,10 @@ mod tests { use crate::{CError, CErrorCode}; #[test] - #[allow(unsafe_code)] fn write_error_handles_message_overflow() { + #![allow(clippy::ref_as_ptr)] + #![allow(unsafe_code)] + // With ASCII character. let description = "a".repeat(CError::MAX_MESSAGE_SIZE_BYTES * 2); let error = CError::new(CErrorCode::Ok, &description); diff --git a/crates/top/rerun_c/src/lib.rs b/crates/top/rerun_c/src/lib.rs index d1f8785a6bafd..83ae11d069b03 100644 --- a/crates/top/rerun_c/src/lib.rs +++ b/crates/top/rerun_c/src/lib.rs @@ -189,7 +189,7 @@ pub struct CDataRow { pub struct CComponentColumns { pub component_type: CComponentTypeHandle, - /// A ListArray with the datatype `List(component_type)`. + /// A `ListArray` with the datatype `List(component_type)`. pub array: arrow2::ffi::ArrowArray, } @@ -463,7 +463,7 @@ thread_local! { /// We need something that is guaranteed to be dropped with the thread shutting down. /// A simple integer value won't do that, `Box` works but seems wasteful, so we use a trivial type with a drop implementation. #[allow(clippy::unnecessary_box_returns)] - pub static THREAD_LIFE_TRACKER: TrivialTypeWithDrop = TrivialTypeWithDrop; + pub static THREAD_LIFE_TRACKER: TrivialTypeWithDrop = const { TrivialTypeWithDrop }; } #[allow(unsafe_code)] diff --git a/crates/utils/re_int_histogram/tests/memory_test.rs b/crates/utils/re_int_histogram/tests/memory_test.rs index c8c0ebf77a6c4..2010f59baec97 100644 --- a/crates/utils/re_int_histogram/tests/memory_test.rs +++ b/crates/utils/re_int_histogram/tests/memory_test.rs @@ -1,7 +1,7 @@ use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; thread_local! { - static LIVE_BYTES_IN_THREAD: AtomicUsize = AtomicUsize::new(0); + static LIVE_BYTES_IN_THREAD: AtomicUsize = const { AtomicUsize::new(0) }; } pub struct TrackingAllocator { diff --git a/crates/viewer/re_component_ui/src/datatype_uis/mod.rs b/crates/viewer/re_component_ui/src/datatype_uis/mod.rs index 32f9a76078c11..7d40859af3972 100644 --- a/crates/viewer/re_component_ui/src/datatype_uis/mod.rs +++ b/crates/viewer/re_component_ui/src/datatype_uis/mod.rs @@ -1,6 +1,7 @@ mod bool_toggle; mod enum_combobox; mod float_drag; +mod range1d; mod singleline_string; mod vec; mod view_id; @@ -8,6 +9,7 @@ mod view_id; pub use bool_toggle::edit_bool; pub use enum_combobox::edit_view_enum; pub use float_drag::{edit_f32_min_to_max_float, edit_f32_zero_to_max, edit_f32_zero_to_one}; +pub use range1d::edit_view_range1d; pub use singleline_string::{ display_name_ui, display_text_ui, edit_multiline_string, edit_singleline_string, }; diff --git a/crates/viewer/re_component_ui/src/range1d.rs b/crates/viewer/re_component_ui/src/datatype_uis/range1d.rs similarity index 65% rename from crates/viewer/re_component_ui/src/range1d.rs rename to crates/viewer/re_component_ui/src/datatype_uis/range1d.rs index 5c1adef2817eb..a1f109085f7ed 100644 --- a/crates/viewer/re_component_ui/src/range1d.rs +++ b/crates/viewer/re_component_ui/src/datatype_uis/range1d.rs @@ -1,15 +1,26 @@ use egui::NumExt as _; -use re_types::components::Range1D; +use re_types::datatypes::Range1D; use re_viewer_context::MaybeMutRef; -pub fn edit_range1d( +pub fn edit_view_range1d( _ctx: &re_viewer_context::ViewerContext<'_>, + ui: &mut egui::Ui, + value: &mut MaybeMutRef<'_, impl std::ops::DerefMut>, +) -> egui::Response { + let mut value: MaybeMutRef<'_, Range1D> = match value { + MaybeMutRef::Ref(value) => MaybeMutRef::Ref(value), + MaybeMutRef::MutRef(value) => MaybeMutRef::MutRef(value), + }; + edit_view_range1d_impl(ui, &mut value) +} + +fn edit_view_range1d_impl( ui: &mut egui::Ui, value: &mut MaybeMutRef<'_, Range1D>, ) -> egui::Response { if let Some(value) = value.as_mut() { - let [min, max] = &mut value.0 .0; + let [min, max] = &mut value.0; let range = (*max - *min).abs(); let speed = (range * 0.01).at_least(0.001); @@ -29,7 +40,7 @@ pub fn edit_range1d( response_min | response_max } else { - let [min, max] = value.0 .0; + let [min, max] = value.0; ui.label(format!( "{} - {}", re_format::format_f64(min), diff --git a/crates/viewer/re_component_ui/src/lib.rs b/crates/viewer/re_component_ui/src/lib.rs index 95102cc3ab084..dd390bc96c3dc 100644 --- a/crates/viewer/re_component_ui/src/lib.rs +++ b/crates/viewer/re_component_ui/src/lib.rs @@ -12,7 +12,6 @@ mod line_strip; mod marker_shape; mod pinhole; mod radius; -mod range1d; mod resolution; mod response_utils; mod timeline; @@ -24,7 +23,7 @@ mod visual_bounds2d; use datatype_uis::{ display_name_ui, display_text_ui, edit_bool, edit_f32_min_to_max_float, edit_f32_zero_to_max, edit_f32_zero_to_one, edit_multiline_string, edit_or_view_vec3d, edit_singleline_string, - edit_view_enum, view_view_id, + edit_view_enum, edit_view_range1d, view_view_id, }; use re_types::{ @@ -32,7 +31,8 @@ use re_types::{ components::{ AggregationPolicy, AlbedoFactor, AxisLength, Color, DepthMeter, DrawOrder, FillMode, FillRatio, GammaCorrection, ImagePlaneDistance, MagnificationFilter, MarkerSize, Name, - Opacity, Scale3D, ShowLabels, StrokeWidth, Text, TransformRelation, Translation3D, + Opacity, Range1D, Scale3D, ShowLabels, StrokeWidth, Text, TransformRelation, Translation3D, + ValueRange, }, Loggable as _, }; @@ -102,6 +102,10 @@ pub fn create_component_ui_registry() -> re_viewer_context::ComponentUiRegistry registry.add_singleline_edit_or_view::(view_view_id); registry.add_singleline_edit_or_view::(view_view_id); + // Range1D components: + registry.add_singleline_edit_or_view::(edit_view_range1d); + registry.add_singleline_edit_or_view::(edit_view_range1d); + // -------------------------------------------------------------------------------- // All other special components: // -------------------------------------------------------------------------------- @@ -123,7 +127,6 @@ pub fn create_component_ui_registry() -> re_viewer_context::ComponentUiRegistry registry.add_singleline_edit_or_view(radius::edit_radius_ui); registry.add_singleline_edit_or_view(marker_shape::edit_marker_shape_ui); - registry.add_singleline_edit_or_view(range1d::edit_range1d); registry.add_multiline_edit_or_view(visual_bounds2d::multiline_edit_visual_bounds2d); registry.add_singleline_edit_or_view(visual_bounds2d::singleline_edit_visual_bounds2d); diff --git a/crates/viewer/re_component_ui/src/timeline.rs b/crates/viewer/re_component_ui/src/timeline.rs index df8aeca7581a1..0c217c59c2214 100644 --- a/crates/viewer/re_component_ui/src/timeline.rs +++ b/crates/viewer/re_component_ui/src/timeline.rs @@ -3,6 +3,7 @@ use re_types_core::LoggableBatch as _; use re_viewer_context::external::re_log_types::TimelineName; use re_viewer_context::{MaybeMutRef, ViewerContext}; +//TODO(#7498): might be unneeded after the dataframe view update pub(crate) fn edit_timeline_name( ctx: &ViewerContext<'_>, ui: &mut egui::Ui, diff --git a/crates/viewer/re_data_ui/src/blob.rs b/crates/viewer/re_data_ui/src/blob.rs index a0c34b2e5e925..25b1290487a68 100644 --- a/crates/viewer/re_data_ui/src/blob.rs +++ b/crates/viewer/re_data_ui/src/blob.rs @@ -109,7 +109,8 @@ pub fn blob_preview_and_save_ui( .ok() }); if let Some(image) = &image { - image_preview_ui(ctx, ui, ui_layout, query, entity_path, image); + let colormap = None; // TODO(andreas): Rely on default here for now. + image_preview_ui(ctx, ui, ui_layout, query, entity_path, image, colormap); } // Try to treat it as a video if treating it as image didn't work: else if let Some(blob_row_id) = blob_row_id { @@ -158,12 +159,11 @@ pub fn blob_preview_and_save_ui( let image_stats = ctx .cache .entry(|c: &mut re_viewer_context::ImageStatsCache| c.entry(&image)); - if let Ok(data_range) = re_viewer_context::gpu_bridge::image_data_range_heuristic( + let data_range = re_viewer_context::gpu_bridge::image_data_range_heuristic( &image_stats, &image.format, - ) { - crate::image::copy_image_button_ui(ui, &image, data_range); - } + ); + crate::image::copy_image_button_ui(ui, &image, data_range); } }); } diff --git a/crates/viewer/re_data_ui/src/image.rs b/crates/viewer/re_data_ui/src/image.rs index 56dd270e1455d..caacd42acd803 100644 --- a/crates/viewer/re_data_ui/src/image.rs +++ b/crates/viewer/re_data_ui/src/image.rs @@ -3,7 +3,7 @@ use egui::{NumExt as _, Vec2}; use re_renderer::renderer::ColormappedTexture; use re_viewer_context::{ gpu_bridge::{self, image_to_gpu}, - ImageInfo, ImageStatsCache, UiLayout, ViewerContext, + ColormapWithRange, ImageInfo, ImageStatsCache, UiLayout, ViewerContext, }; /// Show a button letting the user copy the image @@ -37,12 +37,21 @@ pub fn image_preview_ui( query: &re_chunk_store::LatestAtQuery, entity_path: &re_log_types::EntityPath, image: &ImageInfo, + colormap_with_range: Option<&ColormapWithRange>, ) -> Option<()> { let render_ctx = ctx.render_ctx?; let image_stats = ctx.cache.entry(|c: &mut ImageStatsCache| c.entry(image)); let annotations = crate::annotations(ctx, query, entity_path); let debug_name = entity_path.to_string(); - let texture = image_to_gpu(render_ctx, &debug_name, image, &image_stats, &annotations).ok()?; + let texture = image_to_gpu( + render_ctx, + &debug_name, + image, + &image_stats, + &annotations, + colormap_with_range, + ) + .ok()?; texture_preview_ui(render_ctx, ui, ui_layout, &debug_name, texture); Some(()) } diff --git a/crates/viewer/re_data_ui/src/instance_path.rs b/crates/viewer/re_data_ui/src/instance_path.rs index 9affa0b25aa8e..1f51324a7fb3a 100644 --- a/crates/viewer/re_data_ui/src/instance_path.rs +++ b/crates/viewer/re_data_ui/src/instance_path.rs @@ -1,3 +1,4 @@ +use egui::Rangef; use nohash_hasher::IntMap; use re_chunk_store::UnitChunkShared; @@ -11,8 +12,8 @@ use re_types::{ }; use re_ui::{ContextExt as _, UiExt as _}; use re_viewer_context::{ - gpu_bridge::image_data_range_heuristic, HoverHighlight, ImageInfo, ImageStatsCache, Item, - UiLayout, ViewerContext, + gpu_bridge::image_data_range_heuristic, ColormapWithRange, HoverHighlight, ImageInfo, + ImageStatsCache, Item, UiLayout, ViewerContext, }; use crate::{blob::blob_preview_and_save_ui, image::image_preview_ui}; @@ -287,40 +288,58 @@ fn preview_if_image_ui( ImageKind::Color }; - let colormap = component_map - .get(&components::Colormap::name()) - .and_then(|colormap| { - colormap - .component_mono::() - .transpose() - .ok() - .flatten() - }); - let image = ImageInfo { buffer_row_id, buffer: image_buffer.0, format: image_format.0, kind, - colormap, }; + let image_stats = ctx.cache.entry(|c: &mut ImageStatsCache| c.entry(&image)); - image_preview_ui(ctx, ui, ui_layout, query, entity_path, &image); + let colormap = component_map + .get(&components::Colormap::name()) + .and_then(|colormap| colormap.component_mono::()?.ok()); + let value_range = component_map + .get(&components::Range1D::name()) + .and_then(|colormap| colormap.component_mono::()?.ok()); + let colormap_with_range = colormap.map(|colormap| ColormapWithRange { + colormap, + value_range: value_range + .map(|r| [r.start() as _, r.end() as _]) + .unwrap_or_else(|| { + if kind == ImageKind::Depth { + ColormapWithRange::default_range_for_depth_images(&image_stats) + } else { + let (min, max) = image_stats.finite_range; + [min as _, max as _] + } + }), + }); + + image_preview_ui( + ctx, + ui, + ui_layout, + query, + entity_path, + &image, + colormap_with_range.as_ref(), + ); if ui_layout.is_single_line() || ui_layout == UiLayout::Tooltip { return Some(()); // no more ui } - let image_stats = ctx.cache.entry(|c: &mut ImageStatsCache| c.entry(&image)); - - if let Ok(data_range) = image_data_range_heuristic(&image_stats, &image.format) { - ui.horizontal(|ui| { - image_download_button_ui(ctx, ui, entity_path, &image, data_range); + let data_range = value_range.map_or_else( + || image_data_range_heuristic(&image_stats, &image.format), + |r| Rangef::new(r.start() as _, r.end() as _), + ); + ui.horizontal(|ui| { + image_download_button_ui(ctx, ui, entity_path, &image, data_range); - #[cfg(not(target_arch = "wasm32"))] - crate::image::copy_image_button_ui(ui, &image, data_range); - }); - } + #[cfg(not(target_arch = "wasm32"))] + crate::image::copy_image_button_ui(ui, &image, data_range); + }); // TODO(emilk): we should really support histograms for all types of images if image.format.pixel_format.is_none() diff --git a/crates/viewer/re_data_ui/src/tensor.rs b/crates/viewer/re_data_ui/src/tensor.rs index 62b7e3c892ad0..afe680f47228f 100644 --- a/crates/viewer/re_data_ui/src/tensor.rs +++ b/crates/viewer/re_data_ui/src/tensor.rs @@ -133,10 +133,11 @@ pub fn tensor_summary_ui_grid_contents( ui.end_row(); } // Show finite range only if it is different from the actual range. - if let (true, Some((min, max))) = (range != finite_range, finite_range) { + if range != &Some(*finite_range) { ui.label("Finite data range").on_hover_text( "The finite values (ignoring all NaN & -Inf/+Inf) of the tensor range within these bounds" ); + let (min, max) = finite_range; ui.monospace(format!( "[{} - {}]", re_format::format_f64(*min), diff --git a/crates/viewer/re_renderer/shader/depth_cloud.wgsl b/crates/viewer/re_renderer/shader/depth_cloud.wgsl index d5874562c6515..b2784acc7bb45 100644 --- a/crates/viewer/re_renderer/shader/depth_cloud.wgsl +++ b/crates/viewer/re_renderer/shader/depth_cloud.wgsl @@ -42,8 +42,8 @@ struct DepthCloudInfo { /// Point radius is calculated as world-space depth times this value. point_radius_from_world_depth: f32, - /// The maximum depth value in world-space, for use with the colormap. - max_depth_in_world: f32, + /// The minimum & maximum depth value in world-space, for use with the colormap. + min_max_depth_in_world: vec2f, /// Configures color mapping mode, see `colormap.wgsl`. colormap: u32, @@ -120,7 +120,10 @@ fn compute_point_data(quad_idx: u32) -> PointData { if 0.0 < world_space_depth && world_space_depth < f32max { // TODO(cmc): albedo textures - let color = vec4f(colormap_linear(depth_cloud_info.colormap, world_space_depth / depth_cloud_info.max_depth_in_world), 1.0); + let normalized_depth = + (world_space_depth - depth_cloud_info.min_max_depth_in_world.x) / + (depth_cloud_info.min_max_depth_in_world.y - depth_cloud_info.min_max_depth_in_world.x); + let color = vec4f(colormap_linear(depth_cloud_info.colormap, normalized_depth), 1.0); // TODO(cmc): This assumes a pinhole camera; need to support other kinds at some point. let intrinsics = depth_cloud_info.depth_camera_intrinsics; diff --git a/crates/viewer/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs b/crates/viewer/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs index cca47e9eb8dd4..5c8e73118f2a3 100644 --- a/crates/viewer/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs +++ b/crates/viewer/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs @@ -52,7 +52,7 @@ pub struct CpuWriteGpuReadBuffer { /// In actuality it is tied to the lifetime of [`chunk_buffer`](#structfield.chunk_buffer)! write_view: wgpu::BufferViewMut<'static>, - /// Range in T elements in write_view that haven't been written yet. + /// Range in T elements in `write_view` that haven't been written yet. unwritten_element_range: std::ops::Range, chunk_buffer: GpuBuffer, @@ -123,15 +123,36 @@ where #[inline] pub fn extend( &mut self, - mut elements: impl Iterator, + mut elements: impl ExactSizeIterator, ) -> Result { re_tracing::profile_function!(); // TODO(emilk): optimize the extend function. // Right now it is 3-4x faster to collect to a vec first, which is crazy. - if true { - let vec = elements.collect::>(); - self.extend_from_slice(&vec)?; + // + // Mimalloc can't align types larger than 64 bytes now and will silently ignore it. + // https://github.com/purpleprotocol/mimalloc_rust/issues/128 + // Therefore, large alignments won't work with collect. + let pretend_mimalloc_aligns_correctly = false; + if std::mem::align_of::() <= 64 || pretend_mimalloc_aligns_correctly { + let vec: Vec = elements.collect(); + + #[allow(clippy::dbg_macro)] + if pretend_mimalloc_aligns_correctly { + dbg!(std::any::type_name::()); + dbg!(std::mem::size_of::()); + dbg!(std::mem::align_of::()); + dbg!(vec.len()); + dbg!(vec.as_ptr()); + dbg!(vec.as_ptr() as usize % std::mem::align_of::()); + } + debug_assert_eq!( + vec.as_ptr() as usize % std::mem::align_of::(), + 0, + "Vec::collect collects into unaligned memory!" + ); + + self.extend_from_slice(vec.as_slice())?; Ok(vec.len()) } else { let num_written_before = self.num_written(); @@ -422,7 +443,7 @@ pub struct CpuWriteGpuReadBelt { /// When closed chunks are mapped again, the map callback sends them here. /// - /// Note that we shouldn't use SyncSender since this can block the Sender if a buffer is full, + /// Note that we shouldn't use `SyncSender` since this can block the `Sender` if a buffer is full, /// which means that in a single threaded situation (Web!) we might deadlock. sender: mpsc::Sender, @@ -486,6 +507,8 @@ impl CpuWriteGpuReadBelt { } /// Allocates a cpu writable buffer for `num_elements` instances of type `T`. + /// + /// The buffer will be aligned to T's alignment, but no less than [`Self::MIN_OFFSET_ALIGNMENT`]. pub fn allocate( &mut self, device: &wgpu::Device, diff --git a/crates/viewer/re_renderer/src/allocator/gpu_readback_belt.rs b/crates/viewer/re_renderer/src/allocator/gpu_readback_belt.rs index 1d85c775fda18..a3d9f2d6810c0 100644 --- a/crates/viewer/re_renderer/src/allocator/gpu_readback_belt.rs +++ b/crates/viewer/re_renderer/src/allocator/gpu_readback_belt.rs @@ -146,7 +146,7 @@ struct Chunk { /// All ranges that are currently in use, i.e. there is a GPU write to it scheduled. ranges_in_use: Vec, - /// Last frame this chunk was received, i.e. the last time a map_async action operation finished with it. + /// Last frame this chunk was received, i.e. the last time a `map_async` action operation finished with it. last_received_frame_index: u64, } diff --git a/crates/viewer/re_renderer/src/allocator/uniform_buffer_fill.rs b/crates/viewer/re_renderer/src/allocator/uniform_buffer_fill.rs index f23fe216a4c6e..566086310726e 100644 --- a/crates/viewer/re_renderer/src/allocator/uniform_buffer_fill.rs +++ b/crates/viewer/re_renderer/src/allocator/uniform_buffer_fill.rs @@ -2,27 +2,20 @@ use re_log::ResultExt; use crate::{wgpu_resources::BindGroupEntry, DebugLabel, RenderContext}; -struct UniformBufferAlignmentCheck { +struct UniformBufferSizeCheck { pub _marker: std::marker::PhantomData, } -impl UniformBufferAlignmentCheck { +impl UniformBufferSizeCheck { /// wgpu requires uniform buffers to be aligned to up to 256 bytes. /// - /// This is a property of device limits, see [`WebGPU` specification](https://www.w3.org/TR/webgpu/#limits). - /// Implementations are allowed to advertise a lower alignment requirement, however - /// 256 bytes is fairly common even in modern hardware and is even hardcoded for DX12. - /// - /// Technically this is only relevant when sub-allocating a buffer, as the wgpu backend - /// is internally forced to make sure that the start of any [`wgpu::Buffer`] with [`wgpu::BufferUsages::UNIFORM`] usage - /// has this alignment. Practically, ensuring this alignment everywhere + /// By ensuring that all uniform buffers have a size that is a multiple of 256 bytes, + /// we are guaranteed that bulk copies of multiple uniform buffers in a cpu-write-gpu-read buffer + /// can be copied to (a 256 byte aligned) gpu-readable buffer in a single copy operation. /// - /// Alternatively to enforcing this alignment on the type we could: - /// * only align on the gpu buffer - /// -> causes more fine grained `copy_buffer_to_buffer` calls on the gpu encoder - /// * only align on the [`CpuWriteGpuReadBuffer`][crate::allocator::CpuWriteGpuReadBuffer] & gpu buffer - /// -> causes more complicated offset computation on [`CpuWriteGpuReadBuffer`][crate::allocator::CpuWriteGpuReadBuffer] as well as either - /// holes at padding (-> undefined values & slow for write combined!) or complicated nulling of padding + /// This requirement is a property of device limits, see [`WebGPU` specification](https://www.w3.org/TR/webgpu/#limits). + /// Implementations are allowed to advertise a lower alignment requirement, however + /// 256 bytes is fairly common even in modern hardware and is hardcoded to this value for DX12. /// /// About the [`bytemuck::Pod`] requirement (dragged in by [`CpuWriteGpuReadBuffer`][crate::allocator::CpuWriteGpuReadBuffer]): /// [`bytemuck::Pod`] forces us to be explicit about padding as it doesn't allow invisible padding bytes! @@ -30,8 +23,9 @@ impl UniformBufferAlignmentCheck { /// But this leads to more unsafe code, harder to avoid holes in write combined memory access /// and potentially undefined values in the padding bytes on GPU. const CHECK: () = assert!( - std::mem::align_of::() >= 256 && std::mem::size_of::() > 0, - "Uniform buffers need to be bigger than 0 bytes and aligned to 256 bytes. Use `#[repr(C, align(256))]`" + std::mem::size_of::() % 256 == 0 && std::mem::size_of::() > 0, + "Uniform buffers need to have a size that is a multiple of 256 bytes. + Use types like `F32RowPadded` or `PaddingRow` to pad out as needed." ); } @@ -48,7 +42,7 @@ pub fn create_and_fill_uniform_buffer_batch( re_tracing::profile_function!(label.get().unwrap_or_default()); #[allow(clippy::let_unit_value)] - let _ = UniformBufferAlignmentCheck::::CHECK; + let _ = UniformBufferSizeCheck::::CHECK; if content.len() == 0 { return vec![]; diff --git a/crates/viewer/re_renderer/src/context.rs b/crates/viewer/re_renderer/src/context.rs index 0b012f03fd66b..648389abb5ced 100644 --- a/crates/viewer/re_renderer/src/context.rs +++ b/crates/viewer/re_renderer/src/context.rs @@ -78,7 +78,7 @@ pub struct RenderContext { /// List of unfinished queue submission via this context. /// /// This is currently only about submissions we do via the global encoder in [`ActiveFrameContext`] - /// TODO(andreas): We rely on egui for the "primary" submissions in re_viewer. It would be nice to take full control over all submissions. + /// TODO(andreas): We rely on egui for the "primary" submissions in `re_viewer`. It would be nice to take full control over all submissions. inflight_queue_submissions: Vec, pub active_frame: ActiveFrameContext, diff --git a/crates/viewer/re_renderer/src/draw_phases/outlines.rs b/crates/viewer/re_renderer/src/draw_phases/outlines.rs index ed92c4784d7c2..dfff6b7c4a40a 100644 --- a/crates/viewer/re_renderer/src/draw_phases/outlines.rs +++ b/crates/viewer/re_renderer/src/draw_phases/outlines.rs @@ -138,12 +138,12 @@ mod gpu_data { use crate::wgpu_buffer_types; /// Keep in sync with `jumpflooding_step.wgsl` - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct JumpfloodingStepUniformBuffer { pub step_width: wgpu_buffer_types::U32RowPadded, - /// All this padding hurts. `step_width` be a PushConstant but they are not widely supported enough! + /// All this padding hurts. `step_width` be a `PushConstant` but they are not widely supported enough! pub end_padding: [wgpu_buffer_types::PaddingRow; 16 - 1], } } diff --git a/crates/viewer/re_renderer/src/global_bindings.rs b/crates/viewer/re_renderer/src/global_bindings.rs index 1a55c58091ad0..287b55243074a 100644 --- a/crates/viewer/re_renderer/src/global_bindings.rs +++ b/crates/viewer/re_renderer/src/global_bindings.rs @@ -13,7 +13,7 @@ use smallvec::smallvec; /// /// Contains information that is constant for a single frame like camera. /// (does not contain information that is special to a particular renderer) -#[repr(C, align(256))] +#[repr(C)] #[derive(Clone, Copy, Zeroable, Pod)] pub struct FrameUniformBuffer { pub view_from_world: wgpu_buffer_types::Mat4x3, @@ -35,11 +35,11 @@ pub struct FrameUniformBuffer { /// I.e. the UI zoom factor pub pixels_per_point: f32, - /// (tan(fov_y / 2) * aspect_ratio, tan(fov_y /2)), i.e. half ratio of screen dimension to screen distance in x & y. + /// `(tan(fov_y / 2) * aspect_ratio, tan(fov_y /2))`, i.e. half ratio of screen dimension to screen distance in x & y. /// Both values are set to f32max for orthographic projection pub tan_half_fov: wgpu_buffer_types::Vec2RowPadded, - /// re_renderer defined device tier. + /// `re_renderer` defined device tier. pub device_tier: wgpu_buffer_types::U32RowPadded, } diff --git a/crates/viewer/re_renderer/src/mesh.rs b/crates/viewer/re_renderer/src/mesh.rs index 32021bedf1f29..e6e2589a49409 100644 --- a/crates/viewer/re_renderer/src/mesh.rs +++ b/crates/viewer/re_renderer/src/mesh.rs @@ -200,7 +200,7 @@ pub(crate) mod gpu_data { use crate::wgpu_buffer_types; /// Keep in sync with [`MaterialUniformBuffer`] in `instanced_mesh.wgsl` - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct MaterialUniformBuffer { pub albedo_factor: wgpu_buffer_types::Vec4, diff --git a/crates/viewer/re_renderer/src/renderer/compositor.rs b/crates/viewer/re_renderer/src/renderer/compositor.rs index d6c3c91ce0649..3dfda9842eb5c 100644 --- a/crates/viewer/re_renderer/src/renderer/compositor.rs +++ b/crates/viewer/re_renderer/src/renderer/compositor.rs @@ -19,7 +19,7 @@ mod gpu_data { use crate::wgpu_buffer_types; /// Keep in sync with `composite.wgsl` - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct CompositeUniformBuffer { pub outline_color_layer_a: wgpu_buffer_types::Vec4, diff --git a/crates/viewer/re_renderer/src/renderer/debug_overlay.rs b/crates/viewer/re_renderer/src/renderer/debug_overlay.rs index 9fc93d454494e..5e4d81cecb198 100644 --- a/crates/viewer/re_renderer/src/renderer/debug_overlay.rs +++ b/crates/viewer/re_renderer/src/renderer/debug_overlay.rs @@ -28,7 +28,7 @@ mod gpu_data { } /// Keep in sync with `debug_overlay.wgsl` - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct DebugOverlayUniformBuffer { pub screen_resolution: wgpu_buffer_types::Vec2, diff --git a/crates/viewer/re_renderer/src/renderer/depth_cloud.rs b/crates/viewer/re_renderer/src/renderer/depth_cloud.rs index 8a15628c21544..cd39b1c9b7144 100644 --- a/crates/viewer/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/viewer/re_renderer/src/renderer/depth_cloud.rs @@ -44,7 +44,7 @@ mod gpu_data { const SAMPLE_TYPE_SINT: u32 = 2; const SAMPLE_TYPE_UINT: u32 = 3; - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct DepthCloudInfoUBO { /// The extrinsics of the camera used for the projection. @@ -62,20 +62,20 @@ mod gpu_data { /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, - /// The maximum depth value in world-space, for use with the colormap. - pub max_depth_in_world: f32, + /// The minimum and maximum depth value in world-space, for use with the colormap. + pub min_max_depth_in_world: [f32; 2], + // --- /// Which colormap should be used. pub colormap: u32, - // --- /// One of `SAMPLE_TYPE_*`. pub sample_type: u32, /// Changes over different draw-phases. pub radius_boost_in_ui_points: f32, - pub _row_padding: [f32; 2], + pub _row_padding: [f32; 1], // --- pub _end_padding: [wgpu_buffer_types::PaddingRow; 16 - 4 - 3 - 1 - 1 - 1], @@ -91,7 +91,7 @@ mod gpu_data { depth_camera_intrinsics, world_depth_from_texture_depth, point_radius_from_world_depth, - max_depth_in_world, + min_max_depth_in_world, depth_dimensions: _, depth_texture, colormap, @@ -117,7 +117,7 @@ mod gpu_data { outline_mask_id: outline_mask_id.0.unwrap_or_default().into(), world_depth_from_texture_depth: *world_depth_from_texture_depth, point_radius_from_world_depth: *point_radius_from_world_depth, - max_depth_in_world: *max_depth_in_world, + min_max_depth_in_world: *min_max_depth_in_world, colormap: *colormap as u32, sample_type, radius_boost_in_ui_points, @@ -145,8 +145,8 @@ pub struct DepthCloud { /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, - /// The maximum depth value in world-space, for use with the colormap. - pub max_depth_in_world: f32, + /// The minimum and maximum depth value in world-space, for use with the colormap. + pub min_max_depth_in_world: [f32; 2], /// The dimensions of the depth texture in pixels. pub depth_dimensions: glam::UVec2, @@ -168,8 +168,11 @@ pub struct DepthCloud { impl DepthCloud { /// World-space bounding-box. + /// + /// Assumes max extent to be the maximum depth used for colormapping + /// but ignores the minimum depth, using the frustum's origin instead. pub fn world_space_bbox(&self) -> re_math::BoundingBox { - let max_depth = self.max_depth_in_world; + let max_depth = self.min_max_depth_in_world[1]; let w = self.depth_dimensions.x as f32; let h = self.depth_dimensions.y as f32; let corners = [ diff --git a/crates/viewer/re_renderer/src/renderer/generic_skybox.rs b/crates/viewer/re_renderer/src/renderer/generic_skybox.rs index 5fdcbe2e1c7b6..ec59f52034fe2 100644 --- a/crates/viewer/re_renderer/src/renderer/generic_skybox.rs +++ b/crates/viewer/re_renderer/src/renderer/generic_skybox.rs @@ -28,7 +28,7 @@ pub enum GenericSkyboxType { mod gpu_data { use crate::wgpu_buffer_types; - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct UniformBuffer { pub background_type: wgpu_buffer_types::U32RowPadded, diff --git a/crates/viewer/re_renderer/src/renderer/lines.rs b/crates/viewer/re_renderer/src/renderer/lines.rs index c906954903436..fa3526655095e 100644 --- a/crates/viewer/re_renderer/src/renderer/lines.rs +++ b/crates/viewer/re_renderer/src/renderer/lines.rs @@ -178,7 +178,7 @@ pub mod gpu_data { } /// Uniform buffer that changes once per draw data rendering. - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct DrawDataUniformBuffer { pub radius_boost_in_ui_points: wgpu_buffer_types::F32RowPadded, @@ -186,7 +186,7 @@ pub mod gpu_data { } /// Uniform buffer that changes for every batch of line strips. - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct BatchUniformBuffer { pub world_from_obj: wgpu_buffer_types::Mat4, diff --git a/crates/viewer/re_renderer/src/renderer/point_cloud.rs b/crates/viewer/re_renderer/src/renderer/point_cloud.rs index 0c2d0ea8ead53..7b03dd17a1a34 100644 --- a/crates/viewer/re_renderer/src/renderer/point_cloud.rs +++ b/crates/viewer/re_renderer/src/renderer/point_cloud.rs @@ -69,7 +69,7 @@ pub mod gpu_data { static_assertions::assert_eq_size!(PositionRadius, glam::Vec4); /// Uniform buffer that changes once per draw data rendering. - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct DrawDataUniformBuffer { pub radius_boost_in_ui_points: wgpu_buffer_types::F32RowPadded, @@ -77,7 +77,7 @@ pub mod gpu_data { } /// Uniform buffer that changes for every batch of points. - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct BatchUniformBuffer { pub world_from_obj: wgpu_buffer_types::Mat4, diff --git a/crates/viewer/re_renderer/src/renderer/rectangles.rs b/crates/viewer/re_renderer/src/renderer/rectangles.rs index d3465ffcc265a..2b3c60dd2f510 100644 --- a/crates/viewer/re_renderer/src/renderer/rectangles.rs +++ b/crates/viewer/re_renderer/src/renderer/rectangles.rs @@ -52,8 +52,7 @@ pub enum ShaderDecoding { Nv12, Yuy2, - /// BGR(A)->RGB(A) conversion is done in the shader. - /// (as opposed to doing it via ``) + /// Do BGR(A)->RGB(A) conversion is in the shader. Bgr, } @@ -250,7 +249,7 @@ mod gpu_data { const FILTER_NEAREST: u32 = 1; const FILTER_BILINEAR: u32 = 2; - #[repr(C, align(256))] + #[repr(C)] #[derive(Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)] pub struct UniformBuffer { top_left_corner_position: wgpu_buffer_types::Vec3Unpadded, diff --git a/crates/viewer/re_renderer/src/video/mod.rs b/crates/viewer/re_renderer/src/video/mod.rs index 2fff12b724eb8..f5315bb1df69c 100644 --- a/crates/viewer/re_renderer/src/video/mod.rs +++ b/crates/viewer/re_renderer/src/video/mod.rs @@ -87,9 +87,8 @@ pub struct Video { /// /// On the web this directly corresponds to /// -#[derive( - Debug, Clone, Copy, PartialEq, Eq, Default, Hash, serde::Deserialize, serde::Serialize, -)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub enum DecodeHardwareAcceleration { /// May use hardware acceleration if available and compatible with the codec. #[default] diff --git a/crates/viewer/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs b/crates/viewer/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs index 6d8226445afb4..ebaeb9f48b061 100644 --- a/crates/viewer/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs +++ b/crates/viewer/re_renderer/src/wgpu_resources/dynamic_resource_pool.rs @@ -258,7 +258,7 @@ mod tests { } thread_local! { - static DROP_COUNTER: Cell = Cell::new(0); + static DROP_COUNTER: Cell = const { Cell::new(0) }; } #[derive(Debug)] diff --git a/crates/viewer/re_renderer_examples/depth_cloud.rs b/crates/viewer/re_renderer_examples/depth_cloud.rs index 1e0ab23ab3ed7..a6bf6af6183e4 100644 --- a/crates/viewer/re_renderer_examples/depth_cloud.rs +++ b/crates/viewer/re_renderer_examples/depth_cloud.rs @@ -177,7 +177,7 @@ impl RenderDepthClouds { depth_camera_intrinsics: *intrinsics, world_depth_from_texture_depth: 1.0, point_radius_from_world_depth: *point_radius_from_world_depth, - max_depth_in_world: 5.0, + min_max_depth_in_world: [0.0, 5.0], depth_dimensions: depth.dimensions, depth_texture: depth.texture.clone(), colormap: re_renderer::Colormap::Turbo, diff --git a/crates/viewer/re_space_view_dataframe/src/lib.rs b/crates/viewer/re_space_view_dataframe/src/lib.rs index e25c7b8ed0c85..5ceda1054647a 100644 --- a/crates/viewer/re_space_view_dataframe/src/lib.rs +++ b/crates/viewer/re_space_view_dataframe/src/lib.rs @@ -8,6 +8,7 @@ mod expanded_rows; mod query_kind; mod space_view_class; mod view_query; +mod view_query_v2; mod visualizer_system; pub use space_view_class::DataframeSpaceView; diff --git a/crates/viewer/re_space_view_dataframe/src/space_view_class.rs b/crates/viewer/re_space_view_dataframe/src/space_view_class.rs index 609d8c370bf83..a8c6aa42fe5cd 100644 --- a/crates/viewer/re_space_view_dataframe/src/space_view_class.rs +++ b/crates/viewer/re_space_view_dataframe/src/space_view_class.rs @@ -15,7 +15,7 @@ use re_viewport_blueprint::{SpaceViewContents, ViewProperty}; use crate::dataframe_ui::HideColumnAction; use crate::{ dataframe_ui::dataframe_ui, expanded_rows::ExpandedRowsCache, query_kind::QueryKind, - visualizer_system::EmptySystem, + view_query_v2, visualizer_system::EmptySystem, }; #[derive(Default)] @@ -110,7 +110,18 @@ mode sets the default time range to _everything_. You can override this in the s _space_origin: &EntityPath, space_view_id: SpaceViewId, ) -> Result<(), SpaceViewSystemExecutionError> { - crate::view_query::query_ui(ctx, ui, state, space_view_id) + crate::view_query::query_ui(ctx, ui, state, space_view_id)?; + + //TODO(ab): just display the UI for now, this has no effect on the view itself yet. + ui.separator(); + let state = state.downcast_mut::()?; + let view_query = view_query_v2::QueryV2::from_blueprint(ctx, space_view_id); + let Some(schema) = &state.schema else { + // Shouldn't happen, except maybe on the first frame, which is too early + // for the user to click the menu anyway. + return Ok(()); + }; + view_query.selection_panel_ui(ctx, ui, space_view_id, schema) } fn extra_title_bar_ui( diff --git a/crates/viewer/re_space_view_dataframe/src/view_query_v2/blueprint.rs b/crates/viewer/re_space_view_dataframe/src/view_query_v2/blueprint.rs new file mode 100644 index 0000000000000..55af18c8d08fc --- /dev/null +++ b/crates/viewer/re_space_view_dataframe/src/view_query_v2/blueprint.rs @@ -0,0 +1,242 @@ +use std::collections::HashSet; + +use crate::dataframe_ui::HideColumnAction; +use crate::view_query_v2::QueryV2; +use re_chunk_store::{ColumnDescriptor, ColumnSelector}; +use re_log_types::{TimeInt, TimelineName}; +use re_types::blueprint::{components, datatypes}; +use re_viewer_context::{SpaceViewSystemExecutionError, ViewerContext}; + +// Accessors wrapping reads/writes to the blueprint store. +impl QueryV2 { + /// Get the query timeline. + /// + /// This tries to read the timeline name from the blueprint. If missing or invalid, the current + /// timeline is used and saved back to the blueprint. + pub(crate) fn timeline( + &self, + ctx: &ViewerContext<'_>, + ) -> Result { + // read the timeline and make sure it actually exists + let timeline = self + .query_property + .component_or_empty::()? + .and_then(|name| { + ctx.recording() + .timelines() + .find(|timeline| timeline.name() == &TimelineName::from(name.as_str())) + .copied() + }); + + // if the timeline is unset, we "freeze" it to the current time panel timeline + let save_timeline = timeline.is_none(); + let timeline = timeline.unwrap_or_else(|| *ctx.rec_cfg.time_ctrl.read().timeline()); + if save_timeline { + self.save_timeline_name(ctx, timeline.name()); + } + + Ok(timeline) + } + + /// Save the timeline to the one specified. + /// + /// Note: this resets the range filter timestamps to -inf/+inf as any other value might be + /// invalidated. + pub(super) fn save_timeline_name(&self, ctx: &ViewerContext<'_>, timeline_name: &TimelineName) { + self.query_property + .save_blueprint_component(ctx, &components::TimelineName::from(timeline_name.as_str())); + + // clearing the range filter is equivalent to setting it to the default -inf/+inf + self.query_property + .clear_blueprint_component::(ctx); + } + + pub(crate) fn range_filter(&self) -> Result<(TimeInt, TimeInt), SpaceViewSystemExecutionError> { + #[allow(clippy::map_unwrap_or)] + Ok(self + .query_property + .component_or_empty::()? + .map(|range_filter| (range_filter.start.into(), range_filter.end.into())) + .unwrap_or((TimeInt::MIN, TimeInt::MAX))) + } + + pub(super) fn save_range_filter(&self, ctx: &ViewerContext<'_>, start: TimeInt, end: TimeInt) { + if (start, end) == (TimeInt::MIN, TimeInt::MAX) { + self.query_property + .clear_blueprint_component::(ctx); + } else { + self.query_property + .save_blueprint_component(ctx, &components::FilterByRange::new(start, end)); + } + } + + pub(crate) fn filter_by_event( + &self, + ) -> Result, SpaceViewSystemExecutionError> { + Ok(self + .query_property + .component_or_empty::()?) + } + + pub(super) fn save_filter_by_event( + &self, + ctx: &ViewerContext<'_>, + filter_by_event: &components::FilterByEvent, + ) { + self.query_property + .save_blueprint_component(ctx, filter_by_event); + } + + pub(crate) fn latest_at_enabled(&self) -> Result { + Ok(self + .query_property + .component_or_empty::()? + .map_or(false, |comp| *comp.0)) + } + + pub(crate) fn save_latest_at_enabled(&self, ctx: &ViewerContext<'_>, enabled: bool) { + self.query_property + .save_blueprint_component(ctx, &components::ApplyLatestAt(enabled.into())); + } + + pub(super) fn save_selected_columns( + &self, + ctx: &ViewerContext<'_>, + columns: impl IntoIterator, + ) { + let mut selected_columns = datatypes::SelectedColumns::default(); + for column in columns { + match column { + ColumnSelector::Control(_) => {} + ColumnSelector::Time(desc) => { + selected_columns + .time_columns + .push(desc.timeline.as_str().into()); + } + ColumnSelector::Component(desc) => { + let blueprint_component_descriptor = + datatypes::ComponentColumnSelector::new(&desc.entity_path, desc.component); + + selected_columns + .component_columns + .push(blueprint_component_descriptor); + } + } + } + + self.query_property + .save_blueprint_component(ctx, &components::SelectedColumns(selected_columns)); + } + + pub(super) fn save_all_columns_selected(&self, ctx: &ViewerContext<'_>) { + self.query_property + .clear_blueprint_component::(ctx); + } + + pub(super) fn save_all_columns_unselected(&self, ctx: &ViewerContext<'_>) { + self.query_property + .save_blueprint_component(ctx, &components::SelectedColumns::default()); + } + + /// Given some view columns, list the columns that should be visible (aka "selected columns"), + /// according to the blueprint. + /// + /// This operates by filtering the view columns based on the blueprint specified columns. + /// + /// Returns `Ok(None)` if all columns should be displayed (aka a column selection isn't provided + /// in the blueprint). + pub(crate) fn apply_column_visibility_to_view_columns( + &self, + ctx: &ViewerContext<'_>, + view_columns: &[ColumnDescriptor], + ) -> Result>, SpaceViewSystemExecutionError> { + let selected_columns = self + .query_property + .component_or_empty::()?; + + // no selected columns means all columns are visible + let Some(datatypes::SelectedColumns { + time_columns, + component_columns, + }) = selected_columns.as_deref() + else { + // select all columns + return Ok(None); + }; + + let selected_time_columns: HashSet = time_columns + .iter() + .map(|timeline_name| timeline_name.as_str().into()) + .collect(); + let selected_component_columns = component_columns.iter().cloned().collect::>(); + + let query_timeline_name = *self.timeline(ctx)?.name(); + let result = view_columns + .iter() + .filter(|column| match column { + ColumnDescriptor::Control(_) => true, + ColumnDescriptor::Time(desc) => { + // we always include the query timeline column because we need it for the dataframe ui + desc.timeline.name() == &query_timeline_name + || selected_time_columns.contains(desc.timeline.name()) + } + ColumnDescriptor::Component(desc) => { + let blueprint_component_descriptor = components::ComponentColumnSelector::new( + &desc.entity_path, + desc.component_name, + ); + + selected_component_columns.contains(&blueprint_component_descriptor) + } + }) + .cloned() + .map(ColumnSelector::from) + .collect(); + + Ok(Some(result)) + } + + #[allow(dead_code)] //TODO(ab): used in next PR + pub(crate) fn handle_hide_column_actions( + &self, + ctx: &ViewerContext<'_>, + view_columns: &[ColumnDescriptor], + actions: Vec, + ) -> Result<(), SpaceViewSystemExecutionError> { + if actions.is_empty() { + return Ok(()); + } + + let mut selected_columns: Vec<_> = self + .apply_column_visibility_to_view_columns(ctx, view_columns)? + .map(|columns| columns.into_iter().collect()) + .unwrap_or_else(|| view_columns.iter().cloned().map(Into::into).collect()); + + for action in actions { + match action { + HideColumnAction::HideTimeColumn { timeline_name } => { + selected_columns.retain(|column| match column { + ColumnSelector::Time(desc) => desc.timeline != timeline_name, + _ => true, + }); + } + + HideColumnAction::HideComponentColumn { + entity_path, + component_name, + } => { + selected_columns.retain(|column| match column { + ColumnSelector::Component(desc) => { + desc.entity_path != entity_path || desc.component != component_name + } + _ => true, + }); + } + } + } + + self.save_selected_columns(ctx, selected_columns); + + Ok(()) + } +} diff --git a/crates/viewer/re_space_view_dataframe/src/view_query_v2/mod.rs b/crates/viewer/re_space_view_dataframe/src/view_query_v2/mod.rs new file mode 100644 index 0000000000000..7c505bf345ab6 --- /dev/null +++ b/crates/viewer/re_space_view_dataframe/src/view_query_v2/mod.rs @@ -0,0 +1,52 @@ +mod blueprint; +mod ui; + +use re_chunk_store::ColumnDescriptor; +use re_types::blueprint::archetypes; +use re_viewer_context::{SpaceViewId, SpaceViewSystemExecutionError, ViewerContext}; +use re_viewport_blueprint::ViewProperty; + +/// Wrapper over the `DataframeQueryV2` blueprint archetype that can also display some UI. +pub(crate) struct QueryV2 { + query_property: ViewProperty, +} + +impl QueryV2 { + /// Create a query object from the blueprint store. + /// + /// See the `blueprint_io` module for more related accessors. + pub(crate) fn from_blueprint(ctx: &ViewerContext<'_>, space_view_id: SpaceViewId) -> Self { + Self { + query_property: ViewProperty::from_archetype::( + ctx.blueprint_db(), + ctx.blueprint_query, + space_view_id, + ), + } + } + + /// Display the selection panel ui for this query. + /// + /// Implementation is in the `ui` module. + pub(crate) fn selection_panel_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + space_view_id: SpaceViewId, + view_columns: &[ColumnDescriptor], + ) -> Result<(), SpaceViewSystemExecutionError> { + let timeline = self.timeline(ctx)?; + + self.timeline_ui(ctx, ui, &timeline)?; + ui.separator(); + self.filter_range_ui(ctx, ui, &timeline)?; + ui.separator(); + self.filter_event_ui(ctx, ui, &timeline, space_view_id)?; + ui.separator(); + self.column_visibility_ui(ctx, ui, &timeline, view_columns)?; + ui.separator(); + self.latest_at_ui(ctx, ui)?; + + Ok(()) + } +} diff --git a/crates/viewer/re_space_view_dataframe/src/view_query_v2/ui.rs b/crates/viewer/re_space_view_dataframe/src/view_query_v2/ui.rs new file mode 100644 index 0000000000000..c2a3de7282d12 --- /dev/null +++ b/crates/viewer/re_space_view_dataframe/src/view_query_v2/ui.rs @@ -0,0 +1,532 @@ +use std::collections::{BTreeSet, HashSet}; + +use re_chunk_store::{ColumnDescriptor, ColumnSelector}; +use re_log_types::{ + EntityPath, ResolvedTimeRange, TimeInt, TimeType, TimeZone, Timeline, TimelineName, +}; +use re_types::blueprint::components; +use re_types_core::{ComponentName, ComponentNameSet}; +use re_ui::{list_item, UiExt}; +use re_viewer_context::{SpaceViewId, SpaceViewSystemExecutionError, TimeDragValue, ViewerContext}; + +use crate::view_query_v2::QueryV2; + +// UI implementation +impl QueryV2 { + pub(super) fn timeline_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + timeline: &Timeline, + ) -> Result<(), SpaceViewSystemExecutionError> { + let mut timeline_name = *timeline.name(); + egui::Grid::new("dataframe_view_query_ui_timeline") + .num_columns(2) + .spacing(egui::vec2(8.0, 10.0)) + .show(ui, |ui| -> Result<_, SpaceViewSystemExecutionError> { + ui.grid_left_hand_label("Timeline"); + + if edit_timeline_name(ctx, ui, &mut timeline_name).changed() { + self.save_timeline_name(ctx, &timeline_name); + } + + Ok(()) + }) + .inner + } + + pub(super) fn filter_range_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + timeline: &Timeline, + ) -> Result<(), SpaceViewSystemExecutionError> { + let time_drag_value = if let Some(times) = ctx.recording().time_histogram(timeline) { + TimeDragValue::from_time_histogram(times) + } else { + debug_assert!(false, "This should never happen because `timeline` is guaranteed to be valid by `Self::timeline()`"); + TimeDragValue::from_time_range(0..=0) + }; + + ui.label("Filter rows by time range:"); + let (mut start, mut end) = self.range_filter()?; + + let mut changed = false; + let mut should_display_time_range = false; + list_item::list_item_scope(ui, "dataframe_view_query_ui_range_filter", |ui| { + let mut reset_start = false; + + ui.list_item_flat_noninteractive( + list_item::PropertyContent::new("Start") + .action_button_with_enabled(&re_ui::icons::RESET, start != TimeInt::MIN, || { + reset_start = true; + }) + .value_fn(|ui, _| { + let response = time_boundary_ui( + ui, + &time_drag_value, + None, + timeline.typ(), + ctx.app_options.time_zone, + &mut start, + ); + + changed |= response.changed(); + should_display_time_range |= + response.hovered() || response.dragged() || response.has_focus(); + }), + ); + + if reset_start { + start = TimeInt::MIN; + changed = true; + } + + let mut reset_to = false; + + ui.list_item_flat_noninteractive( + list_item::PropertyContent::new("End") + .action_button_with_enabled(&re_ui::icons::RESET, end != TimeInt::MAX, || { + reset_to = true; + }) + .value_fn(|ui, _| { + let response = time_boundary_ui( + ui, + &time_drag_value, + Some(start), + timeline.typ(), + ctx.app_options.time_zone, + &mut end, + ); + + changed |= response.changed(); + should_display_time_range |= + response.hovered() || response.dragged() || response.has_focus(); + }), + ); + + if reset_to { + end = TimeInt::MAX; + changed = true; + } + }); + + if changed { + self.save_range_filter(ctx, start, end); + } + + if should_display_time_range { + let mut time_ctrl = ctx.rec_cfg.time_ctrl.write(); + if time_ctrl.timeline() == timeline { + time_ctrl.highlighted_range = Some(ResolvedTimeRange::new(start, end)); + } + } + + Ok(()) + } + + pub(super) fn filter_event_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + timeline: &Timeline, + space_view_id: SpaceViewId, + ) -> Result<(), SpaceViewSystemExecutionError> { + // + // Read stuff + // + + let original_filter_by_event = self.filter_by_event()?; + + let (mut active, event_entity, event_component) = original_filter_by_event + .as_ref() + .map(|filter| { + ( + filter.active(), + Some(filter.entity_path()), + Some(filter.component_name()), + ) + }) + .unwrap_or((false, None, None)); + + // + // Filter active? + // + + ui.re_checkbox(&mut active, "Filter by event from:"); + + // + // Event entity + // + + let all_entities = all_pov_entities_for_space_view(ctx, space_view_id, timeline); + + let mut event_entity = event_entity + .and_then(|entity| all_entities.contains(&entity).then_some(entity)) + .or_else(|| all_entities.iter().next().cloned()) + .unwrap_or_else(|| EntityPath::from("/")); + + // + // Event component + // + + let all_components = ctx + .recording_store() + .all_components_on_timeline(timeline, &event_entity) + .unwrap_or_default(); + + // The list of suggested components is build as follows: + // - consider all indicator components + // - for the matching archetypes, take all required components + // - keep those that are actually present + let suggested_components = || { + all_components + .iter() + .filter_map(|c| { + c.indicator_component_archetype() + .and_then(|archetype_short_name| { + ctx.reflection + .archetype_reflection_from_short_name(&archetype_short_name) + }) + }) + .flat_map(|archetype_reflection| { + archetype_reflection + .required_fields() + .map(|field| field.component_name) + }) + .filter(|c| all_components.contains(c)) + .collect::() + }; + + // If the currently saved component, we auto-switch it to a reasonable one. + let mut event_component = event_component + .and_then(|component| all_components.contains(&component).then_some(component)) + .or_else(|| suggested_components().first().copied()) + .unwrap_or_else(|| ComponentName::from("-")); + + // + // UI for event entity and component + // + + ui.add_enabled_ui(active, |ui| { + ui.spacing_mut().item_spacing.y = 0.0; + + ui.list_item_flat_noninteractive(list_item::PropertyContent::new("Entity").value_fn( + |ui, _| { + egui::ComboBox::new("pov_entity", "") + .selected_text(event_entity.to_string()) + .show_ui(ui, |ui| { + for entity in all_entities { + let label = entity.to_string(); + ui.selectable_value(&mut event_entity, entity, label); + } + }); + }, + )); + + ui.list_item_flat_noninteractive( + list_item::PropertyContent::new("Component").value_fn(|ui, _| { + egui::ComboBox::new("pov_component", "") + .selected_text(event_component.short_name()) + .show_ui(ui, |ui| { + for component in all_components { + let label = component.short_name(); + ui.selectable_value(&mut event_component, component, label); + } + }); + }), + ); + }); + + // + // Save event if changed + // + + let filter_by_event = + components::FilterByEvent::new(active, &event_entity, event_component); + + if original_filter_by_event.as_ref() != Some(&filter_by_event) { + self.save_filter_by_event(ctx, &filter_by_event); + } + + Ok(()) + } + + pub(super) fn column_visibility_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + timeline: &Timeline, + view_columns: &[ColumnDescriptor], + ) -> Result<(), SpaceViewSystemExecutionError> { + // Gather our selected columns. + let selected_columns: HashSet<_> = self + .apply_column_visibility_to_view_columns(ctx, view_columns)? + .map(|columns| columns.into_iter().collect()) + .unwrap_or_else(|| view_columns.iter().cloned().map(Into::into).collect()); + + let visible_count = selected_columns.len(); + let hidden_count = view_columns.len() - visible_count; + let visible_count_label = format!("{visible_count} visible, {hidden_count} hidden"); + + let mut new_selected_columns = selected_columns.clone(); + + let modal_ui = |ui: &mut egui::Ui| { + // + // Summary toggle + // + + let indeterminate = visible_count != 0 && hidden_count != 0; + let mut all_enabled = hidden_count == 0; + + if ui + .checkbox_indeterminate(&mut all_enabled, &visible_count_label, indeterminate) + .changed() + { + if all_enabled { + self.save_all_columns_selected(ctx); + } else { + self.save_all_columns_unselected(ctx); + } + } + + ui.add_space(12.0); + + // + // Control columns (always selected) + // + + let mut first = true; + for column in view_columns { + let ColumnDescriptor::Control(_) = column else { + continue; + }; + + if first { + ui.label("Control"); + first = false; + } + + // Control columns are always shown because: + // - now it's just `RowId` + // - the dataframe UI requires the `RowId` column to be present (for tooltips) + let is_enabled = false; + let mut is_visible = true; + + ui.add_enabled_ui(is_enabled, |ui| { + if ui + .re_checkbox(&mut is_visible, column.short_name()) + .on_disabled_hover_text("The query timeline must always be visible") + .changed() + { /* cannot happen for now */ } + }); + } + + // + // Time columns + // + + let mut first = true; + for column in view_columns { + let ColumnDescriptor::Time(time_column_descriptor) = column else { + continue; + }; + + if first { + ui.add_space(6.0); + ui.label("Timelines"); + first = false; + } + + let column_selector: ColumnSelector = column.clone().into(); + + // The query timeline is always active because it's necessary for the dataframe ui + // (for tooltips). + let is_query_timeline = time_column_descriptor.timeline.name() == timeline.name(); + let is_enabled = !is_query_timeline; + let mut is_visible = + is_query_timeline || selected_columns.contains(&column_selector); + + ui.add_enabled_ui(is_enabled, |ui| { + if ui + .re_checkbox(&mut is_visible, column.short_name()) + .on_disabled_hover_text("The query timeline must always be visible") + .changed() + { + if is_visible { + new_selected_columns.insert(column_selector); + } else { + new_selected_columns.remove(&column_selector); + } + } + }); + } + + // + // Component columns + // + + let mut current_entity = None; + for column in view_columns { + let ColumnDescriptor::Component(component_column_descriptor) = column else { + continue; + }; + + if Some(&component_column_descriptor.entity_path) != current_entity.as_ref() { + current_entity = Some(component_column_descriptor.entity_path.clone()); + ui.add_space(6.0); + ui.label(component_column_descriptor.entity_path.to_string()); + } + + let column_selector: ColumnSelector = column.clone().into(); + let mut is_visible = selected_columns.contains(&column_selector); + + if ui + .re_checkbox(&mut is_visible, column.short_name()) + .changed() + { + if is_visible { + new_selected_columns.insert(column_selector); + } else { + new_selected_columns.remove(&column_selector); + } + } + } + }; + + ui.list_item_flat_noninteractive(list_item::PropertyContent::new("Columns").value_fn( + |ui, _| { + egui::menu::menu_button(ui, &visible_count_label, |ui| { + egui::ScrollArea::vertical() + .auto_shrink([false, false]) + .show(ui, modal_ui) + }); + }, + )); + + // save changes of column visibility + if new_selected_columns != selected_columns { + if new_selected_columns.len() == view_columns.len() { + // length match is a guaranteed match because the `selected_columns` sets are built + // from filtering out the view columns + self.save_all_columns_selected(ctx); + } else { + self.save_selected_columns(ctx, new_selected_columns); + } + } + + Ok(()) + } + + pub(super) fn latest_at_ui( + &self, + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + ) -> Result<(), SpaceViewSystemExecutionError> { + ui.label("Empty cells:"); + + let mut latest_at = self.latest_at_enabled()?; + let changed = { + ui.re_radio_value(&mut latest_at, false, "Leave empty") + .changed() + } | { + ui.re_radio_value(&mut latest_at, true, "Fill with latest-at values") + .changed() + }; + + if changed { + self.save_latest_at_enabled(ctx, latest_at); + } + + Ok(()) + } +} + +/// Gather all entities that can meaningfully be used as point-of-view for this view. +/// +/// Meaning: +/// - the entity is part of this view +/// - the entity has any component on the chosen timeline +fn all_pov_entities_for_space_view( + ctx: &ViewerContext<'_>, + space_view_id: SpaceViewId, + timeline: &Timeline, +) -> BTreeSet { + let mut all_entities = BTreeSet::new(); + ctx.lookup_query_result(space_view_id) + .tree + .visit(&mut |node| { + if !node.data_result.tree_prefix_only { + let comp_for_entity = ctx + .recording_store() + .all_components_on_timeline(timeline, &node.data_result.entity_path); + if comp_for_entity.is_some_and(|components| !components.is_empty()) { + all_entities.insert(node.data_result.entity_path.clone()); + } + } + true + }); + + all_entities +} + +fn time_boundary_ui( + ui: &mut egui::Ui, + time_drag_value: &TimeDragValue, + low_bound_override: Option, + timeline_typ: TimeType, + time_zone: TimeZone, + time: &mut TimeInt, +) -> egui::Response { + if *time == TimeInt::MAX { + let mut response = ui.button("+āˆž").on_hover_text("Click to edit"); + if response.clicked() { + *time = time_drag_value.max_time(); + response.mark_changed(); + } + response + } else if *time == TimeInt::MIN { + let mut response = ui.button("ā€“āˆž").on_hover_text("Click to edit"); + if response.clicked() { + *time = time_drag_value.min_time(); + response.mark_changed(); + } + response + } else { + match timeline_typ { + TimeType::Time => { + time_drag_value + .temporal_drag_value_ui(ui, time, true, low_bound_override, time_zone) + .0 + } + + TimeType::Sequence => { + time_drag_value.sequence_drag_value_ui(ui, time, true, low_bound_override) + } + } + } +} + +fn edit_timeline_name( + ctx: &ViewerContext<'_>, + ui: &mut egui::Ui, + value: &mut TimelineName, +) -> egui::Response { + let mut changed = false; + let mut combobox_response = egui::ComboBox::from_id_salt(&value) + .selected_text(value.as_str()) + .show_ui(ui, |ui| { + for timeline in ctx.recording().timelines() { + let response = + ui.selectable_value(value, *timeline.name(), timeline.name().as_str()); + + changed |= response.changed(); + } + }); + + if changed { + combobox_response.response.mark_changed(); + } + + combobox_response.response +} diff --git a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs index 4ae02aadb4598..680c7df501dbe 100644 --- a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs +++ b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs @@ -135,7 +135,7 @@ pub struct TransformContext { /// All unreachable descendant paths of `reference_path`. unreachable_descendants: Vec<(EntityPath, UnreachableTransformReason)>, - /// The first parent of reference_path that is no longer reachable. + /// The first parent of `reference_path` that is no longer reachable. first_unreachable_parent: Option<(EntityPath, UnreachableTransformReason)>, } diff --git a/crates/viewer/re_space_view_spatial/src/mesh_loader.rs b/crates/viewer/re_space_view_spatial/src/mesh_loader.rs index 304685ec10d12..1482486d258b4 100644 --- a/crates/viewer/re_space_view_spatial/src/mesh_loader.rs +++ b/crates/viewer/re_space_view_spatial/src/mesh_loader.rs @@ -211,7 +211,6 @@ fn try_get_or_create_albedo_texture( buffer: albedo_texture_buffer.0.clone(), format: albedo_texture_format.0, kind: re_types::image::ImageKind::Color, - colormap: None, }; if re_viewer_context::gpu_bridge::required_shader_decode( diff --git a/crates/viewer/re_space_view_spatial/src/picking.rs b/crates/viewer/re_space_view_spatial/src/picking.rs index 235d200339d25..50929c6cce518 100644 --- a/crates/viewer/re_space_view_spatial/src/picking.rs +++ b/crates/viewer/re_space_view_spatial/src/picking.rs @@ -62,6 +62,7 @@ impl PickingResult { /// Picking context in which picking is performed. pub struct PickingContext { /// Cursor position in the UI coordinate system. + #[allow(unused)] pub pointer_in_ui: glam::Vec2, /// Cursor position on the renderer canvas in pixels. @@ -69,7 +70,7 @@ pub struct PickingContext { /// Cursor position in the UI coordinates after panning & zooming. /// - /// As of writing, for 3D spaces this is equal to [Self::pointer_in_ui], + /// As of writing, for 3D spaces this is equal to [`Self::pointer_in_ui`], /// since we don't allow panning & zooming after perspective projection. pub pointer_in_camera_plane: glam::Vec2, diff --git a/crates/viewer/re_space_view_spatial/src/picking_ui_pixel.rs b/crates/viewer/re_space_view_spatial/src/picking_ui_pixel.rs index b624b5ddb8cd0..09c0bca4b110f 100644 --- a/crates/viewer/re_space_view_spatial/src/picking_ui_pixel.rs +++ b/crates/viewer/re_space_view_spatial/src/picking_ui_pixel.rs @@ -293,7 +293,7 @@ enum PixelValueSource<'a> { /// /// As of writing, use this only ifā€¦ /// * the texture is known to be able to read back - /// * the texture format is Rgba8UnormSrgb + /// * the texture format is `Rgba8UnormSrgb` /// * you don't care about alpha (since there's no 24bit textures, we assume we can just ignore it) /// Note that these restrictions are not final, /// but merely what covers the usecases right now with the least amount of effort. diff --git a/crates/viewer/re_space_view_spatial/src/proc_mesh.rs b/crates/viewer/re_space_view_spatial/src/proc_mesh.rs index 38151fbca9ee6..f90ada8f6c1fa 100644 --- a/crates/viewer/re_space_view_spatial/src/proc_mesh.rs +++ b/crates/viewer/re_space_view_spatial/src/proc_mesh.rs @@ -62,8 +62,10 @@ impl ProcMeshKey { /// which is to be drawn as lines rather than triangles. #[derive(Debug)] pub struct WireframeMesh { + #[allow(unused)] pub bbox: re_math::BoundingBox, + #[allow(unused)] pub vertex_count: usize, /// Collection of line strips making up the wireframe. @@ -80,6 +82,7 @@ pub struct WireframeMesh { /// This type is cheap to clone. #[derive(Clone)] pub struct SolidMesh { + #[allow(unused)] pub bbox: re_math::BoundingBox, /// Mesh to render. Note that its colors are set to black, so that the diff --git a/crates/viewer/re_space_view_spatial/src/ui_3d.rs b/crates/viewer/re_space_view_spatial/src/ui_3d.rs index 3e8d15bb1c897..bb2741c28b4f5 100644 --- a/crates/viewer/re_space_view_spatial/src/ui_3d.rs +++ b/crates/viewer/re_space_view_spatial/src/ui_3d.rs @@ -50,7 +50,7 @@ pub struct View3DState { /// If this is a camera, it takes over the camera pose, otherwise follows the entity. pub tracked_entity: Option, - /// Eye pose just before we started following an entity [Self::tracked_entity]. + /// Eye pose just before we started following an entity [`Self::tracked_entity`]. camera_before_tracked_entity: Option, eye_interpolation: Option, diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/depth_images.rs b/crates/viewer/re_space_view_spatial/src/visualizers/depth_images.rs index 7788d774e69d2..244a058d49b4f 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/depth_images.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/depth_images.rs @@ -6,15 +6,16 @@ use re_renderer::renderer::{ColormappedTexture, DepthCloud, DepthClouds}; use re_types::{ archetypes::DepthImage, components::{ - self, Colormap, DepthMeter, DrawOrder, FillRatio, ImageBuffer, ImageFormat, ViewCoordinates, + self, Colormap, DepthMeter, DrawOrder, FillRatio, ImageBuffer, ImageFormat, ValueRange, + ViewCoordinates, }, image::ImageKind, Loggable as _, }; use re_viewer_context::{ - ApplicableEntities, IdentifiedViewSystem, ImageInfo, QueryContext, SpaceViewClass, - SpaceViewSystemExecutionError, TypedComponentFallbackProvider, ViewContext, - ViewContextCollection, ViewQuery, VisualizableEntities, VisualizableFilterContext, + ApplicableEntities, ColormapWithRange, IdentifiedViewSystem, ImageInfo, ImageStatsCache, + QueryContext, SpaceViewClass, SpaceViewSystemExecutionError, TypedComponentFallbackProvider, + ViewContext, ViewContextCollection, ViewQuery, VisualizableEntities, VisualizableFilterContext, VisualizerQueryInfo, VisualizerSystem, }; @@ -48,6 +49,8 @@ struct DepthImageComponentData { image: ImageInfo, depth_meter: Option, fill_ratio: Option, + colormap: Option, + value_range: Option<[f64; 2]>, } impl DepthImageVisualizer { @@ -68,15 +71,31 @@ impl DepthImageVisualizer { for data in images { let DepthImageComponentData { - mut image, + image, depth_meter, fill_ratio, + colormap, + value_range, } = data; let depth_meter = depth_meter.unwrap_or_else(|| self.fallback_for(ctx)); // All depth images must have a colormap: - image.colormap = Some(image.colormap.unwrap_or_else(|| self.fallback_for(ctx))); + let colormap = colormap.unwrap_or_else(|| self.fallback_for(ctx)); + let value_range = value_range + .map(|r| [r[0] as f32, r[1] as f32]) + .unwrap_or_else(|| { + // Don't use fallback provider since it has to query information we already have. + let image_stats = ctx + .viewer_ctx + .cache + .entry(|c: &mut ImageStatsCache| c.entry(&image)); + ColormapWithRange::default_range_for_depth_images(&image_stats) + }); + let colormap_with_range = ColormapWithRange { + colormap, + value_range, + }; // First try to create a textured rect for this image. // Even if we end up only showing a depth cloud, @@ -86,6 +105,7 @@ impl DepthImageVisualizer { entity_path, ent_context, &image, + Some(&colormap_with_range), re_renderer::Rgba::WHITE, "DepthImage", &mut self.data, @@ -185,12 +205,17 @@ impl DepthImageVisualizer { let pixel_width_from_depth = (0.5 * fov_y).tan() / (0.5 * dimensions.y as f32); let point_radius_from_world_depth = *radius_scale.0 * pixel_width_from_depth; + let min_max_depth_in_world = [ + world_depth_from_texture_depth * depth_texture.range[0], + world_depth_from_texture_depth * depth_texture.range[1], + ]; + Ok(DepthCloud { world_from_rdf, depth_camera_intrinsics: intrinsics.image_from_camera.0.into(), world_depth_from_texture_depth, point_radius_from_world_depth, - max_depth_in_world: world_depth_from_texture_depth * depth_texture.range[1], + min_max_depth_in_world, depth_dimensions: dimensions, depth_texture: depth_texture.texture.clone(), colormap: match depth_texture.color_mapper { @@ -261,18 +286,20 @@ impl VisualizerSystem for DepthImageVisualizer { ImageFormat::name(), ); let all_colormaps = results.iter_as(timeline, Colormap::name()); + let all_value_ranges = results.iter_as(timeline, ValueRange::name()); let all_depth_meters = results.iter_as(timeline, DepthMeter::name()); let all_fill_ratios = results.iter_as(timeline, FillRatio::name()); - let mut data = re_query::range_zip_1x4( + let mut data = re_query::range_zip_1x5( all_buffers_indexed, all_formats_indexed, all_colormaps.component::(), + all_value_ranges.primitive_array::<2, f64>(), all_depth_meters.primitive::(), all_fill_ratios.primitive::(), ) .filter_map( - |(index, buffers, format, colormap, depth_meter, fill_ratio)| { + |(index, buffers, format, colormap, value_range, depth_meter, fill_ratio)| { let buffer = buffers.first()?; Some(DepthImageComponentData { @@ -281,10 +308,11 @@ impl VisualizerSystem for DepthImageVisualizer { buffer: buffer.clone().into(), format: first_copied(format.as_deref())?.0, kind: ImageKind::Depth, - colormap: first_copied(colormap.as_deref()), }, depth_meter: first_copied(depth_meter).map(Into::into), fill_ratio: first_copied(fill_ratio).map(Into::into), + colormap: first_copied(colormap.as_deref()), + value_range: first_copied(value_range).map(Into::into), }) }, ); @@ -335,9 +363,46 @@ impl VisualizerSystem for DepthImageVisualizer { } } +impl TypedComponentFallbackProvider for DepthImageVisualizer { + fn fallback_for(&self, _ctx: &QueryContext<'_>) -> DrawOrder { + DrawOrder::DEFAULT_DEPTH_IMAGE + } +} + +impl TypedComponentFallbackProvider for DepthImageVisualizer { + fn fallback_for( + &self, + ctx: &re_viewer_context::QueryContext<'_>, + ) -> re_types::components::ValueRange { + if let Some(((_time, buffer_row_id), image_buffer)) = ctx + .recording() + .latest_at_component::(ctx.target_entity_path, ctx.query) + { + // TODO(andreas): What about overrides on the image format? + if let Some((_, format)) = ctx + .recording() + .latest_at_component::(ctx.target_entity_path, ctx.query) + { + let image = ImageInfo { + buffer_row_id, + buffer: image_buffer.0, + format: format.0, + kind: ImageKind::Depth, + }; + let cache = ctx.viewer_ctx.cache; + let image_stats = cache.entry(|c: &mut ImageStatsCache| c.entry(&image)); + let default_range = ColormapWithRange::default_range_for_depth_images(&image_stats); + return [default_range[0] as f64, default_range[1] as f64].into(); + } + } + + [0.0, f64::MAX].into() + } +} + impl TypedComponentFallbackProvider for DepthImageVisualizer { fn fallback_for(&self, _ctx: &re_viewer_context::QueryContext<'_>) -> Colormap { - Colormap::Turbo + ColormapWithRange::DEFAULT_DEPTH_COLORMAP } } @@ -352,13 +417,7 @@ impl TypedComponentFallbackProvider for DepthImageVisualizer { } } -impl TypedComponentFallbackProvider for DepthImageVisualizer { - fn fallback_for(&self, _ctx: &QueryContext<'_>) -> DrawOrder { - DrawOrder::DEFAULT_DEPTH_IMAGE - } -} - -re_viewer_context::impl_component_fallback_provider!(DepthImageVisualizer => [Colormap, DepthMeter, DrawOrder]); +re_viewer_context::impl_component_fallback_provider!(DepthImageVisualizer => [Colormap, ValueRange, DepthMeter, DrawOrder]); fn first_copied(slice: Option<&[T]>) -> Option { slice.and_then(|element| element.first()).copied() diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/encoded_image.rs b/crates/viewer/re_space_view_spatial/src/visualizers/encoded_image.rs index 881c9d5145914..9b094ec9e17f4 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/encoded_image.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/encoded_image.rs @@ -180,12 +180,14 @@ impl EncodedImageVisualizer { let opacity = opacity.copied().unwrap_or_else(|| self.fallback_for(ctx)); let multiplicative_tint = re_renderer::Rgba::from_white_alpha(opacity.0.clamp(0.0, 1.0)); + let colormap = None; if let Some(textured_rect) = textured_rect_from_image( ctx.viewer_ctx, entity_path, spatial_ctx, &image, + colormap, multiplicative_tint, "EncodedImage", &mut self.data, diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/images.rs b/crates/viewer/re_space_view_spatial/src/visualizers/images.rs index 78bfee579b73d..fb8227bc9a269 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/images.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/images.rs @@ -151,7 +151,6 @@ impl ImageVisualizer { buffer: buffer.clone().into(), format: first_copied(formats.as_deref())?.0, kind: ImageKind::Color, - colormap: None, }, opacity: first_copied(opacities).map(Into::into), }) @@ -161,12 +160,14 @@ impl ImageVisualizer { let opacity = opacity.unwrap_or_else(|| self.fallback_for(ctx)); let multiplicative_tint = re_renderer::Rgba::from_white_alpha(opacity.0.clamp(0.0, 1.0)); + let colormap = None; if let Some(textured_rect) = textured_rect_from_image( ctx.viewer_ctx, entity_path, spatial_ctx, &image, + colormap, multiplicative_tint, "Image", &mut self.data, diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/segmentation_images.rs b/crates/viewer/re_space_view_spatial/src/visualizers/segmentation_images.rs index 7e1f9b203d2b5..d19aaa64c3f95 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/segmentation_images.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/segmentation_images.rs @@ -109,7 +109,6 @@ impl VisualizerSystem for SegmentationImageVisualizer { buffer: buffer.clone().into(), format: first_copied(formats.as_deref())?.0, kind: ImageKind::Segmentation, - colormap: None, }, opacity: first_copied(opacity).map(Into::into), }) @@ -121,12 +120,14 @@ impl VisualizerSystem for SegmentationImageVisualizer { let opacity = opacity.unwrap_or_else(|| self.fallback_for(ctx)); let multiplicative_tint = re_renderer::Rgba::from_white_alpha(opacity.0.clamp(0.0, 1.0)); + let colormap = None; if let Some(textured_rect) = textured_rect_from_image( ctx.viewer_ctx, entity_path, spatial_ctx, &image, + colormap, multiplicative_tint, "SegmentationImage", &mut self.data, diff --git a/crates/viewer/re_space_view_spatial/src/visualizers/utilities/textured_rect.rs b/crates/viewer/re_space_view_spatial/src/visualizers/utilities/textured_rect.rs index b2e72cb089e92..1a09222abcf7c 100644 --- a/crates/viewer/re_space_view_spatial/src/visualizers/utilities/textured_rect.rs +++ b/crates/viewer/re_space_view_spatial/src/visualizers/utilities/textured_rect.rs @@ -3,18 +3,20 @@ use glam::Vec3; use re_log_types::EntityPath; use re_renderer::renderer; use re_viewer_context::{ - gpu_bridge, ImageInfo, ImageStatsCache, SpaceViewClass as _, ViewerContext, + gpu_bridge, ColormapWithRange, ImageInfo, ImageStatsCache, SpaceViewClass as _, ViewerContext, }; use crate::{contexts::SpatialSceneEntityContext, SpatialSpaceView2D}; use super::SpatialViewVisualizerData; +#[allow(clippy::too_many_arguments)] pub fn textured_rect_from_image( ctx: &ViewerContext<'_>, ent_path: &EntityPath, ent_context: &SpatialSceneEntityContext<'_>, image: &ImageInfo, + colormap: Option<&ColormapWithRange>, multiplicative_tint: egui::Rgba, visualizer_name: &'static str, visualizer_data: &mut SpatialViewVisualizerData, @@ -30,6 +32,7 @@ pub fn textured_rect_from_image( image, &tensor_stats, &ent_context.annotations, + colormap, ) { Ok(colormapped_texture) => { // TODO(emilk): let users pick texture filtering. diff --git a/crates/viewer/re_space_view_tensor/Cargo.toml b/crates/viewer/re_space_view_tensor/Cargo.toml index 03b01fe484b93..1d7ea1ec349de 100644 --- a/crates/viewer/re_space_view_tensor/Cargo.toml +++ b/crates/viewer/re_space_view_tensor/Cargo.toml @@ -22,6 +22,7 @@ all-features = true re_chunk_store.workspace = true re_data_ui.workspace = true re_log_types.workspace = true +re_query.workspace = true re_renderer.workspace = true re_space_view.workspace = true re_tracing.workspace = true @@ -29,6 +30,7 @@ re_types.workspace = true re_ui.workspace = true re_viewer_context.workspace = true re_viewport_blueprint.workspace = true + anyhow.workspace = true bytemuck.workspace = true egui.workspace = true diff --git a/crates/viewer/re_space_view_tensor/src/space_view_class.rs b/crates/viewer/re_space_view_tensor/src/space_view_class.rs index fa7933e1b4911..1b3c770a135e5 100644 --- a/crates/viewer/re_space_view_tensor/src/space_view_class.rs +++ b/crates/viewer/re_space_view_tensor/src/space_view_class.rs @@ -2,7 +2,6 @@ use egui::{epaint::TextShape, Align2, NumExt as _, Vec2}; use ndarray::Axis; use re_space_view::{suggest_space_view_for_each_entity, view_property_ui}; -use re_chunk_store::RowId; use re_data_ui::tensor_summary_ui_grid_contents; use re_log_types::EntityPath; use re_types::{ @@ -16,16 +15,17 @@ use re_types::{ }; use re_ui::{list_item, ContextExt as _, UiExt as _}; use re_viewer_context::{ - gpu_bridge, ApplicableEntities, IdentifiedViewSystem as _, IndicatedEntities, PerVisualizer, - SpaceViewClass, SpaceViewClassRegistryError, SpaceViewId, SpaceViewState, - SpaceViewStateExt as _, SpaceViewSystemExecutionError, TensorStatsCache, + gpu_bridge, ApplicableEntities, ColormapWithRange, IdentifiedViewSystem as _, + IndicatedEntities, PerVisualizer, SpaceViewClass, SpaceViewClassRegistryError, SpaceViewId, + SpaceViewState, SpaceViewStateExt as _, SpaceViewSystemExecutionError, TensorStatsCache, TypedComponentFallbackProvider, ViewQuery, ViewerContext, VisualizableEntities, }; use re_viewport_blueprint::ViewProperty; use crate::{ dimension_mapping::load_tensor_slice_selection_and_make_valid, - tensor_dimension_mapper::dimension_mapping_ui, visualizer_system::TensorSystem, + tensor_dimension_mapper::dimension_mapping_ui, + visualizer_system::{TensorSystem, TensorView}, }; #[derive(Default)] @@ -37,7 +37,7 @@ type ViewType = re_types::blueprint::views::TensorView; pub struct ViewTensorState { /// Last viewed tensor, copied each frame. /// Used for the selection view. - tensor: Option<(RowId, TensorData)>, + tensor: Option, } impl SpaceViewState for ViewTensorState { @@ -125,10 +125,15 @@ Note: select the space view to configure which dimensions are shown." // TODO(andreas): Listitemify ui.selection_grid("tensor_selection_ui").show(ui, |ui| { - if let Some((tensor_data_row_id, tensor)) = &state.tensor { + if let Some(TensorView { + tensor, + tensor_row_id, + .. + }) = &state.tensor + { let tensor_stats = ctx .cache - .entry(|c: &mut TensorStatsCache| c.entry(*tensor_data_row_id, tensor)); + .entry(|c: &mut TensorStatsCache| c.entry(*tensor_row_id, tensor)); tensor_summary_ui_grid_contents(ui, tensor, &tensor_stats); } @@ -140,7 +145,7 @@ Note: select the space view to configure which dimensions are shown." }); // TODO(#6075): Listitemify - if let Some((_, tensor)) = &state.tensor { + if let Some(TensorView { tensor, .. }) = &state.tensor { let slice_property = ViewProperty::from_archetype::( ctx.blueprint_db(), ctx.blueprint_query, @@ -213,9 +218,9 @@ Note: select the space view to configure which dimensions are shown." tensors.len() )); }); - } else if let Some((tensor_data_row_id, tensor)) = tensors.first() { - state.tensor = Some((*tensor_data_row_id, tensor.0.clone())); - self.view_tensor(ctx, ui, state, query.space_view_id, tensor)?; + } else if let Some(tensor_view) = tensors.first() { + state.tensor = Some(tensor_view.clone()); + self.view_tensor(ctx, ui, state, query.space_view_id, &tensor_view.tensor)?; } else { ui.centered_and_justified(|ui| ui.label("(empty)")); } @@ -314,9 +319,14 @@ impl TensorSpaceView { ) -> anyhow::Result<(egui::Response, egui::Painter, egui::Rect)> { re_tracing::profile_function!(); - let Some((tensor_data_row_id, tensor)) = state.tensor.as_ref() else { + let Some(tensor_view) = state.tensor.as_ref() else { anyhow::bail!("No tensor data available."); }; + let TensorView { + tensor_row_id, + tensor, + data_range, + } = &tensor_view; let scalar_mapping = ViewProperty::from_archetype::( ctx.blueprint_db(), @@ -331,17 +341,16 @@ impl TensorSpaceView { let Some(render_ctx) = ctx.render_ctx else { return Err(anyhow::Error::msg("No render context available.")); }; - - let tensor_stats = ctx - .cache - .entry(|c: &mut TensorStatsCache| c.entry(*tensor_data_row_id, tensor)); + let colormap = ColormapWithRange { + colormap, + value_range: [data_range.start() as f32, data_range.end() as f32], + }; let colormapped_texture = super::tensor_slice_to_gpu::colormapped_texture( render_ctx, - *tensor_data_row_id, + *tensor_row_id, tensor, - &tensor_stats, slice_selection, - colormap, + &colormap, gamma, )?; let [width, height] = colormapped_texture.width_height(); diff --git a/crates/viewer/re_space_view_tensor/src/tensor_slice_to_gpu.rs b/crates/viewer/re_space_view_tensor/src/tensor_slice_to_gpu.rs index f3a3e5957fdf6..4855479c7ea1e 100644 --- a/crates/viewer/re_space_view_tensor/src/tensor_slice_to_gpu.rs +++ b/crates/viewer/re_space_view_tensor/src/tensor_slice_to_gpu.rs @@ -5,13 +5,13 @@ use re_renderer::{ }; use re_types::{ blueprint::archetypes::TensorSliceSelection, - components::{Colormap, GammaCorrection}, + components::GammaCorrection, datatypes::TensorData, tensor_data::{TensorCastError, TensorDataType}, }; use re_viewer_context::{ - gpu_bridge::{self, colormap_to_re_renderer, tensor_data_range_heuristic, RangeError}, - TensorStats, + gpu_bridge::{self, colormap_to_re_renderer}, + ColormapWithRange, }; use crate::space_view_class::selected_tensor_slice; @@ -23,35 +23,29 @@ pub enum TensorUploadError { #[error("Expected a 2D slice")] Not2D, - - #[error(transparent)] - RangeError(#[from] RangeError), } pub fn colormapped_texture( render_ctx: &re_renderer::RenderContext, tensor_data_row_id: RowId, tensor: &TensorData, - tensor_stats: &TensorStats, slice_selection: &TensorSliceSelection, - colormap: Colormap, + colormap: &ColormapWithRange, gamma: GammaCorrection, ) -> Result> { re_tracing::profile_function!(); - let range = tensor_data_range_heuristic(tensor_stats, tensor.dtype()) - .map_err(|err| TextureManager2DError::DataCreation(err.into()))?; let texture = upload_texture_slice_to_gpu(render_ctx, tensor_data_row_id, tensor, slice_selection)?; Ok(ColormappedTexture { texture, - range, + range: colormap.value_range, decode_srgb: false, multiply_rgb_with_alpha: false, gamma: *gamma.0, color_mapper: re_renderer::renderer::ColorMapper::Function(colormap_to_re_renderer( - colormap, + colormap.colormap, )), shader_decoding: None, }) diff --git a/crates/viewer/re_space_view_tensor/src/visualizer_system.rs b/crates/viewer/re_space_view_tensor/src/visualizer_system.rs index 6740f3b635675..95e6d50154033 100644 --- a/crates/viewer/re_space_view_tensor/src/visualizer_system.rs +++ b/crates/viewer/re_space_view_tensor/src/visualizer_system.rs @@ -1,13 +1,26 @@ use re_chunk_store::{LatestAtQuery, RowId}; -use re_types::{archetypes::Tensor, components::TensorData}; +use re_space_view::{latest_at_with_blueprint_resolved_data, RangeResultsExt}; +use re_types::{ + archetypes::Tensor, + components::{TensorData, ValueRange}, + Loggable as _, +}; use re_viewer_context::{ - IdentifiedViewSystem, SpaceViewSystemExecutionError, ViewContext, ViewContextCollection, - ViewQuery, VisualizerQueryInfo, VisualizerSystem, + IdentifiedViewSystem, SpaceViewSystemExecutionError, TensorStats, TensorStatsCache, + TypedComponentFallbackProvider, ViewContext, ViewContextCollection, ViewQuery, + VisualizerQueryInfo, VisualizerSystem, }; +#[derive(Clone)] +pub struct TensorView { + pub tensor_row_id: RowId, + pub tensor: TensorData, + pub data_range: ValueRange, +} + #[derive(Default)] pub struct TensorSystem { - pub tensors: Vec<(RowId, TensorData)>, + pub tensors: Vec, } impl IdentifiedViewSystem for TensorSystem { @@ -32,11 +45,50 @@ impl VisualizerSystem for TensorSystem { for data_result in query.iter_visible_data_results(ctx, Self::identifier()) { let timeline_query = LatestAtQuery::new(query.timeline, query.latest_at); - if let Some(((_time, row_id), tensor)) = ctx - .recording() - .latest_at_component::(&data_result.entity_path, &timeline_query) + let annotations = None; + let query_shadowed_defaults = false; + let results = latest_at_with_blueprint_resolved_data( + ctx, + annotations, + &timeline_query, + data_result, + [TensorData::name(), ValueRange::name()].into_iter(), + query_shadowed_defaults, + ); + + let Some(all_tensor_chunks) = results.get_required_chunks(&TensorData::name()) else { + continue; + }; + + let timeline = query.timeline; + let all_tensors_indexed = all_tensor_chunks.iter().flat_map(move |chunk| { + chunk + .iter_component_indices(&timeline, &TensorData::name()) + .zip(chunk.iter_component::()) + }); + let all_ranges = results.iter_as(timeline, ValueRange::name()); + + for ((_, tensor_row_id), tensors, data_ranges) in + re_query::range_zip_1x1(all_tensors_indexed, all_ranges.component::()) { - self.tensors.push((row_id, tensor)); + let Some(tensor) = tensors.first() else { + continue; + }; + let data_range = data_ranges + .and_then(|ranges| ranges.first().copied()) + .unwrap_or_else(|| { + let tensor_stats = ctx + .viewer_ctx + .cache + .entry(|c: &mut TensorStatsCache| c.entry(tensor_row_id, tensor)); + tensor_data_range_heuristic(&tensor_stats, tensor.dtype()) + }); + + self.tensors.push(TensorView { + tensor_row_id, + tensor: tensor.clone(), + data_range, + }); } } @@ -52,4 +104,50 @@ impl VisualizerSystem for TensorSystem { } } -re_viewer_context::impl_component_fallback_provider!(TensorSystem => []); +/// Get a valid, finite range for the gpu to use. +pub fn tensor_data_range_heuristic( + tensor_stats: &TensorStats, + data_type: re_types::tensor_data::TensorDataType, +) -> ValueRange { + let (min, max) = tensor_stats.finite_range; + + // Apply heuristic for ranges that are typically expected depending on the data type and the finite (!) range. + // (we ignore NaN/Inf values heres, since they are usually there by accident!) + #[allow(clippy::tuple_array_conversions)] + ValueRange::from(if data_type.is_float() && 0.0 <= min && max <= 1.0 { + // Float values that are all between 0 and 1, assume that this is the range. + [0.0, 1.0] + } else if 0.0 <= min && max <= 255.0 { + // If all values are between 0 and 255, assume this is the range. + // (This is very common, independent of the data type) + [0.0, 255.0] + } else if min == max { + // uniform range. This can explode the colormapping, so let's map all colors to the middle: + [min - 1.0, max + 1.0] + } else { + // Use range as is if nothing matches. + [min, max] + }) +} + +impl TypedComponentFallbackProvider for TensorSystem { + fn fallback_for( + &self, + ctx: &re_viewer_context::QueryContext<'_>, + ) -> re_types::components::ValueRange { + if let Some(((_time, row_id), tensor)) = ctx + .recording() + .latest_at_component::(ctx.target_entity_path, ctx.query) + { + let tensor_stats = ctx + .viewer_ctx + .cache + .entry(|c: &mut TensorStatsCache| c.entry(row_id, &tensor)); + tensor_data_range_heuristic(&tensor_stats, tensor.dtype()) + } else { + ValueRange::new(0.0, 1.0) + } + } +} + +re_viewer_context::impl_component_fallback_provider!(TensorSystem => [re_types::components::ValueRange]); diff --git a/crates/viewer/re_space_view_text_log/src/visualizer_system.rs b/crates/viewer/re_space_view_text_log/src/visualizer_system.rs index 9140515950bcb..eb6f541d6dd37 100644 --- a/crates/viewer/re_space_view_text_log/src/visualizer_system.rs +++ b/crates/viewer/re_space_view_text_log/src/visualizer_system.rs @@ -1,6 +1,5 @@ use itertools::izip; use re_chunk_store::ResolvedTimeRange; -use re_chunk_store::RowId; use re_entity_db::EntityPath; use re_log_types::TimeInt; use re_log_types::TimePoint; @@ -18,7 +17,6 @@ use re_viewer_context::{ #[derive(Debug, Clone)] pub struct Entry { - pub row_id: RowId, pub entity_path: EntityPath, pub time: TimeInt, pub timepoint: TimePoint, @@ -119,7 +117,7 @@ impl TextLogSystem { let all_frames = izip!(all_timepoints, all_frames); - for (timepoint, ((data_time, row_id), bodies, levels, colors)) in all_frames { + for (timepoint, ((data_time, _row_id), bodies, levels, colors)) in all_frames { let levels = levels.as_deref().unwrap_or(&[]).iter().cloned().map(Some); let colors = colors .unwrap_or(&[]) @@ -136,7 +134,6 @@ impl TextLogSystem { for (text, level, color) in results { self.entries.push(Entry { - row_id, entity_path: data_result.entity_path.clone(), time: data_time, timepoint: timepoint.clone(), diff --git a/crates/viewer/re_space_view_time_series/src/space_view_class.rs b/crates/viewer/re_space_view_time_series/src/space_view_class.rs index 5ca909146ae9a..e55b92b572b39 100644 --- a/crates/viewer/re_space_view_time_series/src/space_view_class.rs +++ b/crates/viewer/re_space_view_time_series/src/space_view_class.rs @@ -38,7 +38,7 @@ pub struct TimeSeriesSpaceViewState { /// Was the user dragging the cursor last frame? was_dragging_time_cursor: bool, - /// State of egui_plot's auto bounds before the user started dragging the time cursor. + /// State of `egui_plot`'s auto bounds before the user started dragging the time cursor. saved_auto_bounds: egui::Vec2b, /// The range of the scalar values currently on screen. diff --git a/crates/viewer/re_time_panel/src/data_density_graph.rs b/crates/viewer/re_time_panel/src/data_density_graph.rs index 25ff310ae75c1..a8523d37e9ed0 100644 --- a/crates/viewer/re_time_panel/src/data_density_graph.rs +++ b/crates/viewer/re_time_panel/src/data_density_graph.rs @@ -90,7 +90,7 @@ impl DataDensityGraphPainter { pub struct DensityGraph { /// Number of datapoints per bucket. - /// 0 == min_x, n-1 == max_x. + /// `0 == min_x, n-1 == max_x`. buckets: Vec, min_x: f32, max_x: f32, diff --git a/crates/viewer/re_viewer/data/quick_start_guides/rust_connect.md b/crates/viewer/re_viewer/data/quick_start_guides/rust_connect.md index 1130ccd7ea034..fb8bde3efc96d 100644 --- a/crates/viewer/re_viewer/data/quick_start_guides/rust_connect.md +++ b/crates/viewer/re_viewer/data/quick_start_guides/rust_connect.md @@ -10,7 +10,7 @@ Let's try it out in a brand-new Rust project: cargo init cube && cd cube && cargo add rerun --features native_viewer ``` -Note that the Rerun SDK requires a working installation of Rust 1.76+. +Note that the Rerun SDK requires a working installation of Rust 1.79+. ## Logging your own data diff --git a/crates/viewer/re_viewer/data/quick_start_guides/rust_spawn.md b/crates/viewer/re_viewer/data/quick_start_guides/rust_spawn.md index fc9cfc1c6edd3..639c828875c23 100644 --- a/crates/viewer/re_viewer/data/quick_start_guides/rust_spawn.md +++ b/crates/viewer/re_viewer/data/quick_start_guides/rust_spawn.md @@ -10,7 +10,7 @@ Let's try it out in a brand-new Rust project: cargo init cube && cd cube && cargo add rerun ``` -Note that the Rerun SDK requires a working installation of Rust 1.76+. +Note that the Rerun SDK requires a working installation of Rust 1.79+. ## Logging your own data diff --git a/crates/viewer/re_viewer/src/blueprint/validation_gen/mod.rs b/crates/viewer/re_viewer/src/blueprint/validation_gen/mod.rs index aaf303d8bde75..73f857bb01fb2 100644 --- a/crates/viewer/re_viewer/src/blueprint/validation_gen/mod.rs +++ b/crates/viewer/re_viewer/src/blueprint/validation_gen/mod.rs @@ -2,11 +2,14 @@ use super::validation::validate_component; use re_entity_db::EntityDb; pub use re_types::blueprint::components::ActiveTab; +pub use re_types::blueprint::components::ApplyLatestAt; pub use re_types::blueprint::components::BackgroundKind; pub use re_types::blueprint::components::ColumnSelectionMode; pub use re_types::blueprint::components::ColumnShare; pub use re_types::blueprint::components::ComponentColumnSelector; pub use re_types::blueprint::components::Corner2D; +pub use re_types::blueprint::components::FilterByEvent; +pub use re_types::blueprint::components::FilterByRange; pub use re_types::blueprint::components::IncludedContent; pub use re_types::blueprint::components::Interactive; pub use re_types::blueprint::components::LatestAtQueries; @@ -15,6 +18,7 @@ pub use re_types::blueprint::components::PanelState; pub use re_types::blueprint::components::QueryExpression; pub use re_types::blueprint::components::QueryKind; pub use re_types::blueprint::components::RowShare; +pub use re_types::blueprint::components::SelectedColumns; pub use re_types::blueprint::components::SpaceViewClass; pub use re_types::blueprint::components::SpaceViewOrigin; pub use re_types::blueprint::components::TensorDimensionIndexSlider; @@ -39,6 +43,7 @@ pub use re_types_blueprint::blueprint::components::VisualizerOverrides; pub fn is_valid_blueprint(blueprint: &EntityDb) -> bool { validate_component::(blueprint) + && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) @@ -47,6 +52,8 @@ pub fn is_valid_blueprint(blueprint: &EntityDb) -> bool { && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) + && validate_component::(blueprint) + && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) @@ -58,6 +65,7 @@ pub fn is_valid_blueprint(blueprint: &EntityDb) -> bool { && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) + && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) && validate_component::(blueprint) diff --git a/crates/viewer/re_viewer/src/history.rs b/crates/viewer/re_viewer/src/history.rs index 70a55f64e4525..050a319e6cccc 100644 --- a/crates/viewer/re_viewer/src/history.rs +++ b/crates/viewer/re_viewer/src/history.rs @@ -262,6 +262,7 @@ pub trait HistoryExt: private::Sealed { fn push_entry(&self, entry: HistoryEntry) -> Result<(), JsValue>; /// Replace the latest entry. + #[allow(unused)] fn replace_entry(&self, entry: HistoryEntry) -> Result<(), JsValue>; /// Get the latest entry. diff --git a/crates/viewer/re_viewer/src/lib.rs b/crates/viewer/re_viewer/src/lib.rs index 2b77d4879e636..1abf935d79f99 100644 --- a/crates/viewer/re_viewer/src/lib.rs +++ b/crates/viewer/re_viewer/src/lib.rs @@ -93,7 +93,7 @@ pub enum AppEnvironment { /// We are a web-viewer running in a browser as Wasm. Web { url: String }, - /// Some custom application wrapping re_viewer + /// Some custom application wrapping `re_viewer`. Custom(String), } diff --git a/crates/viewer/re_viewer/src/reflection/mod.rs b/crates/viewer/re_viewer/src/reflection/mod.rs index 12842c1e4ae7b..20554b3835f13 100644 --- a/crates/viewer/re_viewer/src/reflection/mod.rs +++ b/crates/viewer/re_viewer/src/reflection/mod.rs @@ -41,6 +41,13 @@ fn generate_component_reflection() -> Result::name(), + ComponentReflection { + docstring_md: "Whether empty cells in a dataframe should be filled with a latest-at query.", + placeholder: Some(ApplyLatestAt::default().to_arrow()?), + }, + ), ( ::name(), ComponentReflection { @@ -97,6 +104,20 @@ fn generate_component_reflection() -> Result::name(), + ComponentReflection { + docstring_md: "Configuration for the filter-by-event feature of the dataframe view.", + placeholder: Some(FilterByEvent::default().to_arrow()?), + }, + ), + ( + ::name(), + ComponentReflection { + docstring_md: "Configuration for a filter-by-range feature of the dataframe view.", + placeholder: Some(FilterByRange::default().to_arrow()?), + }, + ), ( ::name(), ComponentReflection { @@ -174,6 +195,13 @@ fn generate_component_reflection() -> Result::name(), + ComponentReflection { + docstring_md: "Describe a component column to be selected in the dataframe view.", + placeholder: Some(SelectedColumns::default().to_arrow()?), + }, + ), ( ::name(), ComponentReflection { @@ -664,6 +692,13 @@ fn generate_component_reflection() -> Result::name(), + ComponentReflection { + docstring_md: "Range of expected or valid values, specifying a lower and upper bound.", + placeholder: Some(ValueRange::default().to_arrow()?), + }, + ), ( ::name(), ComponentReflection { @@ -952,6 +987,10 @@ fn generate_archetype_reflection() -> ArchetypeReflectionMap { docstring_md : "Colormap to use for rendering the depth image.\n\nIf not set, the depth image will be rendered using the Turbo colormap.", is_required : false, }, ArchetypeFieldReflection { component_name : + "rerun.components.ValueRange".into(), display_name : "Depth range", + docstring_md : + "The expected range of depth values.\n\nThis is typically the expected range of valid values.\nEverything outside of the range is clamped to the range for the purpose of colormpaping.\nNote that point clouds generated from this image will still display all points, regardless of this range.\n\nIf not specified, the range will be automatically estimated from the data.\nNote that the Viewer may try to guess a wider range than the minimum/maximum of values\nin the contents of the depth image.\nE.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0,\nthe Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255.", + is_required : false, }, ArchetypeFieldReflection { component_name : "rerun.components.FillRatio".into(), display_name : "Point fill ratio", docstring_md : "Scale the radii of the points in the point cloud generated from this image.\n\nA fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor\nif it is at the same depth, leaving no gaps.\nA fill ratio of 0.5 means that each point touches the edge of its neighbor if it has the same depth.\n\nTODO(#6744): This applies only to 3D views!", @@ -1388,6 +1427,11 @@ fn generate_archetype_reflection() -> ArchetypeReflectionMap { ArchetypeFieldReflection { component_name : "rerun.components.TensorData".into(), display_name : "Data", docstring_md : "The tensor data", is_required : true, }, + ArchetypeFieldReflection { component_name : + "rerun.components.ValueRange".into(), display_name : "Value range", + docstring_md : + "The expected range of values.\n\nThis is typically the expected range of valid values.\nEverything outside of the range is clamped to the range for the purpose of colormpaping.\nAny colormap applied for display, will map this range.\n\nIf not specified, the range will be automatically estimated from the data.\nNote that the Viewer may try to guess a wider range than the minimum/maximum of values\nin the contents of the tensor.\nE.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0,\nthe Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255.", + is_required : false, }, ], }, ), @@ -1565,6 +1609,35 @@ fn generate_archetype_reflection() -> ArchetypeReflectionMap { ], }, ), + ( + ArchetypeName::new("rerun.blueprint.archetypes.DataframeQueryV2"), + ArchetypeReflection { + display_name: "Dataframe query v2", + fields: vec![ + ArchetypeFieldReflection { component_name : + "rerun.blueprint.components.TimelineName".into(), display_name : + "Timeline", docstring_md : + "The timeline for this query.\n\nIf unset, the timeline currently active on the time panel is used.", + is_required : false, }, ArchetypeFieldReflection { component_name : + "rerun.blueprint.components.FilterByRange".into(), display_name : + "Filter by range", docstring_md : + "If provided, only rows whose timestamp is within this range will be shown.\n\nNote: will be unset as soon as `timeline` is changed.", + is_required : false, }, ArchetypeFieldReflection { component_name : + "rerun.blueprint.components.FilterByEvent".into(), display_name : + "Filter by event", docstring_md : + "If provided, only show rows which contains a logged event for the specified component.", + is_required : false, }, ArchetypeFieldReflection { component_name : + "rerun.blueprint.components.ApplyLatestAt".into(), display_name : + "Apply latest at", docstring_md : + "Should empty cells be filled with latest-at queries?", is_required : + false, }, ArchetypeFieldReflection { component_name : + "rerun.blueprint.components.SelectedColumns".into(), display_name : + "Select", docstring_md : + "Selected columns. If unset, all columns are selected.", is_required + : false, }, + ], + }, + ), ( ArchetypeName::new("rerun.blueprint.archetypes.DataframeVisibleColumns"), ArchetypeReflection { @@ -1682,7 +1755,7 @@ fn generate_archetype_reflection() -> ArchetypeReflectionMap { false, }, ArchetypeFieldReflection { component_name : "rerun.components.GammaCorrection".into(), display_name : "Gamma", docstring_md : - "Gamma exponent applied to normalized values before mapping to color.\n\nRaises the normalized values to the power of this value before mapping to color.\nActs like an inverse brightness. Defaults to 1.0.", + "Gamma exponent applied to normalized values before mapping to color.\n\nRaises the normalized values to the power of this value before mapping to color.\nActs like an inverse brightness. Defaults to 1.0.\n\nThe final value for display is set as:\n`colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )`", is_required : false, }, ], }, diff --git a/crates/viewer/re_viewer/src/ui/welcome_screen/example_section.rs b/crates/viewer/re_viewer/src/ui/welcome_screen/example_section.rs index 2a989383b7d12..0711f985a6b86 100644 --- a/crates/viewer/re_viewer/src/ui/welcome_screen/example_section.rs +++ b/crates/viewer/re_viewer/src/ui/welcome_screen/example_section.rs @@ -14,7 +14,7 @@ struct ExampleThumbnail { #[derive(Debug, serde::Deserialize)] struct ExampleDesc { - /// snake_case version of the example name + /// `snake_case` version of the example name name: String, /// human-readable version of the example name diff --git a/crates/viewer/re_viewer/src/ui/welcome_screen/no_data_ui.rs b/crates/viewer/re_viewer/src/ui/welcome_screen/no_data_ui.rs index 74e5c5a5e1b56..0ec3dd983f04e 100644 --- a/crates/viewer/re_viewer/src/ui/welcome_screen/no_data_ui.rs +++ b/crates/viewer/re_viewer/src/ui/welcome_screen/no_data_ui.rs @@ -2,9 +2,6 @@ use egui::Ui; use re_ui::UiExt as _; -#[derive(Debug, Default, Clone, Copy)] -struct TextSize(egui::Vec2); - /// Show a minimal welcome section. pub fn no_data_ui(ui: &mut egui::Ui) { ui.center("no_data_ui_contents", |ui| { diff --git a/crates/viewer/re_viewer/src/web_tools.rs b/crates/viewer/re_viewer/src/web_tools.rs index c34af5733cd80..160a3778b26e2 100644 --- a/crates/viewer/re_viewer/src/web_tools.rs +++ b/crates/viewer/re_viewer/src/web_tools.rs @@ -14,12 +14,15 @@ pub trait JsResultExt { fn ok_or_log_js_error(self) -> Option; /// Logs an error if the result is an error and returns the result, but only once. + #[allow(unused)] fn ok_or_log_js_error_once(self) -> Option; /// Log a warning if there is an `Err`, but only log the exact same message once. + #[allow(unused)] fn warn_on_js_err_once(self, msg: impl std::fmt::Display) -> Option; /// Unwraps in debug builds otherwise logs an error if the result is an error and returns the result. + #[allow(unused)] fn unwrap_debug_or_log_js_error(self) -> Option; } diff --git a/crates/viewer/re_viewer_context/Cargo.toml b/crates/viewer/re_viewer_context/Cargo.toml index 7be582ae6ff36..1180990a225cb 100644 --- a/crates/viewer/re_viewer_context/Cargo.toml +++ b/crates/viewer/re_viewer_context/Cargo.toml @@ -28,7 +28,7 @@ re_log_types.workspace = true re_log.workspace = true re_math.workspace = true re_query.workspace = true -re_renderer.workspace = true +re_renderer = { workspace = true, features = ["serde"] } re_smart_channel.workspace = true re_string_interner.workspace = true re_tracing.workspace = true diff --git a/crates/viewer/re_viewer_context/src/annotations.rs b/crates/viewer/re_viewer_context/src/annotations.rs index 3396a7b384552..ca797d8c1bb15 100644 --- a/crates/viewer/re_viewer_context/src/annotations.rs +++ b/crates/viewer/re_viewer_context/src/annotations.rs @@ -167,7 +167,7 @@ impl ResolvedAnnotationInfo { } else { self.annotation_info .as_ref() - .and_then(|info| info.label.as_ref().cloned()) + .and_then(|info| info.label.clone()) } } } diff --git a/crates/viewer/re_viewer_context/src/cache/image_decode_cache.rs b/crates/viewer/re_viewer_context/src/cache/image_decode_cache.rs index efbcc865548fc..289d01577cbac 100644 --- a/crates/viewer/re_viewer_context/src/cache/image_decode_cache.rs +++ b/crates/viewer/re_viewer_context/src/cache/image_decode_cache.rs @@ -103,7 +103,6 @@ fn decode_image( buffer: buffer.0, format: format.0, kind: ImageKind::Color, - colormap: None, }) } diff --git a/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs b/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs index ad848dc9c9e76..1f3feb6e4d6c9 100644 --- a/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs +++ b/crates/viewer/re_viewer_context/src/gpu_bridge/image_to_gpu.rs @@ -13,13 +13,16 @@ use re_renderer::{ resource_managers::Texture2DCreationDesc, RenderContext, }; -use re_types::components::{ClassId, Colormap}; +use re_types::components::ClassId; use re_types::datatypes::{ChannelDatatype, ColorModel, ImageFormat, PixelFormat}; use re_types::image::ImageKind; -use crate::{gpu_bridge::colormap::colormap_to_re_renderer, Annotations, ImageInfo, ImageStats}; +use crate::{ + gpu_bridge::colormap::colormap_to_re_renderer, image_info::ColormapWithRange, Annotations, + ImageInfo, ImageStats, +}; -use super::{get_or_create_texture, RangeError}; +use super::get_or_create_texture; // ---------------------------------------------------------------------------- @@ -34,19 +37,19 @@ fn generate_texture_key(image: &ImageInfo) -> u64 { format, kind, - - colormap: _, // No need to upload new texture when this changes } = image; hash((blob_row_id, format, kind)) } +/// `colormap` is currently only used for depth images. pub fn image_to_gpu( render_ctx: &RenderContext, debug_name: &str, image: &ImageInfo, image_stats: &ImageStats, annotations: &Annotations, + colormap: Option<&ColormapWithRange>, ) -> anyhow::Result { re_tracing::profile_function!(); @@ -56,9 +59,14 @@ pub fn image_to_gpu( ImageKind::Color => { color_image_to_gpu(render_ctx, debug_name, texture_key, image, image_stats) } - ImageKind::Depth => { - depth_image_to_gpu(render_ctx, debug_name, texture_key, image, image_stats) - } + ImageKind::Depth => depth_image_to_gpu( + render_ctx, + debug_name, + texture_key, + image, + image_stats, + colormap, + ), ImageKind::Segmentation => segmentation_image_to_gpu( render_ctx, debug_name, @@ -92,25 +100,23 @@ fn color_image_to_gpu( // TODO(emilk): let the user specify the color space. let decode_srgb = texture_format == TextureFormat::Rgba8Unorm - || image_decode_srgb_gamma_heuristic(image_stats, image_format)?; + || image_decode_srgb_gamma_heuristic(image_stats, image_format); // Special casing for normalized textures used above: let range = if matches!( texture_format, TextureFormat::R8Unorm | TextureFormat::Rgba8Unorm | TextureFormat::Bgra8Unorm ) { - [0.0, 1.0] + emath::Rangef::new(0.0, 1.0) } else if texture_format == TextureFormat::R8Snorm { - [-1.0, 1.0] + emath::Rangef::new(-1.0, 1.0) } else if let Some(shader_decoding) = shader_decoding { match shader_decoding { - ShaderDecoding::Nv12 | ShaderDecoding::Yuy2 => [0.0, 1.0], - ShaderDecoding::Bgr => image_data_range_heuristic(image_stats, &image_format) - .map(|range| [range.min, range.max])?, + ShaderDecoding::Nv12 | ShaderDecoding::Yuy2 => emath::Rangef::new(0.0, 1.0), + ShaderDecoding::Bgr => image_data_range_heuristic(image_stats, &image_format), } } else { image_data_range_heuristic(image_stats, &image_format) - .map(|range| [range.min, range.max])? }; let color_mapper = if let Some(shader_decoding) = shader_decoding { @@ -148,7 +154,7 @@ fn color_image_to_gpu( Ok(ColormappedTexture { texture: texture_handle, - range, + range: [range.min, range.max], decode_srgb, multiply_rgb_with_alpha, gamma, @@ -158,12 +164,9 @@ fn color_image_to_gpu( } /// Get a valid, finite range for the gpu to use. -// TODO(#2341): The range should be determined by a `DataRange` component. In absence this, heuristics apply. -pub fn image_data_range_heuristic( - image_stats: &ImageStats, - image_format: &ImageFormat, -) -> Result { - let (min, max) = image_stats.finite_range.ok_or(RangeError::MissingRange)?; +// TODO(#4624): The range should be determined by a `DataRange` component. In absence this, heuristics apply. +pub fn image_data_range_heuristic(image_stats: &ImageStats, image_format: &ImageFormat) -> Rangef { + let (min, max) = image_stats.finite_range; let min = min as f32; let max = max as f32; @@ -172,41 +175,38 @@ pub fn image_data_range_heuristic( // (we ignore NaN/Inf values heres, since they are usually there by accident!) if image_format.is_float() && 0.0 <= min && max <= 1.0 { // Float values that are all between 0 and 1, assume that this is the range. - Ok(Rangef::new(0.0, 1.0)) + Rangef::new(0.0, 1.0) } else if 0.0 <= min && max <= 255.0 { // If all values are between 0 and 255, assume this is the range. // (This is very common, independent of the data type) - Ok(Rangef::new(0.0, 255.0)) + Rangef::new(0.0, 255.0) } else if min == max { // uniform range. This can explode the colormapping, so let's map all colors to the middle: - Ok(Rangef::new(min - 1.0, max + 1.0)) + Rangef::new(min - 1.0, max + 1.0) } else { // Use range as is if nothing matches. - Ok(Rangef::new(min, max)) + Rangef::new(min, max) } } /// Return whether an image should be assumed to be encoded in sRGB color space ("gamma space", no EOTF applied). -fn image_decode_srgb_gamma_heuristic( - image_stats: &ImageStats, - image_format: ImageFormat, -) -> Result { +fn image_decode_srgb_gamma_heuristic(image_stats: &ImageStats, image_format: ImageFormat) -> bool { if let Some(pixel_format) = image_format.pixel_format { match pixel_format { - PixelFormat::NV12 | PixelFormat::YUY2 => Ok(true), + PixelFormat::NV12 | PixelFormat::YUY2 => true, } } else { - let (min, max) = image_stats.finite_range.ok_or(RangeError::MissingRange)?; + let (min, max) = image_stats.finite_range; #[allow(clippy::if_same_then_else)] if 0.0 <= min && max <= 255.0 { // If the range is suspiciously reminding us of a "regular image", assume sRGB. - Ok(true) + true } else if image_format.datatype().is_float() && 0.0 <= min && max <= 1.0 { // Floating point images between 0 and 1 are often sRGB as well. - Ok(true) + true } else { - Ok(false) + false } } } @@ -354,6 +354,7 @@ fn depth_image_to_gpu( texture_key: u64, image: &ImageInfo, image_stats: &ImageStats, + colormap_with_range: Option<&ColormapWithRange>, ) -> anyhow::Result { re_tracing::profile_function!(); @@ -370,7 +371,12 @@ fn depth_image_to_gpu( let datatype = image.format.datatype(); - let range = data_range(image_stats, datatype); + let ColormapWithRange { + value_range, + colormap, + } = colormap_with_range + .cloned() + .unwrap_or_else(|| ColormapWithRange::default_for_depth_images(image_stats)); let texture = get_or_create_texture(render_ctx, texture_key, || { general_texture_creation_desc_from_image(debug_name, image, ColorModel::L, datatype) @@ -379,13 +385,11 @@ fn depth_image_to_gpu( Ok(ColormappedTexture { texture, - range, + range: value_range, decode_srgb: false, multiply_rgb_with_alpha: false, gamma: 1.0, - color_mapper: ColorMapper::Function(colormap_to_re_renderer( - image.colormap.unwrap_or(Colormap::Turbo), - )), + color_mapper: ColorMapper::Function(colormap_to_re_renderer(colormap)), shader_decoding: None, }) } @@ -467,34 +471,6 @@ fn segmentation_image_to_gpu( }) } -fn data_range(image_stats: &ImageStats, datatype: ChannelDatatype) -> [f32; 2] { - let default_min = 0.0; - let default_max = if datatype.is_float() { - 1.0 - } else { - datatype.max_value() - }; - - let range = image_stats - .finite_range - .unwrap_or((default_min, default_max)); - let (mut min, mut max) = range; - - if !min.is_finite() { - min = default_min; - } - if !max.is_finite() { - max = default_max; - } - - if max <= min { - min = default_min; - max = default_max; - } - - [min as f32, max as f32] -} - /// Uploads the image to a texture in a format that closely resembled the input. /// Uses no `Unorm/Snorm` formats. fn general_texture_creation_desc_from_image<'a>( diff --git a/crates/viewer/re_viewer_context/src/gpu_bridge/mod.rs b/crates/viewer/re_viewer_context/src/gpu_bridge/mod.rs index a0f64954417ed..29daaf789d974 100644 --- a/crates/viewer/re_viewer_context/src/gpu_bridge/mod.rs +++ b/crates/viewer/re_viewer_context/src/gpu_bridge/mod.rs @@ -25,62 +25,26 @@ use re_renderer::{ // ---------------------------------------------------------------------------- -/// Errors that can happen when supplying a tensor range to the GPU. -#[derive(thiserror::Error, Debug, PartialEq, Eq)] -pub enum RangeError { - /// This is weird. Should only happen with JPEGs, and those should have been decoded already - #[error("Missing a range.")] - MissingRange, -} - -/// Get a valid, finite range for the gpu to use. -pub fn tensor_data_range_heuristic( - tensor_stats: &TensorStats, - data_type: re_types::tensor_data::TensorDataType, -) -> Result<[f32; 2], RangeError> { - let (min, max) = tensor_stats.finite_range.ok_or(RangeError::MissingRange)?; - - let min = min as f32; - let max = max as f32; - - // Apply heuristic for ranges that are typically expected depending on the data type and the finite (!) range. - // (we ignore NaN/Inf values heres, since they are usually there by accident!) - if data_type.is_float() && 0.0 <= min && max <= 1.0 { - // Float values that are all between 0 and 1, assume that this is the range. - Ok([0.0, 1.0]) - } else if 0.0 <= min && max <= 255.0 { - // If all values are between 0 and 255, assume this is the range. - // (This is very common, independent of the data type) - Ok([0.0, 255.0]) - } else if min == max { - // uniform range. This can explode the colormapping, so let's map all colors to the middle: - Ok([min - 1.0, max + 1.0]) - } else { - // Use range as is if nothing matches. - Ok([min, max]) - } -} - /// Return whether a tensor should be assumed to be encoded in sRGB color space ("gamma space", no EOTF applied). pub fn tensor_decode_srgb_gamma_heuristic( tensor_stats: &TensorStats, data_type: re_types::tensor_data::TensorDataType, channels: u32, -) -> Result { +) -> bool { if matches!(channels, 1 | 3 | 4) { - let (min, max) = tensor_stats.finite_range.ok_or(RangeError::MissingRange)?; + let (min, max) = tensor_stats.finite_range; #[allow(clippy::if_same_then_else)] if 0.0 <= min && max <= 255.0 { // If the range is suspiciously reminding us of a "regular image", assume sRGB. - Ok(true) + true } else if data_type.is_float() && 0.0 <= min && max <= 1.0 { // Floating point images between 0 and 1 are often sRGB as well. - Ok(true) + true } else { - Ok(false) + false } } else { - Ok(false) + false } } diff --git a/crates/viewer/re_viewer_context/src/image_info.rs b/crates/viewer/re_viewer_context/src/image_info.rs index b144cf50473e6..0a514018da59a 100644 --- a/crates/viewer/re_viewer_context/src/image_info.rs +++ b/crates/viewer/re_viewer_context/src/image_info.rs @@ -8,9 +8,34 @@ use re_types::{ tensor_data::TensorElement, }; -/// Represents an `Image`, `SegmentationImage` or `DepthImage`. +/// Colormap together with the range of image values that is mapped to the colormap's range. /// -/// It has enough information to render the image on the screen. +/// The range is used to linearly re-map the image values to a normalized range (of 0-1) +/// to which the colormap is applied. +#[derive(Clone)] +pub struct ColormapWithRange { + pub colormap: Colormap, + pub value_range: [f32; 2], +} + +impl ColormapWithRange { + pub const DEFAULT_DEPTH_COLORMAP: Colormap = Colormap::Turbo; + + pub fn default_range_for_depth_images(image_stats: &crate::ImageStats) -> [f32; 2] { + // Use 0.0 as default minimum depth value, even if it doesn't show up in the data. + // (since logically, depth usually starts at zero) + [0.0, image_stats.finite_range.1 as _] + } + + pub fn default_for_depth_images(image_stats: &crate::ImageStats) -> Self { + Self { + colormap: Self::DEFAULT_DEPTH_COLORMAP, + value_range: Self::default_range_for_depth_images(image_stats), + } + } +} + +/// Represents the contents of an `Image`, `SegmentationImage` or `DepthImage`. #[derive(Clone)] pub struct ImageInfo { /// The row id that contained the blob. @@ -26,9 +51,6 @@ pub struct ImageInfo { /// Color, Depth, or Segmentation? pub kind: ImageKind, - - /// Primarily for depth images atm - pub colormap: Option, } impl ImageInfo { @@ -373,7 +395,6 @@ mod tests { buffer: image.buffer.0, format: image.format.0, kind: re_types::image::ImageKind::Color, - colormap: None, } } diff --git a/crates/viewer/re_viewer_context/src/lib.rs b/crates/viewer/re_viewer_context/src/lib.rs index fbe1c360a2670..0cd14c563bfb0 100644 --- a/crates/viewer/re_viewer_context/src/lib.rs +++ b/crates/viewer/re_viewer_context/src/lib.rs @@ -51,7 +51,7 @@ pub use component_fallbacks::{ }; pub use component_ui_registry::{ComponentUiRegistry, ComponentUiTypes, UiLayout}; pub use contents::{blueprint_id_to_tile_id, Contents, ContentsName}; -pub use image_info::ImageInfo; +pub use image_info::{ColormapWithRange, ImageInfo}; pub use item::Item; pub use maybe_mut_ref::MaybeMutRef; pub use query_context::{ diff --git a/crates/viewer/re_viewer_context/src/tensor/image_stats.rs b/crates/viewer/re_viewer_context/src/tensor/image_stats.rs index 72d9bcaad5e21..c4254feb78fed 100644 --- a/crates/viewer/re_viewer_context/src/tensor/image_stats.rs +++ b/crates/viewer/re_viewer_context/src/tensor/image_stats.rs @@ -14,8 +14,9 @@ pub struct ImageStats { /// Like `range`, but ignoring all `NaN`/inf values. /// - /// `None` if there are no finite values at all. - pub finite_range: Option<(f64, f64)>, + /// If no finite values are present, this takes the maximum finite range + /// of the underlying data type. + pub finite_range: (f64, f64), } impl ImageStats { @@ -124,7 +125,7 @@ impl ImageStats { // We do the lazy thing here: return Self { range: Some((0.0, 255.0)), - finite_range: Some((0.0, 255.0)), + finite_range: (0.0, 255.0), }; } None => image.format.datatype(), @@ -150,13 +151,13 @@ impl ImageStats { // Empty image return Self { range: None, - finite_range: None, + finite_range: (datatype.min_value(), datatype.max_value()), }; } let finite_range = if range.0.is_finite() && range.1.is_finite() { // Already finite - Some(range) + range } else { let finite_range = match datatype { ChannelDatatype::U8 @@ -175,9 +176,9 @@ impl ImageStats { // Ensure it actually is finite: if finite_range.0.is_finite() && finite_range.1.is_finite() { - Some(finite_range) + finite_range } else { - None + (datatype.min_value(), datatype.max_value()) } }; diff --git a/crates/viewer/re_viewer_context/src/tensor/tensor_stats.rs b/crates/viewer/re_viewer_context/src/tensor/tensor_stats.rs index ebac7631a1810..20be75b4ede72 100644 --- a/crates/viewer/re_viewer_context/src/tensor/tensor_stats.rs +++ b/crates/viewer/re_viewer_context/src/tensor/tensor_stats.rs @@ -13,8 +13,9 @@ pub struct TensorStats { /// Like `range`, but ignoring all `NaN`/inf values. /// - /// `None` if there are no finite values at all. - pub finite_range: Option<(f64, f64)>, + /// If no finite values are present, this takes the maximum finite range + /// of the underlying data type. + pub finite_range: (f64, f64), } impl TensorStats { @@ -132,7 +133,7 @@ impl TensorStats { // Empty tensor return Self { range: None, - finite_range: None, + finite_range: (tensor.dtype().min_value(), tensor.dtype().max_value()), }; } } @@ -172,7 +173,8 @@ impl TensorStats { None } }) - }; + } + .unwrap_or_else(|| (tensor.dtype().min_value(), tensor.dtype().max_value())); Self { range, diff --git a/crates/viewer/re_viewer_context/src/time_control.rs b/crates/viewer/re_viewer_context/src/time_control.rs index bd1651944c0e2..45d5e1fbc9295 100644 --- a/crates/viewer/re_viewer_context/src/time_control.rs +++ b/crates/viewer/re_viewer_context/src/time_control.rs @@ -111,7 +111,7 @@ impl std::ops::Deref for ActiveTimeline { #[derive(serde::Deserialize, serde::Serialize, Clone, PartialEq)] #[serde(default)] pub struct TimeControl { - /// Name of the timeline (e.g. "log_time"). + /// Name of the timeline (e.g. `log_time`). timeline: ActiveTimeline, states: BTreeMap, diff --git a/crates/viewer/re_viewer_context/src/utils/color.rs b/crates/viewer/re_viewer_context/src/utils/color.rs index 14ee006516612..b49b709ace3ae 100644 --- a/crates/viewer/re_viewer_context/src/utils/color.rs +++ b/crates/viewer/re_viewer_context/src/utils/color.rs @@ -11,5 +11,5 @@ pub fn auto_color_egui(val: u16) -> egui::Color32 { pub fn auto_color_for_entity_path( entity_path: &re_entity_db::EntityPath, ) -> re_types::components::Color { - auto_color_egui((entity_path.hash64() % std::u16::MAX as u64) as u16).into() + auto_color_egui((entity_path.hash64() % u16::MAX as u64) as u16).into() } diff --git a/crates/viewer/re_viewer_context/src/viewer_context.rs b/crates/viewer/re_viewer_context/src/viewer_context.rs index 8469c29f9f828..a5c1eaac439e4 100644 --- a/crates/viewer/re_viewer_context/src/viewer_context.rs +++ b/crates/viewer/re_viewer_context/src/viewer_context.rs @@ -40,7 +40,7 @@ pub struct ViewerContext<'a> { /// Mapping from class and system to entities for the store /// - /// TODO(andreas): This should have a generation id, allowing to update heuristics(?)/visualizable_entities etc. + /// TODO(andreas): This should have a generation id, allowing to update heuristics(?)/visualizable entities etc. pub applicable_entities_per_visualizer: &'a PerVisualizer, /// For each visualizer, the set of entities that have at least one matching indicator component. diff --git a/crates/viewer/re_viewport_blueprint/src/space_view.rs b/crates/viewer/re_viewport_blueprint/src/space_view.rs index b9fae7df15201..f5170135ec644 100644 --- a/crates/viewer/re_viewport_blueprint/src/space_view.rs +++ b/crates/viewer/re_viewport_blueprint/src/space_view.rs @@ -50,7 +50,7 @@ pub struct SpaceViewBlueprint { /// True if this space view is visible in the UI. pub visible: bool, - /// Path where these space_views defaults can be found. + /// Path where these space views defaults can be found. pub defaults_path: EntityPath, /// Pending blueprint writes for nested components from duplicate. diff --git a/crates/viewer/re_viewport_blueprint/src/view_properties.rs b/crates/viewer/re_viewport_blueprint/src/view_properties.rs index d5dfb4d9d24c0..2894c50b6adbb 100644 --- a/crates/viewer/re_viewport_blueprint/src/view_properties.rs +++ b/crates/viewer/re_viewport_blueprint/src/view_properties.rs @@ -175,6 +175,11 @@ impl ViewProperty { ctx.save_blueprint_component(&self.blueprint_store_path, components); } + /// Clears a blueprint component. + pub fn clear_blueprint_component(&self, ctx: &ViewerContext<'_>) { + ctx.clear_blueprint_component_by_name(&self.blueprint_store_path, C::name()); + } + /// Resets a blueprint component to the value it had in the default blueprint. pub fn reset_blueprint_component(&self, ctx: &ViewerContext<'_>) { ctx.reset_blueprint_component_by_name(&self.blueprint_store_path, C::name()); diff --git a/docs/content/getting-started/installing-viewer.md b/docs/content/getting-started/installing-viewer.md index 8327753b2d595..863796974311a 100644 --- a/docs/content/getting-started/installing-viewer.md +++ b/docs/content/getting-started/installing-viewer.md @@ -40,7 +40,7 @@ There are many ways to install the viewer. Please pick whatever works best for y - Download `rerun-cli` for your platform from the [GitHub Release artifacts](https://github.com/rerun-io/rerun/releases/latest/). - Via Cargo - `cargo binstall rerun-cli` - download binaries via [`cargo binstall`](https://github.com/cargo-bins/cargo-binstall) - - `cargo install rerun-cli --locked` - build it from source (this requires Rust 1.76+) + - `cargo install rerun-cli --locked` - build it from source (this requires Rust 1.79+) - Together with the Rerun [Python SDK](./quick-start/python.md): - `pip3 install rerun-sdk` - download it via pip - `conda install -c conda-forge rerun-sdk` - download via Conda diff --git a/docs/content/getting-started/quick-start/rust.md b/docs/content/getting-started/quick-start/rust.md index 45d1beb1a86a5..f1355e514b31d 100644 --- a/docs/content/getting-started/quick-start/rust.md +++ b/docs/content/getting-started/quick-start/rust.md @@ -5,7 +5,7 @@ order: 3 ## Setup -The Rerun SDK for Rust requires a working installation of Rust 1.76+. +The Rerun SDK for Rust requires a working installation of Rust 1.79+. After you have [installed the viewer](../installing-viewer.md#installing-the-viewer) you can simply add [the Rerun crate](https://crates.io/crates/rerun) to your project with `cargo add rerun`. diff --git a/docs/content/reference/types/archetypes/annotation_context.md b/docs/content/reference/types/archetypes/annotation_context.md index f1e1195f57f62..a9850dde6c0b6 100644 --- a/docs/content/reference/types/archetypes/annotation_context.md +++ b/docs/content/reference/types/archetypes/annotation_context.md @@ -20,6 +20,7 @@ See also [`datatypes.ClassDescription`](https://rerun.io/docs/reference/types/da ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `AnnotationContext`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1AnnotationContext.html) diff --git a/docs/content/reference/types/archetypes/arrows2d.md b/docs/content/reference/types/archetypes/arrows2d.md index a281f754e12f5..40564b6514bcf 100644 --- a/docs/content/reference/types/archetypes/arrows2d.md +++ b/docs/content/reference/types/archetypes/arrows2d.md @@ -16,6 +16,7 @@ title: "Arrows2D" ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Arrows2D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Arrows2D.html) diff --git a/docs/content/reference/types/archetypes/arrows3d.md b/docs/content/reference/types/archetypes/arrows3d.md index 2265f502af29e..937223f432bad 100644 --- a/docs/content/reference/types/archetypes/arrows3d.md +++ b/docs/content/reference/types/archetypes/arrows3d.md @@ -16,6 +16,7 @@ title: "Arrows3D" ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Arrows3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Arrows3D.html) diff --git a/docs/content/reference/types/archetypes/asset3d.md b/docs/content/reference/types/archetypes/asset3d.md index e15d67cc3740f..145fb8cfa15d7 100644 --- a/docs/content/reference/types/archetypes/asset3d.md +++ b/docs/content/reference/types/archetypes/asset3d.md @@ -19,6 +19,7 @@ an instance of the mesh will be drawn for each transform. ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Asset3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Asset3D.html) diff --git a/docs/content/reference/types/archetypes/asset_video.md b/docs/content/reference/types/archetypes/asset_video.md index 0177b17757673..b7b7affe75093 100644 --- a/docs/content/reference/types/archetypes/asset_video.md +++ b/docs/content/reference/types/archetypes/asset_video.md @@ -21,6 +21,9 @@ In order to display a video, you also need to log a [`archetypes.VideoFrameRefer **Recommended**: [`MediaType`](../components/media_type.md) +## Shown in +* [DataframeView](../views/dataframe_view.md) + ## API reference links * šŸŒŠ [C++ API docs for `AssetVideo`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1AssetVideo.html?speculative-link) * šŸ [Python API docs for `AssetVideo`](https://ref.rerun.io/docs/python/stable/common/archetypes?speculative-link#rerun.archetypes.AssetVideo) diff --git a/docs/content/reference/types/archetypes/bar_chart.md b/docs/content/reference/types/archetypes/bar_chart.md index 2991afcfb7fd2..6f2e286bd9234 100644 --- a/docs/content/reference/types/archetypes/bar_chart.md +++ b/docs/content/reference/types/archetypes/bar_chart.md @@ -15,6 +15,7 @@ The x values will be the indices of the array, and the bar heights will be the p ## Shown in * [BarChartView](../views/bar_chart_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `BarChart`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1BarChart.html) diff --git a/docs/content/reference/types/archetypes/boxes2d.md b/docs/content/reference/types/archetypes/boxes2d.md index d8733694c78ce..c1b9c9c424e4f 100644 --- a/docs/content/reference/types/archetypes/boxes2d.md +++ b/docs/content/reference/types/archetypes/boxes2d.md @@ -16,6 +16,7 @@ title: "Boxes2D" ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Boxes2D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Boxes2D.html) diff --git a/docs/content/reference/types/archetypes/boxes3d.md b/docs/content/reference/types/archetypes/boxes3d.md index 5951c6cdef203..ef33d29fcfc23 100644 --- a/docs/content/reference/types/archetypes/boxes3d.md +++ b/docs/content/reference/types/archetypes/boxes3d.md @@ -20,6 +20,7 @@ If there's more instance poses than half sizes, the last half size will be repea ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Boxes3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Boxes3D.html) diff --git a/docs/content/reference/types/archetypes/clear.md b/docs/content/reference/types/archetypes/clear.md index a22bca19571d8..476fcb56470b1 100644 --- a/docs/content/reference/types/archetypes/clear.md +++ b/docs/content/reference/types/archetypes/clear.md @@ -23,6 +23,7 @@ data (i.e. discontinuous lines). * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) * [TimeSeriesView](../views/time_series_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Clear`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Clear.html) diff --git a/docs/content/reference/types/archetypes/depth_image.md b/docs/content/reference/types/archetypes/depth_image.md index b2ec72e6b1d21..6a0d62afe6c00 100644 --- a/docs/content/reference/types/archetypes/depth_image.md +++ b/docs/content/reference/types/archetypes/depth_image.md @@ -11,11 +11,12 @@ Each pixel corresponds to a depth value in units specified by [`components.Depth **Required**: [`ImageBuffer`](../components/image_buffer.md), [`ImageFormat`](../components/image_format.md) -**Optional**: [`DepthMeter`](../components/depth_meter.md), [`Colormap`](../components/colormap.md), [`FillRatio`](../components/fill_ratio.md), [`DrawOrder`](../components/draw_order.md) +**Optional**: [`DepthMeter`](../components/depth_meter.md), [`Colormap`](../components/colormap.md), [`ValueRange`](../components/value_range.md), [`FillRatio`](../components/fill_ratio.md), [`DrawOrder`](../components/draw_order.md) ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `DepthImage`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1DepthImage.html) diff --git a/docs/content/reference/types/archetypes/disconnected_space.md b/docs/content/reference/types/archetypes/disconnected_space.md index 3d61de94cf054..eacd122ed166d 100644 --- a/docs/content/reference/types/archetypes/disconnected_space.md +++ b/docs/content/reference/types/archetypes/disconnected_space.md @@ -17,6 +17,7 @@ This is useful for specifying that a subgraph is independent of the rest of the ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `DisconnectedSpace`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1DisconnectedSpace.html) diff --git a/docs/content/reference/types/archetypes/ellipsoids3d.md b/docs/content/reference/types/archetypes/ellipsoids3d.md index 1d2d8de2642e8..faeaba7a7b94e 100644 --- a/docs/content/reference/types/archetypes/ellipsoids3d.md +++ b/docs/content/reference/types/archetypes/ellipsoids3d.md @@ -24,6 +24,7 @@ If there's more instance poses than half sizes, the last half size will be repea ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Ellipsoids3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Ellipsoids3D.html) diff --git a/docs/content/reference/types/archetypes/encoded_image.md b/docs/content/reference/types/archetypes/encoded_image.md index 70eef2932a0f7..e824f2f3ff8de 100644 --- a/docs/content/reference/types/archetypes/encoded_image.md +++ b/docs/content/reference/types/archetypes/encoded_image.md @@ -18,6 +18,7 @@ Rerun also supports uncompressed images with the [`archetypes.Image`](https://re ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `EncodedImage`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1EncodedImage.html) diff --git a/docs/content/reference/types/archetypes/image.md b/docs/content/reference/types/archetypes/image.md index d07439d9e68e2..a2f2f64bf9dfe 100644 --- a/docs/content/reference/types/archetypes/image.md +++ b/docs/content/reference/types/archetypes/image.md @@ -26,6 +26,7 @@ Compressing images can save a lot of bandwidth and memory. ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Image`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Image.html) diff --git a/docs/content/reference/types/archetypes/instance_poses3d.md b/docs/content/reference/types/archetypes/instance_poses3d.md index f45369215c478..08860bdebfe0f 100644 --- a/docs/content/reference/types/archetypes/instance_poses3d.md +++ b/docs/content/reference/types/archetypes/instance_poses3d.md @@ -23,6 +23,7 @@ the 3x3 matrix is applied first, followed by the translation. ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `InstancePoses3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1InstancePoses3D.html) diff --git a/docs/content/reference/types/archetypes/line_strips2d.md b/docs/content/reference/types/archetypes/line_strips2d.md index fca6ad8674493..dde4074a399d4 100644 --- a/docs/content/reference/types/archetypes/line_strips2d.md +++ b/docs/content/reference/types/archetypes/line_strips2d.md @@ -16,6 +16,7 @@ title: "LineStrips2D" ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `LineStrips2D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1LineStrips2D.html) diff --git a/docs/content/reference/types/archetypes/line_strips3d.md b/docs/content/reference/types/archetypes/line_strips3d.md index 3074b499cdc8a..ab9c01704820e 100644 --- a/docs/content/reference/types/archetypes/line_strips3d.md +++ b/docs/content/reference/types/archetypes/line_strips3d.md @@ -16,6 +16,7 @@ title: "LineStrips3D" ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `LineStrips3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1LineStrips3D.html) diff --git a/docs/content/reference/types/archetypes/mesh3d.md b/docs/content/reference/types/archetypes/mesh3d.md index 2229353abe868..3546920a0238c 100644 --- a/docs/content/reference/types/archetypes/mesh3d.md +++ b/docs/content/reference/types/archetypes/mesh3d.md @@ -21,6 +21,7 @@ an instance of the mesh will be drawn for each transform. ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Mesh3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Mesh3D.html) diff --git a/docs/content/reference/types/archetypes/pinhole.md b/docs/content/reference/types/archetypes/pinhole.md index 89233ddf6d6d5..06a456ba5c469 100644 --- a/docs/content/reference/types/archetypes/pinhole.md +++ b/docs/content/reference/types/archetypes/pinhole.md @@ -16,6 +16,7 @@ Camera perspective projection (a.k.a. intrinsics). ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial2DView](../views/spatial2d_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Pinhole`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Pinhole.html) diff --git a/docs/content/reference/types/archetypes/points2d.md b/docs/content/reference/types/archetypes/points2d.md index 553c1879d9f62..69395fd5fa0bc 100644 --- a/docs/content/reference/types/archetypes/points2d.md +++ b/docs/content/reference/types/archetypes/points2d.md @@ -16,6 +16,7 @@ A 2D point cloud with positions and optional colors, radii, labels, etc. ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Points2D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Points2D.html) diff --git a/docs/content/reference/types/archetypes/points3d.md b/docs/content/reference/types/archetypes/points3d.md index 3cf0c8117a17d..6b83466c73814 100644 --- a/docs/content/reference/types/archetypes/points3d.md +++ b/docs/content/reference/types/archetypes/points3d.md @@ -16,6 +16,7 @@ A 3D point cloud with positions and optional colors, radii, labels, etc. ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Points3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Points3D.html) diff --git a/docs/content/reference/types/archetypes/scalar.md b/docs/content/reference/types/archetypes/scalar.md index 7b904e887339e..b75e7960f4a04 100644 --- a/docs/content/reference/types/archetypes/scalar.md +++ b/docs/content/reference/types/archetypes/scalar.md @@ -19,6 +19,7 @@ the plot-specific archetypes through the blueprint. ## Shown in * [TimeSeriesView](../views/time_series_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Scalar`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Scalar.html) diff --git a/docs/content/reference/types/archetypes/segmentation_image.md b/docs/content/reference/types/archetypes/segmentation_image.md index b8a5958c71289..d6fd1a5c15438 100644 --- a/docs/content/reference/types/archetypes/segmentation_image.md +++ b/docs/content/reference/types/archetypes/segmentation_image.md @@ -21,6 +21,7 @@ See also [`archetypes.AnnotationContext`](https://rerun.io/docs/reference/types/ ## Shown in * [Spatial2DView](../views/spatial2d_view.md) * [Spatial3DView](../views/spatial3d_view.md) (if logged under a projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `SegmentationImage`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1SegmentationImage.html) diff --git a/docs/content/reference/types/archetypes/series_line.md b/docs/content/reference/types/archetypes/series_line.md index 204dc77825f82..a35379f1b8e97 100644 --- a/docs/content/reference/types/archetypes/series_line.md +++ b/docs/content/reference/types/archetypes/series_line.md @@ -15,6 +15,7 @@ when possible. The underlying data needs to be logged to the same entity-path us ## Shown in * [TimeSeriesView](../views/time_series_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `SeriesLine`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1SeriesLine.html) diff --git a/docs/content/reference/types/archetypes/series_point.md b/docs/content/reference/types/archetypes/series_point.md index 16146b2ad0ab9..c2163b8e2638a 100644 --- a/docs/content/reference/types/archetypes/series_point.md +++ b/docs/content/reference/types/archetypes/series_point.md @@ -15,6 +15,7 @@ when possible. The underlying data needs to be logged to the same entity-path us ## Shown in * [TimeSeriesView](../views/time_series_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `SeriesPoint`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1SeriesPoint.html) diff --git a/docs/content/reference/types/archetypes/tensor.md b/docs/content/reference/types/archetypes/tensor.md index a0bff263a4fa3..7e25161887e22 100644 --- a/docs/content/reference/types/archetypes/tensor.md +++ b/docs/content/reference/types/archetypes/tensor.md @@ -9,9 +9,12 @@ An N-dimensional array of numbers. **Required**: [`TensorData`](../components/tensor_data.md) +**Optional**: [`ValueRange`](../components/value_range.md) + ## Shown in * [TensorView](../views/tensor_view.md) * [BarChartView](../views/bar_chart_view.md) (for 1D tensors) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Tensor`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Tensor.html) diff --git a/docs/content/reference/types/archetypes/text_document.md b/docs/content/reference/types/archetypes/text_document.md index 5c93ef4fd6052..e7b481d14b6f3 100644 --- a/docs/content/reference/types/archetypes/text_document.md +++ b/docs/content/reference/types/archetypes/text_document.md @@ -15,6 +15,7 @@ Supports raw text and markdown. ## Shown in * [TextDocumentView](../views/text_document_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `TextDocument`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1TextDocument.html) diff --git a/docs/content/reference/types/archetypes/text_log.md b/docs/content/reference/types/archetypes/text_log.md index 645a2fd889abf..96ce9f014dca3 100644 --- a/docs/content/reference/types/archetypes/text_log.md +++ b/docs/content/reference/types/archetypes/text_log.md @@ -15,6 +15,7 @@ A log entry in a text log, comprised of a text body and its log level. ## Shown in * [TextLogView](../views/text_log_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `TextLog`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1TextLog.html) diff --git a/docs/content/reference/types/archetypes/transform3d.md b/docs/content/reference/types/archetypes/transform3d.md index df78ab1a4907b..058d8c0ef60ff 100644 --- a/docs/content/reference/types/archetypes/transform3d.md +++ b/docs/content/reference/types/archetypes/transform3d.md @@ -21,6 +21,7 @@ it will be resolved to a transform with only a rotation. ## Shown in * [Spatial3DView](../views/spatial3d_view.md) * [Spatial2DView](../views/spatial2d_view.md) (if logged above active projection) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `Transform3D`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1Transform3D.html) diff --git a/docs/content/reference/types/archetypes/video_frame_reference.md b/docs/content/reference/types/archetypes/video_frame_reference.md index 2b679f688d2d0..359adbe6101ed 100644 --- a/docs/content/reference/types/archetypes/video_frame_reference.md +++ b/docs/content/reference/types/archetypes/video_frame_reference.md @@ -19,6 +19,9 @@ See for details of what **Optional**: [`EntityPath`](../components/entity_path.md) +## Shown in +* [DataframeView](../views/dataframe_view.md) + ## API reference links * šŸŒŠ [C++ API docs for `VideoFrameReference`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1VideoFrameReference.html?speculative-link) * šŸ [Python API docs for `VideoFrameReference`](https://ref.rerun.io/docs/python/stable/common/archetypes?speculative-link#rerun.archetypes.VideoFrameReference) diff --git a/docs/content/reference/types/archetypes/view_coordinates.md b/docs/content/reference/types/archetypes/view_coordinates.md index 9f51cbf743362..68d44f93d9f4a 100644 --- a/docs/content/reference/types/archetypes/view_coordinates.md +++ b/docs/content/reference/types/archetypes/view_coordinates.md @@ -20,6 +20,7 @@ Make sure that this archetype is logged at or above the origin entity path of yo ## Shown in * [Spatial3DView](../views/spatial3d_view.md) +* [DataframeView](../views/dataframe_view.md) ## API reference links * šŸŒŠ [C++ API docs for `ViewCoordinates`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1archetypes_1_1ViewCoordinates.html) diff --git a/docs/content/reference/types/components.md b/docs/content/reference/types/components.md index 772ccf32b4bc2..11aba202b4d01 100644 --- a/docs/content/reference/types/components.md +++ b/docs/content/reference/types/components.md @@ -71,6 +71,7 @@ on [Entities and Components](../../concepts/entity-component.md). * [`TransformRelation`](components/transform_relation.md): Specifies relation a spatial transform describes. * [`Translation3D`](components/translation3d.md): A translation vector in 3D space. * [`TriangleIndices`](components/triangle_indices.md): The three indices of a triangle in a triangle mesh. +* [`ValueRange`](components/value_range.md): Range of expected or valid values, specifying a lower and upper bound. * [`Vector2D`](components/vector2d.md): A vector in 2D space. * [`Vector3D`](components/vector3d.md): A vector in 3D space. * [`VideoTimestamp`](components/video_timestamp.md): Timestamp inside a [`archetypes.AssetVideo`](https://rerun.io/docs/reference/types/archetypes/asset_video?speculative-link). diff --git a/docs/content/reference/types/components/.gitattributes b/docs/content/reference/types/components/.gitattributes index 25cfd40ed1e71..c07d94eee94f9 100644 --- a/docs/content/reference/types/components/.gitattributes +++ b/docs/content/reference/types/components/.gitattributes @@ -59,6 +59,7 @@ transform_mat3x3.md linguist-generated=true transform_relation.md linguist-generated=true translation3d.md linguist-generated=true triangle_indices.md linguist-generated=true +value_range.md linguist-generated=true vector2d.md linguist-generated=true vector3d.md linguist-generated=true video_timestamp.md linguist-generated=true diff --git a/docs/content/reference/types/components/value_range.md b/docs/content/reference/types/components/value_range.md new file mode 100644 index 0000000000000..e13c0fc619a2b --- /dev/null +++ b/docs/content/reference/types/components/value_range.md @@ -0,0 +1,21 @@ +--- +title: "ValueRange" +--- + + +Range of expected or valid values, specifying a lower and upper bound. + +## Fields + +* range: [`Range1D`](../datatypes/range1d.md) + +## API reference links + * šŸŒŠ [C++ API docs for `ValueRange`](https://ref.rerun.io/docs/cpp/stable/structrerun_1_1components_1_1ValueRange.html) + * šŸ [Python API docs for `ValueRange`](https://ref.rerun.io/docs/python/stable/common/components#rerun.components.ValueRange) + * šŸ¦€ [Rust API docs for `ValueRange`](https://docs.rs/rerun/latest/rerun/components/struct.ValueRange.html) + + +## Used by + +* [`DepthImage`](../archetypes/depth_image.md) +* [`Tensor`](../archetypes/tensor.md) diff --git a/docs/content/reference/types/datatypes/range1d.md b/docs/content/reference/types/datatypes/range1d.md index 82e2d8551947d..b485ac3f3cd15 100644 --- a/docs/content/reference/types/datatypes/range1d.md +++ b/docs/content/reference/types/datatypes/range1d.md @@ -19,3 +19,4 @@ A 1D range, specifying a lower and upper bound. * [`Range1D`](../components/range1d.md) * [`Range2D`](../datatypes/range2d.md) +* [`ValueRange`](../components/value_range.md) diff --git a/docs/content/reference/types/views.md b/docs/content/reference/types/views.md index fff6b159afc63..8d7b8e11948c2 100644 --- a/docs/content/reference/types/views.md +++ b/docs/content/reference/types/views.md @@ -8,6 +8,7 @@ Views are the panels shown in the viewer's viewport and the primary means of ins * [`BarChartView`](views/bar_chart_view.md): A bar chart view. +* [`DataframeView`](views/dataframe_view.md): A view to display any data in a tabular form. * [`Spatial2DView`](views/spatial2d_view.md): For viewing spatial 2D data. * [`Spatial3DView`](views/spatial3d_view.md): For viewing spatial 3D data. * [`TensorView`](views/tensor_view.md): A view on a tensor of any dimensionality. diff --git a/docs/content/reference/types/views/.gitattributes b/docs/content/reference/types/views/.gitattributes index 5a454df3b43cb..ee86a2002ccb5 100644 --- a/docs/content/reference/types/views/.gitattributes +++ b/docs/content/reference/types/views/.gitattributes @@ -2,6 +2,7 @@ .gitattributes linguist-generated=true bar_chart_view.md linguist-generated=true +dataframe_view.md linguist-generated=true spatial2d_view.md linguist-generated=true spatial3d_view.md linguist-generated=true tensor_view.md linguist-generated=true diff --git a/docs/content/reference/types/views/dataframe_view.md b/docs/content/reference/types/views/dataframe_view.md new file mode 100644 index 0000000000000..1acd13261a759 --- /dev/null +++ b/docs/content/reference/types/views/dataframe_view.md @@ -0,0 +1,34 @@ +--- +title: "DataframeView" +--- + + +A view to display any data in a tabular form. + +Any data from the store can be shown, using a flexibly, user-configurable query. + +## Properties + +### `query` +Query of the dataframe. + +* `timeline`: The timeline for this query. +* `filter_by_range`: If provided, only rows whose timestamp is within this range will be shown. +* `filter_by_event`: If provided, only show rows which contains a logged event for the specified component. +* `apply_latest_at`: Should empty cells be filled with latest-at queries? +* `select`: Selected columns. If unset, all columns are selected. + +## API reference links + * šŸ [Python API docs for `DataframeView`](https://ref.rerun.io/docs/python/stable/common/blueprint_views#rerun.blueprint.views.DataframeView) + +## Example + +### Use a blueprint to customize a DataframeView. + +snippet: views/dataframe + + +## Visualized archetypes + +Any data can be displayed by the Dataframe view. + diff --git a/docs/snippets/all/views/dataframe.py b/docs/snippets/all/views/dataframe.py new file mode 100644 index 0000000000000..6423ea180a321 --- /dev/null +++ b/docs/snippets/all/views/dataframe.py @@ -0,0 +1,30 @@ +"""Use a blueprint to customize a DataframeView.""" + +import math + +import rerun as rr +import rerun.blueprint as rrb + +rr.init("rerun_example_dataframe", spawn=True) + +# Log some data. +rr.log("trig/sin", rr.SeriesLine(color=[255, 0, 0], name="sin(0.01t)"), static=True) +rr.log("trig/cos", rr.SeriesLine(color=[0, 255, 0], name="cos(0.01t)"), static=True) +for t in range(0, int(math.pi * 4 * 100.0)): + rr.set_time_seconds("t", t) + rr.log("trig/sin", rr.Scalar(math.sin(float(t) / 100.0))) + rr.log("trig/cos", rr.Scalar(math.cos(float(t) / 100.0))) + +# Create a Dataframe View +blueprint = rrb.Blueprint( + rrb.DataframeView( + origin="/trig", + # TODO(#6896): improve `DataframeQueryV2` API and showcase more features + query=rrb.archetypes.DataframeQueryV2( + timeline="t", + range_filter=rrb.components.RangeFilter(start=rr.TimeInt(seconds=0), end=rr.TimeInt(seconds=20)), + ), + ), +) + +rr.send_blueprint(blueprint) diff --git a/examples/python/signed_distance_fields/signed_distance_fields/__main__.py b/examples/python/signed_distance_fields/signed_distance_fields/__main__.py index 17adac1a53578..739c6eab15f01 100755 --- a/examples/python/signed_distance_fields/signed_distance_fields/__main__.py +++ b/examples/python/signed_distance_fields/signed_distance_fields/__main__.py @@ -117,7 +117,12 @@ def log_sampled_sdf(points: npt.NDArray[np.float32], sdf: npt.NDArray[np.float32 def log_volumetric_sdf(voxvol: npt.NDArray[np.float32]) -> None: names = ["width", "height", "depth"] - rr.log("tensor", rr.Tensor(voxvol, dim_names=names)) + # Use a symmetric value range, so that the `cyantoyellow` colormap centers around zero. + # Either positive or negative range might be quite small, so don't exceed 1.5x the minimum range. + negative_range = abs(cast(float, np.min(voxvol))) + positive_range = abs(cast(float, np.max(voxvol))) + range = min(max(negative_range, positive_range), 1.5 * min(negative_range, positive_range)) + rr.log("tensor", rr.Tensor(voxvol, dim_names=names, value_range=[-range, range])) @log_timing_decorator("global/log_mesh", "DEBUG") # type: ignore[misc] @@ -197,7 +202,14 @@ def main() -> None: rrb.Vertical( rrb.Horizontal( rrb.Spatial3DView(name="Input Mesh", origin="/world/mesh"), - rrb.TensorView(name="SDF", origin="/tensor"), + rrb.TensorView( + # The cyan to yellow colormap changes its color at the mid point of its range. + # By combining this with a the `value_range` parameter on the tensor, + # we can visualize negative & positive values effectively. + name="SDF", + origin="/tensor", + scalar_mapping=rrb.TensorScalarMapping(colormap="cyantoyellow"), + ), ), rrb.TextLogView(name="Execution Log"), ), diff --git a/examples/rust/clock/Cargo.toml b/examples/rust/clock/Cargo.toml index e01f989a83c9a..e8fa32b0f7a62 100644 --- a/examples/rust/clock/Cargo.toml +++ b/examples/rust/clock/Cargo.toml @@ -2,7 +2,7 @@ name = "clock" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/custom_data_loader/Cargo.toml b/examples/rust/custom_data_loader/Cargo.toml index 81bfdd261066a..0c0e2e7ea7353 100644 --- a/examples/rust/custom_data_loader/Cargo.toml +++ b/examples/rust/custom_data_loader/Cargo.toml @@ -2,7 +2,7 @@ name = "custom_data_loader" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/custom_space_view/Cargo.toml b/examples/rust/custom_space_view/Cargo.toml index 84ca1f6fd4b05..7d715d78e10da 100644 --- a/examples/rust/custom_space_view/Cargo.toml +++ b/examples/rust/custom_space_view/Cargo.toml @@ -2,7 +2,7 @@ name = "custom_space_view" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/custom_store_subscriber/Cargo.toml b/examples/rust/custom_store_subscriber/Cargo.toml index 3c837abc24a85..a841bb43e02ff 100644 --- a/examples/rust/custom_store_subscriber/Cargo.toml +++ b/examples/rust/custom_store_subscriber/Cargo.toml @@ -2,7 +2,7 @@ name = "custom_store_subscriber" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/dna/Cargo.toml b/examples/rust/dna/Cargo.toml index 2ff8aa6775104..7d871018ffc42 100644 --- a/examples/rust/dna/Cargo.toml +++ b/examples/rust/dna/Cargo.toml @@ -2,7 +2,7 @@ name = "dna" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/extend_viewer_ui/Cargo.toml b/examples/rust/extend_viewer_ui/Cargo.toml index 0da6302d7f4a5..a050d355ce99a 100644 --- a/examples/rust/extend_viewer_ui/Cargo.toml +++ b/examples/rust/extend_viewer_ui/Cargo.toml @@ -2,7 +2,7 @@ name = "extend_viewer_ui" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/external_data_loader/Cargo.toml b/examples/rust/external_data_loader/Cargo.toml index 561f79a4ffa28..6867c63fa80a8 100644 --- a/examples/rust/external_data_loader/Cargo.toml +++ b/examples/rust/external_data_loader/Cargo.toml @@ -2,7 +2,7 @@ name = "rerun-loader-rust-file" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/incremental_logging/Cargo.toml b/examples/rust/incremental_logging/Cargo.toml index 2044bf8a7a516..58ba5169f39f0 100644 --- a/examples/rust/incremental_logging/Cargo.toml +++ b/examples/rust/incremental_logging/Cargo.toml @@ -2,7 +2,7 @@ name = "incremental_logging" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/log_file/Cargo.toml b/examples/rust/log_file/Cargo.toml index 140d289e22c29..d3aef9e3190f5 100644 --- a/examples/rust/log_file/Cargo.toml +++ b/examples/rust/log_file/Cargo.toml @@ -2,7 +2,7 @@ name = "log_file" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/minimal/Cargo.toml b/examples/rust/minimal/Cargo.toml index 5e862808487a3..011a749b57ced 100644 --- a/examples/rust/minimal/Cargo.toml +++ b/examples/rust/minimal/Cargo.toml @@ -2,7 +2,7 @@ name = "minimal" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/minimal_options/Cargo.toml b/examples/rust/minimal_options/Cargo.toml index 292d25b5c898a..03d97468a5359 100644 --- a/examples/rust/minimal_options/Cargo.toml +++ b/examples/rust/minimal_options/Cargo.toml @@ -2,7 +2,7 @@ name = "minimal_options" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/minimal_serve/Cargo.toml b/examples/rust/minimal_serve/Cargo.toml index 4ab39d1301321..326312b29b5db 100644 --- a/examples/rust/minimal_serve/Cargo.toml +++ b/examples/rust/minimal_serve/Cargo.toml @@ -2,7 +2,7 @@ name = "minimal_serve" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/objectron/Cargo.toml b/examples/rust/objectron/Cargo.toml index c5822c0c90d72..8961a95547c8e 100644 --- a/examples/rust/objectron/Cargo.toml +++ b/examples/rust/objectron/Cargo.toml @@ -2,7 +2,7 @@ name = "objectron" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/raw_mesh/Cargo.toml b/examples/rust/raw_mesh/Cargo.toml index 325b45df3bff0..192a1ae6f331b 100644 --- a/examples/rust/raw_mesh/Cargo.toml +++ b/examples/rust/raw_mesh/Cargo.toml @@ -2,7 +2,7 @@ name = "raw_mesh" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/shared_recording/Cargo.toml b/examples/rust/shared_recording/Cargo.toml index 1b141ab067d44..96a6f5265b891 100644 --- a/examples/rust/shared_recording/Cargo.toml +++ b/examples/rust/shared_recording/Cargo.toml @@ -2,7 +2,7 @@ name = "shared_recording" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/spawn_viewer/Cargo.toml b/examples/rust/spawn_viewer/Cargo.toml index 56cee8715f939..a9ecdfa153b11 100644 --- a/examples/rust/spawn_viewer/Cargo.toml +++ b/examples/rust/spawn_viewer/Cargo.toml @@ -2,7 +2,7 @@ name = "spawn_viewer" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/stdio/Cargo.toml b/examples/rust/stdio/Cargo.toml index c683b543558eb..c635a9a4cd75c 100644 --- a/examples/rust/stdio/Cargo.toml +++ b/examples/rust/stdio/Cargo.toml @@ -2,7 +2,7 @@ name = "stdio" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/examples/rust/template/Cargo.toml b/examples/rust/template/Cargo.toml index f9522a702c8d3..c23e8f0d34deb 100644 --- a/examples/rust/template/Cargo.toml +++ b/examples/rust/template/Cargo.toml @@ -2,7 +2,7 @@ name = "template" version = "0.19.0-alpha.1+dev" edition = "2021" -rust-version = "1.76" +rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/rerun_cpp/src/rerun/archetypes/depth_image.cpp b/rerun_cpp/src/rerun/archetypes/depth_image.cpp index 593fb02bbb34c..f392dcd19c832 100644 --- a/rerun_cpp/src/rerun/archetypes/depth_image.cpp +++ b/rerun_cpp/src/rerun/archetypes/depth_image.cpp @@ -14,7 +14,7 @@ namespace rerun { ) { using namespace archetypes; std::vector cells; - cells.reserve(7); + cells.reserve(8); { auto result = ComponentBatch::from_loggable(archetype.buffer); @@ -36,6 +36,11 @@ namespace rerun { RR_RETURN_NOT_OK(result.error); cells.push_back(std::move(result.value)); } + if (archetype.depth_range.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.depth_range.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } if (archetype.point_fill_ratio.has_value()) { auto result = ComponentBatch::from_loggable(archetype.point_fill_ratio.value()); RR_RETURN_NOT_OK(result.error); diff --git a/rerun_cpp/src/rerun/archetypes/depth_image.hpp b/rerun_cpp/src/rerun/archetypes/depth_image.hpp index c1168019870cc..66775eb9b8abc 100644 --- a/rerun_cpp/src/rerun/archetypes/depth_image.hpp +++ b/rerun_cpp/src/rerun/archetypes/depth_image.hpp @@ -12,6 +12,7 @@ #include "../components/fill_ratio.hpp" #include "../components/image_buffer.hpp" #include "../components/image_format.hpp" +#include "../components/value_range.hpp" #include "../image_utils.hpp" #include "../indicator_component.hpp" #include "../result.hpp" @@ -26,7 +27,7 @@ namespace rerun::archetypes { /// /// Each pixel corresponds to a depth value in units specified by `components::DepthMeter`. /// - /// Since the underlying `rerun::datatypes::TensorData` uses `rerun::Collection` internally, + /// Since the underlying `rerun::datatypes::ImageBuffer` uses `rerun::Collection` internally, /// data can be passed in without a copy from raw pointers or by reference from `std::vector`/`std::array`/c-arrays. /// If needed, this "borrow-behavior" can be extended by defining your own `rerun::CollectionAdapter`. /// @@ -94,6 +95,19 @@ namespace rerun::archetypes { /// If not set, the depth image will be rendered using the Turbo colormap. std::optional colormap; + /// The expected range of depth values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Note that point clouds generated from this image will still display all points, regardless of this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the depth image. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + std::optional depth_range; + /// Scale the radii of the points in the point cloud generated from this image. /// /// A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor @@ -201,6 +215,23 @@ namespace rerun::archetypes { RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) } + /// The expected range of depth values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Note that point clouds generated from this image will still display all points, regardless of this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the depth image. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + DepthImage with_depth_range(rerun::components::ValueRange _depth_range) && { + depth_range = std::move(_depth_range); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + /// Scale the radii of the points in the point cloud generated from this image. /// /// A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor diff --git a/rerun_cpp/src/rerun/archetypes/tensor.cpp b/rerun_cpp/src/rerun/archetypes/tensor.cpp index 07db511be330c..124deaf6e2abf 100644 --- a/rerun_cpp/src/rerun/archetypes/tensor.cpp +++ b/rerun_cpp/src/rerun/archetypes/tensor.cpp @@ -14,13 +14,18 @@ namespace rerun { ) { using namespace archetypes; std::vector cells; - cells.reserve(2); + cells.reserve(3); { auto result = ComponentBatch::from_loggable(archetype.data); RR_RETURN_NOT_OK(result.error); cells.push_back(std::move(result.value)); } + if (archetype.value_range.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.value_range.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } { auto indicator = Tensor::IndicatorComponent(); auto result = ComponentBatch::from_loggable(indicator); diff --git a/rerun_cpp/src/rerun/archetypes/tensor.hpp b/rerun_cpp/src/rerun/archetypes/tensor.hpp index 88bfafd2d2a4b..881c6f34bb605 100644 --- a/rerun_cpp/src/rerun/archetypes/tensor.hpp +++ b/rerun_cpp/src/rerun/archetypes/tensor.hpp @@ -4,12 +4,15 @@ #pragma once #include "../collection.hpp" +#include "../compiler_utils.hpp" #include "../component_batch.hpp" #include "../components/tensor_data.hpp" +#include "../components/value_range.hpp" #include "../indicator_component.hpp" #include "../result.hpp" #include +#include #include #include @@ -53,6 +56,19 @@ namespace rerun::archetypes { /// The tensor data rerun::components::TensorData data; + /// The expected range of values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Any colormap applied for display, will map this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the tensor. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + std::optional value_range; + public: static constexpr const char IndicatorComponentName[] = "rerun.components.TensorIndicator"; @@ -90,6 +106,23 @@ namespace rerun::archetypes { Tensor(Tensor&& other) = default; explicit Tensor(rerun::components::TensorData _data) : data(std::move(_data)) {} + + /// The expected range of values. + /// + /// This is typically the expected range of valid values. + /// Everything outside of the range is clamped to the range for the purpose of colormpaping. + /// Any colormap applied for display, will map this range. + /// + /// If not specified, the range will be automatically estimated from the data. + /// Note that the Viewer may try to guess a wider range than the minimum/maximum of values + /// in the contents of the tensor. + /// E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + /// the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + Tensor with_value_range(rerun::components::ValueRange _value_range) && { + value_range = std::move(_value_range); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } }; } // namespace rerun::archetypes diff --git a/rerun_cpp/src/rerun/blueprint/archetypes.hpp b/rerun_cpp/src/rerun/blueprint/archetypes.hpp index 18f41b47ff196..c7bb76cc5a573 100644 --- a/rerun_cpp/src/rerun/blueprint/archetypes.hpp +++ b/rerun_cpp/src/rerun/blueprint/archetypes.hpp @@ -5,6 +5,7 @@ #include "blueprint/archetypes/background.hpp" #include "blueprint/archetypes/container_blueprint.hpp" #include "blueprint/archetypes/dataframe_query.hpp" +#include "blueprint/archetypes/dataframe_query_v2.hpp" #include "blueprint/archetypes/dataframe_visible_columns.hpp" #include "blueprint/archetypes/panel_blueprint.hpp" #include "blueprint/archetypes/plot_legend.hpp" diff --git a/rerun_cpp/src/rerun/blueprint/archetypes/.gitattributes b/rerun_cpp/src/rerun/blueprint/archetypes/.gitattributes index 8ea8bf69fb54b..c0639074ebf82 100644 --- a/rerun_cpp/src/rerun/blueprint/archetypes/.gitattributes +++ b/rerun_cpp/src/rerun/blueprint/archetypes/.gitattributes @@ -7,6 +7,8 @@ container_blueprint.cpp linguist-generated=true container_blueprint.hpp linguist-generated=true dataframe_query.cpp linguist-generated=true dataframe_query.hpp linguist-generated=true +dataframe_query_v2.cpp linguist-generated=true +dataframe_query_v2.hpp linguist-generated=true dataframe_visible_columns.cpp linguist-generated=true dataframe_visible_columns.hpp linguist-generated=true panel_blueprint.cpp linguist-generated=true diff --git a/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.cpp b/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.cpp new file mode 100644 index 0000000000000..9759515886861 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.cpp @@ -0,0 +1,54 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs". + +#include "dataframe_query_v2.hpp" + +#include "../../collection_adapter_builtins.hpp" + +namespace rerun::blueprint::archetypes {} + +namespace rerun { + + Result> + AsComponents::serialize( + const blueprint::archetypes::DataframeQueryV2& archetype + ) { + using namespace blueprint::archetypes; + std::vector cells; + cells.reserve(6); + + if (archetype.timeline.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.timeline.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } + if (archetype.filter_by_range.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.filter_by_range.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } + if (archetype.filter_by_event.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.filter_by_event.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } + if (archetype.apply_latest_at.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.apply_latest_at.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } + if (archetype.select.has_value()) { + auto result = ComponentBatch::from_loggable(archetype.select.value()); + RR_RETURN_NOT_OK(result.error); + cells.push_back(std::move(result.value)); + } + { + auto indicator = DataframeQueryV2::IndicatorComponent(); + auto result = ComponentBatch::from_loggable(indicator); + RR_RETURN_NOT_OK(result.error); + cells.emplace_back(std::move(result.value)); + } + + return cells; + } +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.hpp b/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.hpp new file mode 100644 index 0000000000000..a208d87021c7d --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/archetypes/dataframe_query_v2.hpp @@ -0,0 +1,116 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs". + +#pragma once + +#include "../../blueprint/components/apply_latest_at.hpp" +#include "../../blueprint/components/filter_by_event.hpp" +#include "../../blueprint/components/filter_by_range.hpp" +#include "../../blueprint/components/selected_columns.hpp" +#include "../../blueprint/components/timeline_name.hpp" +#include "../../collection.hpp" +#include "../../compiler_utils.hpp" +#include "../../component_batch.hpp" +#include "../../indicator_component.hpp" +#include "../../result.hpp" + +#include +#include +#include +#include + +namespace rerun::blueprint::archetypes { + /// **Archetype**: The query for the dataframe view. + struct DataframeQueryV2 { + /// The timeline for this query. + /// + /// If unset, the timeline currently active on the time panel is used. + std::optional timeline; + + /// If provided, only rows whose timestamp is within this range will be shown. + /// + /// Note: will be unset as soon as `timeline` is changed. + std::optional filter_by_range; + + /// If provided, only show rows which contains a logged event for the specified component. + std::optional filter_by_event; + + /// Should empty cells be filled with latest-at queries? + std::optional apply_latest_at; + + /// Selected columns. If unset, all columns are selected. + std::optional select; + + public: + static constexpr const char IndicatorComponentName[] = + "rerun.blueprint.components.DataframeQueryV2Indicator"; + + /// Indicator component, used to identify the archetype when converting to a list of components. + using IndicatorComponent = rerun::components::IndicatorComponent; + + public: + DataframeQueryV2() = default; + DataframeQueryV2(DataframeQueryV2&& other) = default; + + /// The timeline for this query. + /// + /// If unset, the timeline currently active on the time panel is used. + DataframeQueryV2 with_timeline(rerun::blueprint::components::TimelineName _timeline) && { + timeline = std::move(_timeline); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + + /// If provided, only rows whose timestamp is within this range will be shown. + /// + /// Note: will be unset as soon as `timeline` is changed. + DataframeQueryV2 with_filter_by_range( + rerun::blueprint::components::FilterByRange _filter_by_range + ) && { + filter_by_range = std::move(_filter_by_range); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + + /// If provided, only show rows which contains a logged event for the specified component. + DataframeQueryV2 with_filter_by_event( + rerun::blueprint::components::FilterByEvent _filter_by_event + ) && { + filter_by_event = std::move(_filter_by_event); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + + /// Should empty cells be filled with latest-at queries? + DataframeQueryV2 with_apply_latest_at( + rerun::blueprint::components::ApplyLatestAt _apply_latest_at + ) && { + apply_latest_at = std::move(_apply_latest_at); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + + /// Selected columns. If unset, all columns are selected. + DataframeQueryV2 with_select(rerun::blueprint::components::SelectedColumns _select) && { + select = std::move(_select); + // See: https://github.com/rerun-io/rerun/issues/4027 + RR_WITH_MAYBE_UNINITIALIZED_DISABLED(return std::move(*this);) + } + }; + +} // namespace rerun::blueprint::archetypes + +namespace rerun { + /// \private + template + struct AsComponents; + + /// \private + template <> + struct AsComponents { + /// Serialize all set component batches. + static Result> serialize( + const blueprint::archetypes::DataframeQueryV2& archetype + ); + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/archetypes/tensor_scalar_mapping.hpp b/rerun_cpp/src/rerun/blueprint/archetypes/tensor_scalar_mapping.hpp index e9370fdc226a6..4f85781fefec9 100644 --- a/rerun_cpp/src/rerun/blueprint/archetypes/tensor_scalar_mapping.hpp +++ b/rerun_cpp/src/rerun/blueprint/archetypes/tensor_scalar_mapping.hpp @@ -32,6 +32,9 @@ namespace rerun::blueprint::archetypes { /// /// Raises the normalized values to the power of this value before mapping to color. /// Acts like an inverse brightness. Defaults to 1.0. + /// + /// The final value for display is set as: + /// `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` std::optional gamma; public: @@ -65,6 +68,9 @@ namespace rerun::blueprint::archetypes { /// /// Raises the normalized values to the power of this value before mapping to color. /// Acts like an inverse brightness. Defaults to 1.0. + /// + /// The final value for display is set as: + /// `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` TensorScalarMapping with_gamma(rerun::components::GammaCorrection _gamma) && { gamma = std::move(_gamma); // See: https://github.com/rerun-io/rerun/issues/4027 diff --git a/rerun_cpp/src/rerun/blueprint/components.hpp b/rerun_cpp/src/rerun/blueprint/components.hpp index c1f1ba2dfbdcd..ea4cfda9b8316 100644 --- a/rerun_cpp/src/rerun/blueprint/components.hpp +++ b/rerun_cpp/src/rerun/blueprint/components.hpp @@ -3,6 +3,7 @@ #pragma once #include "blueprint/components/active_tab.hpp" +#include "blueprint/components/apply_latest_at.hpp" #include "blueprint/components/auto_layout.hpp" #include "blueprint/components/auto_space_views.hpp" #include "blueprint/components/background_kind.hpp" @@ -11,6 +12,8 @@ #include "blueprint/components/component_column_selector.hpp" #include "blueprint/components/container_kind.hpp" #include "blueprint/components/corner2d.hpp" +#include "blueprint/components/filter_by_event.hpp" +#include "blueprint/components/filter_by_range.hpp" #include "blueprint/components/grid_columns.hpp" #include "blueprint/components/included_content.hpp" #include "blueprint/components/included_space_view.hpp" @@ -22,6 +25,7 @@ #include "blueprint/components/query_kind.hpp" #include "blueprint/components/root_container.hpp" #include "blueprint/components/row_share.hpp" +#include "blueprint/components/selected_columns.hpp" #include "blueprint/components/space_view_class.hpp" #include "blueprint/components/space_view_maximized.hpp" #include "blueprint/components/space_view_origin.hpp" diff --git a/rerun_cpp/src/rerun/blueprint/components/.gitattributes b/rerun_cpp/src/rerun/blueprint/components/.gitattributes index e460df8137051..4ab15e2bb6687 100644 --- a/rerun_cpp/src/rerun/blueprint/components/.gitattributes +++ b/rerun_cpp/src/rerun/blueprint/components/.gitattributes @@ -2,6 +2,7 @@ .gitattributes linguist-generated=true active_tab.hpp linguist-generated=true +apply_latest_at.hpp linguist-generated=true auto_layout.hpp linguist-generated=true auto_space_views.hpp linguist-generated=true background_kind.cpp linguist-generated=true @@ -14,6 +15,8 @@ container_kind.cpp linguist-generated=true container_kind.hpp linguist-generated=true corner2d.cpp linguist-generated=true corner2d.hpp linguist-generated=true +filter_by_event.hpp linguist-generated=true +filter_by_range.hpp linguist-generated=true grid_columns.hpp linguist-generated=true included_content.hpp linguist-generated=true included_space_view.hpp linguist-generated=true @@ -28,6 +31,7 @@ query_kind.cpp linguist-generated=true query_kind.hpp linguist-generated=true root_container.hpp linguist-generated=true row_share.hpp linguist-generated=true +selected_columns.hpp linguist-generated=true space_view_class.hpp linguist-generated=true space_view_maximized.hpp linguist-generated=true space_view_origin.hpp linguist-generated=true diff --git a/rerun_cpp/src/rerun/blueprint/components/apply_latest_at.hpp b/rerun_cpp/src/rerun/blueprint/components/apply_latest_at.hpp new file mode 100644 index 0000000000000..845e5912d0d65 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/components/apply_latest_at.hpp @@ -0,0 +1,74 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs". + +#pragma once + +#include "../../datatypes/bool.hpp" +#include "../../result.hpp" + +#include +#include + +namespace rerun::blueprint::components { + /// **Component**: Whether empty cells in a dataframe should be filled with a latest-at query. + struct ApplyLatestAt { + rerun::datatypes::Bool apply_latest_at; + + public: + ApplyLatestAt() = default; + + ApplyLatestAt(rerun::datatypes::Bool apply_latest_at_) + : apply_latest_at(apply_latest_at_) {} + + ApplyLatestAt& operator=(rerun::datatypes::Bool apply_latest_at_) { + apply_latest_at = apply_latest_at_; + return *this; + } + + ApplyLatestAt(bool value_) : apply_latest_at(value_) {} + + ApplyLatestAt& operator=(bool value_) { + apply_latest_at = value_; + return *this; + } + + /// Cast to the underlying Bool datatype + operator rerun::datatypes::Bool() const { + return apply_latest_at; + } + }; +} // namespace rerun::blueprint::components + +namespace rerun { + static_assert(sizeof(rerun::datatypes::Bool) == sizeof(blueprint::components::ApplyLatestAt)); + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.components.ApplyLatestAt"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype() { + return Loggable::arrow_datatype(); + } + + /// Serializes an array of `rerun::blueprint:: components::ApplyLatestAt` into an arrow array. + static Result> to_arrow( + const blueprint::components::ApplyLatestAt* instances, size_t num_instances + ) { + if (num_instances == 0) { + return Loggable::to_arrow(nullptr, 0); + } else if (instances == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Passed array instances is null when num_elements> 0." + ); + } else { + return Loggable::to_arrow( + &instances->apply_latest_at, + num_instances + ); + } + } + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/components/filter_by_event.hpp b/rerun_cpp/src/rerun/blueprint/components/filter_by_event.hpp new file mode 100644 index 0000000000000..242b7c285ea39 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/components/filter_by_event.hpp @@ -0,0 +1,71 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs". + +#pragma once + +#include "../../blueprint/datatypes/filter_by_event.hpp" +#include "../../result.hpp" + +#include +#include +#include + +namespace rerun::blueprint::components { + /// **Component**: Configuration for the filter-by-event feature of the dataframe view. + struct FilterByEvent { + rerun::blueprint::datatypes::FilterByEvent filter_by_event; + + public: + FilterByEvent() = default; + + FilterByEvent(rerun::blueprint::datatypes::FilterByEvent filter_by_event_) + : filter_by_event(std::move(filter_by_event_)) {} + + FilterByEvent& operator=(rerun::blueprint::datatypes::FilterByEvent filter_by_event_) { + filter_by_event = std::move(filter_by_event_); + return *this; + } + + /// Cast to the underlying FilterByEvent datatype + operator rerun::blueprint::datatypes::FilterByEvent() const { + return filter_by_event; + } + }; +} // namespace rerun::blueprint::components + +namespace rerun { + static_assert( + sizeof(rerun::blueprint::datatypes::FilterByEvent) == + sizeof(blueprint::components::FilterByEvent) + ); + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.components.FilterByEvent"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype() { + return Loggable::arrow_datatype(); + } + + /// Serializes an array of `rerun::blueprint:: components::FilterByEvent` into an arrow array. + static Result> to_arrow( + const blueprint::components::FilterByEvent* instances, size_t num_instances + ) { + if (num_instances == 0) { + return Loggable::to_arrow(nullptr, 0); + } else if (instances == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Passed array instances is null when num_elements> 0." + ); + } else { + return Loggable::to_arrow( + &instances->filter_by_event, + num_instances + ); + } + } + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/components/filter_by_range.hpp b/rerun_cpp/src/rerun/blueprint/components/filter_by_range.hpp new file mode 100644 index 0000000000000..d6dc32caa20bd --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/components/filter_by_range.hpp @@ -0,0 +1,69 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs". + +#pragma once + +#include "../../blueprint/datatypes/filter_by_range.hpp" +#include "../../result.hpp" + +#include +#include + +namespace rerun::blueprint::components { + /// **Component**: Configuration for a filter-by-range feature of the dataframe view. + struct FilterByRange { + rerun::blueprint::datatypes::FilterByRange range; + + public: + FilterByRange() = default; + + FilterByRange(rerun::blueprint::datatypes::FilterByRange range_) : range(range_) {} + + FilterByRange& operator=(rerun::blueprint::datatypes::FilterByRange range_) { + range = range_; + return *this; + } + + /// Cast to the underlying FilterByRange datatype + operator rerun::blueprint::datatypes::FilterByRange() const { + return range; + } + }; +} // namespace rerun::blueprint::components + +namespace rerun { + static_assert( + sizeof(rerun::blueprint::datatypes::FilterByRange) == + sizeof(blueprint::components::FilterByRange) + ); + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.components.FilterByRange"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype() { + return Loggable::arrow_datatype(); + } + + /// Serializes an array of `rerun::blueprint:: components::FilterByRange` into an arrow array. + static Result> to_arrow( + const blueprint::components::FilterByRange* instances, size_t num_instances + ) { + if (num_instances == 0) { + return Loggable::to_arrow(nullptr, 0); + } else if (instances == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Passed array instances is null when num_elements> 0." + ); + } else { + return Loggable::to_arrow( + &instances->range, + num_instances + ); + } + } + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/components/included_content.hpp b/rerun_cpp/src/rerun/blueprint/components/included_content.hpp index 4db112c1e04b7..93bec78743c9e 100644 --- a/rerun_cpp/src/rerun/blueprint/components/included_content.hpp +++ b/rerun_cpp/src/rerun/blueprint/components/included_content.hpp @@ -14,7 +14,7 @@ namespace rerun::blueprint::components { /// **Component**: All the contents in the container. struct IncludedContent { - /// List of the contents by EntityPath. + /// List of the contents by `datatypes::EntityPath`. /// /// This must be a path in the blueprint store. /// Typically structure as `/`. diff --git a/rerun_cpp/src/rerun/blueprint/components/selected_columns.hpp b/rerun_cpp/src/rerun/blueprint/components/selected_columns.hpp new file mode 100644 index 0000000000000..8e08eb70a40dc --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/components/selected_columns.hpp @@ -0,0 +1,71 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs". + +#pragma once + +#include "../../blueprint/datatypes/selected_columns.hpp" +#include "../../result.hpp" + +#include +#include +#include + +namespace rerun::blueprint::components { + /// **Component**: Describe a component column to be selected in the dataframe view. + struct SelectedColumns { + rerun::blueprint::datatypes::SelectedColumns selected_columns; + + public: + SelectedColumns() = default; + + SelectedColumns(rerun::blueprint::datatypes::SelectedColumns selected_columns_) + : selected_columns(std::move(selected_columns_)) {} + + SelectedColumns& operator=(rerun::blueprint::datatypes::SelectedColumns selected_columns_) { + selected_columns = std::move(selected_columns_); + return *this; + } + + /// Cast to the underlying SelectedColumns datatype + operator rerun::blueprint::datatypes::SelectedColumns() const { + return selected_columns; + } + }; +} // namespace rerun::blueprint::components + +namespace rerun { + static_assert( + sizeof(rerun::blueprint::datatypes::SelectedColumns) == + sizeof(blueprint::components::SelectedColumns) + ); + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.components.SelectedColumns"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype() { + return Loggable::arrow_datatype(); + } + + /// Serializes an array of `rerun::blueprint:: components::SelectedColumns` into an arrow array. + static Result> to_arrow( + const blueprint::components::SelectedColumns* instances, size_t num_instances + ) { + if (num_instances == 0) { + return Loggable::to_arrow(nullptr, 0); + } else if (instances == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Passed array instances is null when num_elements> 0." + ); + } else { + return Loggable::to_arrow( + &instances->selected_columns, + num_instances + ); + } + } + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/components/visualizer_overrides.hpp b/rerun_cpp/src/rerun/blueprint/components/visualizer_overrides.hpp index 9d6a81f14331a..e7c2a9ce2a901 100644 --- a/rerun_cpp/src/rerun/blueprint/components/visualizer_overrides.hpp +++ b/rerun_cpp/src/rerun/blueprint/components/visualizer_overrides.hpp @@ -27,27 +27,27 @@ namespace rerun::blueprint::components { /// Names of the visualizers that should be active. /// /// The built-in visualizers are: - /// - BarChart - /// - Arrows2D - /// - Arrows3D - /// - Asset3D - /// - Boxes2D - /// - Boxes3D - /// - Cameras - /// - DepthImage - /// - Image - /// - Lines2D - /// - Lines3D - /// - Mesh3D - /// - Points2D - /// - Points3D - /// - Transform3DArrows - /// - Tensor - /// - TextDocument - /// - TextLog - /// - SegmentationImage - /// - SeriesLine - /// - SeriesPoint + /// - `BarChart` + /// - `Arrows2D` + /// - `Arrows3D` + /// - `Asset3D` + /// - `Boxes2D` + /// - `Boxes3D` + /// - `Cameras` + /// - `DepthImage` + /// - `Image` + /// - `Lines2D` + /// - `Lines3D` + /// - `Mesh3D` + /// - `Points2D` + /// - `Points3D` + /// - `Transform3DArrows` + /// - `Tensor` + /// - `TextDocument` + /// - `TextLog` + /// - `SegmentationImage` + /// - `SeriesLine` + /// - `SeriesPoint` rerun::blueprint::datatypes::Utf8List visualizers; public: diff --git a/rerun_cpp/src/rerun/blueprint/datatypes.hpp b/rerun_cpp/src/rerun/blueprint/datatypes.hpp index ba29c7a7e8029..ed0d71e77a6dc 100644 --- a/rerun_cpp/src/rerun/blueprint/datatypes.hpp +++ b/rerun_cpp/src/rerun/blueprint/datatypes.hpp @@ -3,7 +3,10 @@ #pragma once #include "blueprint/datatypes/component_column_selector.hpp" +#include "blueprint/datatypes/filter_by_event.hpp" +#include "blueprint/datatypes/filter_by_range.hpp" #include "blueprint/datatypes/latest_at_query.hpp" +#include "blueprint/datatypes/selected_columns.hpp" #include "blueprint/datatypes/tensor_dimension_index_slider.hpp" #include "blueprint/datatypes/time_range_query.hpp" #include "blueprint/datatypes/utf8list.hpp" diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/.gitattributes b/rerun_cpp/src/rerun/blueprint/datatypes/.gitattributes index 729a1620b117e..581c403c67980 100644 --- a/rerun_cpp/src/rerun/blueprint/datatypes/.gitattributes +++ b/rerun_cpp/src/rerun/blueprint/datatypes/.gitattributes @@ -3,8 +3,14 @@ .gitattributes linguist-generated=true component_column_selector.cpp linguist-generated=true component_column_selector.hpp linguist-generated=true +filter_by_event.cpp linguist-generated=true +filter_by_event.hpp linguist-generated=true +filter_by_range.cpp linguist-generated=true +filter_by_range.hpp linguist-generated=true latest_at_query.cpp linguist-generated=true latest_at_query.hpp linguist-generated=true +selected_columns.cpp linguist-generated=true +selected_columns.hpp linguist-generated=true tensor_dimension_index_slider.cpp linguist-generated=true tensor_dimension_index_slider.hpp linguist-generated=true time_range_query.cpp linguist-generated=true diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.cpp b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.cpp new file mode 100644 index 0000000000000..f39743b2e3823 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.cpp @@ -0,0 +1,89 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs". + +#include "filter_by_event.hpp" + +#include "../../datatypes/bool.hpp" +#include "component_column_selector.hpp" + +#include +#include + +namespace rerun::blueprint::datatypes {} + +namespace rerun { + const std::shared_ptr& + Loggable::arrow_datatype() { + static const auto datatype = arrow::struct_({ + arrow::field("active", Loggable::arrow_datatype(), false), + arrow::field( + "column", + Loggable::arrow_datatype(), + false + ), + }); + return datatype; + } + + Result> Loggable::to_arrow( + const blueprint::datatypes::FilterByEvent* instances, size_t num_instances + ) { + // TODO(andreas): Allow configuring the memory pool. + arrow::MemoryPool* pool = arrow::default_memory_pool(); + auto datatype = arrow_datatype(); + + ARROW_ASSIGN_OR_RAISE(auto builder, arrow::MakeBuilder(datatype, pool)) + if (instances && num_instances > 0) { + RR_RETURN_NOT_OK( + Loggable::fill_arrow_array_builder( + static_cast(builder.get()), + instances, + num_instances + ) + ); + } + std::shared_ptr array; + ARROW_RETURN_NOT_OK(builder->Finish(&array)); + return array; + } + + rerun::Error Loggable::fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::FilterByEvent* elements, + size_t num_elements + ) { + if (builder == nullptr) { + return rerun::Error(ErrorCode::UnexpectedNullArgument, "Passed array builder is null."); + } + if (elements == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Cannot serialize null pointer to arrow array." + ); + } + + { + auto field_builder = static_cast(builder->field_builder(0)); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + RR_RETURN_NOT_OK(Loggable::fill_arrow_array_builder( + field_builder, + &elements[elem_idx].active, + 1 + )); + } + } + { + auto field_builder = static_cast(builder->field_builder(1)); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + RR_RETURN_NOT_OK( + Loggable:: + fill_arrow_array_builder(field_builder, &elements[elem_idx].column, 1) + ); + } + } + ARROW_RETURN_NOT_OK(builder->AppendValues(static_cast(num_elements), nullptr)); + + return Error::ok(); + } +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.hpp b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.hpp new file mode 100644 index 0000000000000..f3cbdcbbe916f --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_event.hpp @@ -0,0 +1,56 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs". + +#pragma once + +#include "../../datatypes/bool.hpp" +#include "../../result.hpp" +#include "component_column_selector.hpp" + +#include +#include + +namespace arrow { + class Array; + class DataType; + class StructBuilder; +} // namespace arrow + +namespace rerun::blueprint::datatypes { + /// **Datatype**: Configuration for the filter by event feature of the dataframe view. + struct FilterByEvent { + /// Whether the filter by event feature is active. + rerun::datatypes::Bool active; + + /// The column used when the filter by event feature is used. + rerun::blueprint::datatypes::ComponentColumnSelector column; + + public: + FilterByEvent() = default; + }; +} // namespace rerun::blueprint::datatypes + +namespace rerun { + template + struct Loggable; + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.datatypes.FilterByEvent"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype(); + + /// Serializes an array of `rerun::blueprint:: datatypes::FilterByEvent` into an arrow array. + static Result> to_arrow( + const blueprint::datatypes::FilterByEvent* instances, size_t num_instances + ); + + /// Fills an arrow array builder with an array of this type. + static rerun::Error fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::FilterByEvent* elements, + size_t num_elements + ); + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.cpp b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.cpp new file mode 100644 index 0000000000000..78ba99789bd48 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.cpp @@ -0,0 +1,85 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs". + +#include "filter_by_range.hpp" + +#include "../../datatypes/time_int.hpp" + +#include +#include + +namespace rerun::blueprint::datatypes {} + +namespace rerun { + const std::shared_ptr& + Loggable::arrow_datatype() { + static const auto datatype = arrow::struct_({ + arrow::field("start", Loggable::arrow_datatype(), false), + arrow::field("end", Loggable::arrow_datatype(), false), + }); + return datatype; + } + + Result> Loggable::to_arrow( + const blueprint::datatypes::FilterByRange* instances, size_t num_instances + ) { + // TODO(andreas): Allow configuring the memory pool. + arrow::MemoryPool* pool = arrow::default_memory_pool(); + auto datatype = arrow_datatype(); + + ARROW_ASSIGN_OR_RAISE(auto builder, arrow::MakeBuilder(datatype, pool)) + if (instances && num_instances > 0) { + RR_RETURN_NOT_OK( + Loggable::fill_arrow_array_builder( + static_cast(builder.get()), + instances, + num_instances + ) + ); + } + std::shared_ptr array; + ARROW_RETURN_NOT_OK(builder->Finish(&array)); + return array; + } + + rerun::Error Loggable::fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::FilterByRange* elements, + size_t num_elements + ) { + if (builder == nullptr) { + return rerun::Error(ErrorCode::UnexpectedNullArgument, "Passed array builder is null."); + } + if (elements == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Cannot serialize null pointer to arrow array." + ); + } + + { + auto field_builder = static_cast(builder->field_builder(0)); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + RR_RETURN_NOT_OK(Loggable::fill_arrow_array_builder( + field_builder, + &elements[elem_idx].start, + 1 + )); + } + } + { + auto field_builder = static_cast(builder->field_builder(1)); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + RR_RETURN_NOT_OK(Loggable::fill_arrow_array_builder( + field_builder, + &elements[elem_idx].end, + 1 + )); + } + } + ARROW_RETURN_NOT_OK(builder->AppendValues(static_cast(num_elements), nullptr)); + + return Error::ok(); + } +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.hpp b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.hpp new file mode 100644 index 0000000000000..88cba9f407e85 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/filter_by_range.hpp @@ -0,0 +1,55 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs". + +#pragma once + +#include "../../datatypes/time_int.hpp" +#include "../../result.hpp" + +#include +#include + +namespace arrow { + class Array; + class DataType; + class StructBuilder; +} // namespace arrow + +namespace rerun::blueprint::datatypes { + /// **Datatype**: Configuration for the filter-by-range feature of the dataframe view. + struct FilterByRange { + /// Beginning of the time range. + rerun::datatypes::TimeInt start; + + /// End of the time range (inclusive). + rerun::datatypes::TimeInt end; + + public: + FilterByRange() = default; + }; +} // namespace rerun::blueprint::datatypes + +namespace rerun { + template + struct Loggable; + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.datatypes.FilterByRange"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype(); + + /// Serializes an array of `rerun::blueprint:: datatypes::FilterByRange` into an arrow array. + static Result> to_arrow( + const blueprint::datatypes::FilterByRange* instances, size_t num_instances + ); + + /// Fills an arrow array builder with an array of this type. + static rerun::Error fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::FilterByRange* elements, + size_t num_elements + ); + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.cpp b/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.cpp new file mode 100644 index 0000000000000..df48039456c4d --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.cpp @@ -0,0 +1,118 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs". + +#include "selected_columns.hpp" + +#include "../../datatypes/utf8.hpp" +#include "component_column_selector.hpp" + +#include +#include + +namespace rerun::blueprint::datatypes {} + +namespace rerun { + const std::shared_ptr& + Loggable::arrow_datatype() { + static const auto datatype = arrow::struct_({ + arrow::field( + "time_columns", + arrow::list( + arrow::field("item", Loggable::arrow_datatype(), false) + ), + false + ), + arrow::field( + "component_columns", + arrow::list(arrow::field( + "item", + Loggable::arrow_datatype( + ), + false + )), + false + ), + }); + return datatype; + } + + Result> Loggable::to_arrow( + const blueprint::datatypes::SelectedColumns* instances, size_t num_instances + ) { + // TODO(andreas): Allow configuring the memory pool. + arrow::MemoryPool* pool = arrow::default_memory_pool(); + auto datatype = arrow_datatype(); + + ARROW_ASSIGN_OR_RAISE(auto builder, arrow::MakeBuilder(datatype, pool)) + if (instances && num_instances > 0) { + RR_RETURN_NOT_OK( + Loggable::fill_arrow_array_builder( + static_cast(builder.get()), + instances, + num_instances + ) + ); + } + std::shared_ptr array; + ARROW_RETURN_NOT_OK(builder->Finish(&array)); + return array; + } + + rerun::Error Loggable::fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::SelectedColumns* elements, + size_t num_elements + ) { + if (builder == nullptr) { + return rerun::Error(ErrorCode::UnexpectedNullArgument, "Passed array builder is null."); + } + if (elements == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Cannot serialize null pointer to arrow array." + ); + } + + { + auto field_builder = static_cast(builder->field_builder(0)); + auto value_builder = static_cast(field_builder->value_builder()); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + ARROW_RETURN_NOT_OK(value_builder->Reserve(static_cast(num_elements * 2))); + + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + const auto& element = elements[elem_idx]; + ARROW_RETURN_NOT_OK(field_builder->Append()); + if (element.time_columns.data()) { + RR_RETURN_NOT_OK(Loggable::fill_arrow_array_builder( + value_builder, + element.time_columns.data(), + element.time_columns.size() + )); + } + } + } + { + auto field_builder = static_cast(builder->field_builder(1)); + auto value_builder = static_cast(field_builder->value_builder()); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + ARROW_RETURN_NOT_OK(value_builder->Reserve(static_cast(num_elements * 2))); + + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + const auto& element = elements[elem_idx]; + ARROW_RETURN_NOT_OK(field_builder->Append()); + if (element.component_columns.data()) { + RR_RETURN_NOT_OK( + Loggable:: + fill_arrow_array_builder( + value_builder, + element.component_columns.data(), + element.component_columns.size() + ) + ); + } + } + } + ARROW_RETURN_NOT_OK(builder->AppendValues(static_cast(num_elements), nullptr)); + + return Error::ok(); + } +} // namespace rerun diff --git a/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.hpp b/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.hpp new file mode 100644 index 0000000000000..813f626b4c703 --- /dev/null +++ b/rerun_cpp/src/rerun/blueprint/datatypes/selected_columns.hpp @@ -0,0 +1,57 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs". + +#pragma once + +#include "../../collection.hpp" +#include "../../datatypes/utf8.hpp" +#include "../../result.hpp" +#include "component_column_selector.hpp" + +#include +#include + +namespace arrow { + class Array; + class DataType; + class StructBuilder; +} // namespace arrow + +namespace rerun::blueprint::datatypes { + /// **Datatype**: List of selected columns in a dataframe. + struct SelectedColumns { + /// The time columns to include + rerun::Collection time_columns; + + /// The component columns to include + rerun::Collection component_columns; + + public: + SelectedColumns() = default; + }; +} // namespace rerun::blueprint::datatypes + +namespace rerun { + template + struct Loggable; + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.blueprint.datatypes.SelectedColumns"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype(); + + /// Serializes an array of `rerun::blueprint:: datatypes::SelectedColumns` into an arrow array. + static Result> to_arrow( + const blueprint::datatypes::SelectedColumns* instances, size_t num_instances + ); + + /// Fills an arrow array builder with an array of this type. + static rerun::Error fill_arrow_array_builder( + arrow::StructBuilder* builder, const blueprint::datatypes::SelectedColumns* elements, + size_t num_elements + ); + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/components.hpp b/rerun_cpp/src/rerun/components.hpp index 3e05b8095a17e..e11d74969590d 100644 --- a/rerun_cpp/src/rerun/components.hpp +++ b/rerun_cpp/src/rerun/components.hpp @@ -60,6 +60,7 @@ #include "components/transform_relation.hpp" #include "components/translation3d.hpp" #include "components/triangle_indices.hpp" +#include "components/value_range.hpp" #include "components/vector2d.hpp" #include "components/vector3d.hpp" #include "components/video_timestamp.hpp" diff --git a/rerun_cpp/src/rerun/components/.gitattributes b/rerun_cpp/src/rerun/components/.gitattributes index ef90d02b74d55..a63cd0bb87b8a 100644 --- a/rerun_cpp/src/rerun/components/.gitattributes +++ b/rerun_cpp/src/rerun/components/.gitattributes @@ -68,6 +68,7 @@ transform_relation.cpp linguist-generated=true transform_relation.hpp linguist-generated=true translation3d.hpp linguist-generated=true triangle_indices.hpp linguist-generated=true +value_range.hpp linguist-generated=true vector2d.hpp linguist-generated=true vector3d.hpp linguist-generated=true video_timestamp.hpp linguist-generated=true diff --git a/rerun_cpp/src/rerun/components/value_range.hpp b/rerun_cpp/src/rerun/components/value_range.hpp new file mode 100644 index 0000000000000..840045a3f58e5 --- /dev/null +++ b/rerun_cpp/src/rerun/components/value_range.hpp @@ -0,0 +1,74 @@ +// DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/cpp/mod.rs +// Based on "crates/store/re_types/definitions/rerun/components/value_range.fbs". + +#pragma once + +#include "../datatypes/range1d.hpp" +#include "../result.hpp" + +#include +#include +#include + +namespace rerun::components { + /// **Component**: Range of expected or valid values, specifying a lower and upper bound. + struct ValueRange { + rerun::datatypes::Range1D range; + + public: + ValueRange() = default; + + ValueRange(rerun::datatypes::Range1D range_) : range(range_) {} + + ValueRange& operator=(rerun::datatypes::Range1D range_) { + range = range_; + return *this; + } + + ValueRange(std::array range_) : range(range_) {} + + ValueRange& operator=(std::array range_) { + range = range_; + return *this; + } + + /// Cast to the underlying Range1D datatype + operator rerun::datatypes::Range1D() const { + return range; + } + }; +} // namespace rerun::components + +namespace rerun { + static_assert(sizeof(rerun::datatypes::Range1D) == sizeof(components::ValueRange)); + + /// \private + template <> + struct Loggable { + static constexpr const char Name[] = "rerun.components.ValueRange"; + + /// Returns the arrow data type this type corresponds to. + static const std::shared_ptr& arrow_datatype() { + return Loggable::arrow_datatype(); + } + + /// Serializes an array of `rerun::components::ValueRange` into an arrow array. + static Result> to_arrow( + const components::ValueRange* instances, size_t num_instances + ) { + if (num_instances == 0) { + return Loggable::to_arrow(nullptr, 0); + } else if (instances == nullptr) { + return rerun::Error( + ErrorCode::UnexpectedNullArgument, + "Passed array instances is null when num_elements> 0." + ); + } else { + return Loggable::to_arrow( + &instances->range, + num_instances + ); + } + } + }; +} // namespace rerun diff --git a/rerun_cpp/src/rerun/datatypes/pixel_format.hpp b/rerun_cpp/src/rerun/datatypes/pixel_format.hpp index 5fed6b15e8be2..1cd791004b7bb 100644 --- a/rerun_cpp/src/rerun/datatypes/pixel_format.hpp +++ b/rerun_cpp/src/rerun/datatypes/pixel_format.hpp @@ -33,13 +33,13 @@ namespace rerun::datatypes { /// For more compressed image formats, see `archetypes::EncodedImage`. enum class PixelFormat : uint8_t { - /// NV12 (aka Y_UV12) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. + /// `NV12` (aka `Y_UV12`) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. /// /// First comes entire image in Y in one plane, /// followed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc. NV12 = 26, - /// YUY2 (aka YUYV or YUYV16), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. + /// `YUY2` (aka `YUYV` or `YUYV16`), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. /// /// The order of the channels is Y0, U0, Y1, V0, all in the same plane. YUY2 = 27, diff --git a/rerun_py/rerun_sdk/rerun/archetypes/depth_image.py b/rerun_py/rerun_sdk/rerun/archetypes/depth_image.py index 14b8da0ac6a9d..5fc1d1ed023ce 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/depth_image.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/depth_image.py @@ -70,6 +70,7 @@ def __attrs_clear__(self) -> None: format=None, # type: ignore[arg-type] meter=None, # type: ignore[arg-type] colormap=None, # type: ignore[arg-type] + depth_range=None, # type: ignore[arg-type] point_fill_ratio=None, # type: ignore[arg-type] draw_order=None, # type: ignore[arg-type] ) @@ -123,6 +124,25 @@ def _clear(cls) -> DepthImage: # # (Docstring intentionally commented out to hide this field from the docs) + depth_range: components.ValueRangeBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=components.ValueRangeBatch._optional, # type: ignore[misc] + ) + # The expected range of depth values. + # + # This is typically the expected range of valid values. + # Everything outside of the range is clamped to the range for the purpose of colormpaping. + # Note that point clouds generated from this image will still display all points, regardless of this range. + # + # If not specified, the range will be automatically estimated from the data. + # Note that the Viewer may try to guess a wider range than the minimum/maximum of values + # in the contents of the depth image. + # E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + # the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + # + # (Docstring intentionally commented out to hide this field from the docs) + point_fill_ratio: components.FillRatioBatch | None = field( metadata={"component": "optional"}, default=None, diff --git a/rerun_py/rerun_sdk/rerun/archetypes/depth_image_ext.py b/rerun_py/rerun_sdk/rerun/archetypes/depth_image_ext.py index 31416fdc5f796..68238558e7851 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/depth_image_ext.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/depth_image_ext.py @@ -5,6 +5,8 @@ import numpy as np import numpy.typing as npt +from rerun.datatypes.range1d import Range1DLike + from ..components import Colormap, ImageFormat from ..datatypes import ChannelDatatype, Float32Like @@ -45,7 +47,57 @@ def __init__( *, meter: Float32Like | None = None, colormap: Colormap | None = None, + depth_range: Range1DLike | None = None, + point_fill_ratio: Float32Like | None = None, + draw_order: Float32Like | None = None, ): + """ + Create a new instance of the DepthImage archetype. + + Parameters + ---------- + image: + A numpy array or tensor with the depth image data. + Leading and trailing unit-dimensions are ignored, so that + `1x480x640x1` is treated as a `480x640`. + meter: + An optional floating point value that specifies how long a meter is in the native depth units. + + For instance: with uint16, perhaps meter=1000 which would mean you have millimeter precision + and a range of up to ~65 meters (2^16 / 1000). + + Note that the only effect on 2D views is the physical depth values shown when hovering the image. + In 3D views on the other hand, this affects where the points of the point cloud are placed. + colormap: + Colormap to use for rendering the depth image. + + If not set, the depth image will be rendered using the Turbo colormap. + depth_range: + The expected range of depth values. + + This is typically the expected range of valid values. + Everything outside of the range is clamped to the range for the purpose of colormpaping. + Note that point clouds generated from this image will still display all points, regardless of this range. + + If not specified, the range will be automatically be estimated from the data. + Note that the Viewer may try to guess a wider range than the minimum/maximum of values + in the contents of the depth image. + E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + point_fill_ratio: + Scale the radii of the points in the point cloud generated from this image. + + A fill ratio of 1.0 (the default) means that each point is as big as to touch the center of its neighbor + if it is at the same depth, leaving no gaps. + A fill ratio of 0.5 means that each point touches the edge of its neighbor if it has the same depth. + + TODO(#6744): This applies only to 3D views! + draw_order: + An optional floating point value that specifies the 2D drawing order, used only if the depth image is shown as a 2D image. + + Objects with higher values are drawn on top of those with lower values. + + """ image = _to_numpy(image) shape = image.shape @@ -74,4 +126,7 @@ def __init__( ), meter=meter, colormap=colormap, + depth_range=depth_range, + point_fill_ratio=point_fill_ratio, + draw_order=draw_order, ) diff --git a/rerun_py/rerun_sdk/rerun/archetypes/tensor.py b/rerun_py/rerun_sdk/rerun/archetypes/tensor.py index 8a0529a7bd9e4..9c9a90ec5cdaa 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/tensor.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/tensor.py @@ -57,6 +57,7 @@ def __attrs_clear__(self) -> None: """Convenience method for calling `__attrs_init__` with all `None`s.""" self.__attrs_init__( data=None, # type: ignore[arg-type] + value_range=None, # type: ignore[arg-type] ) @classmethod @@ -74,5 +75,24 @@ def _clear(cls) -> Tensor: # # (Docstring intentionally commented out to hide this field from the docs) + value_range: components.ValueRangeBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=components.ValueRangeBatch._optional, # type: ignore[misc] + ) + # The expected range of values. + # + # This is typically the expected range of valid values. + # Everything outside of the range is clamped to the range for the purpose of colormpaping. + # Any colormap applied for display, will map this range. + # + # If not specified, the range will be automatically estimated from the data. + # Note that the Viewer may try to guess a wider range than the minimum/maximum of values + # in the contents of the tensor. + # E.g. if all values are positive, some bigger than 1.0 and all smaller than 255.0, + # the Viewer will guess that the data likely came from an 8bit image, thus assuming a range of 0-255. + # + # (Docstring intentionally commented out to hide this field from the docs) + __str__ = Archetype.__str__ __repr__ = Archetype.__repr__ # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/archetypes/tensor_ext.py b/rerun_py/rerun_sdk/rerun/archetypes/tensor_ext.py index 845e0d1f59762..0ee17ce0edabe 100644 --- a/rerun_py/rerun_sdk/rerun/archetypes/tensor_ext.py +++ b/rerun_py/rerun_sdk/rerun/archetypes/tensor_ext.py @@ -2,6 +2,8 @@ from typing import TYPE_CHECKING, Any, Sequence +from rerun.datatypes.range1d import Range1DLike + from ..error_utils import catch_and_log_exceptions if TYPE_CHECKING: @@ -17,6 +19,7 @@ def __init__( data: TensorDataLike | TensorLike | None = None, *, dim_names: Sequence[str | None] | None = None, + value_range: Range1DLike | None = None, ): """ Construct a `Tensor` archetype. @@ -37,6 +40,10 @@ def __init__( A TensorData object, or type that can be converted to a numpy array. dim_names: Sequence[str] | None The names of the tensor dimensions when generating the shape from an array. + value_range: Sequence[float] | None + The range of values to use for colormapping. + + If not specified, the range will be estimated from the data. """ from ..datatypes import TensorData @@ -47,7 +54,7 @@ def __init__( elif dim_names is not None: data = TensorData(buffer=data.buffer, dim_names=dim_names) - self.__attrs_init__(data=data) + self.__attrs_init__(data=data, value_range=value_range) return self.__attrs_clear__() diff --git a/rerun_py/rerun_sdk/rerun/blueprint/__init__.py b/rerun_py/rerun_sdk/rerun/blueprint/__init__.py index 8d36c2ceab54e..10450ec4404e4 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/__init__.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/__init__.py @@ -29,6 +29,8 @@ ) from .archetypes import ( Background as Background, + DataframeQuery as DataframeQuery, + DataframeVisibleColumns as DataframeVisibleColumns, PlotLegend as PlotLegend, ScalarAxis as ScalarAxis, TensorScalarMapping as TensorScalarMapping, @@ -40,6 +42,8 @@ BackgroundKind as BackgroundKind, Corner2D as Corner2D, LockRangeDuringZoom as LockRangeDuringZoom, + QueryKind as QueryKind, + TimeRangeQueries as TimeRangeQueries, VisibleTimeRange as VisibleTimeRange, VisualizerOverrides as VisualizerOverrides, ) @@ -51,6 +55,7 @@ ) from .views import ( BarChartView as BarChartView, + DataframeView as DataframeView, Spatial2DView as Spatial2DView, Spatial3DView as Spatial3DView, TensorView as TensorView, diff --git a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/.gitattributes b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/.gitattributes index 3d18277f6a395..1e43163fe8110 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/.gitattributes +++ b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/.gitattributes @@ -5,6 +5,7 @@ __init__.py linguist-generated=true background.py linguist-generated=true container_blueprint.py linguist-generated=true dataframe_query.py linguist-generated=true +dataframe_query_v2.py linguist-generated=true dataframe_visible_columns.py linguist-generated=true panel_blueprint.py linguist-generated=true plot_legend.py linguist-generated=true diff --git a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/__init__.py b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/__init__.py index 32a8ea90306f1..7559a42e447d4 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/__init__.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/__init__.py @@ -5,6 +5,7 @@ from .background import Background from .container_blueprint import ContainerBlueprint from .dataframe_query import DataframeQuery +from .dataframe_query_v2 import DataframeQueryV2 from .dataframe_visible_columns import DataframeVisibleColumns from .panel_blueprint import PanelBlueprint from .plot_legend import PlotLegend @@ -22,6 +23,7 @@ "Background", "ContainerBlueprint", "DataframeQuery", + "DataframeQueryV2", "DataframeVisibleColumns", "PanelBlueprint", "PlotLegend", diff --git a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/dataframe_query_v2.py b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/dataframe_query_v2.py new file mode 100644 index 0000000000000..560767f33717b --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/dataframe_query_v2.py @@ -0,0 +1,136 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/archetypes/dataframe_query.fbs". + +# You can extend this class by creating a "DataframeQueryV2Ext" class in "dataframe_query_v2_ext.py". + +from __future__ import annotations + +from typing import Any + +from attrs import define, field + +from ... import datatypes +from ..._baseclasses import ( + Archetype, +) +from ...blueprint import components as blueprint_components, datatypes as blueprint_datatypes +from ...error_utils import catch_and_log_exceptions + +__all__ = ["DataframeQueryV2"] + + +@define(str=False, repr=False, init=False) +class DataframeQueryV2(Archetype): + """**Archetype**: The query for the dataframe view.""" + + def __init__( + self: Any, + *, + timeline: datatypes.Utf8Like | None = None, + filter_by_range: blueprint_datatypes.FilterByRangeLike | None = None, + filter_by_event: blueprint_datatypes.FilterByEventLike | None = None, + apply_latest_at: datatypes.BoolLike | None = None, + select: blueprint_datatypes.SelectedColumnsLike | None = None, + ): + """ + Create a new instance of the DataframeQueryV2 archetype. + + Parameters + ---------- + timeline: + The timeline for this query. + + If unset, the timeline currently active on the time panel is used. + filter_by_range: + If provided, only rows whose timestamp is within this range will be shown. + + Note: will be unset as soon as `timeline` is changed. + filter_by_event: + If provided, only show rows which contains a logged event for the specified component. + apply_latest_at: + Should empty cells be filled with latest-at queries? + select: + Selected columns. If unset, all columns are selected. + + """ + + # You can define your own __init__ function as a member of DataframeQueryV2Ext in dataframe_query_v2_ext.py + with catch_and_log_exceptions(context=self.__class__.__name__): + self.__attrs_init__( + timeline=timeline, + filter_by_range=filter_by_range, + filter_by_event=filter_by_event, + apply_latest_at=apply_latest_at, + select=select, + ) + return + self.__attrs_clear__() + + def __attrs_clear__(self) -> None: + """Convenience method for calling `__attrs_init__` with all `None`s.""" + self.__attrs_init__( + timeline=None, # type: ignore[arg-type] + filter_by_range=None, # type: ignore[arg-type] + filter_by_event=None, # type: ignore[arg-type] + apply_latest_at=None, # type: ignore[arg-type] + select=None, # type: ignore[arg-type] + ) + + @classmethod + def _clear(cls) -> DataframeQueryV2: + """Produce an empty DataframeQueryV2, bypassing `__init__`.""" + inst = cls.__new__(cls) + inst.__attrs_clear__() + return inst + + timeline: blueprint_components.TimelineNameBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=blueprint_components.TimelineNameBatch._optional, # type: ignore[misc] + ) + # The timeline for this query. + # + # If unset, the timeline currently active on the time panel is used. + # + # (Docstring intentionally commented out to hide this field from the docs) + + filter_by_range: blueprint_components.FilterByRangeBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=blueprint_components.FilterByRangeBatch._optional, # type: ignore[misc] + ) + # If provided, only rows whose timestamp is within this range will be shown. + # + # Note: will be unset as soon as `timeline` is changed. + # + # (Docstring intentionally commented out to hide this field from the docs) + + filter_by_event: blueprint_components.FilterByEventBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=blueprint_components.FilterByEventBatch._optional, # type: ignore[misc] + ) + # If provided, only show rows which contains a logged event for the specified component. + # + # (Docstring intentionally commented out to hide this field from the docs) + + apply_latest_at: blueprint_components.ApplyLatestAtBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=blueprint_components.ApplyLatestAtBatch._optional, # type: ignore[misc] + ) + # Should empty cells be filled with latest-at queries? + # + # (Docstring intentionally commented out to hide this field from the docs) + + select: blueprint_components.SelectedColumnsBatch | None = field( + metadata={"component": "optional"}, + default=None, + converter=blueprint_components.SelectedColumnsBatch._optional, # type: ignore[misc] + ) + # Selected columns. If unset, all columns are selected. + # + # (Docstring intentionally commented out to hide this field from the docs) + + __str__ = Archetype.__str__ + __repr__ = Archetype.__repr__ # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/tensor_scalar_mapping.py b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/tensor_scalar_mapping.py index 5111432a38af1..f0a1157f54622 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/archetypes/tensor_scalar_mapping.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/archetypes/tensor_scalar_mapping.py @@ -46,6 +46,9 @@ def __init__( Raises the normalized values to the power of this value before mapping to color. Acts like an inverse brightness. Defaults to 1.0. + The final value for display is set as: + `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` + """ # You can define your own __init__ function as a member of TensorScalarMappingExt in tensor_scalar_mapping_ext.py @@ -99,6 +102,9 @@ def _clear(cls) -> TensorScalarMapping: # Raises the normalized values to the power of this value before mapping to color. # Acts like an inverse brightness. Defaults to 1.0. # + # The final value for display is set as: + # `colormap( ((value - data_display_range.min) / (data_display_range.max - data_display_range.min)) ** gamma )` + # # (Docstring intentionally commented out to hide this field from the docs) __str__ = Archetype.__str__ diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/.gitattributes b/rerun_py/rerun_sdk/rerun/blueprint/components/.gitattributes index 5cd0cada3e53d..2709b58765eff 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/components/.gitattributes +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/.gitattributes @@ -3,6 +3,7 @@ .gitattributes linguist-generated=true __init__.py linguist-generated=true active_tab.py linguist-generated=true +apply_latest_at.py linguist-generated=true auto_layout.py linguist-generated=true auto_space_views.py linguist-generated=true background_kind.py linguist-generated=true @@ -11,6 +12,8 @@ column_share.py linguist-generated=true component_column_selector.py linguist-generated=true container_kind.py linguist-generated=true corner2d.py linguist-generated=true +filter_by_event.py linguist-generated=true +filter_by_range.py linguist-generated=true grid_columns.py linguist-generated=true included_content.py linguist-generated=true included_space_view.py linguist-generated=true @@ -22,6 +25,7 @@ query_expression.py linguist-generated=true query_kind.py linguist-generated=true root_container.py linguist-generated=true row_share.py linguist-generated=true +selected_columns.py linguist-generated=true space_view_class.py linguist-generated=true space_view_maximized.py linguist-generated=true space_view_origin.py linguist-generated=true diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/__init__.py b/rerun_py/rerun_sdk/rerun/blueprint/components/__init__.py index 9f676774d6244..553de8b34eb6c 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/components/__init__.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from .active_tab import ActiveTab, ActiveTabBatch, ActiveTabType +from .apply_latest_at import ApplyLatestAt, ApplyLatestAtBatch, ApplyLatestAtType from .auto_layout import AutoLayout, AutoLayoutBatch, AutoLayoutType from .auto_space_views import AutoSpaceViews, AutoSpaceViewsBatch, AutoSpaceViewsType from .background_kind import ( @@ -33,6 +34,8 @@ ContainerKindType, ) from .corner2d import Corner2D, Corner2DArrayLike, Corner2DBatch, Corner2DLike, Corner2DType +from .filter_by_event import FilterByEvent, FilterByEventBatch, FilterByEventType +from .filter_by_range import FilterByRange, FilterByRangeBatch, FilterByRangeType from .grid_columns import GridColumns, GridColumnsBatch, GridColumnsType from .included_content import IncludedContent, IncludedContentBatch, IncludedContentType from .included_space_view import IncludedSpaceView, IncludedSpaceViewBatch, IncludedSpaceViewType @@ -50,6 +53,7 @@ from .query_kind import QueryKind, QueryKindArrayLike, QueryKindBatch, QueryKindLike, QueryKindType from .root_container import RootContainer, RootContainerBatch, RootContainerType from .row_share import RowShare, RowShareBatch, RowShareType +from .selected_columns import SelectedColumns, SelectedColumnsBatch, SelectedColumnsType from .space_view_class import SpaceViewClass, SpaceViewClassBatch, SpaceViewClassType from .space_view_maximized import SpaceViewMaximized, SpaceViewMaximizedBatch, SpaceViewMaximizedType from .space_view_origin import SpaceViewOrigin, SpaceViewOriginBatch, SpaceViewOriginType @@ -81,6 +85,9 @@ "ActiveTab", "ActiveTabBatch", "ActiveTabType", + "ApplyLatestAt", + "ApplyLatestAtBatch", + "ApplyLatestAtType", "AutoLayout", "AutoLayoutBatch", "AutoLayoutType", @@ -113,6 +120,12 @@ "Corner2DBatch", "Corner2DLike", "Corner2DType", + "FilterByEvent", + "FilterByEventBatch", + "FilterByEventType", + "FilterByRange", + "FilterByRangeBatch", + "FilterByRangeType", "GridColumns", "GridColumnsBatch", "GridColumnsType", @@ -152,6 +165,9 @@ "RowShare", "RowShareBatch", "RowShareType", + "SelectedColumns", + "SelectedColumnsBatch", + "SelectedColumnsType", "SpaceViewClass", "SpaceViewClassBatch", "SpaceViewClassType", diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/apply_latest_at.py b/rerun_py/rerun_sdk/rerun/blueprint/components/apply_latest_at.py new file mode 100644 index 0000000000000..6de796f747986 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/apply_latest_at.py @@ -0,0 +1,36 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/components/apply_latest_at.fbs". + +# You can extend this class by creating a "ApplyLatestAtExt" class in "apply_latest_at_ext.py". + +from __future__ import annotations + +from ... import datatypes +from ..._baseclasses import ( + ComponentBatchMixin, + ComponentMixin, +) + +__all__ = ["ApplyLatestAt", "ApplyLatestAtBatch", "ApplyLatestAtType"] + + +class ApplyLatestAt(datatypes.Bool, ComponentMixin): + """**Component**: Whether empty cells in a dataframe should be filled with a latest-at query.""" + + _BATCH_TYPE = None + # You can define your own __init__ function as a member of ApplyLatestAtExt in apply_latest_at_ext.py + + # Note: there are no fields here because ApplyLatestAt delegates to datatypes.Bool + pass + + +class ApplyLatestAtType(datatypes.BoolType): + _TYPE_NAME: str = "rerun.blueprint.components.ApplyLatestAt" + + +class ApplyLatestAtBatch(datatypes.BoolBatch, ComponentBatchMixin): + _ARROW_TYPE = ApplyLatestAtType() + + +# This is patched in late to avoid circular dependencies. +ApplyLatestAt._BATCH_TYPE = ApplyLatestAtBatch # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_event.py b/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_event.py new file mode 100644 index 0000000000000..bc4e0c954d737 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_event.py @@ -0,0 +1,36 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_event.fbs". + +# You can extend this class by creating a "FilterByEventExt" class in "filter_by_event_ext.py". + +from __future__ import annotations + +from ..._baseclasses import ( + ComponentBatchMixin, + ComponentMixin, +) +from ...blueprint import datatypes as blueprint_datatypes + +__all__ = ["FilterByEvent", "FilterByEventBatch", "FilterByEventType"] + + +class FilterByEvent(blueprint_datatypes.FilterByEvent, ComponentMixin): + """**Component**: Configuration for the filter-by-event feature of the dataframe view.""" + + _BATCH_TYPE = None + # You can define your own __init__ function as a member of FilterByEventExt in filter_by_event_ext.py + + # Note: there are no fields here because FilterByEvent delegates to datatypes.FilterByEvent + pass + + +class FilterByEventType(blueprint_datatypes.FilterByEventType): + _TYPE_NAME: str = "rerun.blueprint.components.FilterByEvent" + + +class FilterByEventBatch(blueprint_datatypes.FilterByEventBatch, ComponentBatchMixin): + _ARROW_TYPE = FilterByEventType() + + +# This is patched in late to avoid circular dependencies. +FilterByEvent._BATCH_TYPE = FilterByEventBatch # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_range.py b/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_range.py new file mode 100644 index 0000000000000..b27161a7216d1 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/filter_by_range.py @@ -0,0 +1,36 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/components/filter_by_range.fbs". + +# You can extend this class by creating a "FilterByRangeExt" class in "filter_by_range_ext.py". + +from __future__ import annotations + +from ..._baseclasses import ( + ComponentBatchMixin, + ComponentMixin, +) +from ...blueprint import datatypes as blueprint_datatypes + +__all__ = ["FilterByRange", "FilterByRangeBatch", "FilterByRangeType"] + + +class FilterByRange(blueprint_datatypes.FilterByRange, ComponentMixin): + """**Component**: Configuration for a filter-by-range feature of the dataframe view.""" + + _BATCH_TYPE = None + # You can define your own __init__ function as a member of FilterByRangeExt in filter_by_range_ext.py + + # Note: there are no fields here because FilterByRange delegates to datatypes.FilterByRange + pass + + +class FilterByRangeType(blueprint_datatypes.FilterByRangeType): + _TYPE_NAME: str = "rerun.blueprint.components.FilterByRange" + + +class FilterByRangeBatch(blueprint_datatypes.FilterByRangeBatch, ComponentBatchMixin): + _ARROW_TYPE = FilterByRangeType() + + +# This is patched in late to avoid circular dependencies. +FilterByRange._BATCH_TYPE = FilterByRangeBatch # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/blueprint/components/selected_columns.py b/rerun_py/rerun_sdk/rerun/blueprint/components/selected_columns.py new file mode 100644 index 0000000000000..d40f45875753f --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/components/selected_columns.py @@ -0,0 +1,36 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/components/selected_columns.fbs". + +# You can extend this class by creating a "SelectedColumnsExt" class in "selected_columns_ext.py". + +from __future__ import annotations + +from ..._baseclasses import ( + ComponentBatchMixin, + ComponentMixin, +) +from ...blueprint import datatypes as blueprint_datatypes + +__all__ = ["SelectedColumns", "SelectedColumnsBatch", "SelectedColumnsType"] + + +class SelectedColumns(blueprint_datatypes.SelectedColumns, ComponentMixin): + """**Component**: Describe a component column to be selected in the dataframe view.""" + + _BATCH_TYPE = None + # You can define your own __init__ function as a member of SelectedColumnsExt in selected_columns_ext.py + + # Note: there are no fields here because SelectedColumns delegates to datatypes.SelectedColumns + pass + + +class SelectedColumnsType(blueprint_datatypes.SelectedColumnsType): + _TYPE_NAME: str = "rerun.blueprint.components.SelectedColumns" + + +class SelectedColumnsBatch(blueprint_datatypes.SelectedColumnsBatch, ComponentBatchMixin): + _ARROW_TYPE = SelectedColumnsType() + + +# This is patched in late to avoid circular dependencies. +SelectedColumns._BATCH_TYPE = SelectedColumnsBatch # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/.gitattributes b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/.gitattributes index 214121c56e574..e85f1bd4abca8 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/.gitattributes +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/.gitattributes @@ -3,7 +3,10 @@ .gitattributes linguist-generated=true __init__.py linguist-generated=true component_column_selector.py linguist-generated=true +filter_by_event.py linguist-generated=true +filter_by_range.py linguist-generated=true latest_at_query.py linguist-generated=true +selected_columns.py linguist-generated=true tensor_dimension_index_slider.py linguist-generated=true time_range_query.py linguist-generated=true utf8list.py linguist-generated=true diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/__init__.py b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/__init__.py index 1d35b87a92752..9c2c2298e586b 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/__init__.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/__init__.py @@ -9,6 +9,20 @@ ComponentColumnSelectorLike, ComponentColumnSelectorType, ) +from .filter_by_event import ( + FilterByEvent, + FilterByEventArrayLike, + FilterByEventBatch, + FilterByEventLike, + FilterByEventType, +) +from .filter_by_range import ( + FilterByRange, + FilterByRangeArrayLike, + FilterByRangeBatch, + FilterByRangeLike, + FilterByRangeType, +) from .latest_at_query import ( LatestAtQuery, LatestAtQueryArrayLike, @@ -16,6 +30,13 @@ LatestAtQueryLike, LatestAtQueryType, ) +from .selected_columns import ( + SelectedColumns, + SelectedColumnsArrayLike, + SelectedColumnsBatch, + SelectedColumnsLike, + SelectedColumnsType, +) from .tensor_dimension_index_slider import ( TensorDimensionIndexSlider, TensorDimensionIndexSliderArrayLike, @@ -38,11 +59,26 @@ "ComponentColumnSelectorBatch", "ComponentColumnSelectorLike", "ComponentColumnSelectorType", + "FilterByEvent", + "FilterByEventArrayLike", + "FilterByEventBatch", + "FilterByEventLike", + "FilterByEventType", + "FilterByRange", + "FilterByRangeArrayLike", + "FilterByRangeBatch", + "FilterByRangeLike", + "FilterByRangeType", "LatestAtQuery", "LatestAtQueryArrayLike", "LatestAtQueryBatch", "LatestAtQueryLike", "LatestAtQueryType", + "SelectedColumns", + "SelectedColumnsArrayLike", + "SelectedColumnsBatch", + "SelectedColumnsLike", + "SelectedColumnsType", "TensorDimensionIndexSlider", "TensorDimensionIndexSliderArrayLike", "TensorDimensionIndexSliderBatch", diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_event.py b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_event.py new file mode 100644 index 0000000000000..a25367a4aa59d --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_event.py @@ -0,0 +1,107 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_event.fbs". + +# You can extend this class by creating a "FilterByEventExt" class in "filter_by_event_ext.py". + +from __future__ import annotations + +from typing import Any, Sequence, Union + +import pyarrow as pa +from attrs import define, field + +from ... import datatypes +from ..._baseclasses import ( + BaseBatch, + BaseExtensionType, +) +from ...blueprint import datatypes as blueprint_datatypes + +__all__ = ["FilterByEvent", "FilterByEventArrayLike", "FilterByEventBatch", "FilterByEventLike", "FilterByEventType"] + + +def _filter_by_event__active__special_field_converter_override(x: datatypes.BoolLike) -> datatypes.Bool: + if isinstance(x, datatypes.Bool): + return x + else: + return datatypes.Bool(x) + + +@define(init=False) +class FilterByEvent: + """**Datatype**: Configuration for the filter by event feature of the dataframe view.""" + + def __init__(self: Any, active: datatypes.BoolLike, column: blueprint_datatypes.ComponentColumnSelectorLike): + """ + Create a new instance of the FilterByEvent datatype. + + Parameters + ---------- + active: + Whether the filter by event feature is active. + column: + The column used when the filter by event feature is used. + + """ + + # You can define your own __init__ function as a member of FilterByEventExt in filter_by_event_ext.py + self.__attrs_init__(active=active, column=column) + + active: datatypes.Bool = field(converter=_filter_by_event__active__special_field_converter_override) + # Whether the filter by event feature is active. + # + # (Docstring intentionally commented out to hide this field from the docs) + + column: blueprint_datatypes.ComponentColumnSelector = field() + # The column used when the filter by event feature is used. + # + # (Docstring intentionally commented out to hide this field from the docs) + + +FilterByEventLike = FilterByEvent +FilterByEventArrayLike = Union[ + FilterByEvent, + Sequence[FilterByEventLike], +] + + +class FilterByEventType(BaseExtensionType): + _TYPE_NAME: str = "rerun.blueprint.datatypes.FilterByEvent" + + def __init__(self) -> None: + pa.ExtensionType.__init__( + self, + pa.struct([ + pa.field("active", pa.bool_(), nullable=False, metadata={}), + pa.field( + "column", + pa.struct([ + pa.field("entity_path", pa.utf8(), nullable=False, metadata={}), + pa.field("component", pa.utf8(), nullable=False, metadata={}), + ]), + nullable=False, + metadata={}, + ), + ]), + self._TYPE_NAME, + ) + + +class FilterByEventBatch(BaseBatch[FilterByEventArrayLike]): + _ARROW_TYPE = FilterByEventType() + + @staticmethod + def _native_to_pa_array(data: FilterByEventArrayLike, data_type: pa.DataType) -> pa.Array: + from rerun.blueprint.datatypes import ComponentColumnSelectorBatch + from rerun.datatypes import BoolBatch + + if isinstance(data, FilterByEvent): + data = [data] + + return pa.StructArray.from_arrays( + [ + BoolBatch([x.active for x in data]).as_arrow_array().storage, # type: ignore[misc, arg-type] + ComponentColumnSelectorBatch([x.column for x in data]).as_arrow_array().storage, # type: ignore[misc, arg-type] + ], + fields=list(data_type), + ) diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range.py b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range.py new file mode 100644 index 0000000000000..e357b1ae2b7d4 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range.py @@ -0,0 +1,95 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/filter_by_range.fbs". + +# You can extend this class by creating a "FilterByRangeExt" class in "filter_by_range_ext.py". + +from __future__ import annotations + +from typing import Any, Sequence, Union + +import pyarrow as pa +from attrs import define, field + +from ... import datatypes +from ..._baseclasses import ( + BaseBatch, + BaseExtensionType, +) +from .filter_by_range_ext import FilterByRangeExt + +__all__ = ["FilterByRange", "FilterByRangeArrayLike", "FilterByRangeBatch", "FilterByRangeLike", "FilterByRangeType"] + + +@define(init=False) +class FilterByRange(FilterByRangeExt): + """**Datatype**: Configuration for the filter-by-range feature of the dataframe view.""" + + def __init__(self: Any, start: datatypes.TimeIntLike, end: datatypes.TimeIntLike): + """ + Create a new instance of the FilterByRange datatype. + + Parameters + ---------- + start: + Beginning of the time range. + end: + End of the time range (inclusive). + + """ + + # You can define your own __init__ function as a member of FilterByRangeExt in filter_by_range_ext.py + self.__attrs_init__(start=start, end=end) + + start: datatypes.TimeInt = field( + converter=FilterByRangeExt.start__field_converter_override, # type: ignore[misc] + ) + # Beginning of the time range. + # + # (Docstring intentionally commented out to hide this field from the docs) + + end: datatypes.TimeInt = field( + converter=FilterByRangeExt.end__field_converter_override, # type: ignore[misc] + ) + # End of the time range (inclusive). + # + # (Docstring intentionally commented out to hide this field from the docs) + + +FilterByRangeLike = FilterByRange +FilterByRangeArrayLike = Union[ + FilterByRange, + Sequence[FilterByRangeLike], +] + + +class FilterByRangeType(BaseExtensionType): + _TYPE_NAME: str = "rerun.blueprint.datatypes.FilterByRange" + + def __init__(self) -> None: + pa.ExtensionType.__init__( + self, + pa.struct([ + pa.field("start", pa.int64(), nullable=False, metadata={}), + pa.field("end", pa.int64(), nullable=False, metadata={}), + ]), + self._TYPE_NAME, + ) + + +class FilterByRangeBatch(BaseBatch[FilterByRangeArrayLike]): + _ARROW_TYPE = FilterByRangeType() + + @staticmethod + def _native_to_pa_array(data: FilterByRangeArrayLike, data_type: pa.DataType) -> pa.Array: + from rerun.datatypes import TimeIntBatch + + if isinstance(data, FilterByRange): + data = [data] + + return pa.StructArray.from_arrays( + [ + TimeIntBatch([x.start for x in data]).as_arrow_array().storage, # type: ignore[misc, arg-type] + TimeIntBatch([x.end for x in data]).as_arrow_array().storage, # type: ignore[misc, arg-type] + ], + fields=list(data_type), + ) diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range_ext.py b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range_ext.py new file mode 100644 index 0000000000000..8a0a8d3571178 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/filter_by_range_ext.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from ... import datatypes + + +class FilterByRangeExt: + """Extension for [FilterByRange][rerun.blueprint.datatypes.FilterByRange].""" + + # These overrides are required because otherwise the codegen uses `TimeInt(x)`, which is not valid with the custom + # `TimeInt.__init__` override. + + @staticmethod + def start__field_converter_override(x: datatypes.TimeIntLike) -> datatypes.TimeInt: + if isinstance(x, datatypes.TimeInt): + return x + else: + return datatypes.TimeInt(seq=x) + + @staticmethod + def end__field_converter_override(x: datatypes.TimeIntLike) -> datatypes.TimeInt: + if isinstance(x, datatypes.TimeInt): + return x + else: + return datatypes.TimeInt(seq=x) diff --git a/rerun_py/rerun_sdk/rerun/blueprint/datatypes/selected_columns.py b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/selected_columns.py new file mode 100644 index 0000000000000..3ddca3bf1e114 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/datatypes/selected_columns.py @@ -0,0 +1,112 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/datatypes/selected_columns.fbs". + +# You can extend this class by creating a "SelectedColumnsExt" class in "selected_columns_ext.py". + +from __future__ import annotations + +from typing import Any, Sequence, Union + +import pyarrow as pa +from attrs import define, field + +from ... import datatypes +from ..._baseclasses import ( + BaseBatch, + BaseExtensionType, +) +from ...blueprint import datatypes as blueprint_datatypes + +__all__ = [ + "SelectedColumns", + "SelectedColumnsArrayLike", + "SelectedColumnsBatch", + "SelectedColumnsLike", + "SelectedColumnsType", +] + + +@define(init=False) +class SelectedColumns: + """**Datatype**: List of selected columns in a dataframe.""" + + def __init__( + self: Any, + time_columns: datatypes.Utf8ArrayLike, + component_columns: blueprint_datatypes.ComponentColumnSelectorArrayLike, + ): + """ + Create a new instance of the SelectedColumns datatype. + + Parameters + ---------- + time_columns: + The time columns to include + component_columns: + The component columns to include + + """ + + # You can define your own __init__ function as a member of SelectedColumnsExt in selected_columns_ext.py + self.__attrs_init__(time_columns=time_columns, component_columns=component_columns) + + time_columns: list[datatypes.Utf8] = field() + # The time columns to include + # + # (Docstring intentionally commented out to hide this field from the docs) + + component_columns: list[blueprint_datatypes.ComponentColumnSelector] = field() + # The component columns to include + # + # (Docstring intentionally commented out to hide this field from the docs) + + +SelectedColumnsLike = SelectedColumns +SelectedColumnsArrayLike = Union[ + SelectedColumns, + Sequence[SelectedColumnsLike], +] + + +class SelectedColumnsType(BaseExtensionType): + _TYPE_NAME: str = "rerun.blueprint.datatypes.SelectedColumns" + + def __init__(self) -> None: + pa.ExtensionType.__init__( + self, + pa.struct([ + pa.field( + "time_columns", + pa.list_(pa.field("item", pa.utf8(), nullable=False, metadata={})), + nullable=False, + metadata={}, + ), + pa.field( + "component_columns", + pa.list_( + pa.field( + "item", + pa.struct([ + pa.field("entity_path", pa.utf8(), nullable=False, metadata={}), + pa.field("component", pa.utf8(), nullable=False, metadata={}), + ]), + nullable=False, + metadata={}, + ) + ), + nullable=False, + metadata={}, + ), + ]), + self._TYPE_NAME, + ) + + +class SelectedColumnsBatch(BaseBatch[SelectedColumnsArrayLike]): + _ARROW_TYPE = SelectedColumnsType() + + @staticmethod + def _native_to_pa_array(data: SelectedColumnsArrayLike, data_type: pa.DataType) -> pa.Array: + raise NotImplementedError( + "Arrow serialization of SelectedColumns not implemented: We lack codegen for arrow-serialization of general structs" + ) # You need to implement native_to_pa_array_override in selected_columns_ext.py diff --git a/rerun_py/rerun_sdk/rerun/blueprint/views/.gitattributes b/rerun_py/rerun_sdk/rerun/blueprint/views/.gitattributes index 7b2ef874ad4df..75df45e0a8700 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/views/.gitattributes +++ b/rerun_py/rerun_sdk/rerun/blueprint/views/.gitattributes @@ -3,6 +3,7 @@ .gitattributes linguist-generated=true __init__.py linguist-generated=true bar_chart_view.py linguist-generated=true +dataframe_view.py linguist-generated=true spatial2d_view.py linguist-generated=true spatial3d_view.py linguist-generated=true tensor_view.py linguist-generated=true diff --git a/rerun_py/rerun_sdk/rerun/blueprint/views/__init__.py b/rerun_py/rerun_sdk/rerun/blueprint/views/__init__.py index 6a71b704e3800..c2aee9b899421 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/views/__init__.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/views/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from .bar_chart_view import BarChartView +from .dataframe_view import DataframeView from .spatial2d_view import Spatial2DView from .spatial3d_view import Spatial3DView from .tensor_view import TensorView @@ -12,6 +13,7 @@ __all__ = [ "BarChartView", + "DataframeView", "Spatial2DView", "Spatial3DView", "TensorView", diff --git a/rerun_py/rerun_sdk/rerun/blueprint/views/dataframe_view.py b/rerun_py/rerun_sdk/rerun/blueprint/views/dataframe_view.py new file mode 100644 index 0000000000000..2887156f9e064 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/blueprint/views/dataframe_view.py @@ -0,0 +1,120 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/blueprint/views/dataframe.fbs". + +from __future__ import annotations + +from typing import Union + +__all__ = ["DataframeView"] + + +from ... import datatypes +from ..._baseclasses import AsComponents, ComponentBatchLike +from ...datatypes import EntityPathLike, Utf8Like +from .. import archetypes as blueprint_archetypes +from ..api import SpaceView, SpaceViewContentsLike + + +class DataframeView(SpaceView): + """ + **View**: A view to display any data in a tabular form. + + Any data from the store can be shown, using a flexibly, user-configurable query. + + Example + ------- + ### Use a blueprint to customize a DataframeView.: + ```python + import math + + import rerun as rr + import rerun.blueprint as rrb + + rr.init("rerun_example_dataframe", spawn=True) + + # Log some data. + rr.log("trig/sin", rr.SeriesLine(color=[255, 0, 0], name="sin(0.01t)"), static=True) + rr.log("trig/cos", rr.SeriesLine(color=[0, 255, 0], name="cos(0.01t)"), static=True) + for t in range(0, int(math.pi * 4 * 100.0)): + rr.set_time_seconds("t", t) + rr.log("trig/sin", rr.Scalar(math.sin(float(t) / 100.0))) + rr.log("trig/cos", rr.Scalar(math.cos(float(t) / 100.0))) + + # Create a Dataframe View + blueprint = rrb.Blueprint( + rrb.DataframeView( + origin="/trig", + # TODO(#6896): improve `DataframeQueryV2` API and showcase more features + query=rrb.archetypes.DataframeQueryV2( + timeline="t", + range_filter=rrb.components.RangeFilter(start=rr.TimeInt(seconds=0), end=rr.TimeInt(seconds=20)), + ), + ), + ) + + rr.send_blueprint(blueprint) + ``` + + """ + + def __init__( + self, + *, + origin: EntityPathLike = "/", + contents: SpaceViewContentsLike = "$origin/**", + name: Utf8Like | None = None, + visible: datatypes.BoolLike | None = None, + defaults: list[Union[AsComponents, ComponentBatchLike]] = [], + overrides: dict[EntityPathLike, list[ComponentBatchLike]] = {}, + query: blueprint_archetypes.DataframeQueryV2 | None = None, + ) -> None: + """ + Construct a blueprint for a new DataframeView view. + + Parameters + ---------- + origin: + The `EntityPath` to use as the origin of this view. + All other entities will be transformed to be displayed relative to this origin. + contents: + The contents of the view specified as a query expression. + This is either a single expression, or a list of multiple expressions. + See [rerun.blueprint.archetypes.SpaceViewContents][]. + name: + The display name of the view. + visible: + Whether this view is visible. + + Defaults to true if not specified. + defaults: + List of default components or component batches to add to the space view. When an archetype + in the view is missing a component included in this set, the value of default will be used + instead of the normal fallback for the visualizer. + overrides: + Dictionary of overrides to apply to the space view. The key is the path to the entity where the override + should be applied. The value is a list of component or component batches to apply to the entity. + + Important note: the path must be a fully qualified entity path starting at the root. The override paths + do not yet support `$origin` relative paths or glob expressions. + This will be addressed in . + query: + Query of the dataframe. + + """ + + properties: dict[str, AsComponents] = {} + if query is not None: + if not isinstance(query, blueprint_archetypes.DataframeQueryV2): + query = blueprint_archetypes.DataframeQueryV2(query) + properties["DataframeQueryV2"] = query + + super().__init__( + class_identifier="Dataframe", + origin=origin, + contents=contents, + name=name, + visible=visible, + properties=properties, + defaults=defaults, + overrides=overrides, + ) diff --git a/rerun_py/rerun_sdk/rerun/components/.gitattributes b/rerun_py/rerun_sdk/rerun/components/.gitattributes index bb7b80d1486c3..6541e94fa6ebb 100644 --- a/rerun_py/rerun_sdk/rerun/components/.gitattributes +++ b/rerun_py/rerun_sdk/rerun/components/.gitattributes @@ -60,6 +60,7 @@ transform_mat3x3.py linguist-generated=true transform_relation.py linguist-generated=true translation3d.py linguist-generated=true triangle_indices.py linguist-generated=true +value_range.py linguist-generated=true vector2d.py linguist-generated=true vector3d.py linguist-generated=true video_timestamp.py linguist-generated=true diff --git a/rerun_py/rerun_sdk/rerun/components/__init__.py b/rerun_py/rerun_sdk/rerun/components/__init__.py index 8855fcf59d3c4..7fa9bf8b8c02c 100644 --- a/rerun_py/rerun_sdk/rerun/components/__init__.py +++ b/rerun_py/rerun_sdk/rerun/components/__init__.py @@ -88,6 +88,7 @@ ) from .translation3d import Translation3D, Translation3DBatch, Translation3DType from .triangle_indices import TriangleIndices, TriangleIndicesBatch, TriangleIndicesType +from .value_range import ValueRange, ValueRangeBatch, ValueRangeType from .vector2d import Vector2D, Vector2DBatch, Vector2DType from .vector3d import Vector3D, Vector3DBatch, Vector3DType from .video_timestamp import VideoTimestamp, VideoTimestampBatch, VideoTimestampType @@ -286,6 +287,9 @@ "TriangleIndices", "TriangleIndicesBatch", "TriangleIndicesType", + "ValueRange", + "ValueRangeBatch", + "ValueRangeType", "Vector2D", "Vector2DBatch", "Vector2DType", diff --git a/rerun_py/rerun_sdk/rerun/components/value_range.py b/rerun_py/rerun_sdk/rerun/components/value_range.py new file mode 100644 index 0000000000000..b45afdf2ec1e3 --- /dev/null +++ b/rerun_py/rerun_sdk/rerun/components/value_range.py @@ -0,0 +1,36 @@ +# DO NOT EDIT! This file was auto-generated by crates/build/re_types_builder/src/codegen/python/mod.rs +# Based on "crates/store/re_types/definitions/rerun/components/value_range.fbs". + +# You can extend this class by creating a "ValueRangeExt" class in "value_range_ext.py". + +from __future__ import annotations + +from .. import datatypes +from .._baseclasses import ( + ComponentBatchMixin, + ComponentMixin, +) + +__all__ = ["ValueRange", "ValueRangeBatch", "ValueRangeType"] + + +class ValueRange(datatypes.Range1D, ComponentMixin): + """**Component**: Range of expected or valid values, specifying a lower and upper bound.""" + + _BATCH_TYPE = None + # You can define your own __init__ function as a member of ValueRangeExt in value_range_ext.py + + # Note: there are no fields here because ValueRange delegates to datatypes.Range1D + pass + + +class ValueRangeType(datatypes.Range1DType): + _TYPE_NAME: str = "rerun.components.ValueRange" + + +class ValueRangeBatch(datatypes.Range1DBatch, ComponentBatchMixin): + _ARROW_TYPE = ValueRangeType() + + +# This is patched in late to avoid circular dependencies. +ValueRange._BATCH_TYPE = ValueRangeBatch # type: ignore[assignment] diff --git a/rerun_py/rerun_sdk/rerun/datatypes/pixel_format.py b/rerun_py/rerun_sdk/rerun/datatypes/pixel_format.py index 893439a25ebee..872f076901b5d 100644 --- a/rerun_py/rerun_sdk/rerun/datatypes/pixel_format.py +++ b/rerun_py/rerun_sdk/rerun/datatypes/pixel_format.py @@ -37,7 +37,7 @@ class PixelFormat(Enum): NV12 = 26 """ - NV12 (aka Y_UV12) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. + `NV12` (aka `Y_UV12`) is a YUV 4:2:0 chroma downsampled format with 12 bits per pixel and 8 bits per channel. First comes entire image in Y in one plane, followed by a plane with interleaved lines ordered as U0, V0, U1, V1, etc. @@ -45,7 +45,7 @@ class PixelFormat(Enum): YUY2 = 27 """ - YUY2 (aka YUYV or YUYV16), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. + `YUY2` (aka `YUYV` or `YUYV16`), is a YUV 4:2:2 chroma downsampled format with 16 bits per pixel and 8 bits per channel. The order of the channels is Y0, U0, Y1, V0, all in the same plane. """ diff --git a/rerun_py/tests/unit/test_depth_image.py b/rerun_py/tests/unit/test_depth_image.py index 14221c96ea40e..3413901fc4013 100644 --- a/rerun_py/tests/unit/test_depth_image.py +++ b/rerun_py/tests/unit/test_depth_image.py @@ -1,13 +1,14 @@ from __future__ import annotations +import itertools from typing import Any import numpy as np import pytest import rerun as rr import torch -from rerun.components import DepthMeter -from rerun.datatypes import Float32Like +from rerun.components import DepthMeter, ImageFormat +from rerun.datatypes import ChannelDatatype, Float32Like rng = np.random.default_rng(12345) RANDOM_IMAGE_SOURCE = rng.uniform(0.0, 1.0, (10, 20)) @@ -25,13 +26,32 @@ def depth_image_expected() -> Any: return rr.DepthImage(RANDOM_IMAGE_SOURCE, meter=1000) -def test_image() -> None: - expected = depth_image_expected() - - for img, meter in zip(IMAGE_INPUTS, METER_INPUTS): - arch = rr.DepthImage(img, meter=meter) - - assert arch == expected +def test_depth_image() -> None: + ranges = [None, [0.0, 1.0], (1000, 1000)] + + for img, meter, depth_range in itertools.zip_longest(IMAGE_INPUTS, METER_INPUTS, ranges): + if img is None: + img = IMAGE_INPUTS[0] + + print( + f"rr.DepthImage(\n" # + f" {img}\n" + f" meter={meter!r}\n" + f" depth_range={depth_range!r}\n" + f")" + ) + arch = rr.DepthImage(img, meter=meter, depth_range=depth_range) + + assert arch.buffer == rr.components.ImageBufferBatch._optional(img.tobytes()) + assert arch.format == rr.components.ImageFormatBatch._optional( + ImageFormat( + width=img.shape[1], + height=img.shape[0], + channel_datatype=ChannelDatatype.from_np_dtype(img.dtype), + ) + ) + assert arch.meter == rr.components.DepthMeterBatch._optional(meter) + assert arch.depth_range == rr.components.ValueRangeBatch._optional(depth_range) GOOD_IMAGE_INPUTS: list[Any] = [ diff --git a/scripts/clippy_wasm/clippy.toml b/scripts/clippy_wasm/clippy.toml index 25ce151e60ce8..81d0fde1d1e2f 100644 --- a/scripts/clippy_wasm/clippy.toml +++ b/scripts/clippy_wasm/clippy.toml @@ -6,7 +6,7 @@ # ----------------------------------------------------------------------------- # Section identical to the main clippy.toml: -msrv = "1.76" +msrv = "1.79" allow-unwrap-in-tests = true @@ -62,6 +62,8 @@ doc-valid-idents = [ "GLTF", "iOS", "macOS", + "MessagePack", + "MiMalloc", "NaN", "OBJ", "OpenGL", @@ -69,6 +71,7 @@ doc-valid-idents = [ "sRGB", "sRGBA", "WebGL", + "WebGPU", "WebSocket", "WebSockets", ] diff --git a/scripts/lint.py b/scripts/lint.py index 695d12b6bfc67..c5d7edce26c10 100755 --- a/scripts/lint.py +++ b/scripts/lint.py @@ -599,7 +599,7 @@ def test_lint_workspace_deps() -> None: name = "clock" version = "0.6.0-alpha.0" edition = "2021" - rust-version = "1.76" + rust-version = "1.79" license = "MIT OR Apache-2.0" publish = false diff --git a/tests/python/release_checklist/check_all_components_ui.py b/tests/python/release_checklist/check_all_components_ui.py index af638f024364b..9d2f17ad221d3 100644 --- a/tests/python/release_checklist/check_all_components_ui.py +++ b/tests/python/release_checklist/check_all_components_ui.py @@ -216,6 +216,7 @@ def alternatives(self) -> list[Any] | None: ), "Translation3DBatch": TestCase(batch=[(1, 2, 3), (4, 5, 6), (7, 8, 9)]), "TriangleIndicesBatch": TestCase(batch=[(0, 1, 2), (3, 4, 5), (6, 7, 8)]), + "ValueRangeBatch": TestCase((0, 5)), "Vector2DBatch": TestCase(batch=[(0, 1), (2, 3), (4, 5)]), "Vector3DBatch": TestCase(batch=[(0, 3, 4), (1, 4, 5), (2, 5, 6)]), "VideoTimestampBatch": TestCase(rr.components.VideoTimestamp(seconds=0.0)),