diff --git a/crates/re_space_view/src/blueprint/query_expressions.rs b/crates/re_space_view/src/blueprint/query_expressions.rs index 19e37d15a140..702e052a5f9b 100644 --- a/crates/re_space_view/src/blueprint/query_expressions.rs +++ b/crates/re_space_view/src/blueprint/query_expressions.rs @@ -26,22 +26,11 @@ use ::re_types_core::{DeserializationError, DeserializationResult}; /// Unstable. Used for the ongoing blueprint experimentations. #[derive(Clone, Debug, PartialEq, Eq)] pub struct QueryExpressions { - /// A set of strings that can be parsed as `EntityPathExpression`s. - pub expressions: Vec<::re_types_core::ArrowString>, -} - -impl From> for QueryExpressions { - #[inline] - fn from(expressions: Vec<::re_types_core::ArrowString>) -> Self { - Self { expressions } - } -} + /// A set of strings representing `EntityPathExpression`s to be included. + pub inclusions: Vec<::re_types_core::ArrowString>, -impl From for Vec<::re_types_core::ArrowString> { - #[inline] - fn from(value: QueryExpressions) -> Self { - value.expressions - } + /// A set of strings representing `EntityPathExpression`s to be excluded. + pub exclusions: Vec<::re_types_core::ArrowString>, } ::re_types_core::macros::impl_into_cow!(QueryExpressions); @@ -58,17 +47,30 @@ impl ::re_types_core::Loggable for QueryExpressions { #[inline] fn arrow_datatype() -> arrow2::datatypes::DataType { use arrow2::datatypes::*; - DataType::Struct(vec![Field { - name: "expressions".to_owned(), - data_type: DataType::List(Box::new(Field { - name: "item".to_owned(), - data_type: DataType::Utf8, + DataType::Struct(vec![ + Field { + name: "inclusions".to_owned(), + data_type: DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), is_nullable: false, metadata: [].into(), - })), - is_nullable: false, - metadata: [].into(), - }]) + }, + Field { + name: "exclusions".to_owned(), + data_type: DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), + is_nullable: false, + metadata: [].into(), + }, + ]) } #[allow(clippy::wildcard_imports)] @@ -95,75 +97,152 @@ impl ::re_types_core::Loggable for QueryExpressions { }; StructArray::new( ::arrow_datatype(), - vec![{ - let (somes, expressions): (Vec<_>, Vec<_>) = data - .iter() - .map(|datum| { - let datum = datum.as_ref().map(|datum| { - let Self { expressions, .. } = &**datum; - expressions.clone() - }); - (datum.is_some(), datum) - }) - .unzip(); - let expressions_bitmap: Option = { - let any_nones = somes.iter().any(|some| !*some); - any_nones.then(|| somes.into()) - }; + vec![ { - use arrow2::{buffer::Buffer, offset::OffsetsBuffer}; - let expressions_inner_data: Vec<_> = expressions + let (somes, inclusions): (Vec<_>, Vec<_>) = data .iter() - .flatten() - .flatten() - .cloned() - .map(Some) - .collect(); - let expressions_inner_bitmap: Option = None; - let offsets = arrow2::offset::Offsets::::try_from_lengths( - expressions.iter().map(|opt| { - opt.as_ref().map(|datum| datum.len()).unwrap_or_default() - }), - ) - .unwrap() - .into(); - ListArray::new( - DataType::List(Box::new(Field { - name: "item".to_owned(), - data_type: DataType::Utf8, - is_nullable: false, - metadata: [].into(), - })), - offsets, - { - let inner_data: arrow2::buffer::Buffer = expressions_inner_data - .iter() - .flatten() - .flat_map(|s| s.0.clone()) - .collect(); - let offsets = arrow2::offset::Offsets::::try_from_lengths( - expressions_inner_data.iter().map(|opt| { - opt.as_ref().map(|datum| datum.0.len()).unwrap_or_default() - }), - ) - .unwrap() - .into(); - #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] - unsafe { - Utf8Array::::new_unchecked( - DataType::Utf8, - offsets, - inner_data, - expressions_inner_bitmap, + .map(|datum| { + let datum = datum.as_ref().map(|datum| { + let Self { inclusions, .. } = &**datum; + inclusions.clone() + }); + (datum.is_some(), datum) + }) + .unzip(); + let inclusions_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + { + use arrow2::{buffer::Buffer, offset::OffsetsBuffer}; + let inclusions_inner_data: Vec<_> = inclusions + .iter() + .flatten() + .flatten() + .cloned() + .map(Some) + .collect(); + let inclusions_inner_bitmap: Option = None; + let offsets = arrow2::offset::Offsets::::try_from_lengths( + inclusions.iter().map(|opt| { + opt.as_ref().map(|datum| datum.len()).unwrap_or_default() + }), + ) + .unwrap() + .into(); + ListArray::new( + DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), + offsets, + { + let inner_data: arrow2::buffer::Buffer = + inclusions_inner_data + .iter() + .flatten() + .flat_map(|s| s.0.clone()) + .collect(); + let offsets = arrow2::offset::Offsets::::try_from_lengths( + inclusions_inner_data.iter().map(|opt| { + opt.as_ref() + .map(|datum| datum.0.len()) + .unwrap_or_default() + }), ) - } - .boxed() - }, - expressions_bitmap, - ) - .boxed() - } - }], + .unwrap() + .into(); + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + unsafe { + Utf8Array::::new_unchecked( + DataType::Utf8, + offsets, + inner_data, + inclusions_inner_bitmap, + ) + } + .boxed() + }, + inclusions_bitmap, + ) + .boxed() + } + }, + { + let (somes, exclusions): (Vec<_>, Vec<_>) = data + .iter() + .map(|datum| { + let datum = datum.as_ref().map(|datum| { + let Self { exclusions, .. } = &**datum; + exclusions.clone() + }); + (datum.is_some(), datum) + }) + .unzip(); + let exclusions_bitmap: Option = { + let any_nones = somes.iter().any(|some| !*some); + any_nones.then(|| somes.into()) + }; + { + use arrow2::{buffer::Buffer, offset::OffsetsBuffer}; + let exclusions_inner_data: Vec<_> = exclusions + .iter() + .flatten() + .flatten() + .cloned() + .map(Some) + .collect(); + let exclusions_inner_bitmap: Option = None; + let offsets = arrow2::offset::Offsets::::try_from_lengths( + exclusions.iter().map(|opt| { + opt.as_ref().map(|datum| datum.len()).unwrap_or_default() + }), + ) + .unwrap() + .into(); + ListArray::new( + DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), + offsets, + { + let inner_data: arrow2::buffer::Buffer = + exclusions_inner_data + .iter() + .flatten() + .flat_map(|s| s.0.clone()) + .collect(); + let offsets = arrow2::offset::Offsets::::try_from_lengths( + exclusions_inner_data.iter().map(|opt| { + opt.as_ref() + .map(|datum| datum.0.len()) + .unwrap_or_default() + }), + ) + .unwrap() + .into(); + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + unsafe { + Utf8Array::::new_unchecked( + DataType::Utf8, + offsets, + inner_data, + exclusions_inner_bitmap, + ) + } + .boxed() + }, + exclusions_bitmap, + ) + .boxed() + } + }, + ], bitmap, ) .boxed() @@ -186,17 +265,30 @@ impl ::re_types_core::Loggable for QueryExpressions { .downcast_ref::() .ok_or_else(|| { DeserializationError::datatype_mismatch( - DataType::Struct(vec![Field { - name: "expressions".to_owned(), - data_type: DataType::List(Box::new(Field { - name: "item".to_owned(), - data_type: DataType::Utf8, + DataType::Struct(vec![ + Field { + name: "inclusions".to_owned(), + data_type: DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), is_nullable: false, metadata: [].into(), - })), - is_nullable: false, - metadata: [].into(), - }]), + }, + Field { + name: "exclusions".to_owned(), + data_type: DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), + is_nullable: false, + metadata: [].into(), + }, + ]), arrow_data.data_type().clone(), ) }) @@ -211,15 +303,15 @@ impl ::re_types_core::Loggable for QueryExpressions { .map(|field| field.name.as_str()) .zip(arrow_data_arrays) .collect(); - let expressions = { - if !arrays_by_name.contains_key("expressions") { + let inclusions = { + if !arrays_by_name.contains_key("inclusions") { return Err(DeserializationError::missing_struct_field( Self::arrow_datatype(), - "expressions", + "inclusions", )) .with_context("rerun.blueprint.QueryExpressions"); } - let arrow_data = &**arrays_by_name["expressions"]; + let arrow_data = &**arrays_by_name["inclusions"]; { let arrow_data = arrow_data .as_any() @@ -235,7 +327,7 @@ impl ::re_types_core::Loggable for QueryExpressions { arrow_data.data_type().clone(), ) }) - .with_context("rerun.blueprint.QueryExpressions#expressions")?; + .with_context("rerun.blueprint.QueryExpressions#inclusions")?; if arrow_data.is_empty() { Vec::new() } else { @@ -252,7 +344,7 @@ impl ::re_types_core::Loggable for QueryExpressions { ) }) .with_context( - "rerun.blueprint.QueryExpressions#expressions", + "rerun.blueprint.QueryExpressions#inclusions", )?; let arrow_data_inner_buf = arrow_data_inner.values(); let offsets = arrow_data_inner.offsets(); @@ -292,7 +384,127 @@ impl ::re_types_core::Loggable for QueryExpressions { }) }) .collect::>>>() - .with_context("rerun.blueprint.QueryExpressions#expressions")? + .with_context("rerun.blueprint.QueryExpressions#inclusions")? + .into_iter() + } + .collect::>() + }; + let offsets = arrow_data.offsets(); + arrow2::bitmap::utils::ZipValidity::new_with_validity( + offsets.iter().zip(offsets.lengths()), + arrow_data.validity(), + ) + .map(|elem| { + elem.map(|(start, len)| { + let start = *start as usize; + let end = start + len; + if end as usize > arrow_data_inner.len() { + return Err(DeserializationError::offset_slice_oob( + (start, end), + arrow_data_inner.len(), + )); + } + + #[allow(unsafe_code, clippy::undocumented_unsafe_blocks)] + let data = unsafe { + arrow_data_inner.get_unchecked(start as usize..end as usize) + }; + let data = data + .iter() + .cloned() + .map(Option::unwrap_or_default) + .collect(); + Ok(data) + }) + .transpose() + }) + .collect::>>>()? + } + .into_iter() + } + }; + let exclusions = { + if !arrays_by_name.contains_key("exclusions") { + return Err(DeserializationError::missing_struct_field( + Self::arrow_datatype(), + "exclusions", + )) + .with_context("rerun.blueprint.QueryExpressions"); + } + let arrow_data = &**arrays_by_name["exclusions"]; + { + let arrow_data = arrow_data + .as_any() + .downcast_ref::>() + .ok_or_else(|| { + DeserializationError::datatype_mismatch( + DataType::List(Box::new(Field { + name: "item".to_owned(), + data_type: DataType::Utf8, + is_nullable: false, + metadata: [].into(), + })), + arrow_data.data_type().clone(), + ) + }) + .with_context("rerun.blueprint.QueryExpressions#exclusions")?; + if arrow_data.is_empty() { + Vec::new() + } else { + let arrow_data_inner = { + let arrow_data_inner = &**arrow_data.values(); + { + let arrow_data_inner = arrow_data_inner + .as_any() + .downcast_ref::>() + .ok_or_else(|| { + DeserializationError::datatype_mismatch( + DataType::Utf8, + arrow_data_inner.data_type().clone(), + ) + }) + .with_context( + "rerun.blueprint.QueryExpressions#exclusions", + )?; + let arrow_data_inner_buf = arrow_data_inner.values(); + let offsets = arrow_data_inner.offsets(); + arrow2::bitmap::utils::ZipValidity::new_with_validity( + offsets.iter().zip(offsets.lengths()), + arrow_data_inner.validity(), + ) + .map(|elem| { + elem.map(|(start, len)| { + let start = *start as usize; + let end = start + len; + if end as usize > arrow_data_inner_buf.len() { + return Err( + DeserializationError::offset_slice_oob( + (start, end), + arrow_data_inner_buf.len(), + ), + ); + } + + #[allow( + unsafe_code, + clippy::undocumented_unsafe_blocks + )] + let data = unsafe { + arrow_data_inner_buf + .clone() + .sliced_unchecked(start, len) + }; + Ok(data) + }) + .transpose() + }) + .map(|res_or_opt| { + res_or_opt.map(|res_or_opt| { + res_or_opt.map(|v| ::re_types_core::ArrowString(v)) + }) + }) + .collect::>>>() + .with_context("rerun.blueprint.QueryExpressions#exclusions")? .into_iter() } .collect::>() @@ -332,15 +544,22 @@ impl ::re_types_core::Loggable for QueryExpressions { } }; arrow2::bitmap::utils::ZipValidity::new_with_validity( - ::itertools::izip!(expressions), + ::itertools::izip!(inclusions, exclusions), arrow_data.validity(), ) .map(|opt| { - opt.map(|(expressions)| { + opt.map(|(inclusions, exclusions)| { Ok(Self { - expressions: expressions + inclusions: inclusions + .ok_or_else(DeserializationError::missing_data) + .with_context( + "rerun.blueprint.QueryExpressions#inclusions", + )?, + exclusions: exclusions .ok_or_else(DeserializationError::missing_data) - .with_context("rerun.blueprint.QueryExpressions#expressions")?, + .with_context( + "rerun.blueprint.QueryExpressions#exclusions", + )?, }) }) .transpose() diff --git a/crates/re_space_view/src/data_query_blueprint.rs b/crates/re_space_view/src/data_query_blueprint.rs index 6c8694f73ff9..0fe2d1b0237c 100644 --- a/crates/re_space_view/src/data_query_blueprint.rs +++ b/crates/re_space_view/src/data_query_blueprint.rs @@ -50,10 +50,12 @@ impl DataQueryBlueprint { Self { id: DataQueryId::random(), space_view_class_name, - expressions: queries_entities - .map(|exp| exp.to_string().into()) - .collect::>() - .into(), + expressions: QueryExpressions { + inclusions: queries_entities + .map(|exp| exp.to_string().into()) + .collect::>(), + exclusions: vec![], + }, } } @@ -142,8 +144,10 @@ impl DataQuery for DataQueryBlueprint { struct QueryExpressionEvaluator<'a> { blueprint: &'a DataQueryBlueprint, per_system_entity_list: &'a EntitiesPerSystem, - exact_matches: IntSet, - recursive_matches: IntSet, + exact_inclusions: IntSet, + recursive_inclusions: IntSet, + exact_exclusions: IntSet, + recursive_exclusions: IntSet, allowed_prefixes: IntSet, } @@ -152,25 +156,43 @@ impl<'a> QueryExpressionEvaluator<'a> { blueprint: &'a DataQueryBlueprint, per_system_entity_list: &'a EntitiesPerSystem, ) -> Self { - let expressions: Vec = blueprint + let inclusions: Vec = blueprint .expressions + .inclusions + .iter() + .filter(|exp| !exp.as_str().is_empty()) + .map(|exp| EntityPathExpr::from(exp.as_str())) + .collect(); + + let exclusions: Vec = blueprint .expressions + .exclusions .iter() .filter(|exp| !exp.as_str().is_empty()) .map(|exp| EntityPathExpr::from(exp.as_str())) .collect(); - let exact_matches: IntSet = expressions + let exact_inclusions: IntSet = inclusions .iter() .filter_map(|exp| exp.exact_entity_path().cloned()) .collect(); - let recursive_matches = expressions + let recursive_inclusions = inclusions .iter() .filter_map(|exp| exp.recursive_entity_path().cloned()) .collect(); - let allowed_prefixes = expressions + let exact_exclusions: IntSet = exclusions + .iter() + .filter_map(|exp| exp.exact_entity_path().cloned()) + .collect(); + + let recursive_exclusions: IntSet = exclusions + .iter() + .filter_map(|exp| exp.recursive_entity_path().cloned()) + .collect(); + + let allowed_prefixes = inclusions .iter() .flat_map(|exp| EntityPath::incremental_walk(None, exp.entity_path())) .collect(); @@ -178,8 +200,10 @@ impl<'a> QueryExpressionEvaluator<'a> { Self { blueprint, per_system_entity_list, - exact_matches, - recursive_matches, + exact_inclusions, + recursive_inclusions, + exact_exclusions, + recursive_exclusions, allowed_prefixes, } } @@ -193,18 +217,22 @@ impl<'a> QueryExpressionEvaluator<'a> { from_recursive: bool, ) -> Option { // If we hit a prefix that is not allowed, we terminate. This is - // a pruned branch of the tree. + // a pruned branch of the tree. Can come from either an explicit + // recursive exclusion, or an implicit missing inclusion. // TODO(jleibs): If this space is disconnected, we should terminate here - if !(from_recursive || self.allowed_prefixes.contains(&tree.path)) { + if self.recursive_exclusions.contains(&tree.path) + || !(from_recursive || self.allowed_prefixes.contains(&tree.path)) + { return None; } let entity_path = tree.path.clone(); // Pre-compute our matches - let exact_match = self.exact_matches.contains(&entity_path); - let recursive_match = self.recursive_matches.contains(&entity_path) || from_recursive; - let any_match = exact_match || recursive_match; + let exact_include = self.exact_inclusions.contains(&entity_path); + let recursive_include = self.recursive_inclusions.contains(&entity_path) || from_recursive; + let exact_exclude = self.exact_exclusions.contains(&entity_path); + let any_match = (exact_include || recursive_include) && !exact_exclude; // Only populate view_parts if this is a match // Note that allowed prefixes that aren't matches can still create groups @@ -238,7 +266,7 @@ impl<'a> QueryExpressionEvaluator<'a> { .join(&DataQueryBlueprint::RECURSIVE_OVERRIDES_PREFIX.into()) .join(&entity_path); - let self_leaf = if !view_parts.is_empty() || exact_match { + let self_leaf = if !view_parts.is_empty() || exact_include { let individual_props = overrides.individual.get_opt(&entity_path); let mut leaf_resolved_properties = resolved_properties.clone(); @@ -273,7 +301,7 @@ impl<'a> QueryExpressionEvaluator<'a> { overrides, &resolved_properties, data_results, - recursive_match, // Once we have hit a recursive match, it's always propagated + recursive_include, // Once we have hit a recursive match, it's always propagated ) })) .collect(); @@ -450,39 +478,49 @@ mod tests { all_recordings: vec![], }; - let scenarios: Vec<(Vec<&str>, Vec<&str>)> = vec![ - ( - vec!["/"], - vec![ + struct Scenario { + inclusions: Vec<&'static str>, + exclusions: Vec<&'static str>, + outputs: Vec<&'static str>, + } + + let scenarios: Vec = vec![ + Scenario { + inclusions: vec!["/"], + exclusions: vec![], + outputs: vec![ "/", "parent/", "parent", "parent/skipped/", // Not an exact match and not found in tree "parent/skipped/child1", // Only child 1 has ViewParts ], - ), - ( - vec!["parent/skipped/"], - vec![ + }, + Scenario { + inclusions: vec!["parent/skipped/"], + exclusions: vec![], + outputs: vec![ "/", "parent/", // Only included because is a prefix "parent/skipped/", // Not an exact match and not found in tree "parent/skipped/child1", // Only child 1 has ViewParts ], - ), - ( - vec!["parent", "parent/skipped/child2"], - vec![ + }, + Scenario { + inclusions: vec!["parent", "parent/skipped/child2"], + exclusions: vec![], + outputs: vec![ "/", // Trivial intermediate group -- could be collapsed "parent/", "parent", "parent/skipped/", // Trivial intermediate group -- could be collapsed "parent/skipped/child2", ], - ), - ( - vec!["parent/skipped", "parent/skipped/child2", "parent/"], - vec![ + }, + Scenario { + inclusions: vec!["parent/skipped", "parent/skipped/child2", "parent/"], + exclusions: vec![], + outputs: vec![ "/", "parent/", "parent", @@ -491,25 +529,58 @@ mod tests { "parent/skipped/child1", // Included because an exact match "parent/skipped/child2", ], - ), - ( - vec!["not/found"], + }, + Scenario { + inclusions: vec!["parent/skipped", "parent/skipped/child2", "parent/"], + exclusions: vec!["parent"], + outputs: vec![ + "/", + "parent/", // Parent leaf has been excluded + "parent/skipped/", + "parent/skipped", // Included because an exact match + "parent/skipped/child1", // Included because an exact match + "parent/skipped/child2", + ], + }, + Scenario { + inclusions: vec!["parent/"], + exclusions: vec!["parent/skipped/"], + outputs: vec!["/", "parent"], // None of the children are hit since excluded + }, + Scenario { + inclusions: vec!["parent/", "parent/skipped/child2"], + exclusions: vec!["parent/skipped/child1"], + outputs: vec![ + "/", + "parent/", + "parent", + "parent/skipped/", + "parent/skipped/child2", // No child1 since skipped. + ], + }, + Scenario { + inclusions: vec!["not/found"], + exclusions: vec![], // TODO(jleibs): Making this work requires merging the EntityTree walk with a minimal-coverage ExactMatchTree walk // not crucial for now until we expose a free-form UI for entering paths. // vec!["/", "not/", "not/found"]), - vec![], - ), + outputs: vec![], + }, ]; - for (input, outputs) in scenarios { + for Scenario { + inclusions, + exclusions, + outputs, + } in scenarios + { let query = DataQueryBlueprint { id: DataQueryId::random(), space_view_class_name: "3D".into(), - expressions: input - .into_iter() - .map(|s| s.into()) - .collect::>() - .into(), + expressions: QueryExpressions { + inclusions: inclusions.into_iter().map(|s| s.into()).collect::>(), + exclusions: exclusions.into_iter().map(|s| s.into()).collect::>(), + }, }; let query_result = query.execute_query(&resolver, &ctx, &entities_per_system_per_class); diff --git a/crates/re_types/definitions/rerun/blueprint/query_expressions.fbs b/crates/re_types/definitions/rerun/blueprint/query_expressions.fbs index 1b6e9d99f5d2..d4f9e2af279a 100644 --- a/crates/re_types/definitions/rerun/blueprint/query_expressions.fbs +++ b/crates/re_types/definitions/rerun/blueprint/query_expressions.fbs @@ -16,6 +16,9 @@ table QueryExpressions ( "attr.rust.derive": "PartialEq, Eq", "attr.rust.override_crate": "re_space_view" ) { - /// A set of strings that can be parsed as `EntityPathExpression`s. - expressions: [string] (order: 100); + /// A set of strings representing `EntityPathExpression`s to be included. + inclusions: [string] (order: 100); + + /// A set of strings representing `EntityPathExpression`s to be excluded. + exclusions: [string] (order: 200); } diff --git a/crates/re_viewer/src/ui/selection_panel.rs b/crates/re_viewer/src/ui/selection_panel.rs index 236f7b79550f..e6777af83a9f 100644 --- a/crates/re_viewer/src/ui/selection_panel.rs +++ b/crates/re_viewer/src/ui/selection_panel.rs @@ -417,16 +417,22 @@ fn blueprint_ui( if let Some(space_view) = viewport.blueprint.space_view(space_view_id) { if let Some(query) = space_view.queries.first() { - let expressions = query.expressions.expressions.join("\n"); - let mut edited_expressions = expressions.clone(); + let inclusions = query.expressions.inclusions.join("\n"); + let mut edited_inclusions = inclusions.clone(); + let exclusions = query.expressions.exclusions.join("\n"); + let mut edited_exclusions = exclusions.clone(); - ui.text_edit_multiline(&mut edited_expressions); + ui.label("Inclusion expressions"); + ui.text_edit_multiline(&mut edited_inclusions); + ui.label("Exclusion expressions"); + ui.text_edit_multiline(&mut edited_exclusions); - if edited_expressions != expressions { + if edited_inclusions != inclusions || edited_exclusions != exclusions { let timepoint = TimePoint::timeless(); let expressions_component = QueryExpressions { - expressions: edited_expressions.split('\n').map(|s| s.into()).collect(), + inclusions: edited_inclusions.split('\n').map(|s| s.into()).collect(), + exclusions: edited_exclusions.split('\n').map(|s| s.into()).collect(), }; let row = DataRow::from_cells1_sized( diff --git a/rerun_cpp/src/rerun/blueprint/query_expressions.cpp b/rerun_cpp/src/rerun/blueprint/query_expressions.cpp index d44d6d88dfe2..2c8474403168 100644 --- a/rerun_cpp/src/rerun/blueprint/query_expressions.cpp +++ b/rerun_cpp/src/rerun/blueprint/query_expressions.cpp @@ -13,7 +13,12 @@ namespace rerun { ) { static const auto datatype = arrow::struct_({ arrow::field( - "expressions", + "inclusions", + arrow::list(arrow::field("item", arrow::utf8(), false)), + false + ), + arrow::field( + "exclusions", arrow::list(arrow::field("item", arrow::utf8(), false)), false ), @@ -44,8 +49,22 @@ namespace rerun { for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { const auto& element = elements[elem_idx]; ARROW_RETURN_NOT_OK(field_builder->Append()); - for (size_t item_idx = 0; item_idx < element.expressions.size(); item_idx += 1) { - ARROW_RETURN_NOT_OK(value_builder->Append(element.expressions[item_idx])); + for (size_t item_idx = 0; item_idx < element.inclusions.size(); item_idx += 1) { + ARROW_RETURN_NOT_OK(value_builder->Append(element.inclusions[item_idx])); + } + } + } + { + auto field_builder = static_cast(builder->field_builder(1)); + auto value_builder = static_cast(field_builder->value_builder()); + ARROW_RETURN_NOT_OK(field_builder->Reserve(static_cast(num_elements))); + ARROW_RETURN_NOT_OK(value_builder->Reserve(static_cast(num_elements * 2))); + + for (size_t elem_idx = 0; elem_idx < num_elements; elem_idx += 1) { + const auto& element = elements[elem_idx]; + ARROW_RETURN_NOT_OK(field_builder->Append()); + for (size_t item_idx = 0; item_idx < element.exclusions.size(); item_idx += 1) { + ARROW_RETURN_NOT_OK(value_builder->Append(element.exclusions[item_idx])); } } } diff --git a/rerun_cpp/src/rerun/blueprint/query_expressions.hpp b/rerun_cpp/src/rerun/blueprint/query_expressions.hpp index fc2d58c47e55..dd5b843745f0 100644 --- a/rerun_cpp/src/rerun/blueprint/query_expressions.hpp +++ b/rerun_cpp/src/rerun/blueprint/query_expressions.hpp @@ -9,7 +9,6 @@ #include #include #include -#include namespace arrow { class Array; @@ -22,19 +21,14 @@ namespace rerun::blueprint { /// /// Unstable. Used for the ongoing blueprint experimentations. struct QueryExpressions { - /// A set of strings that can be parsed as `EntityPathExpression`s. - rerun::Collection expressions; + /// A set of strings representing `EntityPathExpression`s to be included. + rerun::Collection inclusions; + + /// A set of strings representing `EntityPathExpression`s to be excluded. + rerun::Collection exclusions; public: QueryExpressions() = default; - - QueryExpressions(rerun::Collection expressions_) - : expressions(std::move(expressions_)) {} - - QueryExpressions& operator=(rerun::Collection expressions_) { - expressions = std::move(expressions_); - return *this; - } }; } // namespace rerun::blueprint diff --git a/rerun_py/rerun_sdk/rerun/blueprint/query_expressions.py b/rerun_py/rerun_sdk/rerun/blueprint/query_expressions.py index e35d29656606..24f26b07e5cc 100644 --- a/rerun_py/rerun_sdk/rerun/blueprint/query_expressions.py +++ b/rerun_py/rerun_sdk/rerun/blueprint/query_expressions.py @@ -28,21 +28,28 @@ class QueryExpressions: Unstable. Used for the ongoing blueprint experimentations. """ - def __init__(self: Any, expressions: QueryExpressionsLike): + def __init__(self: Any, inclusions: list[str], exclusions: list[str]): """ Create a new instance of the QueryExpressions blueprint. Parameters ---------- - expressions: - A set of strings that can be parsed as `EntityPathExpression`s. + inclusions: + A set of strings representing `EntityPathExpression`s to be included. + exclusions: + A set of strings representing `EntityPathExpression`s to be excluded. """ # You can define your own __init__ function as a member of QueryExpressionsExt in query_expressions_ext.py - self.__attrs_init__(expressions=expressions) + self.__attrs_init__(inclusions=inclusions, exclusions=exclusions) - expressions: list[str] = field() - # A set of strings that can be parsed as `EntityPathExpression`s. + inclusions: list[str] = field() + # A set of strings representing `EntityPathExpression`s to be included. + # + # (Docstring intentionally commented out to hide this field from the docs) + + exclusions: list[str] = field() + # A set of strings representing `EntityPathExpression`s to be excluded. # # (Docstring intentionally commented out to hide this field from the docs)