Skip to content

Commit

Permalink
Introduce StoreHub and rename Recording->Store (#2301)
Browse files Browse the repository at this point in the history
Tip: review commit-by-commit

### What
Break out a simple `StoreHub` from `App`. It is responsible for handling
all loaded recordings and blueprints.

This PR also fixes a bug where we would before assume an `.rrd` file
could only contain data from a single store, and assumed that store was
a recording.

### Renames
A _Store_ is either a _Recording_ or a _Blueprint_. So now we have:

* `LogDb -> StoreDb`
* `RecordingId -> StoreId`
* `RecordingInfo -> StoreInfo` and `SetRecordingInfo -> SetStoreInfo`
* `RecordingSource -> StoreSource`
* `RecordingType -> StoreKind`


### Checklist
* [x] I have read and agree to [Contributor
Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and
the [Code of
Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md)

<!-- This line will get updated when the PR build summary job finishes.
-->
PR Build Summary: https://build.rerun.io/pr/2301

<!-- pr-link-docs:start -->
Docs preview: https://rerun.io/preview/1f07303/docs
<!-- pr-link-docs:end -->
  • Loading branch information
emilk authored Jun 2, 2023
1 parent 3d778a2 commit 0abb3d3
Show file tree
Hide file tree
Showing 67 changed files with 876 additions and 746 deletions.
11 changes: 4 additions & 7 deletions crates/re_data_store/examples/memory_usage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ fn live_bytes() -> usize {

// ----------------------------------------------------------------------------

use re_log_types::{entity_path, DataRow, RecordingId, RecordingType, RowId};
use re_log_types::{entity_path, DataRow, RowId, StoreId, StoreKind};

fn main() {
log_messages();
Expand Down Expand Up @@ -91,7 +91,7 @@ fn log_messages() {

const NUM_POINTS: usize = 1_000;

let recording_id = RecordingId::random(RecordingType::Data);
let store_id = StoreId::random(StoreKind::Recording);
let timeline = Timeline::new_sequence("frame_nr");
let mut time_point = TimePoint::default();
time_point.insert(timeline, TimeInt::from(0));
Expand All @@ -118,7 +118,7 @@ fn log_messages() {
);
let table_bytes = live_bytes() - used_bytes_start;
let log_msg = Box::new(LogMsg::ArrowMsg(
recording_id.clone(),
store_id.clone(),
table.to_arrow_msg().unwrap(),
));
let log_msg_bytes = live_bytes() - used_bytes_start;
Expand All @@ -143,10 +143,7 @@ fn log_messages() {
.into_table(),
);
let table_bytes = live_bytes() - used_bytes_start;
let log_msg = Box::new(LogMsg::ArrowMsg(
recording_id,
table.to_arrow_msg().unwrap(),
));
let log_msg = Box::new(LogMsg::ArrowMsg(store_id, table.to_arrow_msg().unwrap()));
let log_msg_bytes = live_bytes() - used_bytes_start;
println!("Arrow payload containing a Pos2 uses {table_bytes} bytes in RAM");
let encoded = encode_log_msg(&log_msg);
Expand Down
2 changes: 1 addition & 1 deletion crates/re_data_store/src/instance_path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::hash::Hash;

use re_log_types::{EntityPath, EntityPathHash, InstanceKey};

use crate::log_db::EntityDb;
use crate::store_db::EntityDb;

// ----------------------------------------------------------------------------

Expand Down
4 changes: 2 additions & 2 deletions crates/re_data_store/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ mod editable_auto_value;
pub mod entity_properties;
pub mod entity_tree;
mod instance_path;
pub mod log_db;
pub mod store_db;
mod util;

pub use entity_properties::*;
pub use entity_tree::*;
pub use instance_path::*;
pub use log_db::LogDb;
pub use store_db::StoreDb;
pub use util::*;

#[cfg(feature = "serde")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ use nohash_hasher::IntMap;

use re_arrow_store::{DataStoreConfig, TimeInt};
use re_log_types::{
ArrowMsg, Component as _, ComponentPath, DataCell, DataRow, DataTable, EntityPath,
EntityPathHash, EntityPathOpMsg, InstanceKey, LogMsg, PathOp, RecordingId, RecordingInfo,
RecordingType, RowId, SetRecordingInfo, TimePoint, Timeline,
ApplicationId, ArrowMsg, Component as _, ComponentPath, DataCell, DataRow, DataTable,
EntityPath, EntityPathHash, EntityPathOpMsg, InstanceKey, LogMsg, PathOp, RowId, SetStoreInfo,
StoreId, StoreInfo, StoreKind, TimePoint, Timeline,
};

use crate::{Error, TimesPerTimeline};
Expand Down Expand Up @@ -171,48 +171,52 @@ impl EntityDb {
// ----------------------------------------------------------------------------

/// A in-memory database built from a stream of [`LogMsg`]es.
pub struct LogDb {
/// The [`RecordingId`] for this log.
recording_id: RecordingId,
pub struct StoreDb {
/// The [`StoreId`] for this log.
store_id: StoreId,

/// All [`EntityPathOpMsg`]s ever received.
entity_op_msgs: BTreeMap<RowId, EntityPathOpMsg>,

/// Set by whomever created this [`LogDb`].
/// Set by whomever created this [`StoreDb`].
pub data_source: Option<re_smart_channel::SmartChannelSource>,

/// Comes in a special message, [`LogMsg::SetRecordingInfo`].
recording_msg: Option<SetRecordingInfo>,
/// Comes in a special message, [`LogMsg::SetStoreInfo`].
recording_msg: Option<SetStoreInfo>,

/// Where we store the entities.
pub entity_db: EntityDb,
}

impl LogDb {
pub fn new(recording_id: RecordingId) -> Self {
impl StoreDb {
pub fn new(store_id: StoreId) -> Self {
Self {
recording_id,
store_id,
entity_op_msgs: Default::default(),
data_source: None,
recording_msg: None,
entity_db: Default::default(),
}
}

pub fn recording_msg(&self) -> Option<&SetRecordingInfo> {
pub fn recording_msg(&self) -> Option<&SetStoreInfo> {
self.recording_msg.as_ref()
}

pub fn recording_info(&self) -> Option<&RecordingInfo> {
pub fn store_info(&self) -> Option<&StoreInfo> {
self.recording_msg().map(|msg| &msg.info)
}

pub fn recording_type(&self) -> RecordingType {
self.recording_id.variant
pub fn app_id(&self) -> Option<&ApplicationId> {
self.store_info().map(|ri| &ri.application_id)
}

pub fn recording_id(&self) -> &RecordingId {
&self.recording_id
pub fn store_kind(&self) -> StoreKind {
self.store_id.kind
}

pub fn store_id(&self) -> &StoreId {
&self.store_id
}

pub fn timelines(&self) -> impl ExactSizeIterator<Item = &Timeline> {
Expand All @@ -239,8 +243,10 @@ impl LogDb {
pub fn add(&mut self, msg: &LogMsg) -> Result<(), Error> {
re_tracing::profile_function!();

debug_assert_eq!(msg.store_id(), self.store_id());

match &msg {
LogMsg::SetRecordingInfo(msg) => self.add_begin_recording_msg(msg),
LogMsg::SetStoreInfo(msg) => self.add_begin_recording_msg(msg),
LogMsg::EntityPathOpMsg(_, msg) => {
let EntityPathOpMsg {
row_id,
Expand All @@ -256,11 +262,11 @@ impl LogDb {
Ok(())
}

pub fn add_begin_recording_msg(&mut self, msg: &SetRecordingInfo) {
pub fn add_begin_recording_msg(&mut self, msg: &SetStoreInfo) {
self.recording_msg = Some(msg.clone());
}

/// Returns an iterator over all [`EntityPathOpMsg`]s that have been written to this `LogDb`.
/// Returns an iterator over all [`EntityPathOpMsg`]s that have been written to this `StoreDb`.
pub fn iter_entity_op_msgs(&self) -> impl Iterator<Item = &EntityPathOpMsg> {
self.entity_op_msgs.values()
}
Expand All @@ -287,7 +293,7 @@ impl LogDb {
let cutoff_times = self.entity_db.data_store.oldest_time_per_timeline();

let Self {
recording_id: _,
store_id: _,
entity_op_msgs,
data_source: _,
recording_msg: _,
Expand Down
6 changes: 3 additions & 3 deletions crates/re_data_store/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use re_log_types::{
DataRow, DeserializableComponent, EntityPath, RowId, SerializableComponent, TimePoint, Timeline,
};

use crate::LogDb;
use crate::StoreDb;

// ----------------------------------------------------------------------------

Expand Down Expand Up @@ -60,7 +60,7 @@ where

/// Store a single value for a given [`re_log_types::Component`].
pub fn store_one_component<C: SerializableComponent>(
log_db: &mut LogDb,
store_db: &mut StoreDb,
entity_path: &EntityPath,
timepoint: &TimePoint,
component: C,
Expand All @@ -74,7 +74,7 @@ pub fn store_one_component<C: SerializableComponent>(
);
row.compute_all_size_bytes();

match log_db.entity_db.try_add_data_row(&row) {
match store_db.entity_db.try_add_data_row(&row) {
Ok(()) => {}
Err(err) => {
re_log::warn_once!(
Expand Down
2 changes: 1 addition & 1 deletion crates/re_data_ui/src/annotation_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ fn annotation_info(
keypoint_id: &re_components::KeypointId,
) -> Option<re_components::AnnotationInfo> {
let class_id = ctx
.log_db
.store_db
.entity_db
.data_store
.query_latest_component::<ClassId>(entity_path, query)?;
Expand Down
2 changes: 1 addition & 1 deletion crates/re_data_ui/src/component_path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ impl DataUi for ComponentPath {
verbosity: UiVerbosity,
query: &re_arrow_store::LatestAtQuery,
) {
let store = &ctx.log_db.entity_db.data_store;
let store = &ctx.store_db.entity_db.data_store;

if let Some((_, component_data)) = re_query::get_component_with_instances(
store,
Expand Down
2 changes: 1 addition & 1 deletion crates/re_data_ui/src/instance_path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ impl DataUi for InstancePath {
verbosity: UiVerbosity,
query: &re_arrow_store::LatestAtQuery,
) {
let store = &ctx.log_db.entity_db.data_store;
let store = &ctx.store_db.entity_db.data_store;

let Some(mut components) = store.all_components(&query.timeline, &self.entity_path) else {
ui.label(format!("No components in entity {}", self.entity_path));
Expand Down
30 changes: 15 additions & 15 deletions crates/re_data_ui/src/log_msg.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use re_log_types::{ArrowMsg, DataTable, EntityPathOpMsg, LogMsg, RecordingInfo, SetRecordingInfo};
use re_log_types::{ArrowMsg, DataTable, EntityPathOpMsg, LogMsg, SetStoreInfo, StoreInfo};
use re_viewer_context::{UiVerbosity, ViewerContext};

use super::DataUi;
Expand All @@ -13,55 +13,55 @@ impl DataUi for LogMsg {
query: &re_arrow_store::LatestAtQuery,
) {
match self {
LogMsg::SetRecordingInfo(msg) => msg.data_ui(ctx, ui, verbosity, query),
LogMsg::SetStoreInfo(msg) => msg.data_ui(ctx, ui, verbosity, query),
LogMsg::EntityPathOpMsg(_, msg) => msg.data_ui(ctx, ui, verbosity, query),
LogMsg::ArrowMsg(_, msg) => msg.data_ui(ctx, ui, verbosity, query),
}
}
}

impl DataUi for SetRecordingInfo {
impl DataUi for SetStoreInfo {
fn data_ui(
&self,
_ctx: &mut ViewerContext<'_>,
ui: &mut egui::Ui,
_verbosity: UiVerbosity,
_query: &re_arrow_store::LatestAtQuery,
) {
ui.code("SetRecordingInfo");
let SetRecordingInfo { row_id: _, info } = self;
let RecordingInfo {
ui.code("SetStoreInfo");
let SetStoreInfo { row_id: _, info } = self;
let StoreInfo {
application_id,
recording_id,
store_id,
started,
recording_source,
store_source,
is_official_example,
recording_type,
store_kind,
} = info;

egui::Grid::new("fields").num_columns(2).show(ui, |ui| {
ui.monospace("application_id:");
ui.label(application_id.to_string());
ui.end_row();

ui.monospace("recording_id:");
ui.label(format!("{recording_id:?}"));
ui.monospace("store_id:");
ui.label(format!("{store_id:?}"));
ui.end_row();

ui.monospace("started:");
ui.label(started.format());
ui.end_row();

ui.monospace("recording_source:");
ui.label(format!("{recording_source}"));
ui.monospace("store_source:");
ui.label(format!("{store_source}"));
ui.end_row();

ui.monospace("is_official_example:");
ui.label(format!("{is_official_example}"));
ui.end_row();

ui.monospace("recording_type:");
ui.label(format!("{recording_type}"));
ui.monospace("store_kind:");
ui.label(format!("{store_kind}"));
ui.end_row();
});
}
Expand Down
Loading

1 comment on commit 0abb3d3

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark 'Rust Benchmark'.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 1.25.

Benchmark suite Current: 0abb3d3 Previous: 3d778a2 Ratio
datastore/num_rows=1000/num_instances=1000/packed=false/insert/default 5561175 ns/iter (± 165301) 2878128 ns/iter (± 27117) 1.93
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at/default 403 ns/iter (± 2) 305 ns/iter (± 2) 1.32
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at_missing/primary/default 289 ns/iter (± 0) 223 ns/iter (± 0) 1.30
datastore/num_rows=1000/num_instances=1000/packed=false/latest_at_missing/secondaries/default 447 ns/iter (± 0) 348 ns/iter (± 15) 1.28
datastore/num_rows=1000/num_instances=1000/packed=false/range/default 5973239 ns/iter (± 298553) 2861996 ns/iter (± 51676) 2.09
datastore/num_rows=1000/num_instances=1000/gc/default 2731995 ns/iter (± 51449) 1736721 ns/iter (± 11412) 1.57
mono_points_arrow/generate_message_bundles 40723621 ns/iter (± 1194591) 29807420 ns/iter (± 1442560) 1.37
mono_points_arrow/encode_log_msg 182683554 ns/iter (± 2520146) 144906727 ns/iter (± 2505774) 1.26
mono_points_arrow/decode_message_bundles 91579235 ns/iter (± 2356203) 60820688 ns/iter (± 2451985) 1.51
mono_points_arrow_batched/generate_message_bundles 35726025 ns/iter (± 1364898) 18665165 ns/iter (± 1771269) 1.91
mono_points_arrow_batched/generate_messages 10456203 ns/iter (± 1006346) 3638466 ns/iter (± 169895) 2.87
mono_points_arrow_batched/encode_log_msg 638301 ns/iter (± 3017) 403481 ns/iter (± 2700) 1.58
mono_points_arrow_batched/encode_total 47058842 ns/iter (± 1561532) 23640205 ns/iter (± 1274879) 1.99
mono_points_arrow_batched/decode_log_msg 529887 ns/iter (± 4894) 301234 ns/iter (± 2037) 1.76
mono_points_arrow_batched/decode_message_bundles 13951122 ns/iter (± 786841) 7542255 ns/iter (± 261082) 1.85
mono_points_arrow_batched/decode_total 14619768 ns/iter (± 454038) 7873633 ns/iter (± 304284) 1.86
batch_points_arrow/encode_log_msg 99896 ns/iter (± 2305) 58277 ns/iter (± 590) 1.71
batch_points_arrow/encode_total 394202 ns/iter (± 4516) 280173 ns/iter (± 2525) 1.41
batch_points_arrow/decode_log_msg 78211 ns/iter (± 1857) 46094 ns/iter (± 698) 1.70
batch_points_arrow/decode_total 83338 ns/iter (± 1510) 50076 ns/iter (± 778) 1.66
arrow_mono_points/insert 3140208274 ns/iter (± 59085375) 1788714609 ns/iter (± 16064533) 1.76
arrow_mono_points/query 1472821 ns/iter (± 72091) 948557 ns/iter (± 5020) 1.55
arrow_batch_points/query 16130 ns/iter (± 31) 12250 ns/iter (± 42) 1.32
arrow_batch_vecs/query 448873 ns/iter (± 1100) 315531 ns/iter (± 2891) 1.42

This comment was automatically generated by workflow using github-action-benchmark.

Please sign in to comment.