Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support loading .rbl blueprint files #5513

Merged
merged 14 commits into from
Mar 15, 2024
6 changes: 4 additions & 2 deletions crates/re_data_source/src/data_loader/loader_rrd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ impl crate::DataLoader for RrdLoader {
re_tracing::profile_function!(filepath.display().to_string());

let extension = crate::extension(&filepath);
if extension != "rrd" {
if !matches!(extension.as_str(), "rbl" | "rrd") {
// NOTE: blueprints and recordings has the same file format
return Err(crate::DataLoaderError::Incompatible(filepath.clone()));
}

Expand Down Expand Up @@ -66,7 +67,8 @@ impl crate::DataLoader for RrdLoader {
re_tracing::profile_function!(filepath.display().to_string());

let extension = crate::extension(&filepath);
if extension != "rrd" {
if !matches!(extension.as_str(), "rbl" | "rrd") {
// NOTE: blueprints and recordings has the same file format
return Err(crate::DataLoaderError::Incompatible(filepath));
}

Expand Down
18 changes: 18 additions & 0 deletions crates/re_data_source/src/data_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,24 @@ impl DataSource {
}
}

pub fn file_name(&self) -> Option<String> {
match self {
DataSource::RrdHttpUrl(url) => url.split('/').last().map(|r| r.to_owned()),
#[cfg(not(target_arch = "wasm32"))]
DataSource::FilePath(_, path) => {
path.file_name().map(|s| s.to_string_lossy().to_string())
}
DataSource::FileContents(_, file_contents) => Some(file_contents.name.clone()),
DataSource::WebSocketAddr(_) => None,
#[cfg(not(target_arch = "wasm32"))]
DataSource::Stdin => None,
}
}

pub fn is_blueprint(&self) -> Option<bool> {
self.file_name().map(|name| name.ends_with(".rbl"))
}

/// Stream the data from the given data source.
///
/// Will do minimal checks (e.g. that the file exists), for synchronous errors,
Expand Down
13 changes: 11 additions & 2 deletions crates/re_data_source/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,21 @@ pub use self::load_file::load_from_path;
/// This is what you get when loading a file on Web, or when using drag-n-drop.
//
// TODO(#4554): drag-n-drop streaming support
#[derive(Clone, Debug)]
#[derive(Clone)]
pub struct FileContents {
pub name: String,
pub bytes: std::sync::Arc<[u8]>,
}

impl std::fmt::Debug for FileContents {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("FileContents")
.field("name", &self.name)
.field("bytes", &format_args!("{} bytes", self.bytes.len()))
.finish()
}
}

// …given that all feature flags are turned on for the `image` crate.
pub const SUPPORTED_IMAGE_EXTENSIONS: &[&str] = &[
"avif", "bmp", "dds", "exr", "farbfeld", "ff", "gif", "hdr", "ico", "jpeg", "jpg", "pam",
Expand All @@ -55,7 +64,7 @@ pub const SUPPORTED_MESH_EXTENSIONS: &[&str] = &["glb", "gltf", "obj", "stl"];
// TODO(#4532): `.ply` data loader should support 2D point cloud & meshes
pub const SUPPORTED_POINT_CLOUD_EXTENSIONS: &[&str] = &["ply"];

pub const SUPPORTED_RERUN_EXTENSIONS: &[&str] = &["rrd"];
pub const SUPPORTED_RERUN_EXTENSIONS: &[&str] = &["rbl", "rrd"];

// TODO(#4555): Add catch-all builtin `DataLoader` for text files
pub const SUPPORTED_TEXT_EXTENSIONS: &[&str] = &["txt", "md"];
Expand Down
4 changes: 2 additions & 2 deletions crates/re_data_source/src/load_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub fn load_from_path(

re_log::info!("Loading {path:?}…");

let data = load(settings, path, None)?;
let rx = load(settings, path, None)?;

// TODO(cmc): should we always unconditionally set store info though?
// If we reach this point, then at least one compatible `DataLoader` has been found.
Expand All @@ -45,7 +45,7 @@ pub fn load_from_path(
}
}

send(&settings.store_id, data, tx);
send(&settings.store_id, rx, tx);

Ok(())
}
Expand Down
9 changes: 2 additions & 7 deletions crates/re_entity_db/src/entity_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,6 @@ fn insert_row_with_retries(
///
/// NOTE: all mutation is to be done via public functions!
pub struct EntityDb {
/// The [`StoreId`] for this log.
store_id: StoreId,

/// Set by whomever created this [`EntityDb`].
pub data_source: Option<re_smart_channel::SmartChannelSource>,

Expand Down Expand Up @@ -126,7 +123,6 @@ impl EntityDb {
);
let query_caches = re_query_cache::Caches::new(&data_store);
Self {
store_id: store_id.clone(),
data_source: None,
set_store_info: None,
last_modified_at: web_time::Instant::now(),
Expand Down Expand Up @@ -193,11 +189,11 @@ impl EntityDb {
}

pub fn store_kind(&self) -> StoreKind {
self.store_id.kind
self.store_id().kind
}

pub fn store_id(&self) -> &StoreId {
&self.store_id
self.data_store.id()
}

pub fn timelines(&self) -> impl ExactSizeIterator<Item = &Timeline> {
Expand Down Expand Up @@ -486,7 +482,6 @@ impl EntityDb {
re_tracing::profile_function!();

let Self {
store_id: _,
data_source: _,
set_store_info: _,
last_modified_at: _,
Expand Down
11 changes: 11 additions & 0 deletions crates/re_log_types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,17 @@ impl LogMsg {
Self::ArrowMsg(store_id, _) => store_id,
}
}

pub fn set_store_id(&mut self, new_store_id: StoreId) {
match self {
LogMsg::SetStoreInfo(store_info) => {
store_info.info.store_id = new_store_id;
}
LogMsg::ArrowMsg(msg_store_id, _) => {
*msg_store_id = new_store_id;
}
}
}
}

impl_into_enum!(SetStoreInfo, LogMsg, SetStoreInfo);
Expand Down
127 changes: 75 additions & 52 deletions crates/re_viewer/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ impl App {

SystemCommand::LoadStoreDb(entity_db) => {
let store_id = entity_db.store_id().clone();
store_hub.insert_recording(entity_db);
store_hub.insert_entity_db(entity_db);
store_hub.set_recording_id(store_id);
}

Expand Down Expand Up @@ -443,18 +443,24 @@ impl App {
) {
match cmd {
UICommand::SaveRecording => {
save_recording(self, store_context, None);
if let Err(err) = save_recording(self, store_context, None) {
re_log::error!("Failed to save recording: {err}");
}
}
UICommand::SaveRecordingSelection => {
save_recording(
if let Err(err) = save_recording(
self,
store_context,
self.state.loop_selection(store_context),
);
) {
re_log::error!("Failed to save recording: {err}");
}
}

UICommand::SaveBlueprint => {
save_blueprint(self, store_context);
if let Err(err) = save_blueprint(self, store_context) {
re_log::error!("Failed to save blueprint: {err}");
}
}

#[cfg(not(target_arch = "wasm32"))]
Expand Down Expand Up @@ -934,14 +940,43 @@ impl App {
if let Some(err) = err {
re_log::warn!("Data source {} has left unexpectedly: {err}", msg.source);
} else {
re_log::debug!("Data source {} has left", msg.source);
re_log::debug!("Data source {} has finished", msg.source);

// This could be the signal that we finished loading a blueprint.
// In that case, we want to make it the default.

if let Some(entity_db) =
store_hub.entity_db_from_channel_source(&channel_source)
{
if let Some(store_info) = entity_db.store_info() {
match store_info.store_id.kind {
StoreKind::Recording => {
// Recordings become active as soon as we start streaming them.
}
StoreKind::Blueprint => {
// We wait with activaing blueprints until they are fully loaded,
// so that we don't run heuristics on half-loaded blueprints.
re_log::debug!("Activating newly loaded blueprint");
store_hub.set_blueprint_for_app_id(
entity_db.store_id().clone(),
store_info.application_id.clone(),
);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While this is definitely an improvement for the file-open use-case, moving this here regresses the current blueprint-on-connect behavior.

Because blueprint and recording share the same channel, the blueprint now won't be activated until the channel is closed.

You can see this issue by adding:

import time
time.sleep(5)

to the end of examples/python/blueprint/main.py

This causes the saved blueprint to load first. And then after the timeout when the whole connection is closed, the blueprint loads.

This alone is insufficient for #5297... we still need some kind of safe-to-load marker inside the stream.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ouch; good point…

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I might merge this and then start work on #5297

}
}
}
}
}
continue;
}
};

let store_id = msg.store_id();

if store_hub.is_active_blueprint(store_id) {
// TODO(#5514): handle loading of active blueprints.
re_log::warn_once!("Loading a blueprint {store_id} that is active. See https://github.com/rerun-io/rerun/issues/5514 for details.");
}

let entity_db = store_hub.entity_db_mut(store_id);

if entity_db.data_source.is_none() {
Expand All @@ -952,28 +987,24 @@ impl App {
re_log::error_once!("Failed to add incoming msg: {err}");
};

// Set the recording-id after potentially creating the store in the
// hub. This ordering is important because the `StoreHub` internally
// Set the recording-id after potentially creating the store in the hub.
// This ordering is important because the `StoreHub` internally
// updates the app-id when changing the recording.
if let LogMsg::SetStoreInfo(msg) = &msg {
match msg.info.store_id.kind {
StoreKind::Recording => {
re_log::debug!("Opening a new recording: {:?}", msg.info);
store_hub.set_recording_id(store_id.clone());
}

StoreKind::Blueprint => {
re_log::debug!("Opening a new blueprint: {:?}", msg.info);
store_hub.set_blueprint_for_app_id(
store_id.clone(),
msg.info.application_id.clone(),
);
// We wait with activaing blueprints until they are fully loaded,
// so that we don't run heuristics on half-loaded blueprints.
}
}
}

// Do analytics after ingesting the new message,
// because thats when the `entity_db.store_info` is set,
// because that's when the `entity_db.store_info` is set,
// which we use in the analytics call.
let entity_db = store_hub.entity_db_mut(store_id);
let is_new_store = matches!(&msg, LogMsg::SetStoreInfo(_msg));
Expand Down Expand Up @@ -1508,11 +1539,10 @@ fn save_recording(
app: &mut App,
store_context: Option<&StoreContext<'_>>,
loop_selection: Option<(re_entity_db::Timeline, re_log_types::TimeRangeF)>,
) {
) -> anyhow::Result<()> {
let Some(entity_db) = store_context.as_ref().and_then(|view| view.recording) else {
// NOTE: Can only happen if saving through the command palette.
re_log::error!("No recording data to save");
return;
anyhow::bail!("No recording data to save");
};

let file_name = "data.rrd";
Expand All @@ -1523,51 +1553,51 @@ fn save_recording(
"Save recording"
};

save_entity_db(
app,
file_name.to_owned(),
title.to_owned(),
entity_db,
loop_selection,
);
save_entity_db(app, file_name.to_owned(), title.to_owned(), || {
entity_db.to_messages(loop_selection)
})
}

fn save_blueprint(app: &mut App, store_context: Option<&StoreContext<'_>>) {
fn save_blueprint(app: &mut App, store_context: Option<&StoreContext<'_>>) -> anyhow::Result<()> {
let Some(store_context) = store_context else {
re_log::error!("No blueprint to save");
return;
anyhow::bail!("No blueprint to save");
};

let entity_db = store_context.blueprint;
re_tracing::profile_function!();

// We change the recording id to a new random one,
// otherwise when saving and loading a blueprint file, we can end up
// in a situation where the store_id we're loading is the same as the currently active one,
// which mean they will merge in a strange way.
// This is also related to https://github.com/rerun-io/rerun/issues/5295
let new_store_id = re_log_types::StoreId::random(StoreKind::Blueprint);
let mut messages = store_context.blueprint.to_messages(None)?;
for message in &mut messages {
message.set_store_id(new_store_id.clone());
}

let file_name = format!(
"{}.rbl",
crate::saving::sanitize_app_id(&store_context.app_id)
);
let title = "Save blueprint";
save_entity_db(app, file_name, title.to_owned(), entity_db, None);

save_entity_db(app, file_name, title.to_owned(), || Ok(messages))
}

#[allow(clippy::needless_pass_by_ref_mut)] // `app` is only used on native
fn save_entity_db(
#[allow(unused_variables)] app: &mut App, // only used on native
file_name: String,
title: String,
entity_db: &EntityDb,
loop_selection: Option<(re_log_types::Timeline, re_log_types::TimeRangeF)>,
) {
to_log_messages: impl FnOnce() -> re_log_types::DataTableResult<Vec<LogMsg>>,
) -> anyhow::Result<()> {
re_tracing::profile_function!();

// Web
#[cfg(target_arch = "wasm32")]
{
let messages = match entity_db.to_messages(loop_selection) {
Ok(messages) => messages,
Err(err) => {
re_log::error!("File saving failed: {err}");
return;
}
};
let messages = to_log_messages()?;

wasm_bindgen_futures::spawn_local(async move {
if let Err(err) = async_save_dialog(&file_name, &title, &messages).await {
Expand All @@ -1587,22 +1617,15 @@ fn save_entity_db(
.save_file()
};
if let Some(path) = path {
let messages = match entity_db.to_messages(loop_selection) {
Ok(messages) => messages,
Err(err) => {
re_log::error!("File saving failed: {err}");
return;
}
};
if let Err(err) = app.background_tasks.spawn_file_saver(move || {
let messages = to_log_messages()?;
app.background_tasks.spawn_file_saver(move || {
crate::saving::encode_to_file(&path, messages.iter())?;
Ok(path)
}) {
// NOTE: Can only happen if saving through the command palette.
re_log::error!("File saving failed: {err}");
}
})?;
}
}

Ok(())
}

#[cfg(target_arch = "wasm32")]
Expand Down
2 changes: 1 addition & 1 deletion crates/re_viewer/src/app_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ impl AppState {
re_tracing::profile_function!();

self.recording_configs
.retain(|store_id, _| store_hub.contains_recording(store_id));
.retain(|store_id, _| store_hub.contains_store(store_id));
}

/// Returns the blueprint query that should be used for generating the current
Expand Down
Loading
Loading