Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce versioned EntityPath & refactor mesh/tensor caching #3230

Merged
merged 17 commits into from
Sep 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 40 additions & 9 deletions crates/re_arrow_store/src/store_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,35 @@ use crate::{DataStore, LatestAtQuery};

// --- Read ---

/// A [`Component`] versioned with a specific [`RowId`].
///
/// This is not enough to globally, uniquely identify an instance of a component.
/// For that you will need to combine the `InstancePath` that was used to query
/// the versioned component with the returned [`RowId`], therefore creating a
/// `VersionedInstancePath`.
#[derive(Debug, Clone)]
pub struct VersionedComponent<C: Component> {
pub row_id: RowId,
pub value: C,
}

impl<C: Component> From<(RowId, C)> for VersionedComponent<C> {
#[inline]
fn from((row_id, value): (RowId, C)) -> Self {
Self { row_id, value }
}
}

impl<C: Component> std::ops::Deref for VersionedComponent<C> {
type Target = C;

fn deref(&self) -> &Self::Target {
&self.value
}
}

impl DataStore {
/// Get the latest value for a given [`re_types::Component`].
/// Get the latest value for a given [`re_types::Component`] and the associated [`RowId`].
///
/// This assumes that the row we get from the store only contains a single instance for this
/// component; it will log a warning otherwise.
Expand All @@ -19,10 +46,10 @@ impl DataStore {
&self,
entity_path: &EntityPath,
query: &LatestAtQuery,
) -> Option<C> {
) -> Option<VersionedComponent<C>> {
re_tracing::profile_function!();

let (_, cells) = self.latest_at(query, entity_path, C::name(), &[C::name()])?;
let (row_id, cells) = self.latest_at(query, entity_path, C::name(), &[C::name()])?;
let cell = cells.get(0)?.as_ref()?;

cell.try_to_native_mono::<C>()
Expand Down Expand Up @@ -61,35 +88,39 @@ impl DataStore {
err
})
.ok()?
.map(|c| (row_id, c).into())
}

/// Call `query_latest_component` at the given path, walking up the hierarchy until an instance is found.
/// Call [`Self::query_latest_component`] at the given path, walking up the hierarchy until an instance is found.
pub fn query_latest_component_at_closest_ancestor<C: Component>(
&self,
entity_path: &EntityPath,
query: &LatestAtQuery,
) -> Option<(EntityPath, C)> {
) -> Option<(EntityPath, VersionedComponent<C>)> {
re_tracing::profile_function!();

let mut cur_path = Some(entity_path.clone());
while let Some(path) = cur_path {
if let Some(component) = self.query_latest_component::<C>(&path, query) {
return Some((path, component));
if let Some(c) = self.query_latest_component::<C>(&path, query) {
return Some((path, c));
}
cur_path = path.parent();
}
None
}

/// Get the latest value for a given [`re_types::Component`], assuming it is timeless.
/// Get the latest value for a given [`re_types::Component`] and the associated [`RowId`], assuming it is timeless.
///
/// This assumes that the row we get from the store only contains a single instance for this
/// component; it will log a warning otherwise.
///
/// This should only be used for "mono-components" such as `Transform` and `Tensor`.
///
/// This is a best-effort helper, it will merely log errors on failure.
pub fn query_timeless_component<C: Component>(&self, entity_path: &EntityPath) -> Option<C> {
pub fn query_timeless_component<C: Component>(
&self,
entity_path: &EntityPath,
) -> Option<VersionedComponent<C>> {
re_tracing::profile_function!();

let query = LatestAtQuery::latest(Timeline::default());
Expand Down
3 changes: 1 addition & 2 deletions crates/re_components/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,13 @@ pub use self::{
bbox::Box3D,
coordinates::ViewCoordinates,
mat::LegacyMat3x3,
mesh3d::{EncodedMesh3D, Mesh3D, MeshFormat, MeshId, RawMesh3D},
mesh3d::{EncodedMesh3D, Mesh3D, MeshFormat, RawMesh3D},
pinhole::Pinhole,
quaternion::Quaternion,
rect::Rect2D,
scalar::{Scalar, ScalarPlotProps},
tensor::{
DecodedTensor, Tensor, TensorCastError, TensorData, TensorDataMeaning, TensorDimension,
TensorId,
},
tensor_data::{TensorDataType, TensorDataTypeTrait, TensorElement},
text_box::TextBox,
Expand Down
1 change: 0 additions & 1 deletion crates/re_components/src/load_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,6 @@ pub fn data_cell_from_mesh_file_contents(
format: crate::MeshFormat,
) -> Result<DataCell, FromFileError> {
let mesh = crate::EncodedMesh3D {
mesh_id: crate::MeshId::random(),
format,
bytes: bytes.into(),
transform: [
Expand Down
90 changes: 0 additions & 90 deletions crates/re_components/src/mesh3d.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use arrow2::array::{FixedSizeBinaryArray, MutableFixedSizeBinaryArray};
use arrow2::buffer::Buffer;
use arrow2::datatypes::DataType;
use arrow2_convert::arrow_enable_vec_for_type;
Expand All @@ -10,71 +9,6 @@ use super::{FieldError, LegacyVec4D};

// ----------------------------------------------------------------------------

/// A unique id per [`Mesh3D`].
///
/// TODO(emilk): this should be a hash of the mesh (CAS).
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))]
pub struct MeshId(pub uuid::Uuid);

impl nohash_hasher::IsEnabled for MeshId {}

// required for [`nohash_hasher`].
#[allow(clippy::derived_hash_with_manual_eq)]
impl std::hash::Hash for MeshId {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
state.write_u64(self.0.as_u128() as u64);
}
}

impl MeshId {
#[inline]
pub fn random() -> Self {
Self(uuid::Uuid::new_v4())
}
}

impl ArrowField for MeshId {
type Type = Self;

#[inline]
fn data_type() -> arrow2::datatypes::DataType {
arrow2::datatypes::DataType::FixedSizeBinary(16)
}
}

impl ArrowSerialize for MeshId {
type MutableArrayType = MutableFixedSizeBinaryArray;

#[inline]
fn new_array() -> Self::MutableArrayType {
MutableFixedSizeBinaryArray::new(16)
}

#[inline]
fn arrow_serialize(
v: &<Self as arrow2_convert::field::ArrowField>::Type,
array: &mut Self::MutableArrayType,
) -> arrow2::error::Result<()> {
array.try_push(Some(v.0.as_bytes()))
}
}

impl ArrowDeserialize for MeshId {
type ArrayType = FixedSizeBinaryArray;

#[inline]
fn arrow_deserialize(
v: <&Self::ArrayType as IntoIterator>::Item,
) -> Option<<Self as ArrowField>::Type> {
v.and_then(|bytes| uuid::Uuid::from_slice(bytes).ok())
.map(Self)
}
}

// ----------------------------------------------------------------------------

// TODO(cmc): Let's make both mesh Component types use friendlier types for their inner elements
// (e.g. positions should be a vec of Vec3D, transform should be a Mat4, etc).
// This will also make error checking for invalid user data much nicer.
Expand Down Expand Up @@ -113,7 +47,6 @@ pub enum RawMeshError {
/// assert_eq!(
/// RawMesh3D::data_type(),
/// DataType::Struct(vec![
/// Field::new("mesh_id", DataType::FixedSizeBinary(16), false),
/// Field::new("vertex_positions", DataType::List(Box::new(
/// Field::new("item", DataType::Float32, false)),
/// ), false),
Expand All @@ -135,8 +68,6 @@ pub enum RawMeshError {
/// ```
#[derive(ArrowField, ArrowSerialize, ArrowDeserialize, Clone, Debug, PartialEq)]
pub struct RawMesh3D {
pub mesh_id: MeshId,

/// The flattened vertex positions array of this mesh.
///
/// The length of this vector should always be divisible by three (since this is a 3D mesh).
Expand Down Expand Up @@ -239,7 +170,6 @@ impl RawMesh3D {
/// assert_eq!(
/// EncodedMesh3D::data_type(),
/// DataType::Struct(vec![
/// Field::new("mesh_id", DataType::FixedSizeBinary(16), false),
/// Field::new("format", DataType::Union(vec![
/// Field::new("Gltf", DataType::Boolean, false),
/// Field::new("Glb", DataType::Boolean, false),
Expand All @@ -255,8 +185,6 @@ impl RawMesh3D {
/// ```
#[derive(Clone, Debug, PartialEq)]
pub struct EncodedMesh3D {
pub mesh_id: MeshId,

pub format: MeshFormat,

pub bytes: Buffer<u8>,
Expand All @@ -268,8 +196,6 @@ pub struct EncodedMesh3D {
/// Helper struct for converting `EncodedMesh3D` to arrow
#[derive(ArrowField, ArrowSerialize, ArrowDeserialize)]
pub struct EncodedMesh3DArrow {
pub mesh_id: MeshId,

pub format: MeshFormat,

pub bytes: Buffer<u8>,
Expand All @@ -281,13 +207,11 @@ pub struct EncodedMesh3DArrow {
impl From<&EncodedMesh3D> for EncodedMesh3DArrow {
fn from(v: &EncodedMesh3D) -> Self {
let EncodedMesh3D {
mesh_id,
format,
bytes,
transform,
} = v;
Self {
mesh_id: *mesh_id,
format: *format,
bytes: bytes.clone(),
transform: transform.iter().flat_map(|c| c.iter().cloned()).collect(),
Expand All @@ -300,14 +224,12 @@ impl TryFrom<EncodedMesh3DArrow> for EncodedMesh3D {

fn try_from(v: EncodedMesh3DArrow) -> super::Result<Self> {
let EncodedMesh3DArrow {
mesh_id,
format,
bytes,
transform,
} = v;

Ok(Self {
mesh_id,
format,
bytes,
transform: [
Expand Down Expand Up @@ -414,23 +336,12 @@ impl re_log_types::LegacyComponent for Mesh3D {
}
}

impl Mesh3D {
#[inline]
pub fn mesh_id(&self) -> MeshId {
match self {
Mesh3D::Encoded(mesh) => mesh.mesh_id,
Mesh3D::Raw(mesh) => mesh.mesh_id,
}
}
}

#[cfg(test)]
mod tests {
use super::*;

fn example_raw_mesh() -> RawMesh3D {
let mesh = RawMesh3D {
mesh_id: MeshId::random(),
vertex_positions: vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 8.0, 9.0, 10.0].into(),
vertex_colors: Some(vec![0xff0000ff, 0x00ff00ff, 0x0000ffff].into()),
indices: Some(vec![0, 1, 2].into()),
Expand All @@ -451,7 +362,6 @@ mod tests {
// Encoded
{
let mesh_in = vec![Mesh3D::Encoded(EncodedMesh3D {
mesh_id: MeshId::random(),
format: MeshFormat::Glb,
bytes: vec![5, 9, 13, 95, 38, 42, 98, 17].into(),
transform: [
Expand Down
Loading