diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/mod.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/mod.rs index e098176c5ff..80e1626dfc0 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/mod.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/mod.rs @@ -160,8 +160,8 @@ impl_event_cache_store! { trace!(%room_id, "Inserting new chunk (prev={previous:?}, new={new:?}, next={next:?})"); transaction .add_chunk( - room_id, &types::Chunk { + room_id: room_id.to_owned(), identifier: new.index(), previous: previous.map(|i| i.index()), next: next.map(|i| i.index()), @@ -174,8 +174,8 @@ impl_event_cache_store! { trace!(%room_id, "Inserting new gap (prev={previous:?}, new={new:?}, next={next:?})"); transaction .add_item( - room_id, &types::Gap { + room_id: room_id.to_owned(), chunk_identifier: new.index(), prev_token: gap.prev_token, }, @@ -183,8 +183,8 @@ impl_event_cache_store! { .await?; transaction .add_chunk( - room_id, &types::Chunk { + room_id: room_id.to_owned(), identifier: new.index(), previous: previous.map(|i| i.index()), next: next.map(|i| i.index()), @@ -195,7 +195,7 @@ impl_event_cache_store! { } Update::RemoveChunk(chunk_id) => { trace!("Removing chunk {chunk_id:?}"); - transaction.delete_chunk_by_id(room_id, &chunk_id).await?; + transaction.delete_chunk_by_id(room_id, chunk_id).await?; } Update::PushItems { at, items } => { let chunk_identifier = at.chunk_identifier().index(); @@ -204,9 +204,9 @@ impl_event_cache_store! { for (i, item) in items.into_iter().enumerate() { transaction - .put_item( - room_id, + .put_event( &types::Event::InBand(InBandEvent { + room_id: room_id.to_owned(), content: item, position: types::Position { chunk_identifier, @@ -225,8 +225,8 @@ impl_event_cache_store! { transaction .put_event( - room_id, &types::Event::InBand(InBandEvent { + room_id: room_id.to_owned(), content: item, position: at.into(), }), @@ -239,7 +239,7 @@ impl_event_cache_store! { trace!(%room_id, "removing item @ {chunk_id}:{index}"); - transaction.delete_event_by_position(room_id, &at.into()).await?; + transaction.delete_event_by_position(room_id, at.into()).await?; } Update::DetachLastItems { at } => { let chunk_id = at.chunk_identifier().index(); @@ -247,7 +247,7 @@ impl_event_cache_store! { trace!(%room_id, "detaching last items @ {chunk_id}:{index}"); - transaction.delete_events_by_chunk_from_index(room_id, &at.into()).await?; + transaction.delete_events_by_chunk_from_index(room_id, at.into()).await?; } Update::StartReattachItems | Update::EndReattachItems => { // Nothing? See sqlite implementation @@ -283,7 +283,7 @@ impl_event_cache_store! { let chunks = transaction.get_chunks_in_room(room_id).await?; for chunk in chunks { if let Some(raw_chunk) = transaction - .load_chunk_by_id(room_id, &ChunkIdentifier::new(chunk.identifier)) + .load_chunk_by_id(room_id, ChunkIdentifier::new(chunk.identifier)) .await? { raw_chunks.push(raw_chunk); @@ -321,7 +321,7 @@ impl_event_cache_store! { let chunks = transaction.get_chunks_in_room(room_id).await?; for chunk in chunks { let chunk_id = ChunkIdentifier::new(chunk.identifier); - let num_items = transaction.get_events_count_by_chunk(room_id, &chunk_id).await?; + let num_items = transaction.get_events_count_by_chunk(room_id, chunk_id).await?; raw_chunks.push(ChunkMetadata { num_items, previous: chunk.previous.map(ChunkIdentifier::new), @@ -355,7 +355,7 @@ impl_event_cache_store! { // Now that we know we have some chunks in the room, we query IndexedDB // for the last chunk in the room by getting the chunk which does not // have a next chunk. - match transaction.get_chunk_by_next_chunk_id(room_id, &None).await { + match transaction.get_chunk_by_next_chunk_id(room_id, None).await { Err(IndexeddbEventCacheStoreTransactionError::ItemIsNotUnique) => { // If there are multiple chunks that do not have a next chunk, that // means we have more than one last chunk, which means that we have @@ -375,7 +375,7 @@ impl_event_cache_store! { Ok(Some(last_chunk)) => { let last_chunk_identifier = ChunkIdentifier::new(last_chunk.identifier); let last_raw_chunk = transaction - .load_chunk_by_id(room_id, &last_chunk_identifier) + .load_chunk_by_id(room_id, last_chunk_identifier) .await? .ok_or(IndexeddbEventCacheStoreError::UnableToLoadChunk)?; let max_chunk_id = transaction @@ -404,10 +404,10 @@ impl_event_cache_store! { &[keys::LINKED_CHUNKS, keys::EVENTS, keys::GAPS], IdbTransactionMode::Readonly, )?; - if let Some(chunk) = transaction.get_chunk_by_id(room_id, &before_chunk_identifier).await? { + if let Some(chunk) = transaction.get_chunk_by_id(room_id, before_chunk_identifier).await? { if let Some(previous_identifier) = chunk.previous { let previous_identifier = ChunkIdentifier::new(previous_identifier); - return Ok(transaction.load_chunk_by_id(room_id, &previous_identifier).await?); + return Ok(transaction.load_chunk_by_id(room_id, previous_identifier).await?); } } Ok(None) @@ -466,7 +466,7 @@ impl_event_cache_store! { let transaction = self.transaction(&[keys::EVENTS], IdbTransactionMode::Readonly)?; transaction - .get_event_by_id(room_id, &event_id.to_owned()) + .get_event_by_id(room_id, event_id) .await .map(|ok| ok.map(Into::into)) .map_err(Into::into) @@ -488,8 +488,8 @@ impl_event_cache_store! { match filters { Some(relation_types) if !relation_types.is_empty() => { for relation_type in relation_types { - let relation = (event_id.to_owned(), relation_type.clone()); - let events = transaction.get_events_by_relation(room_id, &relation).await?; + let relation = (event_id, relation_type); + let events = transaction.get_events_by_relation(room_id, relation).await?; for event in events { let position = event.position().map(Into::into); related_events.push((event.into(), position)); @@ -498,7 +498,7 @@ impl_event_cache_store! { } _ => { for event in - transaction.get_events_by_related_event(room_id, &event_id.to_owned()).await? + transaction.get_events_by_related_event(room_id, event_id).await? { let position = event.position().map(Into::into); related_events.push((event.into(), position)); @@ -524,9 +524,9 @@ impl_event_cache_store! { self.transaction(&[keys::EVENTS], IdbTransactionMode::Readwrite)?; let event = match transaction.get_event_by_id(room_id, &event_id).await? { Some(mut inner) => inner.with_content(event), - None => types::Event::OutOfBand(OutOfBandEvent { content: event, position: () }), + None => types::Event::OutOfBand(OutOfBandEvent { room_id: room_id.to_owned(), content: event, position: () }), }; - transaction.put_event(room_id, &event).await?; + transaction.put_event(&event).await?; transaction.commit().await?; Ok(()) } diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/mod.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/mod.rs index d429578d5f8..78817f6da78 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/mod.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/mod.rs @@ -71,12 +71,12 @@ impl IndexeddbEventCacheStoreSerializer { /// /// Note that the particular key which is encoded is defined by the type /// `K`. - pub fn encode_key(&self, room_id: &RoomId, components: &K::KeyComponents) -> K + pub fn encode_key(&self, components: K::KeyComponents<'_>) -> K where T: Indexed, K: IndexedKey, { - K::encode(room_id, components, &self.inner) + K::encode(components, &self.inner) } /// Encodes a key for a [`Indexed`] type as a [`JsValue`]. @@ -85,14 +85,13 @@ impl IndexeddbEventCacheStoreSerializer { /// `K`. pub fn encode_key_as_value( &self, - room_id: &RoomId, - components: &K::KeyComponents, + components: K::KeyComponents<'_>, ) -> Result where T: Indexed, K: IndexedKey + Serialize, { - serde_wasm_bindgen::to_value(&self.encode_key::(room_id, components)) + serde_wasm_bindgen::to_value(&self.encode_key::(components)) } /// Encodes a key component range for an [`Indexed`] type. @@ -101,12 +100,11 @@ impl IndexeddbEventCacheStoreSerializer { /// `K`. pub fn encode_key_range( &self, - room_id: &RoomId, range: impl Into>, ) -> Result where T: Indexed, - K: IndexedKeyBounds + Serialize, + K: Serialize, { use serde_wasm_bindgen::to_value; Ok(match range.into() { @@ -114,11 +112,6 @@ impl IndexeddbEventCacheStoreSerializer { IndexedKeyRange::Bound(lower, upper) => { IdbKeyRange::bound(&to_value(&lower)?, &to_value(&upper)?)? } - IndexedKeyRange::All => { - let lower = to_value(&K::lower_key(room_id, &self.inner))?; - let upper = to_value(&K::upper_key(room_id, &self.inner))?; - IdbKeyRange::bound(&lower, &upper).expect("construct key range") - } }) } @@ -128,45 +121,36 @@ impl IndexeddbEventCacheStoreSerializer { /// `K`. pub fn encode_key_component_range<'a, T, K>( &self, - room_id: &RoomId, - range: impl Into>, + range: impl Into>>, ) -> Result where T: Indexed, - K: IndexedKeyComponentBounds + Serialize, - K::KeyComponents: 'a, + K: IndexedKey + Serialize, { let range = match range.into() { IndexedKeyRange::Only(components) => { - IndexedKeyRange::Only(K::encode(room_id, components, &self.inner)) + IndexedKeyRange::Only(K::encode(components, &self.inner)) } IndexedKeyRange::Bound(lower, upper) => { - let lower = K::encode(room_id, lower, &self.inner); - let upper = K::encode(room_id, upper, &self.inner); - IndexedKeyRange::Bound(lower, upper) - } - IndexedKeyRange::All => { - let lower = K::lower_key(room_id, &self.inner); - let upper = K::upper_key(room_id, &self.inner); + let lower = K::encode(lower, &self.inner); + let upper = K::encode(upper, &self.inner); IndexedKeyRange::Bound(lower, upper) } }; - self.encode_key_range::(room_id, range) + self.encode_key_range::(range) } /// Serializes an [`Indexed`] type into a [`JsValue`] pub fn serialize( &self, - room_id: &RoomId, t: &T, ) -> Result> where T: Indexed, T::IndexedType: Serialize, { - let indexed = t - .to_indexed(room_id, &self.inner) - .map_err(IndexeddbEventCacheStoreSerializerError::Indexing)?; + let indexed = + t.to_indexed(&self.inner).map_err(IndexeddbEventCacheStoreSerializerError::Indexing)?; serde_wasm_bindgen::to_value(&indexed).map_err(Into::into) } diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/traits.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/traits.rs index 7c4182aebca..e1db633ed5a 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/traits.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/traits.rs @@ -35,7 +35,6 @@ pub trait Indexed: Sized { /// Converts the high-level type into an indexed type. fn to_indexed( &self, - room_id: &RoomId, serializer: &IndexeddbSerializer, ) -> Result; @@ -54,7 +53,7 @@ pub trait IndexedKey { const INDEX: Option<&'static str> = None; /// Any extra data used to construct the key. - type KeyComponents; + type KeyComponents<'a>; /// Encodes the key components into a type that can be used as a key in /// IndexedDB. @@ -63,11 +62,7 @@ pub trait IndexedKey { /// argument, which provides the necessary context for encryption and /// decryption, in the case that certain components of the key must be /// encrypted before storage. - fn encode( - room_id: &RoomId, - components: &Self::KeyComponents, - serializer: &IndexeddbSerializer, - ) -> Self; + fn encode(components: Self::KeyComponents<'_>, serializer: &IndexeddbSerializer) -> Self; } /// A trait for constructing the bounds of an [`IndexedKey`]. @@ -91,10 +86,10 @@ pub trait IndexedKey { /// the proper bound. pub trait IndexedKeyBounds: IndexedKey { /// Constructs the lower bound of the key. - fn lower_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self; + fn lower_key(serializer: &IndexeddbSerializer) -> Self; /// Constructs the upper bound of the key. - fn upper_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self; + fn upper_key(serializer: &IndexeddbSerializer) -> Self; } impl IndexedKeyBounds for K @@ -103,13 +98,13 @@ where K: IndexedKeyComponentBounds + Sized, { /// Constructs the lower bound of the key. - fn lower_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { - >::encode(room_id, &Self::lower_key_components(), serializer) + fn lower_key(serializer: &IndexeddbSerializer) -> Self { + >::encode(Self::lower_key_components(), serializer) } /// Constructs the upper bound of the key. - fn upper_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { - >::encode(room_id, &Self::upper_key_components(), serializer) + fn upper_key(serializer: &IndexeddbSerializer) -> Self { + >::encode(Self::upper_key_components(), serializer) } } @@ -123,8 +118,72 @@ where /// get a better overview of how these two interact. pub trait IndexedKeyComponentBounds: IndexedKeyBounds { /// Constructs the lower bound of the key components. - fn lower_key_components() -> Self::KeyComponents; + fn lower_key_components() -> Self::KeyComponents<'static>; /// Constructs the upper bound of the key components. - fn upper_key_components() -> Self::KeyComponents; + fn upper_key_components() -> Self::KeyComponents<'static>; +} + +/// A trait for constructing the bounds of an [`IndexedKey`] given a prefix `P` +/// of that key. +/// +/// The key bounds should be constructed by keeping the prefix constant while +/// the remaining components of the key are set to their lower and upper limits. +/// +/// This is useful when constructing prefixed range queries in IndexedDB. +/// +/// Note that the [`IndexedPrefixKeyComponentBounds`] helps to specify the upper +/// and lower bounds of the components that are used to create the final key, +/// while the `IndexedPrefixKeyBounds` are the upper and lower bounds of the +/// final key itself. +/// +/// For details on the differences between key bounds and key component bounds, +/// see the documentation on [`IndexedKeyBounds`]. +pub trait IndexedPrefixKeyBounds: IndexedKey { + /// Constructs the lower bound of the key while maintaining a constant + /// prefix. + fn lower_key_with_prefix(prefix: P, serializer: &IndexeddbSerializer) -> Self; + + /// Constructs the upper bound of the key while maintaining a constant + /// prefix. + fn upper_key_with_prefix(prefix: P, serializer: &IndexeddbSerializer) -> Self; +} + +impl<'a, T, K, P> IndexedPrefixKeyBounds for K +where + T: Indexed, + K: IndexedPrefixKeyComponentBounds<'a, T, P> + Sized, + P: 'a, +{ + fn lower_key_with_prefix(prefix: P, serializer: &IndexeddbSerializer) -> Self { + >::encode(Self::lower_key_components_with_prefix(prefix), serializer) + } + + fn upper_key_with_prefix(prefix: P, serializer: &IndexeddbSerializer) -> Self { + >::encode(Self::upper_key_components_with_prefix(prefix), serializer) + } +} + +/// A trait for constructing the bounds of the components of an [`IndexedKey`] +/// given a prefix `P` of that key. +/// +/// The key component bounds should be constructed by keeping the prefix +/// constant while the remaining components of the key are set to their lower +/// and upper limits. +/// +/// This is useful when constructing range queries in IndexedDB. +/// +/// Note that this trait should not be implemented for key components that are +/// going to be encrypted as ordering properties will not be preserved. +/// +/// One may be interested to read the documentation of [`IndexedKeyBounds`] to +/// get a better overview of how these two interact. +pub trait IndexedPrefixKeyComponentBounds<'a, T: Indexed, P: 'a>: IndexedKey { + /// Constructs the lower bound of the key components while maintaining a + /// constant prefix. + fn lower_key_components_with_prefix(prefix: P) -> Self::KeyComponents<'a>; + + /// Constructs the upper bound of the key components while maintaining a + /// constant prefix. + fn upper_key_components_with_prefix(prefix: P) -> Self::KeyComponents<'a>; } diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/types.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/types.rs index e2a4f8c4090..21b73c13c20 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/types.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/serializer/types.rs @@ -27,6 +27,8 @@ //! These types mimic the structure of the object stores and indices created in //! [`crate::event_cache_store::migrations`]. +use std::sync::LazyLock; + use matrix_sdk_base::linked_chunk::ChunkIdentifier; use matrix_sdk_crypto::CryptoStoreError; use ruma::{events::relation::RelationType, EventId, OwnedEventId, RoomId}; @@ -36,7 +38,10 @@ use thiserror::Error; use crate::{ event_cache_store::{ migrations::current::keys, - serializer::traits::{Indexed, IndexedKey, IndexedKeyBounds, IndexedKeyComponentBounds}, + serializer::traits::{ + Indexed, IndexedKey, IndexedKeyBounds, IndexedKeyComponentBounds, + IndexedPrefixKeyBounds, IndexedPrefixKeyComponentBounds, + }, types::{Chunk, Event, Gap, Position}, }, serializer::{IndexeddbSerializer, MaybeEncrypted}, @@ -60,16 +65,84 @@ const INDEXED_KEY_LOWER_CHARACTER: char = '\u{0000}'; /// [1]: https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane const INDEXED_KEY_UPPER_CHARACTER: char = '\u{FFFF}'; +/// A [`ChunkIdentifier`] constructed with `0`. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`ChunkIdentifier`]s when used in conjunction with +/// [`INDEXED_KEY_UPPER_CHUNK_IDENTIFIER`]. +static INDEXED_KEY_LOWER_CHUNK_IDENTIFIER: LazyLock = + LazyLock::new(|| ChunkIdentifier::new(0)); + +/// A [`ChunkIdentifier`] constructed with [`js_sys::Number::MAX_SAFE_INTEGER`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`ChunkIdentifier`]s when used in conjunction with +/// [`INDEXED_KEY_LOWER_CHUNK_IDENTIFIER`]. +static INDEXED_KEY_UPPER_CHUNK_IDENTIFIER: LazyLock = + LazyLock::new(|| ChunkIdentifier::new(js_sys::Number::MAX_SAFE_INTEGER as u64)); + +/// An [`OwnedEventId`] constructed with [`INDEXED_KEY_LOWER_CHARACTER`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`EventId`]s when used in conjunction with +/// [`INDEXED_KEY_UPPER_EVENT_ID`]. +static INDEXED_KEY_LOWER_EVENT_ID: LazyLock = LazyLock::new(|| { + OwnedEventId::try_from(format!("${INDEXED_KEY_LOWER_CHARACTER}")).expect("valid event id") +}); + +/// An [`OwnedEventId`] constructed with [`INDEXED_KEY_UPPER_CHARACTER`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`EventId`]s when used in conjunction with +/// [`INDEXED_KEY_LOWER_EVENT_ID`]. +static INDEXED_KEY_UPPER_EVENT_ID: LazyLock = LazyLock::new(|| { + OwnedEventId::try_from(format!("${INDEXED_KEY_UPPER_CHARACTER}")).expect("valid event id") +}); + +/// The lowest possible index that can be used to reference an [`Event`] inside +/// a [`Chunk`] - i.e., `0`. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`Position`]s when used in conjunction with +/// [`INDEXED_KEY_UPPER_EVENT_INDEX`]. +const INDEXED_KEY_LOWER_EVENT_INDEX: usize = 0; + +/// The highest possible index that can be used to reference an [`Event`] inside +/// a [`Chunk`] - i.e., [`js_sys::Number::MAX_SAFE_INTEGER`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`Position`]s when used in conjunction with +/// [`INDEXED_KEY_LOWER_EVENT_INDEX`]. +const INDEXED_KEY_UPPER_EVENT_INDEX: usize = js_sys::Number::MAX_SAFE_INTEGER as usize; + +/// The lowest possible [`Position`] that can be used to reference an [`Event`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`Position`]s when used in conjunction with +/// [`INDEXED_KEY_UPPER_EVENT_INDEX`]. +static INDEXED_KEY_LOWER_EVENT_POSITION: LazyLock = LazyLock::new(|| Position { + chunk_identifier: INDEXED_KEY_LOWER_CHUNK_IDENTIFIER.index(), + index: INDEXED_KEY_LOWER_EVENT_INDEX, +}); + +/// The highest possible [`Position`] that can be used to reference an +/// [`Event`]. +/// +/// This value is useful for constructing a key range over all keys which +/// contain [`Position`]s when used in conjunction with +/// [`INDEXED_KEY_LOWER_EVENT_INDEX`]. +static INDEXED_KEY_UPPER_EVENT_POSITION: LazyLock = LazyLock::new(|| Position { + chunk_identifier: INDEXED_KEY_UPPER_CHUNK_IDENTIFIER.index(), + index: INDEXED_KEY_UPPER_EVENT_INDEX, +}); + /// Representation of a range of keys of type `K`. This is loosely /// correlated with [IDBKeyRange][1], with a few differences. /// -/// Firstly, this enum only provides a single way to express a bounded range +/// Namely, this enum only provides a single way to express a bounded range /// which is always inclusive on both bounds. While all ranges can still be /// represented, [`IDBKeyRange`][1] provides more flexibility in this regard. /// -/// Secondly, this enum provides a way to express the range of all keys -/// of type `K`. -/// /// [1]: https://developer.mozilla.org/en-US/docs/Web/API/IDBKeyRange #[derive(Debug, Copy, Clone)] pub enum IndexedKeyRange { @@ -87,35 +160,57 @@ pub enum IndexedKeyRange { /// /// [1]: https://developer.mozilla.org/en-US/docs/Web/API/IDBKeyRange/bound Bound(K, K), - /// Represents an inclusive range of all keys of type `K`. - All, } -impl IndexedKeyRange<&C> { +impl<'a, C: 'a> IndexedKeyRange { /// Encodes a range of key components of type `K::KeyComponents` /// into a range of keys of type `K`. - pub fn encoded( - &self, - room_id: &RoomId, - serializer: &IndexeddbSerializer, - ) -> IndexedKeyRange + pub fn encoded(self, serializer: &IndexeddbSerializer) -> IndexedKeyRange where T: Indexed, - K: IndexedKey, + K: IndexedKey = C>, { match self { - Self::Only(components) => { - IndexedKeyRange::Only(K::encode(room_id, components, serializer)) + Self::Only(components) => IndexedKeyRange::Only(K::encode(components, serializer)), + Self::Bound(lower, upper) => { + IndexedKeyRange::Bound(K::encode(lower, serializer), K::encode(upper, serializer)) } - Self::Bound(lower, upper) => IndexedKeyRange::Bound( - K::encode(room_id, lower, serializer), - K::encode(room_id, upper, serializer), - ), - Self::All => IndexedKeyRange::All, } } } +impl IndexedKeyRange { + pub fn map(self, f: F) -> IndexedKeyRange + where + F: Fn(K) -> T, + { + match self { + IndexedKeyRange::Only(key) => IndexedKeyRange::Only(f(key)), + IndexedKeyRange::Bound(lower, upper) => IndexedKeyRange::Bound(f(lower), f(upper)), + } + } + + pub fn all(serializer: &IndexeddbSerializer) -> IndexedKeyRange + where + T: Indexed, + K: IndexedKeyBounds, + { + IndexedKeyRange::Bound(K::lower_key(serializer), K::upper_key(serializer)) + } + + pub fn all_with_prefix(prefix: P, serializer: &IndexeddbSerializer) -> IndexedKeyRange + where + T: Indexed, + K: IndexedPrefixKeyBounds, + P: Clone, + { + IndexedKeyRange::Bound( + K::lower_key_with_prefix(prefix.clone(), serializer), + K::upper_key_with_prefix(prefix, serializer), + ) + } +} + impl From<(K, K)> for IndexedKeyRange { fn from(value: (K, K)) -> Self { Self::Bound(value.0, value.1) @@ -151,18 +246,15 @@ impl Indexed for Chunk { fn to_indexed( &self, - room_id: &RoomId, serializer: &IndexeddbSerializer, ) -> Result { Ok(IndexedChunk { id: >::encode( - room_id, - &ChunkIdentifier::new(self.identifier), + (&self.room_id, ChunkIdentifier::new(self.identifier)), serializer, ), next: IndexedNextChunkIdKey::encode( - room_id, - &self.next.map(ChunkIdentifier::new), + (&self.room_id, self.next.map(ChunkIdentifier::new)), serializer, ), content: serializer.maybe_encrypt_value(self)?, @@ -188,11 +280,10 @@ impl Indexed for Chunk { pub struct IndexedChunkIdKey(IndexedRoomId, IndexedChunkId); impl IndexedKey for IndexedChunkIdKey { - type KeyComponents = ChunkIdentifier; + type KeyComponents<'a> = (&'a RoomId, ChunkIdentifier); fn encode( - room_id: &RoomId, - chunk_id: &ChunkIdentifier, + (room_id, chunk_id): Self::KeyComponents<'_>, serializer: &IndexeddbSerializer, ) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); @@ -201,13 +292,13 @@ impl IndexedKey for IndexedChunkIdKey { } } -impl IndexedKeyComponentBounds for IndexedChunkIdKey { - fn lower_key_components() -> Self::KeyComponents { - ChunkIdentifier::new(0) +impl<'a> IndexedPrefixKeyComponentBounds<'a, Chunk, &'a RoomId> for IndexedChunkIdKey { + fn lower_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, *INDEXED_KEY_LOWER_CHUNK_IDENTIFIER) } - fn upper_key_components() -> Self::KeyComponents { - ChunkIdentifier::new(js_sys::Number::MAX_SAFE_INTEGER as u64) + fn upper_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, *INDEXED_KEY_UPPER_CHUNK_IDENTIFIER) } } @@ -248,17 +339,17 @@ impl IndexedNextChunkIdKey { impl IndexedKey for IndexedNextChunkIdKey { const INDEX: Option<&'static str> = Some(keys::LINKED_CHUNKS_NEXT); - type KeyComponents = Option; + type KeyComponents<'a> = (&'a RoomId, Option); fn encode( - room_id: &RoomId, - next_chunk_id: &Option, + (room_id, next_chunk_id): Self::KeyComponents<'_>, serializer: &IndexeddbSerializer, ) -> Self { next_chunk_id .map(|id| { Self::Some(>::encode( - room_id, &id, serializer, + (room_id, id), + serializer, )) }) .unwrap_or_else(|| { @@ -268,13 +359,13 @@ impl IndexedKey for IndexedNextChunkIdKey { } } -impl IndexedKeyComponentBounds for IndexedNextChunkIdKey { - fn lower_key_components() -> Self::KeyComponents { - None +impl<'a> IndexedPrefixKeyComponentBounds<'a, Chunk, &'a RoomId> for IndexedNextChunkIdKey { + fn lower_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, None) } - fn upper_key_components() -> Self::KeyComponents { - Some(ChunkIdentifier::new(js_sys::Number::MAX_SAFE_INTEGER as u64)) + fn upper_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, Some(*INDEXED_KEY_UPPER_CHUNK_IDENTIFIER)) } } @@ -311,18 +402,16 @@ impl Indexed for Event { fn to_indexed( &self, - room_id: &RoomId, serializer: &IndexeddbSerializer, ) -> Result { let event_id = self.event_id().ok_or(Self::Error::NoEventId)?; - let id = IndexedEventIdKey::encode(room_id, &event_id, serializer); - let position = self - .position() - .map(|position| IndexedEventPositionKey::encode(room_id, &position, serializer)); + let id = IndexedEventIdKey::encode((self.room_id(), &event_id), serializer); + let position = self.position().map(|position| { + IndexedEventPositionKey::encode((self.room_id(), position), serializer) + }); let relation = self.relation().map(|(related_event, relation_type)| { IndexedEventRelationKey::encode( - room_id, - &(related_event, RelationType::from(relation_type)), + (self.room_id(), &related_event, &RelationType::from(relation_type)), serializer, ) }); @@ -348,22 +437,22 @@ impl Indexed for Event { pub struct IndexedEventIdKey(IndexedRoomId, IndexedEventId); impl IndexedKey for IndexedEventIdKey { - type KeyComponents = OwnedEventId; + type KeyComponents<'a> = (&'a RoomId, &'a EventId); - fn encode(room_id: &RoomId, event_id: &OwnedEventId, serializer: &IndexeddbSerializer) -> Self { + fn encode((room_id, event_id): (&RoomId, &EventId), serializer: &IndexeddbSerializer) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); let event_id = serializer.encode_key_as_string(keys::EVENTS, event_id); Self(room_id, event_id) } } -impl IndexedKeyComponentBounds for IndexedEventIdKey { - fn lower_key_components() -> Self::KeyComponents { - OwnedEventId::try_from(format!("${INDEXED_KEY_LOWER_CHARACTER}")).expect("valid event id") +impl IndexedPrefixKeyBounds for IndexedEventIdKey { + fn lower_key_with_prefix(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { + Self::encode((room_id, &*INDEXED_KEY_LOWER_EVENT_ID), serializer) } - fn upper_key_components() -> Self::KeyComponents { - OwnedEventId::try_from(format!("${INDEXED_KEY_UPPER_CHARACTER}")).expect("valid event id") + fn upper_key_with_prefix(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { + Self::encode((room_id, &*INDEXED_KEY_UPPER_EVENT_ID), serializer) } } @@ -383,24 +472,46 @@ pub struct IndexedEventPositionKey(IndexedRoomId, IndexedChunkId, IndexedEventPo impl IndexedKey for IndexedEventPositionKey { const INDEX: Option<&'static str> = Some(keys::EVENTS_POSITION); - type KeyComponents = Position; + type KeyComponents<'a> = (&'a RoomId, Position); - fn encode(room_id: &RoomId, position: &Position, serializer: &IndexeddbSerializer) -> Self { + fn encode( + (room_id, position): Self::KeyComponents<'_>, + serializer: &IndexeddbSerializer, + ) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); Self(room_id, position.chunk_identifier, position.index) } } -impl IndexedKeyComponentBounds for IndexedEventPositionKey { - fn lower_key_components() -> Self::KeyComponents { - Position { chunk_identifier: 0, index: 0 } +impl<'a> IndexedPrefixKeyComponentBounds<'a, Event, &'a RoomId> for IndexedEventPositionKey { + fn lower_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, *INDEXED_KEY_LOWER_EVENT_POSITION) } - fn upper_key_components() -> Self::KeyComponents { - Position { - chunk_identifier: js_sys::Number::MAX_SAFE_INTEGER as u64, - index: js_sys::Number::MAX_SAFE_INTEGER as usize, - } + fn upper_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + (room_id, *INDEXED_KEY_UPPER_EVENT_POSITION) + } +} + +impl<'a> IndexedPrefixKeyComponentBounds<'a, Event, (&'a RoomId, ChunkIdentifier)> + for IndexedEventPositionKey +{ + fn lower_key_components_with_prefix( + (room_id, chunk_id): (&'a RoomId, ChunkIdentifier), + ) -> Self::KeyComponents<'a> { + ( + room_id, + Position { chunk_identifier: chunk_id.index(), index: INDEXED_KEY_LOWER_EVENT_INDEX }, + ) + } + + fn upper_key_components_with_prefix( + (room_id, chunk_id): (&'a RoomId, ChunkIdentifier), + ) -> Self::KeyComponents<'a> { + ( + room_id, + Position { chunk_identifier: chunk_id.index(), index: INDEXED_KEY_UPPER_EVENT_INDEX }, + ) } } @@ -417,31 +528,13 @@ pub type IndexedEventPositionIndex = usize; #[derive(Debug, Serialize, Deserialize)] pub struct IndexedEventRelationKey(IndexedRoomId, IndexedEventId, IndexedRelationType); -impl IndexedEventRelationKey { - /// Returns an identical key, but with the related event field updated to - /// the given related event. This is helpful when searching for all - /// events which are related to the given event. - pub fn with_related_event_id( - &self, - related_event_id: &OwnedEventId, - serializer: &IndexeddbSerializer, - ) -> Self { - let room_id = self.0.clone(); - let related_event_id = - serializer.encode_key_as_string(keys::EVENTS_RELATION_RELATED_EVENTS, related_event_id); - let relation_type = self.2.clone(); - Self(room_id, related_event_id, relation_type) - } -} - impl IndexedKey for IndexedEventRelationKey { const INDEX: Option<&'static str> = Some(keys::EVENTS_RELATION); - type KeyComponents = (OwnedEventId, RelationType); + type KeyComponents<'a> = (&'a RoomId, &'a EventId, &'a RelationType); fn encode( - room_id: &RoomId, - (related_event_id, relation_type): &(OwnedEventId, RelationType), + (room_id, related_event_id, relation_type): Self::KeyComponents<'_>, serializer: &IndexeddbSerializer, ) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); @@ -453,15 +546,15 @@ impl IndexedKey for IndexedEventRelationKey { } } -impl IndexedKeyBounds for IndexedEventRelationKey { - fn lower_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { +impl IndexedPrefixKeyBounds for IndexedEventRelationKey { + fn lower_key_with_prefix(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); let related_event_id = String::from(INDEXED_KEY_LOWER_CHARACTER); let relation_type = String::from(INDEXED_KEY_LOWER_CHARACTER); Self(room_id, related_event_id, relation_type) } - fn upper_key(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { + fn upper_key_with_prefix(room_id: &RoomId, serializer: &IndexeddbSerializer) -> Self { let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); let related_event_id = String::from(INDEXED_KEY_UPPER_CHARACTER); let relation_type = String::from(INDEXED_KEY_UPPER_CHARACTER); @@ -469,6 +562,30 @@ impl IndexedKeyBounds for IndexedEventRelationKey { } } +impl IndexedPrefixKeyBounds for IndexedEventRelationKey { + fn lower_key_with_prefix( + (room_id, related_event_id): (&RoomId, &EventId), + serializer: &IndexeddbSerializer, + ) -> Self { + let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); + let related_event_id = + serializer.encode_key_as_string(keys::EVENTS_RELATION_RELATED_EVENTS, related_event_id); + let relation_type = String::from(INDEXED_KEY_LOWER_CHARACTER); + Self(room_id, related_event_id, relation_type) + } + + fn upper_key_with_prefix( + (room_id, related_event_id): (&RoomId, &EventId), + serializer: &IndexeddbSerializer, + ) -> Self { + let room_id = serializer.encode_key_as_string(keys::ROOMS, room_id); + let related_event_id = + serializer.encode_key_as_string(keys::EVENTS_RELATION_RELATED_EVENTS, related_event_id); + let relation_type = String::from(INDEXED_KEY_UPPER_CHARACTER); + Self(room_id, related_event_id, relation_type) + } +} + /// A representation of the relationship between two events (see /// [`RelationType`](ruma::events::relation::RelationType)) pub type IndexedRelationType = String; @@ -494,13 +611,11 @@ impl Indexed for Gap { fn to_indexed( &self, - room_id: &RoomId, serializer: &IndexeddbSerializer, ) -> Result { Ok(IndexedGap { id: >::encode( - room_id, - &ChunkIdentifier::new(self.chunk_identifier), + (&self.room_id, ChunkIdentifier::new(self.chunk_identifier)), serializer, ), content: serializer.maybe_encrypt_value(self)?, @@ -524,24 +639,24 @@ impl Indexed for Gap { pub type IndexedGapIdKey = IndexedChunkIdKey; impl IndexedKey for IndexedGapIdKey { - type KeyComponents = >::KeyComponents; + type KeyComponents<'a> = >::KeyComponents<'a>; - fn encode( - room_id: &RoomId, - components: &Self::KeyComponents, - serializer: &IndexeddbSerializer, - ) -> Self { - >::encode(room_id, components, serializer) + fn encode(components: Self::KeyComponents<'_>, serializer: &IndexeddbSerializer) -> Self { + >::encode(components, serializer) } } -impl IndexedKeyComponentBounds for IndexedGapIdKey { - fn lower_key_components() -> Self::KeyComponents { - >::lower_key_components() +impl<'a> IndexedPrefixKeyComponentBounds<'a, Gap, &'a RoomId> for IndexedGapIdKey { + fn lower_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + >::lower_key_components_with_prefix( + room_id, + ) } - fn upper_key_components() -> Self::KeyComponents { - >::upper_key_components() + fn upper_key_components_with_prefix(room_id: &'a RoomId) -> Self::KeyComponents<'a> { + >::upper_key_components_with_prefix( + room_id, + ) } } diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/transaction.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/transaction.rs index 6ecce3417ba..8108c3d1f20 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/transaction.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/transaction.rs @@ -17,7 +17,7 @@ use matrix_sdk_base::{ event_cache::{store::EventCacheStoreError, Event as RawEvent, Gap as RawGap}, linked_chunk::{ChunkContent, ChunkIdentifier, RawChunk}, }; -use ruma::{events::relation::RelationType, OwnedEventId, RoomId}; +use ruma::{events::relation::RelationType, EventId, OwnedEventId, RoomId}; use serde::{ de::{DeserializeOwned, Error}, Serialize, @@ -28,7 +28,10 @@ use web_sys::IdbCursorDirection; use crate::event_cache_store::{ error::AsyncErrorDeps, serializer::{ - traits::{Indexed, IndexedKey, IndexedKeyBounds, IndexedKeyComponentBounds}, + traits::{ + Indexed, IndexedKey, IndexedKeyBounds, IndexedKeyComponentBounds, + IndexedPrefixKeyBounds, IndexedPrefixKeyComponentBounds, + }, types::{ IndexedChunkIdKey, IndexedEventIdKey, IndexedEventPositionKey, IndexedEventRelationKey, IndexedGapIdKey, IndexedKeyRange, IndexedNextChunkIdKey, @@ -104,16 +107,15 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// room. pub async fn get_items_by_key( &self, - room_id: &RoomId, range: impl Into>, ) -> Result, IndexeddbEventCacheStoreTransactionError> where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, + K: IndexedKey + Serialize, { - let range = self.serializer.encode_key_range::(room_id, range)?; + let range = self.serializer.encode_key_range::(range)?; let object_store = self.transaction.object_store(T::OBJECT_STORE)?; let array = if let Some(index) = K::INDEX { object_store.index(index)?.get_all_with_key(&range)?.await? @@ -134,48 +136,49 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// the given room. pub async fn get_items_by_key_components<'b, T, K>( &self, - room_id: &RoomId, - range: impl Into>, + range: impl Into>>, ) -> Result, IndexeddbEventCacheStoreTransactionError> where - T: Indexed, + T: Indexed + 'b, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyComponentBounds + Serialize, - K::KeyComponents: 'b, + K: IndexedKey + Serialize + 'b, { - let range: IndexedKeyRange = range.into().encoded(room_id, self.serializer.inner()); - self.get_items_by_key::(room_id, range).await + let range: IndexedKeyRange = range.into().encoded(self.serializer.inner()); + self.get_items_by_key::(range).await } /// Query IndexedDB for all items in the given room by key `K` - pub async fn get_items_in_room( + pub async fn get_items_in_room<'b, T, K>( &self, - room_id: &RoomId, + room_id: &'b RoomId, ) -> Result, IndexeddbEventCacheStoreTransactionError> where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, + K: IndexedPrefixKeyBounds + Serialize, { - self.get_items_by_key::(room_id, IndexedKeyRange::All).await + self.get_items_by_key::(IndexedKeyRange::all_with_prefix( + room_id, + self.serializer.inner(), + )) + .await } /// Query IndexedDB for items that match the given key in the given room. If /// more than one item is found, an error is returned. pub async fn get_item_by_key( &self, - room_id: &RoomId, key: K, ) -> Result, IndexeddbEventCacheStoreTransactionError> where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, + K: IndexedKey + Serialize, { - let mut items = self.get_items_by_key::(room_id, key).await?; + let mut items = self.get_items_by_key::(key).await?; if items.len() > 1 { return Err(IndexeddbEventCacheStoreTransactionError::ItemIsNotUnique); } @@ -184,18 +187,17 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// Query IndexedDB for items that match the given key components in the /// given room. If more than one item is found, an error is returned. - pub async fn get_item_by_key_components( + pub async fn get_item_by_key_components<'b, T, K>( &self, - room_id: &RoomId, - components: &K::KeyComponents, + components: K::KeyComponents<'b>, ) -> Result, IndexeddbEventCacheStoreTransactionError> where - T: Indexed, + T: Indexed + 'b, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyComponentBounds + Serialize, + K: IndexedKey + Serialize + 'b, { - let mut items = self.get_items_by_key_components::(room_id, components).await?; + let mut items = self.get_items_by_key_components::(components).await?; if items.len() > 1 { return Err(IndexeddbEventCacheStoreTransactionError::ItemIsNotUnique); } @@ -206,16 +208,15 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// in the given room. pub async fn get_items_count_by_key( &self, - room_id: &RoomId, range: impl Into>, ) -> Result where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, + K: IndexedKey + Serialize, { - let range = self.serializer.encode_key_range::(room_id, range)?; + let range = self.serializer.encode_key_range::(range)?; let object_store = self.transaction.object_store(T::OBJECT_STORE)?; let count = if let Some(index) = K::INDEX { object_store.index(index)?.count_with_key(&range)?.await? @@ -229,46 +230,48 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// components range in the given room. pub async fn get_items_count_by_key_components<'b, T, K>( &self, - room_id: &RoomId, - range: impl Into>, + range: impl Into>>, ) -> Result where - T: Indexed, + T: Indexed + 'b, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, - K::KeyComponents: 'b, + K: IndexedKey + Serialize + 'b, { - let range: IndexedKeyRange = range.into().encoded(room_id, self.serializer.inner()); - self.get_items_count_by_key::(room_id, range).await + let range: IndexedKeyRange = range.into().encoded(self.serializer.inner()); + self.get_items_count_by_key::(range).await } /// Query IndexedDB for the number of items in the given room. - pub async fn get_items_count_in_room( + pub async fn get_items_count_in_room<'b, T, K>( &self, - room_id: &RoomId, + room_id: &'b RoomId, ) -> Result where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKeyBounds + Serialize, + K: IndexedPrefixKeyBounds + Serialize, { - self.get_items_count_by_key::(room_id, IndexedKeyRange::All).await + self.get_items_count_by_key::(IndexedKeyRange::all_with_prefix( + room_id, + self.serializer.inner(), + )) + .await } - /// Query IndexedDB for the item with the maximum key in the given room. + /// Query IndexedDB for the item with the maximum key in the given range. pub async fn get_max_item_by_key( &self, - room_id: &RoomId, + range: impl Into>, ) -> Result, IndexeddbEventCacheStoreTransactionError> where T: Indexed, T::IndexedType: DeserializeOwned, T::Error: AsyncErrorDeps, - K: IndexedKey + IndexedKeyBounds + Serialize, + K: IndexedKey + Serialize, { - let range = self.serializer.encode_key_range::(room_id, IndexedKeyRange::All)?; + let range = self.serializer.encode_key_range::(range)?; let direction = IdbCursorDirection::Prev; let object_store = self.transaction.object_store(T::OBJECT_STORE)?; if let Some(index) = K::INDEX { @@ -294,7 +297,6 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// exists, it will be rejected. pub async fn add_item( &self, - room_id: &RoomId, item: &T, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where @@ -304,7 +306,7 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { { self.transaction .object_store(T::OBJECT_STORE)? - .add_val_owned(self.serializer.serialize(room_id, item).map_err(|e| { + .add_val_owned(self.serializer.serialize(item).map_err(|e| { IndexeddbEventCacheStoreTransactionError::Serialization(Box::new(e)) })?)? .await @@ -316,7 +318,6 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// exists, it will be overwritten. pub async fn put_item( &self, - room_id: &RoomId, item: &T, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where @@ -326,7 +327,7 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { { self.transaction .object_store(T::OBJECT_STORE)? - .put_val_owned(self.serializer.serialize(room_id, item).map_err(|e| { + .put_val_owned(self.serializer.serialize(item).map_err(|e| { IndexeddbEventCacheStoreTransactionError::Serialization(Box::new(e)) })?)? .await @@ -336,14 +337,13 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// Delete items in given key range in the given room from IndexedDB pub async fn delete_items_by_key( &self, - room_id: &RoomId, range: impl Into>, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where T: Indexed, - K: IndexedKeyBounds + Serialize, + K: IndexedKey + Serialize, { - let range = self.serializer.encode_key_range::(room_id, range)?; + let range = self.serializer.encode_key_range::(range)?; let object_store = self.transaction.object_store(T::OBJECT_STORE)?; if let Some(index) = K::INDEX { let index = object_store.index(index)?; @@ -363,42 +363,43 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// IndexedDB pub async fn delete_items_by_key_components<'b, T, K>( &self, - room_id: &RoomId, - range: impl Into>, + range: impl Into>>, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where - T: Indexed, - K: IndexedKeyBounds + Serialize, - K::KeyComponents: 'b, + T: Indexed + 'b, + K: IndexedKey + Serialize + 'b, { - let range: IndexedKeyRange = range.into().encoded(room_id, self.serializer.inner()); - self.delete_items_by_key::(room_id, range).await + let range: IndexedKeyRange = range.into().encoded(self.serializer.inner()); + self.delete_items_by_key::(range).await } /// Delete all items of type `T` by key `K` in the given room from IndexedDB - pub async fn delete_items_in_room( + pub async fn delete_items_in_room<'b, T, K>( &self, - room_id: &RoomId, + room_id: &'b RoomId, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where T: Indexed, - K: IndexedKeyBounds + Serialize, + K: IndexedPrefixKeyBounds + Serialize, { - self.delete_items_by_key::(room_id, IndexedKeyRange::All).await + self.delete_items_by_key::(IndexedKeyRange::all_with_prefix( + room_id, + self.serializer.inner(), + )) + .await } /// Delete item that matches the given key components in the given room from /// IndexedDB - pub async fn delete_item_by_key( + pub async fn delete_item_by_key<'b, T, K>( &self, - room_id: &RoomId, - key: &K::KeyComponents, + key: K::KeyComponents<'b>, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> where - T: Indexed, - K: IndexedKeyBounds + Serialize, + T: Indexed + 'b, + K: IndexedKey + Serialize + 'b, { - self.delete_items_by_key_components::(room_id, key).await + self.delete_items_by_key_components::(key).await } /// Clear all items of type `T` in all rooms from IndexedDB @@ -414,9 +415,9 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_chunk_by_id( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - self.get_item_by_key_components::(room_id, chunk_id).await + self.get_item_by_key_components::((room_id, chunk_id)).await } /// Query IndexedDB for chunks such that the next chunk matches the given @@ -425,9 +426,9 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_chunk_by_next_chunk_id( &self, room_id: &RoomId, - next_chunk_id: &Option, + next_chunk_id: Option, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - self.get_item_by_key_components::(room_id, next_chunk_id) + self.get_item_by_key_components::((room_id, next_chunk_id)) .await } @@ -452,7 +453,8 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { &self, room_id: &RoomId, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - self.get_max_item_by_key::(room_id).await + let range = IndexedKeyRange::all_with_prefix::(room_id, self.serializer.inner()); + self.get_max_item_by_key::(range).await } /// Query IndexedDB for given chunk in given room and additionally query @@ -461,13 +463,13 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn load_chunk_by_id( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result>, IndexeddbEventCacheStoreTransactionError> { if let Some(chunk) = self.get_chunk_by_id(room_id, chunk_id).await? { let content = match chunk.chunk_type { ChunkType::Event => { let events = self - .get_events_by_chunk(room_id, &ChunkIdentifier::new(chunk.identifier)) + .get_events_by_chunk(room_id, ChunkIdentifier::new(chunk.identifier)) .await? .into_iter() .map(RawEvent::from) @@ -476,7 +478,7 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { } ChunkType::Gap => { let gap = self - .get_gap_by_id(room_id, &ChunkIdentifier::new(chunk.identifier)) + .get_gap_by_id(room_id, ChunkIdentifier::new(chunk.identifier)) .await? .ok_or(IndexeddbEventCacheStoreTransactionError::ItemNotFound)?; ChunkContent::Gap(RawGap { prev_token: gap.prev_token }) @@ -498,24 +500,25 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { /// rejected. pub async fn add_chunk( &self, - room_id: &RoomId, chunk: &Chunk, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - self.add_item(room_id, chunk).await?; + self.add_item(chunk).await?; if let Some(previous) = chunk.previous { let previous_identifier = ChunkIdentifier::new(previous); if let Some(mut previous_chunk) = - self.get_chunk_by_id(room_id, &previous_identifier).await? + self.get_chunk_by_id(&chunk.room_id, previous_identifier).await? { previous_chunk.next = Some(chunk.identifier); - self.put_item(room_id, &previous_chunk).await?; + self.put_item(&previous_chunk).await?; } } if let Some(next) = chunk.next { let next_identifier = ChunkIdentifier::new(next); - if let Some(mut next_chunk) = self.get_chunk_by_id(room_id, &next_identifier).await? { + if let Some(mut next_chunk) = + self.get_chunk_by_id(&chunk.room_id, next_identifier).await? + { next_chunk.previous = Some(chunk.identifier); - self.put_item(room_id, &next_chunk).await?; + self.put_item(&next_chunk).await?; } } Ok(()) @@ -528,28 +531,27 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn delete_chunk_by_id( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { if let Some(chunk) = self.get_chunk_by_id(room_id, chunk_id).await? { if let Some(previous) = chunk.previous { let previous_identifier = ChunkIdentifier::new(previous); if let Some(mut previous_chunk) = - self.get_chunk_by_id(room_id, &previous_identifier).await? + self.get_chunk_by_id(room_id, previous_identifier).await? { previous_chunk.next = chunk.next; - self.put_item(room_id, &previous_chunk).await?; + self.put_item(&previous_chunk).await?; } } if let Some(next) = chunk.next { let next_identifier = ChunkIdentifier::new(next); - if let Some(mut next_chunk) = - self.get_chunk_by_id(room_id, &next_identifier).await? + if let Some(mut next_chunk) = self.get_chunk_by_id(room_id, next_identifier).await? { next_chunk.previous = chunk.previous; - self.put_item(room_id, &next_chunk).await?; + self.put_item(&next_chunk).await?; } } - self.delete_item_by_key::(room_id, chunk_id).await?; + self.delete_item_by_key::((room_id, chunk_id)).await?; match chunk.chunk_type { ChunkType::Event => { self.delete_events_by_chunk(room_id, chunk_id).await?; @@ -575,10 +577,10 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_event_by_id( &self, room_id: &RoomId, - event_id: &OwnedEventId, + event_id: &EventId, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - let key = self.serializer.encode_key(room_id, event_id); - self.get_item_by_key::(room_id, key).await + let key = self.serializer.encode_key((room_id, event_id)); + self.get_item_by_key::(key).await } /// Query IndexedDB for events in the given position range in the given @@ -586,9 +588,12 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_events_by_position( &self, room_id: &RoomId, - range: impl Into>, + range: impl Into>, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - self.get_items_by_key_components::(room_id, range).await + self.get_items_by_key_components::( + range.into().map(|position| (room_id, position)), + ) + .await } /// Query IndexedDB for number of events in the given position range in the @@ -596,24 +601,22 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_events_count_by_position( &self, room_id: &RoomId, - range: impl Into>, + range: impl Into>, ) -> Result { - self.get_items_count_by_key_components::(room_id, range) - .await + self.get_items_count_by_key_components::( + range.into().map(|position| (room_id, position)), + ) + .await } /// Query IndexedDB for events in the given chunk in the given room. pub async fn get_events_by_chunk( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - let mut lower = IndexedEventPositionKey::lower_key_components(); - lower.chunk_identifier = chunk_id.index(); - let mut upper = IndexedEventPositionKey::upper_key_components(); - upper.chunk_identifier = chunk_id.index(); - let range = IndexedKeyRange::Bound(&lower, &upper); - self.get_events_by_position(room_id, range).await + let range = IndexedKeyRange::all_with_prefix((room_id, chunk_id), self.serializer.inner()); + self.get_items_by_key::(range).await } /// Query IndexedDB for number of events in the given chunk in the given @@ -621,14 +624,10 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_events_count_by_chunk( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result { - let mut lower = IndexedEventPositionKey::lower_key_components(); - lower.chunk_identifier = chunk_id.index(); - let mut upper = IndexedEventPositionKey::upper_key_components(); - upper.chunk_identifier = chunk_id.index(); - let range = IndexedKeyRange::Bound(&lower, &upper); - self.get_events_count_by_position(room_id, range).await + let range = IndexedKeyRange::all_with_prefix((room_id, chunk_id), self.serializer.inner()); + self.get_items_count_by_key::(range).await } /// Query IndexedDB for events that match the given relation range in the @@ -636,10 +635,13 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_events_by_relation( &self, room_id: &RoomId, - range: impl Into>, + range: impl Into>, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - let range = range.into().encoded(room_id, self.serializer.inner()); - self.get_items_by_key::(room_id, range).await + let range = range + .into() + .map(|(event_id, relation_type)| (room_id, event_id, relation_type)) + .encoded(self.serializer.inner()); + self.get_items_by_key::(range).await } /// Query IndexedDB for events that are related to the given event in the @@ -647,21 +649,17 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_events_by_related_event( &self, room_id: &RoomId, - related_event_id: &OwnedEventId, + related_event_id: &EventId, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - let lower = IndexedEventRelationKey::lower_key(room_id, self.serializer.inner()) - .with_related_event_id(related_event_id, self.serializer.inner()); - let upper = IndexedEventRelationKey::upper_key(room_id, self.serializer.inner()) - .with_related_event_id(related_event_id, self.serializer.inner()); - let range = IndexedKeyRange::Bound(lower, upper); - self.get_items_by_key::(room_id, range).await + let range = + IndexedKeyRange::all_with_prefix((room_id, related_event_id), self.serializer.inner()); + self.get_items_by_key::(range).await } /// Puts an event in the given room. If an event with the same key already /// exists, it will be overwritten. pub async fn put_event( &self, - room_id: &RoomId, event: &Event, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { if let Some(position) = event.position() { @@ -674,41 +672,40 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { // As a workaround, if the event has a position, we delete it first and // then call `put_item`. This should be fine as it all happens within the // context of a single transaction. - self.delete_event_by_position(room_id, &position).await?; + self.delete_event_by_position(event.room_id(), position).await?; } - self.put_item(room_id, event).await + self.put_item(event).await } /// Delete events in the given position range in the given room pub async fn delete_events_by_position( &self, room_id: &RoomId, - range: impl Into>, + range: impl Into>, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - self.delete_items_by_key_components::(room_id, range).await + self.delete_items_by_key_components::( + range.into().map(|position| (room_id, position)), + ) + .await } /// Delete event in the given position in the given room pub async fn delete_event_by_position( &self, room_id: &RoomId, - position: &Position, + position: Position, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - self.delete_item_by_key::(room_id, position).await + self.delete_item_by_key::((room_id, position)).await } /// Delete events in the given chunk in the given room pub async fn delete_events_by_chunk( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - let mut lower = IndexedEventPositionKey::lower_key_components(); - lower.chunk_identifier = chunk_id.index(); - let mut upper = IndexedEventPositionKey::upper_key_components(); - upper.chunk_identifier = chunk_id.index(); - let range = IndexedKeyRange::Bound(&lower, &upper); - self.delete_events_by_position(room_id, range).await + let range = IndexedKeyRange::all_with_prefix((room_id, chunk_id), self.serializer.inner()); + self.delete_items_by_key::(range).await } /// Delete events starting from the given position in the given room @@ -716,11 +713,14 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn delete_events_by_chunk_from_index( &self, room_id: &RoomId, - position: &Position, + position: Position, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - let mut upper = IndexedEventPositionKey::upper_key_components(); - upper.chunk_identifier = position.chunk_identifier; - let range = IndexedKeyRange::Bound(position, &upper); + let lower = (room_id, position); + let upper = IndexedEventPositionKey::upper_key_components_with_prefix(( + room_id, + ChunkIdentifier::new(position.chunk_identifier), + )); + let range = IndexedKeyRange::Bound(lower, upper).map(|(_, position)| position); self.delete_events_by_position(room_id, range).await } @@ -736,18 +736,18 @@ impl<'a> IndexeddbEventCacheStoreTransaction<'a> { pub async fn get_gap_by_id( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result, IndexeddbEventCacheStoreTransactionError> { - self.get_item_by_key_components::(room_id, chunk_id).await + self.get_item_by_key_components::((room_id, chunk_id)).await } /// Delete gap that matches the given chunk identifier in the given room pub async fn delete_gap_by_id( &self, room_id: &RoomId, - chunk_id: &ChunkIdentifier, + chunk_id: ChunkIdentifier, ) -> Result<(), IndexeddbEventCacheStoreTransactionError> { - self.delete_item_by_key::(room_id, chunk_id).await + self.delete_item_by_key::((room_id, chunk_id)).await } /// Delete all gaps in the given room diff --git a/crates/matrix-sdk-indexeddb/src/event_cache_store/types.rs b/crates/matrix-sdk-indexeddb/src/event_cache_store/types.rs index 3ef36eccc6c..6d56bee841f 100644 --- a/crates/matrix-sdk-indexeddb/src/event_cache_store/types.rs +++ b/crates/matrix-sdk-indexeddb/src/event_cache_store/types.rs @@ -16,13 +16,15 @@ use matrix_sdk_base::{ deserialized_responses::TimelineEvent, event_cache::store::extract_event_relation, linked_chunk::ChunkIdentifier, }; -use ruma::OwnedEventId; +use ruma::{OwnedEventId, OwnedRoomId, RoomId}; use serde::{Deserialize, Serialize}; /// Representation of a [`Chunk`](matrix_sdk_base::linked_chunk::Chunk) /// which can be stored in IndexedDB. #[derive(Debug, Serialize, Deserialize)] pub struct Chunk { + /// The room in which the chunk exists. + pub room_id: OwnedRoomId, /// The identifier of the chunk - i.e., /// [`ChunkIdentifier`](matrix_sdk_base::linked_chunk::ChunkIdentifier). pub identifier: u64, @@ -69,6 +71,14 @@ impl From for TimelineEvent { } impl Event { + /// The [`RoomId`] of the room in which the underlying event exists. + pub fn room_id(&self) -> &RoomId { + match self { + Event::InBand(e) => &e.room_id, + Event::OutOfBand(e) => &e.room_id, + } + } + /// The [`OwnedEventId`] of the underlying event. pub fn event_id(&self) -> Option { match self { @@ -114,6 +124,8 @@ impl Event { /// in-band or out-of-band. #[derive(Debug, Serialize, Deserialize)] pub struct GenericEvent

{ + /// The room in which the event exists. + pub room_id: OwnedRoomId, /// The full content of the event. pub content: TimelineEvent, /// The position of the event, if it is in a chunk. @@ -169,6 +181,8 @@ impl From for Position { /// which can be stored in IndexedDB. #[derive(Debug, Serialize, Deserialize)] pub struct Gap { + /// The room in which the gap exists. + pub room_id: OwnedRoomId, /// The identifier of the chunk containing this gap. pub chunk_identifier: u64, /// The token to use in the query, extracted from a previous "from" /