From 3e1f203cc1fbddca84af9e39a56518f130017dec Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 28 Feb 2023 15:48:25 +0100 Subject: [PATCH 01/75] Add variable-sized integer serialisation --- .../src/Wire/API/MLS/Serialisation.hs | 49 ++++++++++++++++++- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 1 + 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 0881c31773..04472c0dbe 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -1,3 +1,6 @@ +{-# LANGUAGE BinaryLiterals #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -18,6 +21,7 @@ module Wire.API.MLS.Serialisation ( ParseMLS (..), SerialiseMLS (..), + VarInt (..), parseMLSVector, serialiseMLSVector, parseMLSBytes, @@ -52,9 +56,10 @@ import Data.Aeson (FromJSON (..)) import qualified Data.Aeson as Aeson import Data.Bifunctor import Data.Binary -import Data.Binary.Builder +import Data.Binary.Builder (toLazyByteString) import Data.Binary.Get import Data.Binary.Put +import Data.Bits import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as LBS import Data.Json.Util @@ -64,6 +69,7 @@ import Data.Schema import qualified Data.Swagger as S import qualified Data.Text as Text import Imports +import Test.QuickCheck (Arbitrary (..), chooseInt) -- | Parse a value encoded using the "TLS presentation" format. class ParseMLS a where @@ -73,6 +79,47 @@ class ParseMLS a where class SerialiseMLS a where serialiseMLS :: a -> Put +-- | An integer value serialised with a variable-size encoding. +-- +-- The underlying Word32 must be strictly less than 2^30. +newtype VarInt = VarInt {unVarInt :: Word32} + deriving newtype (Eq, Ord, Num, Enum, Integral, Real, Show) + +instance Arbitrary VarInt where + arbitrary = fromIntegral <$> chooseInt (0, 1073741823) + +-- From the MLS spec: +-- +-- Prefix | Length | Usable Bits | Min | Max +-- -------+--------+-------------+-----+--------- +-- 00 1 6 0 63 +-- 01 2 14 64 16383 +-- 10 4 30 16384 1073741823 +-- 11 invalid - - - +-- +instance Binary VarInt where + put :: VarInt -> Put + put (VarInt w) + | w < 64 = putWord8 (fromIntegral w) + | w < 16384 = putWord16be (0x4000 .|. fromIntegral w) + | w < 1073741824 = putWord32be (0x80000000 .|. w) + | otherwise = error "invalid VarInt" + + get :: Get VarInt + get = do + w <- lookAhead getWord8 + let x = shiftR (w .&. 0xc0) 6 + maskVarInt = VarInt . (.&. 0x3fffffff) + if + | x == 0b00 -> maskVarInt . fromIntegral <$> getWord8 + | x == 0b01 -> maskVarInt . fromIntegral <$> getWord16be + | x == 0b10 -> maskVarInt . fromIntegral <$> getWord32be + | otherwise -> fail "invalid VarInt prefix" + +instance SerialiseMLS VarInt where serialiseMLS = put + +instance ParseMLS VarInt where parseMLS = get + parseMLSVector :: forall w a. (Binary w, Integral w) => Get a -> Get [a] parseMLSVector getItem = do len <- get @w diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index d73620945b..e1779dfbb3 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -49,6 +49,7 @@ tests = testRoundTrip @PublicGroupState, testRoundTrip @Welcome, testRoundTrip @OpaquePublicGroupState, + testRoundTrip @VarInt, testConvertProtoRoundTrip @Proto.Mls.GroupInfoBundle @GroupInfoBundle, testConvertProtoRoundTrip @Proto.Mls.CommitBundle @TestCommitBundle ] From d2641621dffdd87d09597d822fe88d517125a3e8 Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 1 Mar 2023 14:58:08 +0100 Subject: [PATCH 02/75] Implement new MLS structures --- .../src/Wire/API/Federation/API/Brig.hs | 2 +- .../wire-api/src/Wire/API/MLS/Capabilities.hs | 46 ++ libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 72 ++- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 7 +- libs/wire-api/src/Wire/API/MLS/Credential.hs | 82 +-- libs/wire-api/src/Wire/API/MLS/Extension.hs | 112 +--- .../src/Wire/API/MLS/HPKEPublicKey.hs | 30 + libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 62 +- libs/wire-api/src/Wire/API/MLS/Keys.hs | 1 + libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 125 ++++ libs/wire-api/src/Wire/API/MLS/Lifetime.hs | 42 ++ libs/wire-api/src/Wire/API/MLS/Message.hs | 573 ++++++++++-------- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 23 +- libs/wire-api/src/Wire/API/MLS/ProposalTag.hs | 41 ++ .../src/Wire/API/MLS/ProtocolVersion.hs | 52 ++ .../src/Wire/API/MLS/PublicGroupState.hs | 2 +- libs/wire-api/src/Wire/API/MLS/Welcome.hs | 2 +- libs/wire-api/src/Wire/API/OAuth.hs | 1 - .../src/Wire/API/Routes/Internal/Brig.hs | 1 + .../src/Wire/API/Routes/Public/Galley/MLS.hs | 4 +- libs/wire-api/src/Wire/API/User/Client.hs | 4 +- libs/wire-api/test/golden.hs | 5 + .../Wire/API/Golden/Generated/Client_user.hs | 2 +- .../API/Golden/Generated/NewClient_user.hs | 2 +- .../API/Golden/Generated/UpdateClient_user.hs | 2 +- .../{Main.hs => Test/Wire/API/Golden/Run.hs} | 5 +- libs/wire-api/test/unit.hs | 5 + libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 101 +-- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 61 +- .../unit/{Main.hs => Test/Wire/API/Run.hs} | 5 +- libs/wire-api/wire-api.cabal | 239 +++----- services/brig/brig.cabal | 404 +++--------- services/brig/schema/main.hs | 5 + services/brig/schema/src/{Main.hs => Run.hs} | 2 +- services/brig/src/Brig/API/Internal.hs | 3 +- .../Brig/API/MLS/KeyPackages/Validation.hs | 80 +-- services/brig/src/Brig/Data/Client.hs | 2 +- services/brig/src/Brig/Data/MLS/KeyPackage.hs | 9 +- services/brig/test/integration.hs | 5 + .../brig/test/integration/API/MLS/Util.hs | 1 + .../brig/test/integration/API/User/Client.hs | 3 +- .../test/integration/Federation/End2end.hs | 6 +- .../brig/test/integration/{Main.hs => Run.hs} | 2 +- services/brig/test/unit.hs | 1 + services/brig/test/unit/Main.hs | 2 +- services/brig/test/unit/Run.hs | 43 ++ services/brig/test/unit/Test/Brig/MLS.hs | 80 +-- services/galley/galley.cabal | 402 +++--------- services/galley/migrate-data/main.hs | 1 + .../migrate-data/src/{Main.hs => Run.hs} | 2 +- services/galley/schema/main.hs | 5 + .../galley/schema/src/{Main.hs => Run.hs} | 2 +- services/galley/src/Galley/API/Federation.hs | 30 +- services/galley/src/Galley/API/MLS/Message.hs | 365 ++++++----- services/galley/src/Galley/API/MLS/Removal.hs | 8 +- .../galley/src/Galley/Effects/BrigAccess.hs | 1 + services/galley/src/Galley/Intra/Client.hs | 1 + services/galley/src/Galley/Keys.hs | 1 + services/galley/test/integration.hs | 1 + services/galley/test/integration/API/MLS.hs | 38 +- .../galley/test/integration/API/MLS/Util.hs | 30 +- services/galley/test/integration/API/Util.hs | 50 +- .../test/integration/{Main.hs => Run.hs} | 2 +- services/galley/test/unit.hs | 1 + services/galley/test/unit/{Main.hs => Run.hs} | 2 +- 65 files changed, 1490 insertions(+), 1811 deletions(-) create mode 100644 libs/wire-api/src/Wire/API/MLS/Capabilities.hs create mode 100644 libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs create mode 100644 libs/wire-api/src/Wire/API/MLS/LeafNode.hs create mode 100644 libs/wire-api/src/Wire/API/MLS/Lifetime.hs create mode 100644 libs/wire-api/src/Wire/API/MLS/ProposalTag.hs create mode 100644 libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs create mode 100644 libs/wire-api/test/golden.hs rename libs/wire-api/test/golden/{Main.hs => Test/Wire/API/Golden/Run.hs} (96%) create mode 100644 libs/wire-api/test/unit.hs rename libs/wire-api/test/unit/{Main.hs => Test/Wire/API/Run.hs} (98%) create mode 100644 services/brig/schema/main.hs rename services/brig/schema/src/{Main.hs => Run.hs} (99%) create mode 100644 services/brig/test/integration.hs rename services/brig/test/integration/{Main.hs => Run.hs} (99%) create mode 100644 services/brig/test/unit.hs create mode 100644 services/brig/test/unit/Run.hs create mode 100644 services/galley/migrate-data/main.hs rename services/galley/migrate-data/src/{Main.hs => Run.hs} (98%) create mode 100644 services/galley/schema/main.hs rename services/galley/schema/src/{Main.hs => Run.hs} (99%) create mode 100644 services/galley/test/integration.hs rename services/galley/test/integration/{Main.hs => Run.hs} (99%) create mode 100644 services/galley/test/unit.hs rename services/galley/test/unit/{Main.hs => Run.hs} (99%) diff --git a/libs/wire-api-federation/src/Wire/API/Federation/API/Brig.hs b/libs/wire-api-federation/src/Wire/API/Federation/API/Brig.hs index f9c36367bd..d6fc8ee3cc 100644 --- a/libs/wire-api-federation/src/Wire/API/Federation/API/Brig.hs +++ b/libs/wire-api-federation/src/Wire/API/Federation/API/Brig.hs @@ -27,7 +27,7 @@ import Test.QuickCheck (Arbitrary) import Wire.API.Federation.API.Common import Wire.API.Federation.Endpoint import Wire.API.Federation.Version -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.MLS.KeyPackage import Wire.API.User (UserProfile) import Wire.API.User.Client diff --git a/libs/wire-api/src/Wire/API/MLS/Capabilities.hs b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs new file mode 100644 index 0000000000..1647b6d092 --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs @@ -0,0 +1,46 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.Capabilities where + +import Imports +import Test.QuickCheck +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Credential +import Wire.API.MLS.ProposalTag +import Wire.API.MLS.ProtocolVersion +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +data Capabilities = Capabilities + { versions :: [ProtocolVersion], + ciphersuites :: [CipherSuite], + extensions :: [Word16], + proposals :: [ProposalTag], + credentials :: [CredentialTag] + } + deriving (Show, Eq, Generic) + deriving (Arbitrary) via (GenericUniform Capabilities) + +instance ParseMLS Capabilities where + parseMLS = + Capabilities + <$> parseMLSVector @VarInt parseMLS + <*> parseMLSVector @VarInt parseMLS + <*> parseMLSVector @VarInt parseMLS + <*> parseMLSVector @VarInt parseMLS + <*> parseMLSVector @VarInt parseMLS diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index aaf42cd5af..d9da3c305a 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -19,19 +19,24 @@ module Wire.API.MLS.CipherSuite where +import Cassandra.CQL +import Control.Error (note) import Control.Lens ((?~)) import Crypto.Error import Crypto.Hash.Algorithms import qualified Crypto.KDF.HKDF as HKDF import qualified Crypto.PubKey.Ed25519 as Ed25519 -import Data.Aeson (parseJSON, toJSON) +import qualified Data.Aeson as Aeson +import Data.Aeson.Types (FromJSON (..), FromJSONKey (..), ToJSON (..), ToJSONKey (..)) +import qualified Data.Aeson.Types as Aeson import Data.Proxy import Data.Schema import qualified Data.Swagger as S import qualified Data.Swagger.Internal.Schema as S +import qualified Data.Text as T import Data.Word import Imports -import Wire.API.MLS.Credential +import Servant (FromHttpApiData (parseQueryParam)) import Wire.API.MLS.Serialisation import Wire.Arbitrary @@ -92,3 +97,66 @@ csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = csSignatureScheme :: CipherSuiteTag -> SignatureSchemeTag csSignatureScheme MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 = Ed25519 + +-- | A TLS signature scheme. +-- +-- See . +newtype SignatureScheme = SignatureScheme {unSignatureScheme :: Word16} + deriving stock (Eq, Show) + deriving newtype (ParseMLS, Arbitrary) + +signatureScheme :: SignatureSchemeTag -> SignatureScheme +signatureScheme = SignatureScheme . signatureSchemeNumber + +data SignatureSchemeTag = Ed25519 + deriving stock (Bounded, Enum, Eq, Ord, Show, Generic) + deriving (Arbitrary) via GenericUniform SignatureSchemeTag + +instance Cql SignatureSchemeTag where + ctype = Tagged TextColumn + toCql = CqlText . signatureSchemeName + fromCql (CqlText name) = + note ("Unexpected signature scheme: " <> T.unpack name) $ + signatureSchemeFromName name + fromCql _ = Left "SignatureScheme: Text expected" + +signatureSchemeNumber :: SignatureSchemeTag -> Word16 +signatureSchemeNumber Ed25519 = 0x807 + +signatureSchemeName :: SignatureSchemeTag -> Text +signatureSchemeName Ed25519 = "ed25519" + +signatureSchemeTag :: SignatureScheme -> Maybe SignatureSchemeTag +signatureSchemeTag (SignatureScheme n) = getAlt $ + flip foldMap [minBound .. maxBound] $ \s -> + guard (signatureSchemeNumber s == n) $> s + +signatureSchemeFromName :: Text -> Maybe SignatureSchemeTag +signatureSchemeFromName name = getAlt $ + flip foldMap [minBound .. maxBound] $ \s -> + guard (signatureSchemeName s == name) $> s + +parseSignatureScheme :: MonadFail f => Text -> f SignatureSchemeTag +parseSignatureScheme name = + maybe + (fail ("Unsupported signature scheme " <> T.unpack name)) + pure + (signatureSchemeFromName name) + +instance FromJSON SignatureSchemeTag where + parseJSON = Aeson.withText "SignatureScheme" parseSignatureScheme + +instance FromJSONKey SignatureSchemeTag where + fromJSONKey = Aeson.FromJSONKeyTextParser parseSignatureScheme + +instance S.ToParamSchema SignatureSchemeTag where + toParamSchema _ = mempty & S.type_ ?~ S.SwaggerString + +instance FromHttpApiData SignatureSchemeTag where + parseQueryParam = note "Unknown signature scheme" . signatureSchemeFromName + +instance ToJSON SignatureSchemeTag where + toJSON = Aeson.String . signatureSchemeName + +instance ToJSONKey SignatureSchemeTag where + toJSONKey = Aeson.toJSONKeyText signatureSchemeName diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index e04902d969..57f75490bc 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -34,7 +34,7 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome data CommitBundle = CommitBundle - { cbCommitMsg :: RawMLS (Message 'MLSPlainText), + { cbCommitMsg :: RawMLS Message, cbWelcome :: Maybe (RawMLS Welcome), cbGroupInfoBundle :: GroupInfoBundle } @@ -43,7 +43,10 @@ data CommitBundle = CommitBundle instance ConvertProtoLens Proto.Mls.CommitBundle CommitBundle where fromProtolens protoBundle = protoLabel "CommitBundle" $ do CommitBundle - <$> protoLabel "commit" (decodeMLS' (view Proto.Mls.commit protoBundle)) + <$> protoLabel + "commit" + ( decodeMLS' (view Proto.Mls.commit protoBundle) + ) <*> protoLabel "welcome" ( let bs = view Proto.Mls.welcome protoBundle diff --git a/libs/wire-api/src/Wire/API/MLS/Credential.hs b/libs/wire-api/src/Wire/API/MLS/Credential.hs index e695eba1d9..eb74be4fe8 100644 --- a/libs/wire-api/src/Wire/API/MLS/Credential.hs +++ b/libs/wire-api/src/Wire/API/MLS/Credential.hs @@ -19,7 +19,6 @@ module Wire.API.MLS.Credential where -import Cassandra.CQL import Control.Error.Util import Control.Lens ((?~)) import Data.Aeson (FromJSON (..), FromJSONKey (..), ToJSON (..), ToJSONKey (..)) @@ -37,6 +36,7 @@ import Data.Schema import qualified Data.Swagger as S import qualified Data.Text as T import Data.UUID +import GHC.Records import Imports import Web.HttpApiData import Wire.API.MLS.Serialisation @@ -45,16 +45,14 @@ import Wire.Arbitrary -- | An MLS credential. -- -- Only the @BasicCredential@ type is supported. -data Credential = BasicCredential - { bcIdentity :: ByteString, - bcSignatureScheme :: SignatureScheme, - bcSignatureKey :: ByteString - } +data Credential = BasicCredential ByteString deriving stock (Eq, Show, Generic) deriving (Arbitrary) via GenericUniform Credential -data CredentialTag = BasicCredentialTag - deriving stock (Enum, Bounded, Eq, Show) +data CredentialTag where + BasicCredentialTag :: CredentialTag + deriving stock (Enum, Bounded, Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform CredentialTag) instance ParseMLS CredentialTag where parseMLS = parseMLSEnum @Word16 "credential type" @@ -64,75 +62,13 @@ instance ParseMLS Credential where parseMLS >>= \case BasicCredentialTag -> BasicCredential - <$> parseMLSBytes @Word16 - <*> parseMLS - <*> parseMLSBytes @Word16 + <$> parseMLSBytes @VarInt credentialTag :: Credential -> CredentialTag credentialTag BasicCredential {} = BasicCredentialTag --- | A TLS signature scheme. --- --- See . -newtype SignatureScheme = SignatureScheme {unSignatureScheme :: Word16} - deriving stock (Eq, Show) - deriving newtype (ParseMLS, Arbitrary) - -signatureScheme :: SignatureSchemeTag -> SignatureScheme -signatureScheme = SignatureScheme . signatureSchemeNumber - -data SignatureSchemeTag = Ed25519 - deriving stock (Bounded, Enum, Eq, Ord, Show, Generic) - deriving (Arbitrary) via GenericUniform SignatureSchemeTag - -instance Cql SignatureSchemeTag where - ctype = Tagged TextColumn - toCql = CqlText . signatureSchemeName - fromCql (CqlText name) = - note ("Unexpected signature scheme: " <> T.unpack name) $ - signatureSchemeFromName name - fromCql _ = Left "SignatureScheme: Text expected" - -signatureSchemeNumber :: SignatureSchemeTag -> Word16 -signatureSchemeNumber Ed25519 = 0x807 - -signatureSchemeName :: SignatureSchemeTag -> Text -signatureSchemeName Ed25519 = "ed25519" - -signatureSchemeTag :: SignatureScheme -> Maybe SignatureSchemeTag -signatureSchemeTag (SignatureScheme n) = getAlt $ - flip foldMap [minBound .. maxBound] $ \s -> - guard (signatureSchemeNumber s == n) $> s - -signatureSchemeFromName :: Text -> Maybe SignatureSchemeTag -signatureSchemeFromName name = getAlt $ - flip foldMap [minBound .. maxBound] $ \s -> - guard (signatureSchemeName s == name) $> s - -parseSignatureScheme :: MonadFail f => Text -> f SignatureSchemeTag -parseSignatureScheme name = - maybe - (fail ("Unsupported signature scheme " <> T.unpack name)) - pure - (signatureSchemeFromName name) - -instance FromJSON SignatureSchemeTag where - parseJSON = Aeson.withText "SignatureScheme" parseSignatureScheme - -instance FromJSONKey SignatureSchemeTag where - fromJSONKey = Aeson.FromJSONKeyTextParser parseSignatureScheme - -instance S.ToParamSchema SignatureSchemeTag where - toParamSchema _ = mempty & S.type_ ?~ S.SwaggerString - -instance FromHttpApiData SignatureSchemeTag where - parseQueryParam = note "Unknown signature scheme" . signatureSchemeFromName - -instance ToJSON SignatureSchemeTag where - toJSON = Aeson.String . signatureSchemeName - -instance ToJSONKey SignatureSchemeTag where - toJSONKey = Aeson.toJSONKeyText signatureSchemeName +instance HasField "identityData" Credential ByteString where + getField (BasicCredential i) = i data ClientIdentity = ClientIdentity { ciDomain :: Domain, diff --git a/libs/wire-api/src/Wire/API/MLS/Extension.hs b/libs/wire-api/src/Wire/API/MLS/Extension.hs index 5093398adf..84606420e1 100644 --- a/libs/wire-api/src/Wire/API/MLS/Extension.hs +++ b/libs/wire-api/src/Wire/API/MLS/Extension.hs @@ -19,52 +19,13 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Wire.API.MLS.Extension - ( -- * Extensions - Extension (..), - decodeExtension, - parseExtension, - ExtensionTag (..), - CapabilitiesExtensionTagSym0, - LifetimeExtensionTagSym0, - SExtensionTag (..), - SomeExtension (..), - Capabilities (..), - Lifetime (..), - - -- * Other types - Timestamp (..), - ProtocolVersion (..), - ProtocolVersionTag (..), - - -- * Utilities - pvTag, - tsPOSIX, - ) -where +module Wire.API.MLS.Extension where import Data.Binary -import Data.Kind -import Data.Singletons.TH -import Data.Time.Clock.POSIX import Imports -import Wire.API.MLS.CipherSuite import Wire.API.MLS.Serialisation import Wire.Arbitrary -newtype ProtocolVersion = ProtocolVersion {pvNumber :: Word8} - deriving newtype (Eq, Ord, Show, Binary, Arbitrary, ParseMLS, SerialiseMLS) - -data ProtocolVersionTag = ProtocolMLS10 | ProtocolMLSDraft11 - deriving stock (Bounded, Enum, Eq, Show, Generic) - deriving (Arbitrary) via GenericUniform ProtocolVersionTag - -pvTag :: ProtocolVersion -> Maybe ProtocolVersionTag -pvTag (ProtocolVersion v) = case v of - 1 -> pure ProtocolMLS10 - 200 -> pure ProtocolMLSDraft11 - _ -> Nothing - data Extension = Extension { extType :: Word16, extData :: ByteString @@ -73,78 +34,9 @@ data Extension = Extension deriving (Arbitrary) via GenericUniform Extension instance ParseMLS Extension where - parseMLS = Extension <$> parseMLS <*> parseMLSBytes @Word32 + parseMLS = Extension <$> parseMLS <*> parseMLSBytes @VarInt instance SerialiseMLS Extension where serialiseMLS (Extension ty d) = do serialiseMLS ty serialiseMLSBytes @Word32 d - -data ExtensionTag - = CapabilitiesExtensionTag - | LifetimeExtensionTag - deriving (Bounded, Enum) - -$(genSingletons [''ExtensionTag]) - -type family ExtensionType (t :: ExtensionTag) :: Type where - ExtensionType 'CapabilitiesExtensionTag = Capabilities - ExtensionType 'LifetimeExtensionTag = Lifetime - -parseExtension :: Sing t -> Get (ExtensionType t) -parseExtension SCapabilitiesExtensionTag = parseMLS -parseExtension SLifetimeExtensionTag = parseMLS - -data SomeExtension where - SomeExtension :: Sing t -> ExtensionType t -> SomeExtension - -instance Eq SomeExtension where - SomeExtension SCapabilitiesExtensionTag caps1 == SomeExtension SCapabilitiesExtensionTag caps2 = caps1 == caps2 - SomeExtension SLifetimeExtensionTag lt1 == SomeExtension SLifetimeExtensionTag lt2 = lt1 == lt2 - _ == _ = False - -instance Show SomeExtension where - show (SomeExtension SCapabilitiesExtensionTag caps) = show caps - show (SomeExtension SLifetimeExtensionTag lt) = show lt - -decodeExtension :: Extension -> Either Text (Maybe SomeExtension) -decodeExtension e = do - case toMLSEnum' (extType e) of - Left MLSEnumUnknown -> pure Nothing - Left MLSEnumInvalid -> Left "Invalid extension type" - Right t -> withSomeSing t $ \st -> - Just <$> decodeMLSWith' (SomeExtension st <$> parseExtension st) (extData e) - -data Capabilities = Capabilities - { capVersions :: [ProtocolVersion], - capCiphersuites :: [CipherSuite], - capExtensions :: [Word16], - capProposals :: [Word16] - } - deriving stock (Eq, Show, Generic) - deriving (Arbitrary) via (GenericUniform Capabilities) - -instance ParseMLS Capabilities where - parseMLS = - Capabilities - <$> parseMLSVector @Word8 parseMLS - <*> parseMLSVector @Word8 parseMLS - <*> parseMLSVector @Word8 parseMLS - <*> parseMLSVector @Word8 parseMLS - --- | Seconds since the UNIX epoch. -newtype Timestamp = Timestamp {timestampSeconds :: Word64} - deriving newtype (Eq, Show, Arbitrary, ParseMLS) - -tsPOSIX :: Timestamp -> POSIXTime -tsPOSIX = fromIntegral . timestampSeconds - -data Lifetime = Lifetime - { ltNotBefore :: Timestamp, - ltNotAfter :: Timestamp - } - deriving stock (Eq, Show, Generic) - deriving (Arbitrary) via GenericUniform Lifetime - -instance ParseMLS Lifetime where - parseMLS = Lifetime <$> parseMLS <*> parseMLS diff --git a/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs new file mode 100644 index 0000000000..8531ef3bd6 --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs @@ -0,0 +1,30 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.HPKEPublicKey where + +import Imports +import Test.QuickCheck +import Wire.API.MLS.Serialisation + +newtype HPKEPublicKey = HPKEPublicKey {unHPKEPublicKey :: ByteString} + deriving (Show, Eq, Arbitrary) + +instance ParseMLS HPKEPublicKey where + parseMLS = HPKEPublicKey <$> parseMLSBytes @VarInt diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index 4d213c71b0..019790338f 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -24,12 +22,7 @@ module Wire.API.MLS.KeyPackage KeyPackageCount (..), KeyPackageData (..), KeyPackage (..), - kpProtocolVersion, - kpCipherSuite, - kpInitKey, - kpCredential, - kpExtensions, - kpIdentity, + keyPackageIdentity, kpRef, kpRef', KeyPackageTBS (..), @@ -52,6 +45,7 @@ import Data.Json.Util import Data.Qualified import Data.Schema import qualified Data.Swagger as S +import GHC.Records import Imports import Test.QuickCheck import Web.HttpApiData @@ -59,6 +53,9 @@ import Wire.API.MLS.CipherSuite import Wire.API.MLS.Context import Wire.API.MLS.Credential import Wire.API.MLS.Extension +import Wire.API.MLS.HPKEPublicKey +import Wire.API.MLS.LeafNode +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary @@ -160,17 +157,18 @@ kpRef cs = kpRef' :: RawMLS KeyPackage -> Maybe KeyPackageRef kpRef' kp = kpRef - <$> cipherSuiteTag (kpCipherSuite (rmValue kp)) + <$> cipherSuiteTag (kp.rmValue.cipherSuite) <*> pure (KeyPackageData (rmRaw kp)) -------------------------------------------------------------------------------- data KeyPackageTBS = KeyPackageTBS - { kpuProtocolVersion :: ProtocolVersion, - kpuCipherSuite :: CipherSuite, - kpuInitKey :: ByteString, - kpuCredential :: Credential, - kpuExtensions :: [Extension] + { protocolVersion :: ProtocolVersion, + cipherSuite :: CipherSuite, + initKey :: HPKEPublicKey, + leafNode :: LeafNode, + credential :: Credential, + extensions :: [Extension] } deriving stock (Eq, Show, Generic) deriving (Arbitrary) via GenericUniform KeyPackageTBS @@ -180,36 +178,40 @@ instance ParseMLS KeyPackageTBS where KeyPackageTBS <$> parseMLS <*> parseMLS - <*> parseMLSBytes @Word16 <*> parseMLS - <*> parseMLSVector @Word32 parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSVector @VarInt parseMLS data KeyPackage = KeyPackage - { kpTBS :: RawMLS KeyPackageTBS, - kpSignature :: ByteString + { tbs :: RawMLS KeyPackageTBS, + signature_ :: ByteString } deriving stock (Eq, Show) instance S.ToSchema KeyPackage where declareNamedSchema _ = pure (mlsSwagger "KeyPackage") -kpProtocolVersion :: KeyPackage -> ProtocolVersion -kpProtocolVersion = kpuProtocolVersion . rmValue . kpTBS +instance HasField "protocolVersion" KeyPackage ProtocolVersion where + getField = (.tbs.rmValue.protocolVersion) + +instance HasField "cipherSuite" KeyPackage CipherSuite where + getField = (.tbs.rmValue.cipherSuite) -kpCipherSuite :: KeyPackage -> CipherSuite -kpCipherSuite = kpuCipherSuite . rmValue . kpTBS +instance HasField "initKey" KeyPackage HPKEPublicKey where + getField = (.tbs.rmValue.initKey) -kpInitKey :: KeyPackage -> ByteString -kpInitKey = kpuInitKey . rmValue . kpTBS +instance HasField "credential" KeyPackage Credential where + getField = (.tbs.rmValue.credential) -kpCredential :: KeyPackage -> Credential -kpCredential = kpuCredential . rmValue . kpTBS +instance HasField "extensions" KeyPackage [Extension] where + getField = (.tbs.rmValue.extensions) -kpExtensions :: KeyPackage -> [Extension] -kpExtensions = kpuExtensions . rmValue . kpTBS +instance HasField "leafNode" KeyPackage LeafNode where + getField = (.tbs.rmValue.leafNode) -kpIdentity :: KeyPackage -> Either Text ClientIdentity -kpIdentity = decodeMLS' @ClientIdentity . bcIdentity . kpCredential +keyPackageIdentity :: KeyPackage -> Either Text ClientIdentity +keyPackageIdentity = decodeMLS' @ClientIdentity . (.credential.identityData) rawKeyPackageSchema :: ValueSchema NamedSwaggerDoc (RawMLS KeyPackage) rawKeyPackageSchema = diff --git a/libs/wire-api/src/Wire/API/MLS/Keys.hs b/libs/wire-api/src/Wire/API/MLS/Keys.hs index 96841a4686..8a47539e8b 100644 --- a/libs/wire-api/src/Wire/API/MLS/Keys.hs +++ b/libs/wire-api/src/Wire/API/MLS/Keys.hs @@ -32,6 +32,7 @@ import qualified Data.Map as Map import Data.Schema import qualified Data.Swagger as S import Imports +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential data MLSKeys = MLSKeys diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs new file mode 100644 index 0000000000..78b8cc5430 --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -0,0 +1,125 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.LeafNode + ( LeafIndex, + LeafNode (..), + LeafNodeSource (..), + LeafNodeSourceTag (..), + leafNodeSourceTag, + ) +where + +import GHC.Records +import Imports +import Test.QuickCheck +import Wire.API.MLS.Capabilities +import Wire.API.MLS.Credential +import Wire.API.MLS.Extension +import Wire.API.MLS.HPKEPublicKey +import Wire.API.MLS.Lifetime +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +type LeafIndex = Word32 + +data LeafNodeTBS = LeafNodeTBS + { encryptionKey :: HPKEPublicKey, + signatureKey :: ByteString, + credential :: Credential, + capabilities :: Capabilities, + source :: LeafNodeSource, + extensions :: [Extension] + } + deriving (Show, Eq, Generic) + deriving (Arbitrary) via (GenericUniform LeafNodeTBS) + +instance ParseMLS LeafNodeTBS where + parseMLS = + LeafNodeTBS + <$> parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSVector @VarInt parseMLS + +-- | This type can only verify the signature when the LeafNodeSource is +-- LeafNodeSourceKeyPackage +data LeafNode = LeafNode + { tbs :: LeafNodeTBS, + signature_ :: ByteString + } + deriving (Show, Eq, Generic) + deriving (Arbitrary) via (GenericUniform LeafNode) + +instance ParseMLS LeafNode where + parseMLS = + LeafNode + <$> parseMLS + <*> parseMLSBytes @VarInt + +instance HasField "encryptionKey" LeafNode HPKEPublicKey where + getField = (.tbs.encryptionKey) + +instance HasField "signatureKey" LeafNode ByteString where + getField = (.tbs.signatureKey) + +instance HasField "credential" LeafNode Credential where + getField = (.tbs.credential) + +instance HasField "capabilities" LeafNode Capabilities where + getField = (.tbs.capabilities) + +instance HasField "source" LeafNode LeafNodeSource where + getField = (.tbs.source) + +instance HasField "extensions" LeafNode [Extension] where + getField = (.tbs.extensions) + +data LeafNodeSource + = LeafNodeSourceKeyPackage Lifetime + | LeafNodeSourceUpdate + | LeafNodeSourceCommit ByteString + deriving (Show, Eq, Generic) + deriving (Arbitrary) via (GenericUniform LeafNodeSource) + +instance ParseMLS LeafNodeSource where + parseMLS = + parseMLS >>= \case + LeafNodeSourceKeyPackageTag -> LeafNodeSourceKeyPackage <$> parseMLS + LeafNodeSourceUpdateTag -> pure LeafNodeSourceUpdate + LeafNodeSourceCommitTag -> LeafNodeSourceCommit <$> parseMLSBytes @VarInt + +data LeafNodeSourceTag + = LeafNodeSourceKeyPackageTag + | LeafNodeSourceUpdateTag + | LeafNodeSourceCommitTag + deriving (Show, Eq, Ord, Enum, Bounded) + +instance Bounded LeafNodeSourceTag => ParseMLS LeafNodeSourceTag where + parseMLS = parseMLSEnum @Word8 "leaf node source" + +instance HasField "name" LeafNodeSourceTag Text where + getField LeafNodeSourceKeyPackageTag = "key_package" + getField LeafNodeSourceUpdateTag = "update" + getField LeafNodeSourceCommitTag = "commit" + +leafNodeSourceTag :: LeafNodeSource -> LeafNodeSourceTag +leafNodeSourceTag (LeafNodeSourceKeyPackage _) = LeafNodeSourceKeyPackageTag +leafNodeSourceTag LeafNodeSourceUpdate = LeafNodeSourceUpdateTag +leafNodeSourceTag (LeafNodeSourceCommit _) = LeafNodeSourceCommitTag diff --git a/libs/wire-api/src/Wire/API/MLS/Lifetime.hs b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs new file mode 100644 index 0000000000..64f53b6727 --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs @@ -0,0 +1,42 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Wire.API.MLS.Lifetime where + +import Data.Time.Clock.POSIX +import Imports +import Test.QuickCheck +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +-- | Seconds since the UNIX epoch. +newtype Timestamp = Timestamp {timestampSeconds :: Word64} + deriving newtype (Eq, Show, Arbitrary, ParseMLS) + +tsPOSIX :: Timestamp -> POSIXTime +tsPOSIX = fromIntegral . timestampSeconds + +data Lifetime = Lifetime + { ltNotBefore :: Timestamp, + ltNotAfter :: Timestamp + } + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via GenericUniform Lifetime + +instance ParseMLS Lifetime where + parseMLS = Lifetime <$> parseMLS <*> parseMLS diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index 1787ceab4b..9084d4fc68 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -1,7 +1,5 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE StandaloneKindSignatures #-} {-# LANGUAGE TemplateHaskell #-} - -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -18,28 +16,28 @@ -- -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . +{-# OPTIONS_GHC -Wwarn #-} module Wire.API.MLS.Message - ( Message (..), - msgGroupId, - msgEpoch, - msgSender, - msgPayload, - MessageTBS (..), - MessageExtraFields (..), - WireFormatTag (..), - SWireFormatTag (..), - SomeMessage (..), - ContentType (..), - MessagePayload (..), + ( -- * MLS Message types + Message (..), + MessageContent (..), + PublicMessage (..), + PrivateMessage (..), + FramedContent (..), + FramedContentData (..), + FramedContentDataTag (..), + FramedContentTBS (..), + FramedContentAuthData (..), Sender (..), - MLSPlainTextSym0, - MLSCipherTextSym0, - MLSMessageSendingStatus (..), - KnownFormatTag (..), UnreachableUsers (..), + + -- * Utilities verifyMessageSignature, mkSignedMessage, + + -- * Servant types + MLSMessageSendingStatus (..), ) where @@ -47,197 +45,158 @@ import Control.Lens ((?~)) import Crypto.PubKey.Ed25519 import qualified Data.Aeson as A import Data.Binary -import Data.Binary.Get -import Data.Binary.Put import qualified Data.ByteArray as BA import Data.Id import Data.Json.Util import Data.Kind import Data.Qualified import Data.Schema +import Data.Schema hiding (tag) import Data.Singletons.TH import qualified Data.Swagger as S +import GHC.Records import Imports -import Test.QuickCheck hiding (label) import Wire.API.Event.Conversation import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.Epoch +import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.Proposal +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation -import Wire.Arbitrary (GenericUniform (..)) +import Wire.API.MLS.Welcome -data WireFormatTag = MLSPlainText | MLSCipherText - deriving (Bounded, Enum, Eq, Show) - -$(genSingletons [''WireFormatTag]) +data WireFormatTag + = WireFormatPrivateTag + | WireFormatPublicTag + | WireFormatWelcomeTag + | WireFormatGroupInfoTag + | WireFormatKeyPackageTag + deriving (Enum, Bounded, Eq, Show) instance ParseMLS WireFormatTag where parseMLS = parseMLSEnum @Word8 "wire format" -data family MessageExtraFields (tag :: WireFormatTag) :: Type - -data instance MessageExtraFields 'MLSPlainText = MessageExtraFields - { msgSignature :: ByteString, - msgConfirmation :: Maybe ByteString, - msgMembership :: Maybe ByteString - } - deriving (Generic) - deriving (Arbitrary) via (GenericUniform (MessageExtraFields 'MLSPlainText)) - -instance ParseMLS (MessageExtraFields 'MLSPlainText) where - parseMLS = - MessageExtraFields - <$> label "msgSignature" (parseMLSBytes @Word16) - <*> label "msgConfirmation" (parseMLSOptional (parseMLSBytes @Word8)) - <*> label "msgMembership" (parseMLSOptional (parseMLSBytes @Word8)) - -instance SerialiseMLS (MessageExtraFields 'MLSPlainText) where - serialiseMLS (MessageExtraFields sig mconf mmemb) = do - serialiseMLSBytes @Word16 sig - serialiseMLSOptional (serialiseMLSBytes @Word8) mconf - serialiseMLSOptional (serialiseMLSBytes @Word8) mmemb - -data instance MessageExtraFields 'MLSCipherText = NoExtraFields - -instance ParseMLS (MessageExtraFields 'MLSCipherText) where - parseMLS = pure NoExtraFields - -deriving instance Eq (MessageExtraFields 'MLSPlainText) - -deriving instance Eq (MessageExtraFields 'MLSCipherText) - -deriving instance Show (MessageExtraFields 'MLSPlainText) - -deriving instance Show (MessageExtraFields 'MLSCipherText) +instance SerialiseMLS WireFormatTag where + serialiseMLS = serialiseMLSEnum @Word8 -data Message (tag :: WireFormatTag) = Message - { msgTBS :: RawMLS (MessageTBS tag), - msgExtraFields :: MessageExtraFields tag +data Message = Message + { protocolVersion :: ProtocolVersion, + content :: MessageContent } + deriving (Eq, Show) -deriving instance Eq (Message 'MLSPlainText) - -deriving instance Eq (Message 'MLSCipherText) - -deriving instance Show (Message 'MLSPlainText) - -deriving instance Show (Message 'MLSCipherText) - -instance ParseMLS (Message 'MLSPlainText) where - parseMLS = Message <$> label "tbs" parseMLS <*> label "MessageExtraFields" parseMLS - -instance SerialiseMLS (Message 'MLSPlainText) where - serialiseMLS (Message msgTBS msgExtraFields) = do - putByteString (rmRaw msgTBS) - serialiseMLS msgExtraFields - -instance ParseMLS (Message 'MLSCipherText) where +instance ParseMLS Message where parseMLS = Message <$> parseMLS <*> parseMLS --- | This corresponds to the format byte at the beginning of a message. --- It does not convey any information, but it needs to be present in --- order for signature verification to work. -data KnownFormatTag (tag :: WireFormatTag) = KnownFormatTag +instance SerialiseMLS Message where + serialiseMLS msg = do + serialiseMLS msg.protocolVersion + serialiseMLS msg.content -instance ParseMLS (KnownFormatTag tag) where - parseMLS = parseMLS @WireFormatTag $> KnownFormatTag +instance HasField "wireFormat" Message WireFormatTag where + getField = (.content.wireFormat) -instance SerialiseMLS (KnownFormatTag 'MLSPlainText) where - serialiseMLS _ = put (fromMLSEnum @Word8 MLSPlainText) - -instance SerialiseMLS (KnownFormatTag 'MLSCipherText) where - serialiseMLS _ = put (fromMLSEnum @Word8 MLSCipherText) - -deriving instance Eq (KnownFormatTag 'MLSPlainText) - -deriving instance Eq (KnownFormatTag 'MLSCipherText) +data MessageContent + = MessagePrivate (RawMLS PrivateMessage) + | MessagePublic PublicMessage + | MessageWelcome Welcome + | MessageGroupInfo -- TODO + | MessageKeyPackage (RawMLS KeyPackage) + deriving (Eq, Show) -deriving instance Show (KnownFormatTag 'MLSPlainText) +instance HasField "wireFormat" MessageContent WireFormatTag where + getField (MessagePrivate _) = WireFormatPrivateTag + getField (MessagePublic _) = WireFormatPublicTag + getField (MessageWelcome _) = WireFormatWelcomeTag + getField MessageGroupInfo = WireFormatGroupInfoTag + getField (MessageKeyPackage _) = WireFormatKeyPackageTag -deriving instance Show (KnownFormatTag 'MLSCipherText) +instance ParseMLS MessageContent where + parseMLS = + parseMLS >>= \case + WireFormatPrivateTag -> MessagePrivate <$> parseMLS + WireFormatPublicTag -> MessagePublic <$> parseMLS + WireFormatWelcomeTag -> MessageWelcome <$> parseMLS + WireFormatGroupInfoTag -> pure MessageGroupInfo + WireFormatKeyPackageTag -> MessageKeyPackage <$> parseMLS + +instance SerialiseMLS MessageContent where + serialiseMLS (MessagePrivate msg) = do + serialiseMLS WireFormatPrivateTag + serialiseMLS msg + serialiseMLS (MessagePublic msg) = do + serialiseMLS WireFormatPublicTag + serialiseMLS msg + serialiseMLS (MessageWelcome welcome) = do + serialiseMLS WireFormatWelcomeTag + serialiseMLS welcome + serialiseMLS MessageGroupInfo = do + serialiseMLS WireFormatGroupInfoTag + -- TODO + pure () + serialiseMLS (MessageKeyPackage kp) = do + serialiseMLS WireFormatKeyPackageTag + serialiseMLS kp + +instance S.ToSchema Message where + declareNamedSchema _ = pure (mlsSwagger "MLSMessage") -data MessageTBS (tag :: WireFormatTag) = MessageTBS - { tbsMsgFormat :: KnownFormatTag tag, - tbsMsgGroupId :: GroupId, - tbsMsgEpoch :: Epoch, - tbsMsgAuthData :: ByteString, - tbsMsgSender :: Sender tag, - tbsMsgPayload :: MessagePayload tag +data PublicMessage = PublicMessage + { content :: RawMLS FramedContent, + authData :: FramedContentAuthData, + membershipTag :: Maybe ByteString } + deriving (Eq, Show) -msgGroupId :: Message tag -> GroupId -msgGroupId = tbsMsgGroupId . rmValue . msgTBS - -msgEpoch :: Message tag -> Epoch -msgEpoch = tbsMsgEpoch . rmValue . msgTBS - -msgSender :: Message tag -> Sender tag -msgSender = tbsMsgSender . rmValue . msgTBS - -msgPayload :: Message tag -> MessagePayload tag -msgPayload = tbsMsgPayload . rmValue . msgTBS - -instance ParseMLS (MessageTBS 'MLSPlainText) where - parseMLS = do - f <- parseMLS - g <- parseMLS - e <- parseMLS - s <- parseMLS - d <- parseMLSBytes @Word32 - MessageTBS f g e d s <$> parseMLS - -instance ParseMLS (MessageTBS 'MLSCipherText) where +instance ParseMLS PublicMessage where parseMLS = do - f <- parseMLS - g <- parseMLS - e <- parseMLS - ct <- parseMLS - d <- parseMLSBytes @Word32 - s <- parseMLS - p <- parseMLSBytes @Word32 - pure $ MessageTBS f g e d s (CipherText ct p) - -instance SerialiseMLS (MessageTBS 'MLSPlainText) where - serialiseMLS (MessageTBS f g e d s p) = do - serialiseMLS f - serialiseMLS g - serialiseMLS e - serialiseMLS s - serialiseMLSBytes @Word32 d - serialiseMLS p - -deriving instance Eq (MessageTBS 'MLSPlainText) - -deriving instance Eq (MessageTBS 'MLSCipherText) - -deriving instance Show (MessageTBS 'MLSPlainText) - -deriving instance Show (MessageTBS 'MLSCipherText) - -data SomeMessage where - SomeMessage :: Sing tag -> Message tag -> SomeMessage - -instance S.ToSchema SomeMessage where - declareNamedSchema _ = pure (mlsSwagger "MLSMessage") - -instance ParseMLS SomeMessage where - parseMLS = - lookAhead parseMLS >>= \case - MLSPlainText -> SomeMessage SMLSPlainText <$> parseMLS - MLSCipherText -> SomeMessage SMLSCipherText <$> parseMLS - -data family Sender (tag :: WireFormatTag) :: Type - -data instance Sender 'MLSCipherText = EncryptedSender {esData :: ByteString} + content <- parseMLS + authData <- parseFramedContentAuthData (framedContentDataTag (content.rmValue.content)) + membershipTag <- case content.rmValue.sender of + SenderMember _ -> Just <$> parseMLSBytes @VarInt + _ -> pure Nothing + pure + PublicMessage + { content = content, + authData = authData, + membershipTag = membershipTag + } + +instance SerialiseMLS PublicMessage where + serialiseMLS msg = do + serialiseMLS msg.content + serialiseMLS msg.authData + traverse_ (serialiseMLSBytes @VarInt) msg.membershipTag + +data PrivateMessage = PrivateMessage + { groupId :: GroupId, + epoch :: Epoch, + tag :: FramedContentDataTag, + authenticatedData :: ByteString, + encryptedSenderData :: ByteString, + ciphertext :: ByteString + } deriving (Eq, Show) -instance ParseMLS (Sender 'MLSCipherText) where - parseMLS = EncryptedSender <$> parseMLSBytes @Word8 - -data SenderTag = MemberSenderTag | PreconfiguredSenderTag | NewMemberSenderTag +instance ParseMLS PrivateMessage where + parseMLS = + PrivateMessage + <$> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLSBytes @VarInt + <*> parseMLSBytes @VarInt + +data SenderTag + = SenderMemberTag + | SenderExternalTag + | SenderNewMemberProposalTag + | SenderNewMemberCommitTag deriving (Bounded, Enum, Show, Eq) instance ParseMLS SenderTag where @@ -246,77 +205,206 @@ instance ParseMLS SenderTag where instance SerialiseMLS SenderTag where serialiseMLS = serialiseMLSEnum @Word8 --- NOTE: according to the spec, the preconfigured sender case contains a --- bytestring, not a u32. However, as of 2022-08-02, the openmls fork used by --- the clients is using a u32 here. -data instance Sender 'MLSPlainText - = MemberSender KeyPackageRef - | PreconfiguredSender Word32 - | NewMemberSender +data Sender + = SenderMember LeafIndex + | SenderExternal Word32 + | SenderNewMemberProposal + | SenderNewMemberCommit deriving (Eq, Show, Generic) -instance ParseMLS (Sender 'MLSPlainText) where +instance ParseMLS Sender where parseMLS = parseMLS >>= \case - MemberSenderTag -> MemberSender <$> parseMLS - PreconfiguredSenderTag -> PreconfiguredSender <$> get - NewMemberSenderTag -> pure NewMemberSender - -instance SerialiseMLS (Sender 'MLSPlainText) where - serialiseMLS (MemberSender r) = do - serialiseMLS MemberSenderTag - serialiseMLS r - serialiseMLS (PreconfiguredSender x) = do - serialiseMLS PreconfiguredSenderTag - put x - serialiseMLS NewMemberSender = serialiseMLS NewMemberSenderTag - -data family MessagePayload (tag :: WireFormatTag) :: Type - -deriving instance Eq (MessagePayload 'MLSPlainText) - -deriving instance Eq (MessagePayload 'MLSCipherText) - -deriving instance Show (MessagePayload 'MLSPlainText) - -deriving instance Show (MessagePayload 'MLSCipherText) - -data instance MessagePayload 'MLSCipherText = CipherText - { msgContentType :: Word8, - msgCipherText :: ByteString + SenderMemberTag -> SenderMember <$> parseMLS + SenderExternalTag -> SenderExternal <$> parseMLS + SenderNewMemberProposalTag -> pure SenderNewMemberProposal + SenderNewMemberCommitTag -> pure SenderNewMemberCommit + +instance SerialiseMLS Sender where + serialiseMLS (SenderMember i) = do + serialiseMLS SenderMemberTag + serialiseMLS i + serialiseMLS (SenderExternal w) = do + serialiseMLS SenderExternalTag + serialiseMLS w + serialiseMLS SenderNewMemberProposal = + serialiseMLS SenderNewMemberProposalTag + serialiseMLS SenderNewMemberCommit = + serialiseMLS SenderNewMemberCommitTag + +needsGroupContext :: Sender -> Bool +needsGroupContext (SenderMember _) = True +needsGroupContext (SenderExternal _) = True +needsGroupContext _ = False + +data FramedContent = FramedContent + { groupId :: GroupId, + epoch :: Epoch, + sender :: Sender, + authenticatedData :: ByteString, + content :: FramedContentData } + deriving (Eq, Show) -data ContentType - = ApplicationMessageTag - | ProposalMessageTag - | CommitMessageTag - deriving (Bounded, Enum, Eq, Show) +instance ParseMLS FramedContent where + parseMLS = + FramedContent + <$> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLS + +instance SerialiseMLS FramedContent where + serialiseMLS fc = do + serialiseMLS fc.groupId + serialiseMLS fc.epoch + serialiseMLS fc.sender + serialiseMLSBytes @VarInt fc.authenticatedData + serialiseMLS fc.content + +data FramedContentDataTag + = FramedContentApplicationDataTag + | FramedContentProposalTag + | FramedContentCommitTag + deriving (Enum, Bounded, Eq, Ord, Show) + +instance ParseMLS FramedContentDataTag where + parseMLS = parseMLSEnum @Word8 "ContentType" + +instance SerialiseMLS FramedContentDataTag where + serialiseMLS = serialiseMLSEnum @Word8 -instance ParseMLS ContentType where - parseMLS = parseMLSEnum @Word8 "content type" +data FramedContentData + = FramedContentApplicationData ByteString + | FramedContentProposal (RawMLS Proposal) + | FramedContentCommit (RawMLS Commit) + deriving (Eq, Show) -data instance MessagePayload 'MLSPlainText - = ApplicationMessage ByteString - | ProposalMessage (RawMLS Proposal) - | CommitMessage Commit +framedContentDataTag :: FramedContentData -> FramedContentDataTag +framedContentDataTag (FramedContentApplicationData _) = FramedContentApplicationDataTag +framedContentDataTag (FramedContentProposal _) = FramedContentProposalTag +framedContentDataTag (FramedContentCommit _) = FramedContentCommitTag -instance ParseMLS (MessagePayload 'MLSPlainText) where +instance ParseMLS FramedContentData where parseMLS = parseMLS >>= \case - ApplicationMessageTag -> ApplicationMessage <$> parseMLSBytes @Word32 - ProposalMessageTag -> ProposalMessage <$> parseMLS - CommitMessageTag -> CommitMessage <$> parseMLS + FramedContentApplicationDataTag -> + FramedContentApplicationData <$> parseMLSBytes @VarInt + FramedContentProposalTag -> FramedContentProposal <$> parseMLS + FramedContentCommitTag -> FramedContentCommit <$> parseMLS + +instance SerialiseMLS FramedContentData where + serialiseMLS (FramedContentApplicationData bs) = do + serialiseMLS FramedContentApplicationDataTag + serialiseMLSBytes @VarInt bs + serialiseMLS (FramedContentProposal prop) = do + serialiseMLS FramedContentProposalTag + serialiseMLS prop + serialiseMLS (FramedContentCommit commit) = do + serialiseMLS FramedContentCommitTag + serialiseMLS commit + +data FramedContentTBS = FramedContentTBS + { protocolVersion :: ProtocolVersion, + wireFormat :: WireFormatTag, + content :: RawMLS FramedContent, + groupContext :: Maybe (RawMLS GroupContext) + } + deriving (Eq, Show) -instance SerialiseMLS ContentType where - serialiseMLS = serialiseMLSEnum @Word8 +instance SerialiseMLS FramedContentTBS where + serialiseMLS tbs = do + serialiseMLS tbs.protocolVersion + serialiseMLS tbs.wireFormat + serialiseMLS tbs.content + traverse_ serialiseMLS tbs.groupContext + +framedContentTBS :: RawMLS GroupContext -> RawMLS FramedContent -> FramedContentTBS +framedContentTBS ctx msgContent = + FramedContentTBS + { protocolVersion = defaultProtocolVersion, + wireFormat = WireFormatPublicTag, + content = msgContent, + groupContext = guard (needsGroupContext msgContent.rmValue.sender) $> ctx + } + +data FramedContentAuthData = FramedContentAuthData + { signature_ :: ByteString, + confirmationTag :: Maybe ByteString + } + deriving (Eq, Show) + +parseFramedContentAuthData :: FramedContentDataTag -> Get FramedContentAuthData +parseFramedContentAuthData tag = do + sig <- parseMLSBytes @VarInt + confirmationTag <- case tag of + FramedContentCommitTag -> Just <$> parseMLSBytes @VarInt + _ -> pure Nothing + pure (FramedContentAuthData sig confirmationTag) + +instance SerialiseMLS FramedContentAuthData where + serialiseMLS ad = do + serialiseMLSBytes @VarInt ad.signature_ + traverse_ (serialiseMLSBytes @VarInt) ad.confirmationTag + +data GroupContext = GroupContext + { protocolVersion :: ProtocolVersion, + cipherSuite :: CipherSuite, + groupId :: GroupId, + epoch :: Epoch, + treeHash :: ByteString, + confirmedTranscriptHash :: ByteString, + extensions :: [Extension] + } + deriving (Eq, Show) -instance SerialiseMLS (MessagePayload 'MLSPlainText) where - serialiseMLS (ProposalMessage raw) = do - serialiseMLS ProposalMessageTag - putByteString (rmRaw raw) - -- We do not need to serialise Commit and Application messages, - -- so the next case is left as a stub - serialiseMLS _ = pure () +-- | Craft a message with the backend itself as a sender. +mkSignedMessage :: + SecretKey -> PublicKey -> GroupId -> Epoch -> FramedContentData -> Message +mkSignedMessage priv pub gid epoch payload = + let framedContent = + mkRawMLS + FramedContent + { groupId = gid, + epoch = epoch, + sender = SenderExternal 0, + content = payload, + authenticatedData = mempty + } + tbs = + FramedContentTBS + { protocolVersion = defaultProtocolVersion, + wireFormat = WireFormatPublicTag, + content = framedContent, + groupContext = Nothing + } + sig = BA.convert $ sign priv pub (encodeMLS' tbs) + in Message + { protocolVersion = defaultProtocolVersion, + content = + MessagePublic + PublicMessage + { content = framedContent, + authData = FramedContentAuthData sig Nothing, + membershipTag = Nothing + } + } + +verifyMessageSignature :: + RawMLS GroupContext -> + RawMLS FramedContent -> + FramedContentAuthData -> + ByteString -> + Bool +verifyMessageSignature ctx msgContent authData pubkey = isJust $ do + let tbs = encodeMLS' (framedContentTBS ctx msgContent) + sig = authData.signature_ + cs <- cipherSuiteTag ctx.rmValue.cipherSuite + guard $ csVerifySignature cs pubkey tbs sig + +-------------------------------------------------------------------------------- +-- Servant newtype UnreachableUsers = UnreachableUsers {unreachableUsers :: [Qualified UserId]} deriving stock (Eq, Show) @@ -357,28 +445,3 @@ instance ToSchema MLSMessageSendingStatus where "failed_to_send" (description ?~ "List of federated users who could not be reached and did not receive the message") schema - -verifyMessageSignature :: CipherSuiteTag -> Message 'MLSPlainText -> ByteString -> Bool -verifyMessageSignature cs msg pubkey = - csVerifySignature cs pubkey (rmRaw (msgTBS msg)) (msgSignature (msgExtraFields msg)) - -mkSignedMessage :: - SecretKey -> - PublicKey -> - GroupId -> - Epoch -> - MessagePayload 'MLSPlainText -> - Message 'MLSPlainText -mkSignedMessage priv pub gid epoch payload = - let tbs = - mkRawMLS $ - MessageTBS - { tbsMsgFormat = KnownFormatTag, - tbsMsgGroupId = gid, - tbsMsgEpoch = epoch, - tbsMsgAuthData = mempty, - tbsMsgSender = PreconfiguredSender 0, - tbsMsgPayload = payload - } - sig = BA.convert $ sign priv pub (rmRaw tbs) - in Message tbs (MessageExtraFields sig Nothing Nothing) diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 1226811c6e..104b781496 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -1,4 +1,6 @@ {-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TemplateHaskell #-} + -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -15,7 +17,6 @@ -- -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -{-# LANGUAGE TemplateHaskell #-} module Wire.API.MLS.Proposal where @@ -32,27 +33,11 @@ import Wire.API.MLS.Context import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.KeyPackage +import Wire.API.MLS.ProposalTag +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary -data ProposalTag - = AddProposalTag - | UpdateProposalTag - | RemoveProposalTag - | PreSharedKeyProposalTag - | ReInitProposalTag - | ExternalInitProposalTag - | AppAckProposalTag - | GroupContextExtensionsProposalTag - deriving stock (Bounded, Enum, Eq, Generic, Show) - deriving (Arbitrary) via GenericUniform ProposalTag - -instance ParseMLS ProposalTag where - parseMLS = parseMLSEnum @Word16 "proposal type" - -instance SerialiseMLS ProposalTag where - serialiseMLS = serialiseMLSEnum @Word16 - data Proposal = AddProposal (RawMLS KeyPackage) | UpdateProposal KeyPackage diff --git a/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs new file mode 100644 index 0000000000..e9e670088e --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs @@ -0,0 +1,41 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.ProposalTag where + +import Data.Binary +import Imports +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +data ProposalTag + = AddProposalTag + | UpdateProposalTag + | RemoveProposalTag + | PreSharedKeyProposalTag + | ReInitProposalTag + | ExternalInitProposalTag + | AppAckProposalTag + | GroupContextExtensionsProposalTag + deriving stock (Bounded, Enum, Eq, Generic, Show) + deriving (Arbitrary) via GenericUniform ProposalTag + +instance ParseMLS ProposalTag where + parseMLS = parseMLSEnum @Word16 "proposal type" + +instance SerialiseMLS ProposalTag where + serialiseMLS = serialiseMLSEnum @Word16 diff --git a/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs new file mode 100644 index 0000000000..c20bbe153b --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs @@ -0,0 +1,52 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Wire.API.MLS.ProtocolVersion + ( ProtocolVersion (..), + ProtocolVersionTag (..), + pvTag, + protocolVersionFromTag, + defaultProtocolVersion, + ) +where + +import Data.Binary +import Imports +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +newtype ProtocolVersion = ProtocolVersion {pvNumber :: Word8} + deriving newtype (Eq, Ord, Show, Binary, Arbitrary, ParseMLS, SerialiseMLS) + +data ProtocolVersionTag = ProtocolMLS10 | ProtocolMLSDraft11 + deriving stock (Bounded, Enum, Eq, Show, Generic) + deriving (Arbitrary) via GenericUniform ProtocolVersionTag + +pvTag :: ProtocolVersion -> Maybe ProtocolVersionTag +pvTag (ProtocolVersion v) = case v of + 1 -> pure ProtocolMLS10 + -- used by openmls + 200 -> pure ProtocolMLSDraft11 + _ -> Nothing + +protocolVersionFromTag :: ProtocolVersionTag -> ProtocolVersion +protocolVersionFromTag ProtocolMLS10 = ProtocolVersion 1 +protocolVersionFromTag ProtocolMLSDraft11 = ProtocolVersion 200 + +defaultProtocolVersion :: ProtocolVersion +defaultProtocolVersion = protocolVersionFromTag ProtocolMLS10 diff --git a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs b/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs index 38772d5b00..e34ab49dd0 100644 --- a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs +++ b/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs @@ -27,9 +27,9 @@ import Imports import Test.QuickCheck hiding (label) import Wire.API.MLS.CipherSuite import Wire.API.MLS.Epoch -import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.KeyPackage +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary diff --git a/libs/wire-api/src/Wire/API/MLS/Welcome.hs b/libs/wire-api/src/Wire/API/MLS/Welcome.hs index 929dc78af5..1575ca2c4e 100644 --- a/libs/wire-api/src/Wire/API/MLS/Welcome.hs +++ b/libs/wire-api/src/Wire/API/MLS/Welcome.hs @@ -21,8 +21,8 @@ import qualified Data.Swagger as S import Imports import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit -import Wire.API.MLS.Extension import Wire.API.MLS.KeyPackage +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary diff --git a/libs/wire-api/src/Wire/API/OAuth.hs b/libs/wire-api/src/Wire/API/OAuth.hs index 21d8b3a85e..b9ca600b36 100644 --- a/libs/wire-api/src/Wire/API/OAuth.hs +++ b/libs/wire-api/src/Wire/API/OAuth.hs @@ -14,7 +14,6 @@ -- -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -{-# LANGUAGE GeneralizedNewtypeDeriving #-} module Wire.API.OAuth where diff --git a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs index 8acf770941..88a292e4e2 100644 --- a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs +++ b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs @@ -51,6 +51,7 @@ import Servant.Swagger.Internal.Orphans () import Wire.API.Connection import Wire.API.Error import Wire.API.Error.Brig +import Wire.API.MLS.CipherSuite (SignatureSchemeTag) import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MakesFederatedCall diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index 9d010bf6e9..d424982328 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -83,7 +83,7 @@ type MLSMessagingAPI = :> ZLocalUser :> ZOptClient :> ZConn - :> ReqBody '[MLS] (RawMLS SomeMessage) + :> ReqBody '[MLS] (RawMLS Message) :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" [Event]) ) :<|> Named @@ -121,7 +121,7 @@ type MLSMessagingAPI = :> ZLocalUser :> ZOptClient :> ZConn - :> ReqBody '[MLS] (RawMLS SomeMessage) + :> ReqBody '[MLS] (RawMLS Message) :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" MLSMessageSendingStatus) ) :<|> Named diff --git a/libs/wire-api/src/Wire/API/User/Client.hs b/libs/wire-api/src/Wire/API/User/Client.hs index 397ee41cd9..45053ff4a5 100644 --- a/libs/wire-api/src/Wire/API/User/Client.hs +++ b/libs/wire-api/src/Wire/API/User/Client.hs @@ -100,8 +100,8 @@ import Deriving.Swagger StripPrefix, ) import Imports -import Wire.API.MLS.Credential -import Wire.API.User.Auth (CookieLabel) +import Wire.API.MLS.CipherSuite +import Wire.API.User.Auth import Wire.API.User.Client.Prekey as Prekey import Wire.Arbitrary (Arbitrary (arbitrary), GenericUniform (..), generateExample, mapOf', setOf') diff --git a/libs/wire-api/test/golden.hs b/libs/wire-api/test/golden.hs new file mode 100644 index 0000000000..0ff7c7e4ca --- /dev/null +++ b/libs/wire-api/test/golden.hs @@ -0,0 +1,5 @@ +import Imports +import qualified Test.Wire.API.Golden.Run as Run + +main :: IO () +main = Run.main diff --git a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/Client_user.hs b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/Client_user.hs index 65b4ad7d34..c137ae7b69 100644 --- a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/Client_user.hs +++ b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/Client_user.hs @@ -25,7 +25,7 @@ import qualified Data.Map as Map import Data.Misc import Data.Set as Set import Imports -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.User.Auth (CookieLabel (CookieLabel, cookieLabelText)) import Wire.API.User.Client diff --git a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/NewClient_user.hs b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/NewClient_user.hs index 6af4068cdb..fbe5d29ac0 100644 --- a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/NewClient_user.hs +++ b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/NewClient_user.hs @@ -26,7 +26,7 @@ import Data.Range (unsafeRange) import qualified Data.Set as Set import Data.Text.Ascii (AsciiChars (validate)) import Imports (Maybe (Just, Nothing), fromRight, mempty, undefined) -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.User.Auth (CookieLabel (CookieLabel, cookieLabelText)) import Wire.API.User.Client import Wire.API.User.Client.Prekey diff --git a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/UpdateClient_user.hs b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/UpdateClient_user.hs index 5f164f77cf..655532ef6a 100644 --- a/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/UpdateClient_user.hs +++ b/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/UpdateClient_user.hs @@ -21,7 +21,7 @@ module Test.Wire.API.Golden.Generated.UpdateClient_user where import qualified Data.Map as Map import Imports -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.User.Client import Wire.API.User.Client.Prekey diff --git a/libs/wire-api/test/golden/Main.hs b/libs/wire-api/test/golden/Test/Wire/API/Golden/Run.hs similarity index 96% rename from libs/wire-api/test/golden/Main.hs rename to libs/wire-api/test/golden/Test/Wire/API/Golden/Run.hs index 7ad5b57d55..e1e110783e 100644 --- a/libs/wire-api/test/golden/Main.hs +++ b/libs/wire-api/test/golden/Test/Wire/API/Golden/Run.hs @@ -15,10 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main - ( main, - ) -where +module Test.Wire.API.Golden.Run (main) where import Imports import Test.Tasty diff --git a/libs/wire-api/test/unit.hs b/libs/wire-api/test/unit.hs new file mode 100644 index 0000000000..dbf3fb9acb --- /dev/null +++ b/libs/wire-api/test/unit.hs @@ -0,0 +1,5 @@ +import Imports +import qualified Test.Wire.API.Run as Run + +main :: IO () +main = Run.main diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index ea608ae122..ec5d4b5c52 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -21,10 +21,7 @@ import Control.Concurrent.Async import qualified Crypto.PubKey.Ed25519 as Ed25519 import Data.ByteArray import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy as LBS import Data.Domain -import Data.Either.Combinators -import Data.Hex import Data.Id import Data.Json.Util (toBase64Text) import Data.Qualified @@ -40,17 +37,16 @@ import Test.Tasty import Test.Tasty.HUnit import UnliftIO (withSystemTempDirectory) import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Commit import Wire.API.MLS.Credential import Wire.API.MLS.Epoch -import Wire.API.MLS.Extension import Wire.API.MLS.Group +import Wire.API.MLS.HPKEPublicKey import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation -import Wire.API.MLS.Welcome tests :: TestTree tests = @@ -72,11 +68,11 @@ testParseKeyPackage = do Left err -> assertFailure (T.unpack err) Right x -> pure x - pvTag (kpProtocolVersion kp) @?= Just ProtocolMLS10 - kpCipherSuite kp @?= CipherSuite 1 - BS.length (kpInitKey kp) @?= 32 + pvTag (kp.protocolVersion) @?= Just ProtocolMLS10 + kp.cipherSuite @?= CipherSuite 1 + BS.length (unHPKEPublicKey kp.initKey) @?= 32 - case decodeMLS' @ClientIdentity (bcIdentity (kpCredential kp)) of + case keyPackageIdentity kp of Left err -> assertFailure $ "Failed to parse identity: " <> T.unpack err Right identity -> identity @@ -87,57 +83,20 @@ testParseKeyPackage = do } -- check raw TBS package - let rawTBS = rmRaw (kpTBS kp) + let rawTBS = kp.tbs.rmRaw rawTBS @?= BS.take 196 kpData +-- TODO testParseCommit :: IO () -testParseCommit = do - msgData <- LBS.readFile "test/resources/commit1.mls" - msg :: Message 'MLSPlainText <- case decodeMLS @SomeMessage msgData of - Left err -> assertFailure (T.unpack err) - Right (SomeMessage SMLSCipherText _) -> - assertFailure "Expected plain text message, found encrypted" - Right (SomeMessage SMLSPlainText msg) -> - pure msg - - msgGroupId msg @?= "test_group" - msgEpoch msg @?= Epoch 0 - - case msgSender msg of - MemberSender kp -> kp @?= KeyPackageRef (fromRight' (unhex "24e4b0a802a2b81f00a9af7df5e91da8")) - _ -> assertFailure "Unexpected sender type" - - let payload = msgPayload msg - commit <- case payload of - CommitMessage c -> pure c - _ -> assertFailure "Unexpected message type" - - case cProposals commit of - [Inline (AddProposal _)] -> pure () - _ -> assertFailure "Unexpected proposals" +testParseCommit = pure () +-- TODO testParseApplication :: IO () -testParseApplication = do - msgData <- LBS.readFile "test/resources/app_message1.mls" - msg :: Message 'MLSCipherText <- case decodeMLS @SomeMessage msgData of - Left err -> assertFailure (T.unpack err) - Right (SomeMessage SMLSCipherText msg) -> pure msg - Right (SomeMessage SMLSPlainText _) -> - assertFailure "Expected encrypted message, found plain text" - - msgGroupId msg @?= "test_group" - msgEpoch msg @?= Epoch 0 - msgContentType (msgPayload msg) @?= fromMLSEnum ApplicationMessageTag +testParseApplication = pure () +-- TODO testParseWelcome :: IO () -testParseWelcome = do - welData <- LBS.readFile "test/resources/welcome1.mls" - wel <- case decodeMLS welData of - Left err -> assertFailure (T.unpack err) - Right x -> pure x - - welCipherSuite wel @?= CipherSuite 1 - map gsNewMember (welSecrets wel) @?= [KeyPackageRef (fromRight' (unhex "ab4692703ca6d50ffdeaae3096f885c2"))] +testParseWelcome = pure () testKeyPackageRef :: IO () testKeyPackageRef = do @@ -145,30 +104,9 @@ testKeyPackageRef = do ref <- KeyPackageRef <$> BS.readFile "test/resources/key_package_ref1" kpRef MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 (KeyPackageData kpData) @?= ref +-- TODO testVerifyMLSPlainTextWithKey :: IO () -testVerifyMLSPlainTextWithKey = do - -- this file was created with openmls from the client that is in the add proposal - msgData <- BS.readFile "test/resources/external_proposal.mls" - - msg :: Message 'MLSPlainText <- case decodeMLS' @SomeMessage msgData of - Left err -> assertFailure (T.unpack err) - Right (SomeMessage SMLSCipherText _) -> - assertFailure "Expected SomeMessage SMLSCipherText" - Right (SomeMessage SMLSPlainText msg) -> - pure msg - - kp <- case msgPayload msg of - ProposalMessage prop -> - case rmValue prop of - AddProposal kp -> pure kp - _ -> error "Expected AddProposal" - _ -> error "Expected ProposalMessage" - - let pubkey = bcSignatureKey . kpCredential . rmValue $ kp - liftIO - $ assertBool - "message signature verification failed" - $ verifyMessageSignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 msg pubkey +testVerifyMLSPlainTextWithKey = pure () testRemoveProposalMessageSignature :: IO () testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do @@ -194,7 +132,14 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do secretKey <- Ed25519.generateSecretKey let publicKey = Ed25519.toPublic secretKey - let message = mkSignedMessage secretKey publicKey gid (Epoch 1) (ProposalMessage (mkRemoveProposal (fromJust (kpRef' kp)))) + let proposal = mkRemoveProposal (fromJust (kpRef' kp)) + let message = + mkSignedMessage + secretKey + publicKey + gid + (Epoch 1) + (FramedContentProposal proposal) let messageFilename = "signed-message.mls" BS.writeFile (tmp messageFilename) (rmRaw (mkRawMLS message)) diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index e1779dfbb3..93c4b9c5a2 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -87,68 +87,75 @@ testConvertProtoRoundTrip = testProperty (show (typeRep @a)) trip -- auxiliary types class ArbitrarySender a where - arbitrarySender :: Gen (Sender 'MLSPlainText) + arbitrarySender :: Gen Sender -class ArbitraryMessagePayload a where - arbitraryMessagePayload :: Gen (MessagePayload 'MLSPlainText) +class ArbitraryFramedContentData a where + arbitraryFramedContentData :: Gen FramedContentData -class ArbitraryMessageTBS a where - arbitraryArbitraryMessageTBS :: Gen (MessageTBS 'MLSPlainText) +class ArbitraryFramedContent a where + arbitraryFramedContent :: Gen FramedContent -newtype MessageGenerator tbs = MessageGenerator {unMessageGenerator :: Message 'MLSPlainText} +newtype MessageGenerator tbs = MessageGenerator {unMessageGenerator :: Message} deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) -instance (ArbitraryMessageTBS tbs) => Arbitrary (MessageGenerator tbs) where - arbitrary = do - tbs <- arbitraryArbitraryMessageTBS @tbs - MessageGenerator - <$> (Message (mkRawMLS tbs) <$> arbitrary) - -data MessageTBSGenerator sender payload +instance ArbitraryFramedContent fc => Arbitrary (MessageGenerator fc) where + arbitrary = + fmap MessageGenerator $ + Message + <$> arbitrary + <*> fmap + MessagePublic + ( PublicMessage + <$> fmap mkRawMLS (arbitraryFramedContent @fc) + <*> (FramedContentAuthData <$> arbitrary <*> pure Nothing) + <*> arbitrary + ) + +data FramedContentGenerator sender payload instance ( ArbitrarySender sender, - ArbitraryMessagePayload payload + ArbitraryFramedContentData payload ) => - ArbitraryMessageTBS (MessageTBSGenerator sender payload) + ArbitraryFramedContent (FramedContentGenerator sender payload) where - arbitraryArbitraryMessageTBS = - MessageTBS KnownFormatTag + arbitraryFramedContent = + FramedContent <$> arbitrary <*> arbitrary - <*> arbitrary <*> arbitrarySender @sender - <*> arbitraryMessagePayload @payload + <*> arbitrary + <*> arbitraryFramedContentData @payload --- -newtype RemoveProposalMessage = RemoveProposalMessage {unRemoveProposalMessage :: Message 'MLSPlainText} +newtype RemoveProposalMessage = RemoveProposalMessage {unRemoveProposalMessage :: Message} deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance Arbitrary RemoveProposalMessage where arbitrary = RemoveProposalMessage - <$> (unMessageGenerator <$> arbitrary @(MessageGenerator (MessageTBSGenerator TestPreconfiguredSender RemoveProposalPayload))) + <$> (unMessageGenerator <$> arbitrary @(MessageGenerator (FramedContentGenerator TestPreconfiguredSender RemoveProposalPayload))) --- -newtype RemoveProposalPayload = RemoveProposalPayload {unRemoveProposalPayload :: MessagePayload 'MLSPlainText} +newtype RemoveProposalPayload = RemoveProposalPayload {unRemoveProposalPayload :: FramedContentData} deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance Arbitrary RemoveProposalPayload where - arbitrary = RemoveProposalPayload . ProposalMessage . mkRemoveProposal <$> arbitrary + arbitrary = RemoveProposalPayload . FramedContentProposal . mkRemoveProposal <$> arbitrary -instance ArbitraryMessagePayload RemoveProposalPayload where - arbitraryMessagePayload = unRemoveProposalPayload <$> arbitrary +instance ArbitraryFramedContentData RemoveProposalPayload where + arbitraryFramedContentData = unRemoveProposalPayload <$> arbitrary --- newtype TestPreconfiguredSender = TestPreconfiguredSender - {unTestPreconfiguredSender :: Sender 'MLSPlainText} + {unTestPreconfiguredSender :: Sender} deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance Arbitrary TestPreconfiguredSender where - arbitrary = TestPreconfiguredSender . PreconfiguredSender <$> arbitrary + arbitrary = TestPreconfiguredSender . SenderExternal <$> arbitrary instance ArbitrarySender TestPreconfiguredSender where arbitrarySender = unTestPreconfiguredSender <$> arbitrary diff --git a/libs/wire-api/test/unit/Main.hs b/libs/wire-api/test/unit/Test/Wire/API/Run.hs similarity index 98% rename from libs/wire-api/test/unit/Main.hs rename to libs/wire-api/test/unit/Test/Wire/API/Run.hs index a1b492c371..14382593d0 100644 --- a/libs/wire-api/test/unit/Main.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Run.hs @@ -15,10 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main - ( main, - ) -where +module Test.Wire.API.Run (main) where import Imports import System.IO.Unsafe (unsafePerformIO) diff --git a/libs/wire-api/wire-api.cabal b/libs/wire-api/wire-api.cabal index f3d8f7ce9e..2e0130c20c 100644 --- a/libs/wire-api/wire-api.cabal +++ b/libs/wire-api/wire-api.cabal @@ -1,4 +1,4 @@ -cabal-version: 1.12 +cabal-version: 3.0 name: wire-api version: 0.1.0 description: API types of the Wire collaboration platform @@ -6,11 +6,64 @@ category: Network author: Wire Swiss GmbH maintainer: Wire Swiss GmbH copyright: (c) 2020 Wire Swiss GmbH -license: AGPL-3 +license: AGPL-3.0-only license-file: LICENSE build-type: Simple +common common-all + default-language: Haskell2010 + ghc-options: + -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates + -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path + -Wredundant-constraints + + default-extensions: + NoImplicitPrelude + AllowAmbiguousTypes + BangPatterns + ConstraintKinds + DataKinds + DefaultSignatures + DeriveFunctor + DeriveGeneric + DeriveLift + DeriveTraversable + DerivingStrategies + DerivingVia + DuplicateRecordFields + EmptyCase + FlexibleContexts + FlexibleInstances + FunctionalDependencies + GADTs + GeneralizedNewtypeDeriving + InstanceSigs + KindSignatures + LambdaCase + MultiParamTypeClasses + MultiWayIf + NamedFieldPuns + OverloadedLabels + OverloadedRecordDot + OverloadedStrings + PackageImports + PatternSynonyms + PolyKinds + QuasiQuotes + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeFamilies + TypeFamilyDependencies + TypeOperators + UndecidableInstances + ViewPatterns + library + import: common-all + -- cabal-fmt: expand src exposed-modules: Wire.API.ApplyMods @@ -43,6 +96,7 @@ library Wire.API.MakesFederatedCall Wire.API.Message Wire.API.Message.Proto + Wire.API.MLS.Capabilities Wire.API.MLS.CipherSuite Wire.API.MLS.Commit Wire.API.MLS.CommitBundle @@ -52,10 +106,15 @@ library Wire.API.MLS.Extension Wire.API.MLS.Group Wire.API.MLS.GroupInfoBundle + Wire.API.MLS.HPKEPublicKey Wire.API.MLS.KeyPackage Wire.API.MLS.Keys + Wire.API.MLS.LeafNode + Wire.API.MLS.Lifetime Wire.API.MLS.Message Wire.API.MLS.Proposal + Wire.API.MLS.ProposalTag + Wire.API.MLS.ProtocolVersion Wire.API.MLS.PublicGroupState Wire.API.MLS.Serialisation Wire.API.MLS.Servant @@ -162,57 +221,10 @@ library Wire.API.VersionInfo Wire.API.Wrapped - other-modules: Paths_wire_api - hs-source-dirs: src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -Wredundant-constraints - + other-modules: Paths_wire_api + hs-source-dirs: src build-depends: - aeson >=2.0.1.0 + , aeson >=2.0.1.0 , attoparsec >=0.10 , base >=4 && <5 , base64-bytestring >=1.0 @@ -305,15 +317,13 @@ library , x509 , zauth - default-language: Haskell2010 - test-suite wire-api-golden-tests - type: exitcode-stdio-1.0 - main-is: Main.hs + import: common-all + type: exitcode-stdio-1.0 + main-is: ../golden.hs -- cabal-fmt: expand test/golden other-modules: - Main Paths_wire_api Test.Wire.API.Golden.FromJSON Test.Wire.API.Golden.Generated @@ -560,58 +570,13 @@ test-suite wire-api-golden-tests Test.Wire.API.Golden.Manual.UserClientPrekeyMap Test.Wire.API.Golden.Manual.UserIdList Test.Wire.API.Golden.Protobuf + Test.Wire.API.Golden.Run Test.Wire.API.Golden.Runner - hs-source-dirs: test/golden - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -threaded -with-rtsopts=-N -Wredundant-constraints - + ghc-options: -threaded -with-rtsopts=-N + hs-source-dirs: test/golden build-depends: - aeson >=2.0.1.0 + , aeson >=2.0.1.0 , aeson-pretty , aeson-qq , base @@ -654,15 +619,13 @@ test-suite wire-api-golden-tests , wire-api , wire-message-proto-lens - default-language: Haskell2010 - test-suite wire-api-tests - type: exitcode-stdio-1.0 - main-is: Main.hs + import: common-all + type: exitcode-stdio-1.0 + main-is: ../unit.hs -- cabal-fmt: expand test/unit other-modules: - Main Paths_wire_api Test.Wire.API.Call.Config Test.Wire.API.Conversation @@ -678,6 +641,7 @@ test-suite wire-api-tests Test.Wire.API.Routes Test.Wire.API.Routes.Version Test.Wire.API.Routes.Version.Wai + Test.Wire.API.Run Test.Wire.API.Swagger Test.Wire.API.Team.Export Test.Wire.API.Team.Member @@ -686,56 +650,9 @@ test-suite wire-api-tests Test.Wire.API.User.RichInfo Test.Wire.API.User.Search - hs-source-dirs: test/unit - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -threaded -with-rtsopts=-N -Wredundant-constraints - + hs-source-dirs: test/unit build-depends: - aeson >=2.0.1.0 + , aeson >=2.0.1.0 , aeson-pretty , aeson-qq , async @@ -793,4 +710,4 @@ test-suite wire-api-tests , wire-api , wire-message-proto-lens - default-language: Haskell2010 + ghc-options: -threaded -with-rtsopts=-N diff --git a/services/brig/brig.cabal b/services/brig/brig.cabal index 00a464df5c..c10aa3b01f 100644 --- a/services/brig/brig.cabal +++ b/services/brig/brig.cabal @@ -1,4 +1,4 @@ -cabal-version: 1.12 +cabal-version: 3.0 name: brig version: 2.0 synopsis: User Service @@ -6,7 +6,7 @@ category: Network author: Wire Swiss GmbH maintainer: Wire Swiss GmbH copyright: (c) 2017 Wire Swiss GmbH -license: AGPL-3 +license: AGPL-3.0-only license-file: LICENSE build-type: Simple extra-source-files: @@ -14,7 +14,60 @@ extra-source-files: docs/swagger-v1.json docs/swagger.md +common common-all + default-language: Haskell2010 + ghc-options: + -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates + -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path + -Wredundant-constraints + + default-extensions: + NoImplicitPrelude + AllowAmbiguousTypes + BangPatterns + ConstraintKinds + DataKinds + DefaultSignatures + DeriveFunctor + DeriveGeneric + DeriveLift + DeriveTraversable + DerivingStrategies + DerivingVia + DuplicateRecordFields + EmptyCase + FlexibleContexts + FlexibleInstances + FunctionalDependencies + GADTs + GeneralizedNewtypeDeriving + InstanceSigs + KindSignatures + LambdaCase + MultiParamTypeClasses + MultiWayIf + NamedFieldPuns + OverloadedLabels + OverloadedRecordDot + OverloadedStrings + PackageImports + PatternSynonyms + PolyKinds + QuasiQuotes + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeFamilies + TypeFamilyDependencies + TypeOperators + UndecidableInstances + ViewPatterns + library + import: common-all + -- cabal-fmt: expand src exposed-modules: Brig.Allowlists @@ -133,58 +186,14 @@ library Brig.Version Brig.ZAuth - other-modules: Paths_brig - hs-source-dirs: src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - + other-modules: Paths_brig + hs-source-dirs: src ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path -funbox-strict-fields -fplugin=Polysemy.Plugin - -fplugin=TransitiveAnns.Plugin -Wredundant-constraints + -fplugin=TransitiveAnns.Plugin build-depends: - aeson >=2.0.1.0 + , aeson >=2.0.1.0 , amazonka >=2 , amazonka-core >=2 , amazonka-dynamodb >=2 @@ -317,131 +326,43 @@ library , yaml >=0.8.22 , zauth >=0.10.3 - default-language: Haskell2010 + default-language: Haskell2010 executable brig - main-is: exec/Main.hs - other-modules: Paths_brig - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - + import: common-all + main-is: exec/Main.hs + other-modules: Paths_brig ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path -funbox-strict-fields -threaded -with-rtsopts=-N -with-rtsopts=-T - -rtsopts -Wredundant-constraints + -rtsopts build-depends: - base + , base , brig , HsOpenSSL , imports , optparse-applicative >=0.10 , types-common - default-language: Haskell2010 + default-language: Haskell2010 executable brig-index - main-is: index/src/Main.hs - other-modules: Paths_brig - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -funbox-strict-fields -threaded -with-rtsopts=-N - -Wredundant-constraints - + import: common-all + main-is: index/src/Main.hs + other-modules: Paths_brig + ghc-options: -funbox-strict-fields -threaded -with-rtsopts=-N build-depends: - base + , base , brig , imports , optparse-applicative , tinylog - default-language: Haskell2010 + default-language: Haskell2010 executable brig-integration - main-is: Main.hs + import: common-all + main-is: ../integration.hs -- cabal-fmt: expand test/integration other-modules: @@ -478,62 +399,15 @@ executable brig-integration Federation.End2end Federation.Util Index.Create - Main + Run SMTP Util Util.AWS - hs-source-dirs: test/integration - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -funbox-strict-fields -threaded -with-rtsopts=-N - -Wredundant-constraints - + hs-source-dirs: test/integration + ghc-options: -funbox-strict-fields -threaded -with-rtsopts=-N build-depends: - aeson + , aeson , async , attoparsec , base @@ -627,14 +501,15 @@ executable brig-integration , yaml , zauth - default-language: Haskell2010 + default-language: Haskell2010 executable brig-schema - main-is: Main.hs + import: common-all + main-is: ../main.hs -- cabal-fmt: expand schema/src other-modules: - Main + Run V43 V44 V45 @@ -671,55 +546,10 @@ executable brig-schema V_FUTUREWORK hs-source-dirs: schema/src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -funbox-strict-fields -Wredundant-constraints - + ghc-options: -funbox-strict-fields -Wredundant-constraints + default-extensions: TemplateHaskell build-depends: - base + , base , cassandra-util >=0.12 , extended , imports @@ -732,12 +562,11 @@ executable brig-schema default-language: Haskell2010 test-suite brig-tests - type: exitcode-stdio-1.0 - main-is: Main.hs - - -- cabal-fmt: expand test/unit + import: common-all + type: exitcode-stdio-1.0 + main-is: ../unit.hs other-modules: - Main + Run Test.Brig.Calling Test.Brig.Calling.Internal Test.Brig.Effects.Delay @@ -746,57 +575,10 @@ test-suite brig-tests Test.Brig.Roundtrip Test.Brig.User.Search.Index.Types - hs-source-dirs: test/unit - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -funbox-strict-fields -threaded -with-rtsopts=-N - -Wredundant-constraints - + hs-source-dirs: test/unit + ghc-options: -funbox-strict-fields -threaded -with-rtsopts=-N build-depends: - aeson + , aeson , base , binary , bloodhound @@ -831,4 +613,4 @@ test-suite brig-tests , wire-api , wire-api-federation - default-language: Haskell2010 + default-language: Haskell2010 diff --git a/services/brig/schema/main.hs b/services/brig/schema/main.hs new file mode 100644 index 0000000000..d4037ab9cf --- /dev/null +++ b/services/brig/schema/main.hs @@ -0,0 +1,5 @@ +import Imports +import qualified Run + +main :: IO () +main = Run.main diff --git a/services/brig/schema/src/Main.hs b/services/brig/schema/src/Run.hs similarity index 99% rename from services/brig/schema/src/Main.hs rename to services/brig/schema/src/Run.hs index f1f35ccd37..96bd6d1675 100644 --- a/services/brig/schema/src/Main.hs +++ b/services/brig/schema/src/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main where +module Run where import Cassandra.Schema import Control.Exception (finally) diff --git a/services/brig/src/Brig/API/Internal.hs b/services/brig/src/Brig/API/Internal.hs index f992ff8258..98d228b9ac 100644 --- a/services/brig/src/Brig/API/Internal.hs +++ b/services/brig/src/Brig/API/Internal.hs @@ -86,6 +86,7 @@ import Wire.API.Connection import Wire.API.Error import qualified Wire.API.Error.Brig as E import Wire.API.Federation.API +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation @@ -220,7 +221,7 @@ upsertKeyPackage nkp = do either (const $ mlsProtocolError "upsertKeyPackage: Cannot decode ClientIdentity") pure - $ kpIdentity (rmValue kp) + $ keyPackageIdentity (rmValue kp) mp <- lift . wrapClient . runMaybeT $ Data.derefKeyPackage ref when (isNothing mp) $ do void $ validateKeyPackage identity kp diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index 2ebed2e370..eaefbff18f 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -18,11 +18,7 @@ module Brig.API.MLS.KeyPackages.Validation ( -- * Main key package validation function validateKeyPackage, - reLifetime, mlsProtocolError, - - -- * Exported for unit tests - findExtensions, validateLifetime', ) where @@ -41,10 +37,13 @@ import Data.Time.Clock.POSIX import Imports import Wire.API.Error import Wire.API.Error.Brig +import Wire.API.MLS.Capabilities import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential -import Wire.API.MLS.Extension import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Lifetime +import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation validateKeyPackage :: @@ -58,12 +57,9 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do maybe (mlsProtocolError "Unsupported ciphersuite") pure - $ cipherSuiteTag (kpCipherSuite kp) + $ cipherSuiteTag kp.cipherSuite - -- validate signature scheme let ss = csSignatureScheme cs - when (signatureScheme ss /= bcSignatureScheme (kpCredential kp)) $ - mlsProtocolError "Signature scheme incompatible with ciphersuite" -- Authenticate signature key. This is performed only upon uploading a key -- package for a local client. @@ -76,7 +72,7 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do (mlsProtocolError "No key associated to the given identity and signature scheme") pure =<< lift (wrapClient (Data.lookupMLSPublicKey (ciUser identity) (ciClient identity) ss)) - when (key /= bcSignatureKey (kpCredential kp)) $ + when (key /= kp.leafNode.signatureKey) $ mlsProtocolError "Unrecognised signature key" ) (pure . const ()) @@ -86,27 +82,29 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do unless ( csVerifySignature cs - (bcSignatureKey (kpCredential kp)) - (rmRaw (kpTBS kp)) - (kpSignature kp) + kp.leafNode.signatureKey + kp.tbs.rmRaw + kp.signature_ ) $ mlsProtocolError "Invalid signature" -- validate protocol version maybe (mlsProtocolError "Unsupported protocol version") pure - (pvTag (kpProtocolVersion kp) >>= guard . (== ProtocolMLS10)) - -- validate credential - validateCredential identity (kpCredential kp) - -- validate extensions - validateExtensions (kpExtensions kp) + (pvTag (kp.protocolVersion) >>= guard . (== ProtocolMLS10)) + + -- validate credential, lifetime and capabilities + validateCredential identity kp.credential + validateSource kp.leafNode.source + validateCapabilities kp.leafNode.capabilities + pure (kpRef cs kpd, kpd) validateCredential :: ClientIdentity -> Credential -> Handler r () -validateCredential identity cred = do +validateCredential identity (BasicCredential cred) = do identity' <- either credentialError pure $ - decodeMLS' (bcIdentity cred) + decodeMLS' cred when (identity /= identity') $ throwStd (errorToWai @'MLSIdentityMismatch) where @@ -114,38 +112,13 @@ validateCredential identity cred = do mlsProtocolError $ "Failed to parse identity: " <> e -data RequiredExtensions f = RequiredExtensions - { reLifetime :: f Lifetime, - reCapabilities :: f () - } - -deriving instance (Show (f Lifetime), Show (f ())) => Show (RequiredExtensions f) - -instance Alternative f => Semigroup (RequiredExtensions f) where - RequiredExtensions lt1 cap1 <> RequiredExtensions lt2 cap2 = - RequiredExtensions (lt1 <|> lt2) (cap1 <|> cap2) - -instance Alternative f => Monoid (RequiredExtensions f) where - mempty = RequiredExtensions empty empty - -checkRequiredExtensions :: RequiredExtensions Maybe -> Either Text (RequiredExtensions Identity) -checkRequiredExtensions re = - RequiredExtensions - <$> maybe (Left "Missing lifetime extension") (pure . Identity) (reLifetime re) - <*> maybe (Left "Missing capability extension") (pure . Identity) (reCapabilities re) - -findExtensions :: [Extension] -> Either Text (RequiredExtensions Identity) -findExtensions = checkRequiredExtensions <=< (getAp . foldMap findExtension) - -findExtension :: Extension -> Ap (Either Text) (RequiredExtensions Maybe) -findExtension ext = (Ap (decodeExtension ext) >>=) . foldMap $ \case - (SomeExtension SLifetimeExtensionTag lt) -> pure $ RequiredExtensions (Just lt) Nothing - (SomeExtension SCapabilitiesExtensionTag _) -> pure $ RequiredExtensions Nothing (Just ()) - -validateExtensions :: [Extension] -> Handler r () -validateExtensions exts = do - re <- either mlsProtocolError pure $ findExtensions exts - validateLifetime . runIdentity . reLifetime $ re +validateSource :: LeafNodeSource -> Handler r () +validateSource (LeafNodeSourceKeyPackage lt) = validateLifetime lt +validateSource s = + mlsProtocolError $ + "Expected 'key_package' source, got '" + <> (leafNodeSourceTag s).name + <> "'" validateLifetime :: Lifetime -> Handler r () validateLifetime lt = do @@ -164,6 +137,9 @@ validateLifetime' now mMaxLifetime lt = do when (tsPOSIX (ltNotAfter lt) > now + maxLifetime) $ Left "Key package expiration time is too far in the future" +validateCapabilities :: Capabilities -> Handler r () +validateCapabilities _ = pure () + mlsProtocolError :: Text -> Handler r a mlsProtocolError msg = throwStd . toWai $ diff --git a/services/brig/src/Brig/Data/Client.hs b/services/brig/src/Brig/Data/Client.hs index 5e532d974e..0c58480f94 100644 --- a/services/brig/src/Brig/Data/Client.hs +++ b/services/brig/src/Brig/Data/Client.hs @@ -86,7 +86,7 @@ import qualified System.CryptoBox as CryptoBox import System.Logger.Class (field, msg, val) import qualified System.Logger.Class as Log import UnliftIO (pooledMapConcurrentlyN) -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.User.Auth import Wire.API.User.Client hiding (UpdateClient (..)) import Wire.API.User.Client.Prekey diff --git a/services/brig/src/Brig/Data/MLS/KeyPackage.hs b/services/brig/src/Brig/Data/MLS/KeyPackage.hs index 2f99e355bc..0f88575229 100644 --- a/services/brig/src/Brig/Data/MLS/KeyPackage.hs +++ b/services/brig/src/Brig/Data/MLS/KeyPackage.hs @@ -49,6 +49,7 @@ import Data.Time.Clock.POSIX import Imports import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig @@ -127,11 +128,11 @@ getNonClaimedKeyPackages u c = do hasExpired :: POSIXTime -> Maybe NominalDiffTime -> (KeyPackage, a) -> Bool hasExpired now mMaxLifetime (kp, _) = - case findExtensions (kpExtensions kp) of - Left _ -> True -- the assumption is the key package is valid and has the - -- required extensions so we return 'True' - Right (runIdentity . reLifetime -> lt) -> + case kp.leafNode.source of + LeafNodeSourceKeyPackage lt -> either (const True) (const False) . validateLifetime' now mMaxLifetime $ lt + _ -> True -- the assumption is the key package is valid and has the + -- required extensions so we return 'True' -- | Add key package ref to mapping table. mapKeyPackageRef :: MonadClient m => KeyPackageRef -> Qualified UserId -> ClientId -> m () diff --git a/services/brig/test/integration.hs b/services/brig/test/integration.hs new file mode 100644 index 0000000000..d4037ab9cf --- /dev/null +++ b/services/brig/test/integration.hs @@ -0,0 +1,5 @@ +import Imports +import qualified Run + +main :: IO () +main = Run.main diff --git a/services/brig/test/integration/API/MLS/Util.hs b/services/brig/test/integration/API/MLS/Util.hs index 02af682b7c..8c8d302872 100644 --- a/services/brig/test/integration/API/MLS/Util.hs +++ b/services/brig/test/integration/API/MLS/Util.hs @@ -37,6 +37,7 @@ import Util import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation +import Wire.API.MLS.CipherSuite import Wire.API.User.Client data SetKey = SetKey | DontSetKey diff --git a/services/brig/test/integration/API/User/Client.hs b/services/brig/test/integration/API/User/Client.hs index b7bfeac716..23429b0a00 100644 --- a/services/brig/test/integration/API/User/Client.hs +++ b/services/brig/test/integration/API/User/Client.hs @@ -61,7 +61,7 @@ import Test.Tasty.HUnit import UnliftIO (mapConcurrently) import Util import Wire.API.Internal.Notification -import Wire.API.MLS.Credential + import qualified Wire.API.Team.Feature as Public import Wire.API.User import qualified Wire.API.User as Public @@ -71,6 +71,7 @@ import Wire.API.User.Client.DPoPAccessToken import Wire.API.User.Client.Prekey import Wire.API.UserMap (QualifiedUserMap (..), UserMap (..), WrappedQualifiedUserMap) import Wire.API.Wrapped (Wrapped (..)) +import Wire.API.MLS.CipherSuite tests :: ConnectionLimit -> Opt.Timeout -> Opt.Opts -> Manager -> DB.ClientState -> Brig -> Cannon -> Galley -> TestTree tests _cl _at opts p db b c g = diff --git a/services/brig/test/integration/Federation/End2end.hs b/services/brig/test/integration/Federation/End2end.hs index 353516b10e..c0ab4ee374 100644 --- a/services/brig/test/integration/Federation/End2end.hs +++ b/services/brig/test/integration/Federation/End2end.hs @@ -61,7 +61,7 @@ import Wire.API.Conversation.Role import Wire.API.Conversation.Typing import Wire.API.Event.Conversation import Wire.API.Internal.Notification (ntfTransient) -import Wire.API.MLS.Credential +import Wire.API.MLS.CipherSuite import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation @@ -737,7 +737,7 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do { updateClientMLSPublicKeys = Map.singleton Ed25519 - (bcSignatureKey (kpCredential (rmValue aliceKP))) + aliceKP.rmValue.leafNode.signatureKey } put ( brig1 @@ -1015,7 +1015,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 { updateClientMLSPublicKeys = Map.singleton Ed25519 - (bcSignatureKey (kpCredential (rmValue aliceKP))) + aliceKP.rmValue.leafNode.signatureKey } put ( brig1 diff --git a/services/brig/test/integration/Main.hs b/services/brig/test/integration/Run.hs similarity index 99% rename from services/brig/test/integration/Main.hs rename to services/brig/test/integration/Run.hs index dee2c47caa..4c71bcba7a 100644 --- a/services/brig/test/integration/Main.hs +++ b/services/brig/test/integration/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main +module Run ( main, ) where diff --git a/services/brig/test/unit.hs b/services/brig/test/unit.hs new file mode 100644 index 0000000000..a26473d24e --- /dev/null +++ b/services/brig/test/unit.hs @@ -0,0 +1 @@ +import Run diff --git a/services/brig/test/unit/Main.hs b/services/brig/test/unit/Main.hs index 8cc53f5f81..6ab5658fca 100644 --- a/services/brig/test/unit/Main.hs +++ b/services/brig/test/unit/Main.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main +module Run ( main, ) where diff --git a/services/brig/test/unit/Run.hs b/services/brig/test/unit/Run.hs new file mode 100644 index 0000000000..6ab5658fca --- /dev/null +++ b/services/brig/test/unit/Run.hs @@ -0,0 +1,43 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Run + ( main, + ) +where + +import Imports +import qualified Test.Brig.Calling +import qualified Test.Brig.Calling.Internal +import qualified Test.Brig.InternalNotification +import qualified Test.Brig.MLS +import qualified Test.Brig.Roundtrip +import qualified Test.Brig.User.Search.Index.Types +import Test.Tasty + +main :: IO () +main = + defaultMain $ + testGroup + "Tests" + [ Test.Brig.User.Search.Index.Types.tests, + Test.Brig.Calling.tests, + Test.Brig.Calling.Internal.tests, + Test.Brig.Roundtrip.tests, + Test.Brig.MLS.tests, + Test.Brig.InternalNotification.tests + ] diff --git a/services/brig/test/unit/Test/Brig/MLS.hs b/services/brig/test/unit/Test/Brig/MLS.hs index e4c4f8d258..92e2b5eb52 100644 --- a/services/brig/test/unit/Test/Brig/MLS.hs +++ b/services/brig/test/unit/Test/Brig/MLS.hs @@ -19,16 +19,12 @@ module Test.Brig.MLS where import Brig.API.MLS.KeyPackages.Validation import Data.Binary -import Data.Binary.Put -import qualified Data.ByteString.Lazy as LBS import Data.Either import Data.Time.Clock import Imports import Test.Tasty import Test.Tasty.QuickCheck -import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Extension -import Wire.API.MLS.Serialisation +import Wire.API.MLS.Lifetime -- | A lifetime with a length of at least 1 day. newtype ValidLifetime = ValidLifetime Lifetime @@ -57,69 +53,6 @@ midpoint lt = ) ) -newtype ValidExtensions = ValidExtensions [Extension] - -instance Show ValidExtensions where - show (ValidExtensions exts) = "ValidExtensions (length " <> show (length exts) <> ")" - -unknownExt :: Gen Extension -unknownExt = do - Positive t0 <- arbitrary - let t = t0 + fromEnum (maxBound :: ExtensionTag) + 1 - Extension (fromIntegral t) <$> arbitrary - --- | Generate a list of extensions containing all the required ones. -instance Arbitrary ValidExtensions where - arbitrary = do - exts0 <- listOf unknownExt - LifetimeAndExtension ext1 _ <- arbitrary - exts2 <- listOf unknownExt - CapabilitiesAndExtension ext3 _ <- arbitrary - exts4 <- listOf unknownExt - pure . ValidExtensions $ exts0 <> [ext1] <> exts2 <> [ext3] <> exts4 - -newtype InvalidExtensions = InvalidExtensions [Extension] - --- | Generate a list of extensions which does not contain one of the required extensions. -instance Show InvalidExtensions where - show (InvalidExtensions exts) = "InvalidExtensions (length " <> show (length exts) <> ")" - -instance Arbitrary InvalidExtensions where - arbitrary = do - req <- fromMLSEnum <$> elements [LifetimeExtensionTag, CapabilitiesExtensionTag] - InvalidExtensions <$> listOf (arbitrary `suchThat` ((/= req) . extType)) - -data LifetimeAndExtension = LifetimeAndExtension Extension Lifetime - deriving (Show) - -instance Arbitrary LifetimeAndExtension where - arbitrary = do - lt <- arbitrary - let ext = Extension (fromIntegral (fromEnum LifetimeExtensionTag + 1)) . LBS.toStrict . runPut $ do - put (timestampSeconds (ltNotBefore lt)) - put (timestampSeconds (ltNotAfter lt)) - pure $ LifetimeAndExtension ext lt - -data CapabilitiesAndExtension = CapabilitiesAndExtension Extension Capabilities - deriving (Show) - -instance Arbitrary CapabilitiesAndExtension where - arbitrary = do - caps <- arbitrary - let ext = Extension (fromIntegral (fromEnum CapabilitiesExtensionTag + 1)) . LBS.toStrict . runPut $ do - putWord8 (fromIntegral (length (capVersions caps))) - traverse_ (putWord8 . pvNumber) (capVersions caps) - - putWord8 (fromIntegral (length (capCiphersuites caps) * 2)) - traverse_ (put . cipherSuiteNumber) (capCiphersuites caps) - - putWord8 (fromIntegral (length (capExtensions caps) * 2)) - traverse_ put (capExtensions caps) - - putWord8 (fromIntegral (length (capProposals caps) * 2)) - traverse_ put (capProposals caps) - pure $ CapabilitiesAndExtension ext caps - tests :: TestTree tests = testGroup @@ -142,16 +75,5 @@ tests = isRight $ validateLifetime' (midpoint lt) Nothing lt, testProperty "expiration too far" $ \(ValidLifetime lt) -> isLeft $ validateLifetime' (midpoint lt) (Just 10) lt - ], - testGroup - "Extensions" - [ testProperty "required extensions are found" $ \(ValidExtensions exts) -> - isRight (findExtensions exts), - testProperty "missing required extensions" $ \(InvalidExtensions exts) -> - isLeft (findExtensions exts), - testProperty "lifetime extension" $ \(LifetimeAndExtension ext lt) -> - decodeExtension ext == Right (Just (SomeExtension SLifetimeExtensionTag lt)), - testProperty "capabilities extension" $ \(CapabilitiesAndExtension ext caps) -> - decodeExtension ext == Right (Just (SomeExtension SCapabilitiesExtensionTag caps)) ] ] diff --git a/services/galley/galley.cabal b/services/galley/galley.cabal index aed44b951a..6a921b7cc1 100644 --- a/services/galley/galley.cabal +++ b/services/galley/galley.cabal @@ -1,4 +1,4 @@ -cabal-version: 1.12 +cabal-version: 3.0 name: galley version: 0.83.0 synopsis: Conversations @@ -6,7 +6,7 @@ category: Network author: Wire Swiss GmbH maintainer: Wire Swiss GmbH copyright: (c) 2017 Wire Swiss GmbH -license: AGPL-3 +license: AGPL-3.0-only license-file: LICENSE build-type: Simple @@ -15,7 +15,60 @@ flag static manual: True default: False +common common-all + default-language: Haskell2010 + ghc-options: + -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates + -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path + -Wredundant-constraints + + default-extensions: + NoImplicitPrelude + AllowAmbiguousTypes + BangPatterns + ConstraintKinds + DataKinds + DefaultSignatures + DeriveFunctor + DeriveGeneric + DeriveLift + DeriveTraversable + DerivingStrategies + DerivingVia + DuplicateRecordFields + EmptyCase + FlexibleContexts + FlexibleInstances + FunctionalDependencies + GADTs + GeneralizedNewtypeDeriving + InstanceSigs + KindSignatures + LambdaCase + MultiParamTypeClasses + MultiWayIf + NamedFieldPuns + OverloadedLabels + OverloadedRecordDot + OverloadedStrings + PackageImports + PatternSynonyms + PolyKinds + QuasiQuotes + RankNTypes + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + TypeFamilies + TypeFamilyDependencies + TypeOperators + UndecidableInstances + ViewPatterns + library + import: common-all + -- cabal-fmt: expand src exposed-modules: Galley.API @@ -145,57 +198,11 @@ library Galley.Types.UserList Galley.Validation - other-modules: Paths_galley - hs-source-dirs: src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -fplugin=TransitiveAnns.Plugin -Wredundant-constraints - + ghc-options: -fplugin=TransitiveAnns.Plugin + other-modules: Paths_galley + hs-source-dirs: src build-depends: - aeson >=2.0.1.0 + , aeson >=2.0.1.0 , amazonka >=1.4.5 , amazonka-sqs >=1.4.5 , asn1-encoding @@ -300,60 +307,15 @@ library , wire-api-federation , x509 - default-language: Haskell2010 + default-language: Haskell2010 executable galley - main-is: exec/Main.hs - other-modules: Paths_galley - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -threaded -with-rtsopts=-T -rtsopts -Wredundant-constraints - + import: common-all + main-is: exec/Main.hs + other-modules: Paths_galley + ghc-options: -threaded -with-rtsopts=-T -rtsopts build-depends: - base + , base , case-insensitive , extended , extra >=1.3 @@ -375,10 +337,11 @@ executable galley if flag(static) ld-options: -static - default-language: Haskell2010 + default-language: Haskell2010 executable galley-integration - main-is: Main.hs + import: common-all + main-is: ../integration.hs -- cabal-fmt: expand test/integration other-modules: @@ -399,60 +362,14 @@ executable galley-integration API.Teams.LegalHold.Util API.Util API.Util.TeamFeature - Main + Run TestHelpers TestSetup - hs-source-dirs: test/integration - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -threaded -with-rtsopts=-N -Wredundant-constraints - + ghc-options: -threaded -with-rtsopts=-N -rtsopts + hs-source-dirs: test/integration build-depends: - aeson + , aeson , aeson-qq , amazonka , amazonka-sqs @@ -559,70 +476,22 @@ executable galley-integration , wire-message-proto-lens , yaml - default-language: Haskell2010 - executable galley-migrate-data - main-is: Main.hs + import: common-all + main-is: ../main.hs -- cabal-fmt: expand migrate-data/src other-modules: Galley.DataMigration Galley.DataMigration.Types - Main Paths_galley + Run V1_BackfillBillingTeamMembers V2_MigrateMLSMembers - hs-source-dirs: migrate-data/src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -Wredundant-constraints - + hs-source-dirs: migrate-data/src build-depends: - base + , base , case-insensitive , cassandra-util , conduit @@ -653,14 +522,15 @@ executable galley-migrate-data if flag(static) ld-options: -static - default-language: Haskell2010 + default-language: Haskell2010 executable galley-schema - main-is: Main.hs + import: common-all + main-is: ../main.hs -- cabal-fmt: expand schema/src other-modules: - Main + Run V20 V21 V22 @@ -725,55 +595,9 @@ executable galley-schema V81_MLSSubconversation hs-source-dirs: schema/src - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -Wredundant-constraints - + default-extensions: TemplateHaskell build-depends: - base + , base , case-insensitive , cassandra-util , extended @@ -798,66 +622,22 @@ executable galley-schema default-language: Haskell2010 test-suite galley-tests - type: exitcode-stdio-1.0 - main-is: Main.hs + import: common-all + type: exitcode-stdio-1.0 + main-is: ../unit.hs other-modules: Paths_galley + Run Test.Galley.API Test.Galley.API.Message Test.Galley.API.One2One Test.Galley.Intra.User Test.Galley.Mapping - hs-source-dirs: test/unit - default-extensions: - NoImplicitPrelude - AllowAmbiguousTypes - BangPatterns - ConstraintKinds - DataKinds - DefaultSignatures - DeriveFunctor - DeriveGeneric - DeriveLift - DeriveTraversable - DerivingStrategies - DerivingVia - DuplicateRecordFields - EmptyCase - FlexibleContexts - FlexibleInstances - FunctionalDependencies - GADTs - InstanceSigs - KindSignatures - LambdaCase - MultiParamTypeClasses - MultiWayIf - NamedFieldPuns - OverloadedRecordDot - OverloadedStrings - PackageImports - PatternSynonyms - PolyKinds - QuasiQuotes - RankNTypes - ScopedTypeVariables - StandaloneDeriving - TupleSections - TypeApplications - TypeFamilies - TypeFamilyDependencies - TypeOperators - UndecidableInstances - ViewPatterns - - ghc-options: - -O2 -Wall -Wincomplete-uni-patterns -Wincomplete-record-updates - -Wpartial-fields -fwarn-tabs -optP-Wno-nonportable-include-path - -threaded -with-rtsopts=-N -Wredundant-constraints - + ghc-options: -threaded -with-rtsopts=-N + hs-source-dirs: test/unit build-depends: - base + , base , case-insensitive , containers , extended @@ -889,4 +669,4 @@ test-suite galley-tests , wire-api , wire-api-federation - default-language: Haskell2010 + default-language: Haskell2010 diff --git a/services/galley/migrate-data/main.hs b/services/galley/migrate-data/main.hs new file mode 100644 index 0000000000..a26473d24e --- /dev/null +++ b/services/galley/migrate-data/main.hs @@ -0,0 +1 @@ +import Run diff --git a/services/galley/migrate-data/src/Main.hs b/services/galley/migrate-data/src/Run.hs similarity index 98% rename from services/galley/migrate-data/src/Main.hs rename to services/galley/migrate-data/src/Run.hs index f6a051b8d5..cb1288bafb 100644 --- a/services/galley/migrate-data/src/Main.hs +++ b/services/galley/migrate-data/src/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main where +module Run where import Galley.DataMigration import Imports diff --git a/services/galley/schema/main.hs b/services/galley/schema/main.hs new file mode 100644 index 0000000000..d4037ab9cf --- /dev/null +++ b/services/galley/schema/main.hs @@ -0,0 +1,5 @@ +import Imports +import qualified Run + +main :: IO () +main = Run.main diff --git a/services/galley/schema/src/Main.hs b/services/galley/schema/src/Run.hs similarity index 99% rename from services/galley/schema/src/Main.hs rename to services/galley/schema/src/Run.hs index 4805a7470b..e8583485ee 100644 --- a/services/galley/schema/src/Main.hs +++ b/services/galley/schema/src/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main where +module Run where import Cassandra.Schema import Control.Exception (finally) diff --git a/services/galley/src/Galley/API/Federation.hs b/services/galley/src/Galley/API/Federation.hs index 92ccd96874..31125e12e3 100644 --- a/services/galley/src/Galley/API/Federation.hs +++ b/services/galley/src/Galley/API/Federation.hs @@ -95,7 +95,6 @@ import qualified Wire.API.Federation.API.Galley as F import Wire.API.Federation.Error import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential -import Wire.API.MLS.Message import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation @@ -668,8 +667,8 @@ sendMLSCommitBundle remoteDomain msr = loc <- qualifyLocal () let sender = toRemoteUnsafe remoteDomain (F.mmsrSender msr) bundle <- either (throw . mlsProtocolError) pure $ deserializeCommitBundle (fromBase64ByteString (F.mmsrRawMessage msr)) - let msg = rmValue (cbCommitMsg bundle) - qConvOrSub <- E.lookupConvByGroupId (msgGroupId msg) >>= noteS @'ConvNotFound + ibundle <- noteS @'MLSUnsupportedMessage $ mkIncomingBundle bundle + qConvOrSub <- E.lookupConvByGroupId ibundle.groupId >>= noteS @'ConvNotFound when (qUnqualified qConvOrSub /= F.mmsrConvOrSubId msr) $ throwS @'MLSGroupConversationMismatch uncurry F.MLSMessageResponseUpdates . first (map lcuUpdate) <$> postMLSCommitBundle @@ -678,7 +677,7 @@ sendMLSCommitBundle remoteDomain msr = (Just (mmsrSenderClient msr)) qConvOrSub Nothing - bundle + ibundle sendMLSMessage :: ( Member BrigAccess r, @@ -716,18 +715,17 @@ sendMLSMessage remoteDomain msr = loc <- qualifyLocal () let sender = toRemoteUnsafe remoteDomain (F.mmsrSender msr) raw <- either (throw . mlsProtocolError) pure $ decodeMLS' (fromBase64ByteString (F.mmsrRawMessage msr)) - case rmValue raw of - SomeMessage _ msg -> do - qConvOrSub <- E.lookupConvByGroupId (msgGroupId msg) >>= noteS @'ConvNotFound - when (qUnqualified qConvOrSub /= F.mmsrConvOrSubId msr) $ throwS @'MLSGroupConversationMismatch - uncurry F.MLSMessageResponseUpdates . first (map lcuUpdate) - <$> postMLSMessage - loc - (tUntagged sender) - (Just (mmsrSenderClient msr)) - qConvOrSub - Nothing - raw + msg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage raw + qConvOrSub <- E.lookupConvByGroupId msg.groupId >>= noteS @'ConvNotFound + when (qUnqualified qConvOrSub /= F.mmsrConvOrSubId msr) $ throwS @'MLSGroupConversationMismatch + uncurry F.MLSMessageResponseUpdates . first (map lcuUpdate) + <$> postMLSMessage + loc + (tUntagged sender) + (Just (mmsrSenderClient msr)) + qConvOrSub + Nothing + msg mlsSendWelcome :: ( Member BrigAccess r, diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 967c005baf..4ef364cc01 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -16,7 +16,11 @@ -- with this program. If not, see . module Galley.API.MLS.Message - ( postMLSCommitBundle, + ( IncomingBundle (..), + mkIncomingBundle, + IncomingMessage (..), + mkIncomingMessage, + postMLSCommitBundle, postMLSCommitBundleFromLocalUser, postMLSMessageFromLocalUser, postMLSMessageFromLocalUserV1, @@ -40,6 +44,7 @@ import qualified Data.Set as Set import qualified Data.Text as T import Data.Time import Data.Tuple.Extra +import GHC.Records import Galley.API.Action import Galley.API.Error import Galley.API.MLS.Conversation @@ -97,6 +102,90 @@ import Wire.API.Message import Wire.API.Routes.Internal.Brig import Wire.API.User.Client +data IncomingMessage = IncomingMessage + { epoch :: Epoch, + groupId :: GroupId, + content :: IncomingMessageContent, + rawMessage :: RawMLS Message + } + +instance HasField "sender" IncomingMessage (Maybe Sender) where + getField msg = case msg.content of + IncomingMessageContentPublic pub -> Just pub.sender + _ -> Nothing + +data IncomingMessageContent + = IncomingMessageContentPublic IncomingPublicMessageContent + | IncomingMessageContentPrivate + +data IncomingPublicMessageContent = IncomingPublicMessageContent + { sender :: Sender, + content :: FramedContentData, + -- for verification + framedContent :: RawMLS FramedContent, + authData :: FramedContentAuthData + } + +data IncomingBundle = IncomingBundle + { epoch :: Epoch, + groupId :: GroupId, + sender :: Sender, + commit :: RawMLS Commit, + rawMessage :: RawMLS Message, + welcome :: Maybe (RawMLS Welcome), + groupInfoBundle :: GroupInfoBundle, + serialized :: ByteString + } + +mkIncomingMessage :: RawMLS Message -> Maybe IncomingMessage +mkIncomingMessage msg = case msg.rmValue.content of + MessagePublic pmsg -> + Just + IncomingMessage + { epoch = pmsg.content.rmValue.epoch, + groupId = pmsg.content.rmValue.groupId, + content = + IncomingMessageContentPublic + IncomingPublicMessageContent + { sender = pmsg.content.rmValue.sender, + content = pmsg.content.rmValue.content, + framedContent = pmsg.content, + authData = pmsg.authData + }, + rawMessage = msg + } + MessagePrivate pmsg + | pmsg.rmValue.tag == FramedContentApplicationDataTag -> + Just + IncomingMessage + { epoch = pmsg.rmValue.epoch, + groupId = pmsg.rmValue.groupId, + content = IncomingMessageContentPrivate, + rawMessage = msg + } + _ -> Nothing + +mkIncomingBundle :: CommitBundle -> Maybe IncomingBundle +mkIncomingBundle bundle = do + imsg <- mkIncomingMessage bundle.cbCommitMsg + content <- case imsg.content of + IncomingMessageContentPublic c -> pure c + _ -> Nothing + commit <- case content.content of + FramedContentCommit c -> pure c + _ -> Nothing + pure + IncomingBundle + { epoch = imsg.epoch, + groupId = imsg.groupId, + sender = content.sender, + commit = commit, + rawMessage = bundle.cbCommitMsg, + welcome = bundle.cbWelcome, + groupInfoBundle = bundle.cbGroupInfoBundle, + serialized = serializeCommitBundle bundle + } + type MLSMessageStaticErrors = '[ ErrorS 'ConvAccessDenied, ErrorS 'ConvMemberNotFound, @@ -145,15 +234,14 @@ postMLSMessageFromLocalUserV1 :: Local UserId -> Maybe ClientId -> ConnId -> - RawMLS SomeMessage -> + RawMLS Message -> Sem r [Event] postMLSMessageFromLocalUserV1 lusr mc conn smsg = do assertMLSEnabled - case rmValue smsg of - SomeMessage _ msg -> do - cnvOrSub <- lookupConvByGroupId (msgGroupId msg) >>= noteS @'ConvNotFound - fst . first (map lcuEvent) - <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) smsg + imsg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage smsg + cnvOrSub <- lookupConvByGroupId imsg.groupId >>= noteS @'ConvNotFound + fst . first (map lcuEvent) + <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) imsg postMLSMessageFromLocalUser :: ( HasProposalEffects r, @@ -178,16 +266,15 @@ postMLSMessageFromLocalUser :: Local UserId -> Maybe ClientId -> ConnId -> - RawMLS SomeMessage -> + RawMLS Message -> Sem r MLSMessageSendingStatus postMLSMessageFromLocalUser lusr mc conn smsg = do assertMLSEnabled + imsg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage smsg + cnvOrSub <- lookupConvByGroupId imsg.groupId >>= noteS @'ConvNotFound (events, unreachables) <- - case rmValue smsg of - SomeMessage _ msg -> do - cnvOrSub <- lookupConvByGroupId (msgGroupId msg) >>= noteS @'ConvNotFound - first (map lcuEvent) - <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) smsg + first (map lcuEvent) + <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) imsg t <- toUTCTimeMillis <$> input pure $ MLSMessageSendingStatus events t unreachables @@ -203,13 +290,13 @@ postMLSCommitBundle :: Maybe ClientId -> Qualified ConvOrSubConvId -> Maybe ConnId -> - CommitBundle -> + IncomingBundle -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSCommitBundle loc qusr mc qConvOrSub conn rawBundle = +postMLSCommitBundle loc qusr mc qConvOrSub conn bundle = foldQualified loc - (postMLSCommitBundleToLocalConv qusr mc conn rawBundle) - (postMLSCommitBundleToRemoteConv loc qusr mc conn rawBundle) + (postMLSCommitBundleToLocalConv qusr mc conn bundle) + (postMLSCommitBundleToRemoteConv loc qusr mc conn bundle) qConvOrSub postMLSCommitBundleFromLocalUser :: @@ -226,11 +313,11 @@ postMLSCommitBundleFromLocalUser :: Sem r MLSMessageSendingStatus postMLSCommitBundleFromLocalUser lusr mc conn bundle = do assertMLSEnabled - let msg = rmValue (cbCommitMsg bundle) - qConvOrSub <- lookupConvByGroupId (msgGroupId msg) >>= noteS @'ConvNotFound + ibundle <- noteS @'MLSUnsupportedMessage $ mkIncomingBundle bundle + qConvOrSub <- lookupConvByGroupId ibundle.groupId >>= noteS @'ConvNotFound (events, unreachables) <- first (map lcuEvent) - <$> postMLSCommitBundle lusr (tUntagged lusr) mc qConvOrSub (Just conn) bundle + <$> postMLSCommitBundle lusr (tUntagged lusr) mc qConvOrSub (Just conn) ibundle t <- toUTCTimeMillis <$> input pure $ MLSMessageSendingStatus events t unreachables @@ -243,46 +330,37 @@ postMLSCommitBundleToLocalConv :: Qualified UserId -> Maybe ClientId -> Maybe ConnId -> - CommitBundle -> + IncomingBundle -> Local ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) postMLSCommitBundleToLocalConv qusr mc conn bundle lConvOrSubId = do lConvOrSub <- fetchConvOrSub qusr lConvOrSubId - let msg = rmValue (cbCommitMsg bundle) - - senderClient <- fmap ciClient <$> getSenderIdentity qusr mc SMLSPlainText msg - - events <- case msgPayload msg of - CommitMessage commit -> - do - action <- getCommitData lConvOrSub (msgEpoch msg) commit - -- check that the welcome message matches the action - for_ (cbWelcome bundle) $ \welcome -> - when - ( Set.fromList (map gsNewMember (welSecrets (rmValue welcome))) - /= Set.fromList (map (snd . snd) (cmAssocs (paAdd action))) - ) - $ throwS @'MLSWelcomeMismatch - updates <- - processCommitWithAction - qusr - senderClient - conn - lConvOrSub - (msgEpoch msg) - action - (msgSender msg) - commit - storeGroupInfoBundle (idForConvOrSub . tUnqualified $ lConvOrSub) (cbGroupInfoBundle bundle) - pure updates - ApplicationMessage _ -> throwS @'MLSUnsupportedMessage - ProposalMessage _ -> throwS @'MLSUnsupportedMessage - let cm = membersConvOrSub (tUnqualified lConvOrSub) - unreachables <- propagateMessage qusr lConvOrSub conn (rmRaw (cbCommitMsg bundle)) cm + senderClient <- fmap ciClient <$> getSenderIdentity qusr mc (Just bundle.sender) - for_ (cbWelcome bundle) $ - postMLSWelcome lConvOrSub conn + action <- getCommitData lConvOrSub bundle.epoch bundle.commit.rmValue + -- check that the welcome message matches the action + for_ bundle.welcome $ \welcome -> + when + ( Set.fromList (map gsNewMember (welSecrets (rmValue welcome))) + /= Set.fromList (map (snd . snd) (cmAssocs (paAdd action))) + ) + $ throwS @'MLSWelcomeMismatch + events <- + processCommitWithAction + qusr + senderClient + conn + lConvOrSub + bundle.epoch + action + bundle.sender + bundle.commit.rmValue + storeGroupInfoBundle (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfoBundle + + let cm = membersConvOrSub (tUnqualified lConvOrSub) + unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.rmRaw cm + traverse_ (postMLSWelcome lConvOrSub conn) bundle.welcome pure (events, unreachables) @@ -303,7 +381,7 @@ postMLSCommitBundleToRemoteConv :: Qualified UserId -> Maybe ClientId -> Maybe ConnId -> - CommitBundle -> + IncomingBundle -> Remote ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) postMLSCommitBundleToRemoteConv loc qusr mc con bundle rConvOrSubId = do @@ -315,11 +393,7 @@ postMLSCommitBundleToRemoteConv loc qusr mc con bundle rConvOrSubId = do senderIdentity <- noteS @'MLSMissingSenderClient - =<< getSenderIdentity - qusr - mc - SMLSPlainText - (rmValue (cbCommitMsg bundle)) + =<< getSenderIdentity qusr mc (Just bundle.sender) resp <- runFederated rConvOrSubId $ @@ -328,7 +402,7 @@ postMLSCommitBundleToRemoteConv loc qusr mc con bundle rConvOrSubId = do { mmsrConvOrSubId = tUnqualified rConvOrSubId, mmsrSender = tUnqualified lusr, mmsrSenderClient = ciClient senderIdentity, - mmsrRawMessage = Base64ByteString (serializeCommitBundle bundle) + mmsrRawMessage = Base64ByteString bundle.serialized } case resp of MLSMessageResponseError e -> rethrowErrors @MLSBundleStaticErrors e @@ -365,60 +439,40 @@ postMLSMessage :: Maybe ClientId -> Qualified ConvOrSubConvId -> Maybe ConnId -> - RawMLS SomeMessage -> + IncomingMessage -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessage loc qusr mc qconvOrSub con smsg = case rmValue smsg of - SomeMessage tag msg -> do - mSender <- fmap ciClient <$> getSenderIdentity qusr mc tag msg - foldQualified - loc - (postMLSMessageToLocalConv qusr mSender con smsg) - (postMLSMessageToRemoteConv loc qusr mSender con smsg) - qconvOrSub +postMLSMessage loc qusr mc qconvOrSub con msg = do + mSender <- fmap ciClient <$> getSenderIdentity qusr mc msg.sender + foldQualified + loc + (postMLSMessageToLocalConv qusr mSender con msg) + (postMLSMessageToRemoteConv loc qusr mSender con msg) + qconvOrSub --- Check that the MLS client who created the message belongs to the user who --- is the sender of the REST request, identified by HTTP header. --- --- The check is skipped in case of conversation creation and encrypted messages. -getSenderClient :: - ( Member (ErrorS 'MLSKeyPackageRefNotFound) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, - Member BrigAccess r - ) => - Qualified UserId -> - SWireFormatTag tag -> - Message tag -> - Sem r (Maybe ClientId) -getSenderClient _ SMLSCipherText _ = pure Nothing -getSenderClient _ _ msg | msgEpoch msg == Epoch 0 = pure Nothing -getSenderClient qusr SMLSPlainText msg = case msgSender msg of - PreconfiguredSender _ -> pure Nothing - NewMemberSender -> pure Nothing - MemberSender ref -> do - cid <- derefKeyPackage ref - when (fmap fst (cidQualifiedClient cid) /= qusr) $ - throwS @'MLSClientSenderUserMismatch - pure (Just (ciClient cid)) +getSenderIndex :: Sender -> Maybe Word32 +getSenderIndex sender = case sender of + SenderMember index -> Just index + _ -> Nothing -- FUTUREWORK: once we can assume that the Z-Client header is present (i.e. -- when v2 is dropped), remove the Maybe in the return type. getSenderIdentity :: - ( Member (ErrorS 'MLSKeyPackageRefNotFound) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, - Member BrigAccess r + ( Member (ErrorS 'MLSClientSenderUserMismatch) r ) => Qualified UserId -> Maybe ClientId -> - SWireFormatTag tag -> - Message tag -> + Maybe Sender -> Sem r (Maybe ClientIdentity) -getSenderIdentity qusr mc fmt msg = do - mSender <- getSenderClient qusr fmt msg - -- At this point, mc is the client ID of the request, while mSender is the +getSenderIdentity qusr mc mSender = do + let mSenderClient = do + sender <- mSender + index <- getSenderIndex sender + error "TODO: get client ID from index" index + -- At this point, mc is the client ID of the request, while mSenderClient is the -- one contained in the message. We throw an error if the two don't match. - when (((==) <$> mc <*> mSender) == Just False) $ + when (((==) <$> mc <*> mSenderClient) == Just False) $ throwS @'MLSClientSenderUserMismatch - pure (mkClientIdentity qusr <$> (mc <|> mSender)) + pure (mkClientIdentity qusr <$> (mc <|> mSenderClient)) postMLSMessageToLocalConv :: ( HasProposalEffects r, @@ -438,32 +492,25 @@ postMLSMessageToLocalConv :: Qualified UserId -> Maybe ClientId -> Maybe ConnId -> - RawMLS SomeMessage -> + IncomingMessage -> Local ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessageToLocalConv qusr senderClient con smsg convOrSubId = - case rmValue smsg of - SomeMessage tag msg -> do - lConvOrSub <- fetchConvOrSub qusr convOrSubId - - -- validate message - events <- case tag of - SMLSPlainText -> case msgPayload msg of - CommitMessage c -> - processCommit qusr senderClient con lConvOrSub (msgEpoch msg) (msgSender msg) c - ApplicationMessage _ -> throwS @'MLSUnsupportedMessage - ProposalMessage prop -> - processProposal qusr lConvOrSub msg prop $> mempty - SMLSCipherText -> case toMLSEnum' (msgContentType (msgPayload msg)) of - Right CommitMessageTag -> throwS @'MLSUnsupportedMessage - Right ProposalMessageTag -> throwS @'MLSUnsupportedMessage - Right ApplicationMessageTag -> pure mempty - Left _ -> throwS @'MLSUnsupportedMessage - - let cm = membersConvOrSub (tUnqualified lConvOrSub) - -- forward message - unreachables <- propagateMessage qusr lConvOrSub con (rmRaw smsg) cm - pure (events, unreachables) +postMLSMessageToLocalConv qusr senderClient con msg convOrSubId = do + lConvOrSub <- fetchConvOrSub qusr convOrSubId + + -- validate message + events <- case msg.content of + IncomingMessageContentPublic pub -> case pub.content of + FramedContentCommit c -> + processCommit qusr senderClient con lConvOrSub msg.epoch pub.sender c.rmValue + FramedContentApplicationData _ -> throwS @'MLSUnsupportedMessage + FramedContentProposal prop -> + processProposal qusr lConvOrSub msg pub prop $> mempty + IncomingMessageContentPrivate -> pure mempty + + let cm = membersConvOrSub (tUnqualified lConvOrSub) + unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage.rmRaw cm + pure (events, unreachables) postMLSMessageToRemoteConv :: ( Members MLSMessageStaticErrors r, @@ -476,10 +523,10 @@ postMLSMessageToRemoteConv :: Qualified UserId -> Maybe ClientId -> Maybe ConnId -> - RawMLS SomeMessage -> + IncomingMessage -> Remote ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessageToRemoteConv loc qusr mc con smsg rConvOrSubId = do +postMLSMessageToRemoteConv loc qusr mc con msg rConvOrSubId = do -- only local users can send messages to remote conversations lusr <- foldQualified loc pure (\_ -> throwS @'ConvAccessDenied) qusr -- only members may send messages to the remote conversation @@ -493,7 +540,7 @@ postMLSMessageToRemoteConv loc qusr mc con smsg rConvOrSubId = do { mmsrConvOrSubId = tUnqualified rConvOrSubId, mmsrSender = tUnqualified lusr, mmsrSenderClient = senderClient, - mmsrRawMessage = Base64ByteString (rmRaw smsg) + mmsrRawMessage = Base64ByteString msg.rawMessage.rmRaw } case resp of MLSMessageResponseError e -> rethrowErrors @MLSMessageStaticErrors e @@ -598,7 +645,7 @@ processCommit :: Maybe ConnId -> Local ConvOrSubConv -> Epoch -> - Sender 'MLSPlainText -> + Sender -> Commit -> Sem r [LocalConversationUpdate] processCommit qusr senderClient con lConvOrSub epoch sender commit = do @@ -743,14 +790,17 @@ processCommitWithAction :: Local ConvOrSubConv -> Epoch -> ProposalAction -> - Sender 'MLSPlainText -> + Sender -> Commit -> Sem r [LocalConversationUpdate] processCommitWithAction qusr senderClient con lConvOrSub epoch action sender commit = case sender of - MemberSender ref -> processInternalCommit qusr senderClient con lConvOrSub epoch action ref commit - NewMemberSender -> processExternalCommit qusr senderClient lConvOrSub epoch action (cPath commit) $> [] - _ -> throw (mlsProtocolError "Unexpected sender") + SenderMember index -> + processInternalCommit qusr senderClient con lConvOrSub epoch action (error "TODO" index) commit + SenderExternal _ -> throw (mlsProtocolError "Unexpected sender") + SenderNewMemberProposal -> throw (mlsProtocolError "Unexpected sender") + SenderNewMemberCommit -> + processExternalCommit qusr senderClient lConvOrSub epoch action (cPath commit) $> [] processInternalCommit :: forall r. @@ -958,14 +1008,14 @@ checkProposalCipherSuite :: Sem r () checkProposalCipherSuite suite (AddProposal kpRaw) = do let kp = rmValue kpRaw - unless (kpCipherSuite kp == tagCipherSuite suite) + unless (kp.cipherSuite == tagCipherSuite suite) . throw . mlsProtocolError . T.pack $ "The group's cipher suite " <> show (cipherSuiteNumber (tagCipherSuite suite)) <> " and the cipher suite of the proposal's key package " - <> show (cipherSuiteNumber (kpCipherSuite kp)) + <> show (cipherSuiteNumber kp.cipherSuite) <> " do not match." checkProposalCipherSuite _suite _prop = pure () @@ -976,13 +1026,14 @@ processProposal :: ) => Qualified UserId -> Local ConvOrSubConv -> - Message 'MLSPlainText -> + IncomingMessage -> -- TODO: just pass header? + IncomingPublicMessageContent -> RawMLS Proposal -> Sem r () -processProposal qusr lConvOrSub msg prop = do +processProposal qusr lConvOrSub msg pub prop = do let mlsMeta = mlsMetaConvOrSub (tUnqualified lConvOrSub) - checkEpoch (msgEpoch msg) mlsMeta - checkGroup (msgGroupId msg) mlsMeta + checkEpoch msg.epoch mlsMeta + checkGroup msg.groupId mlsMeta let suiteTag = cnvmlsCipherSuite mlsMeta let cid = mcId . convOfConvOrSub . tUnqualified $ lConvOrSub @@ -1006,30 +1057,28 @@ processProposal qusr lConvOrSub msg prop = do -- FUTUREWORK: validate the member's conversation role let propValue = rmValue prop checkProposalCipherSuite suiteTag propValue - when (isExternalProposal msg) $ do - checkExternalProposalSignature suiteTag msg prop + when (isExternal pub.sender) $ do + checkExternalProposalSignature pub prop checkExternalProposalUser qusr propValue let propRef = proposalRef suiteTag prop - storeProposal (msgGroupId msg) (msgEpoch msg) propRef ProposalOriginClient prop + storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop + +isExternal :: Sender -> Bool +isExternal (SenderMember _) = False +isExternal _ = True checkExternalProposalSignature :: Member (ErrorS 'MLSUnsupportedProposal) r => - CipherSuiteTag -> - Message 'MLSPlainText -> + IncomingPublicMessageContent -> RawMLS Proposal -> Sem r () -checkExternalProposalSignature csTag msg prop = case rmValue prop of +checkExternalProposalSignature msg prop = case rmValue prop of AddProposal kp -> do - let pubKey = bcSignatureKey . kpCredential $ rmValue kp - unless (verifyMessageSignature csTag msg pubKey) $ throwS @'MLSUnsupportedProposal + let pubkey = kp.rmValue.leafNode.signatureKey + ctx = error "TODO: get group context" + unless (verifyMessageSignature ctx msg.framedContent msg.authData pubkey) $ throwS @'MLSUnsupportedProposal _ -> pure () -- FUTUREWORK: check signature of other proposals as well -isExternalProposal :: Message 'MLSPlainText -> Bool -isExternalProposal msg = case msgSender msg of - NewMemberSender -> True - PreconfiguredSender _ -> True - _ -> False - -- check owner/subject of the key package exists and belongs to the user checkExternalProposalUser :: ( Member BrigAccess r, @@ -1044,14 +1093,12 @@ checkExternalProposalUser qusr prop = do foldQualified loc ( \lusr -> case prop of - AddProposal keyPackage -> do + AddProposal kp -> do ClientIdentity {ciUser, ciClient} <- either (const $ throwS @'MLSUnsupportedProposal) pure - . kpIdentity - . rmValue - $ keyPackage + (keyPackageIdentity kp.rmValue) -- requesting user must match key package owner when (tUnqualified lusr /= ciUser) $ throwS @'MLSUnsupportedProposal -- client referenced in key package must be one of the user's clients diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 27d314ef68..42ead7c84a 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -80,7 +80,13 @@ createAndSendRemoveProposals lConvOrSubConv cs qusr cm = do Just (secKey, pubKey) -> do for_ cs $ \kpref -> do let proposal = mkRemoveProposal kpref - msg = mkSignedMessage secKey pubKey (cnvmlsGroupId meta) (cnvmlsEpoch meta) (ProposalMessage proposal) + msg = + mkSignedMessage + secKey + pubKey + (cnvmlsGroupId meta) + (cnvmlsEpoch meta) + (FramedContentProposal proposal) msgEncoded = encodeMLS' msg storeProposal (cnvmlsGroupId meta) diff --git a/services/galley/src/Galley/Effects/BrigAccess.hs b/services/galley/src/Galley/Effects/BrigAccess.hs index be713c6fbc..2c5e4741ba 100644 --- a/services/galley/src/Galley/Effects/BrigAccess.hs +++ b/services/galley/src/Galley/Effects/BrigAccess.hs @@ -73,6 +73,7 @@ import Polysemy import Polysemy.Error import Wire.API.Connection import Wire.API.Error.Galley +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.Routes.Internal.Brig diff --git a/services/galley/src/Galley/Intra/Client.hs b/services/galley/src/Galley/Intra/Client.hs index 697c588465..11278568e5 100644 --- a/services/galley/src/Galley/Intra/Client.hs +++ b/services/galley/src/Galley/Intra/Client.hs @@ -63,6 +63,7 @@ import qualified Polysemy.TinyLog as P import Servant import qualified System.Logger.Class as Logger import Wire.API.Error.Galley +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.Routes.Internal.Brig diff --git a/services/galley/src/Galley/Keys.hs b/services/galley/src/Galley/Keys.hs index 129b42396a..287191f53e 100644 --- a/services/galley/src/Galley/Keys.hs +++ b/services/galley/src/Galley/Keys.hs @@ -33,6 +33,7 @@ import qualified Data.Map as Map import Data.PEM import Data.X509 import Imports +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.Keys diff --git a/services/galley/test/integration.hs b/services/galley/test/integration.hs new file mode 100644 index 0000000000..a26473d24e --- /dev/null +++ b/services/galley/test/integration.hs @@ -0,0 +1 @@ +import Run diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index b6c6bd1023..4aa2a469e6 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -29,11 +29,7 @@ import Control.Exception (throw) import Control.Lens (view) import Control.Lens.Extras import qualified Control.Monad.State as State -import Crypto.Error -import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Data.Aeson as Aeson -import Data.Binary.Put -import qualified Data.ByteString.Lazy as LBS import Data.Domain import Data.Id import Data.Json.Util hiding ((#)) @@ -998,7 +994,7 @@ testLocalToRemoteNonMember = do . paths ["mls", "messages"] . zUser (qUnqualified bob) . zConn "conn" - . content "message/mls" + . Bilge.content "message/mls" . bytes (mpMessage message) ) !!! do @@ -1650,38 +1646,8 @@ testPublicKeys = do ) @?= [Ed25519] --- | The test manually reads from mls-test-cli's store and extracts a private --- key. The key is needed for signing an AppAck proposal, which as of August 24, --- 2022 only gets forwarded by the backend, i.e., there's no action taken by the --- backend. propUnsupported :: TestM () -propUnsupported = do - users@[_alice, bob] <- createAndConnectUsers (replicate 2 Nothing) - runMLSTest $ do - [alice1, bob1] <- traverse createMLSClient users - void $ uploadNewKeyPackage bob1 - (gid, _) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit - - mems <- readGroupState <$> getClientGroupState alice1 - - (_, ref) <- assertJust $ find ((== alice1) . fst) mems - (priv, pub) <- clientKeyPair alice1 - msg <- - assertJust $ - maybeCryptoError $ - mkAppAckProposalMessage - gid - (Epoch 1) - ref - [] - <$> Ed25519.secretKey priv - <*> Ed25519.publicKey pub - let msgData = LBS.toStrict (runPut (serialiseMLS msg)) - - -- we cannot use sendAndConsumeMessage here, because openmls does not yet - -- support AppAck proposals - postMessage alice1 msgData !!! const 201 === statusCode +propUnsupported = pure () -- TODO (app ack does not exist anymore) testBackendRemoveProposalRecreateClient :: TestM () testBackendRemoveProposalRecreateClient = do diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index bb59cb8cdb..5ed6878420 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -31,7 +31,6 @@ import Control.Monad.Catch import Control.Monad.State (StateT, evalStateT) import qualified Control.Monad.State as State import Control.Monad.Trans.Maybe -import Crypto.PubKey.Ed25519 import Data.Aeson.Lens import Data.Binary.Builder (toLazyByteString) import qualified Data.ByteArray as BA @@ -79,7 +78,6 @@ import Wire.API.MLS.GroupInfoBundle import Wire.API.MLS.KeyPackage import Wire.API.MLS.Keys import Wire.API.MLS.Message -import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.User.Client @@ -124,7 +122,7 @@ postMessage sender msg = do . zUser (ciUser sender) . zClient (ciClient sender) . zConn "conn" - . content "message/mls" + . Bilge.content "message/mls" . bytes msg ) @@ -145,7 +143,7 @@ localPostCommitBundle sender bundle = do . zUser (ciUser sender) . zClient (ciClient sender) . zConn "conn" - . content "application/x-protobuf" + . Bilge.content "application/x-protobuf" . bytes bundle ) @@ -218,32 +216,10 @@ postWelcome uid welcome = do . paths ["v2", "mls", "welcome"] . zUser uid . zConn "conn" - . content "message/mls" + . Bilge.content "message/mls" . bytes welcome ) -mkAppAckProposalMessage :: - GroupId -> - Epoch -> - KeyPackageRef -> - [MessageRange] -> - SecretKey -> - PublicKey -> - Message 'MLSPlainText -mkAppAckProposalMessage gid epoch ref mrs priv pub = do - let tbs = - mkRawMLS $ - MessageTBS - { tbsMsgFormat = KnownFormatTag, - tbsMsgGroupId = gid, - tbsMsgEpoch = epoch, - tbsMsgAuthData = mempty, - tbsMsgSender = MemberSender ref, - tbsMsgPayload = ProposalMessage (mkAppAckProposal mrs) - } - sig = BA.convert $ sign priv pub (rmRaw tbs) - in Message tbs (MessageExtraFields sig Nothing Nothing) - saveRemovalKey :: FilePath -> TestM () saveRemovalKey fp = do keys <- fromJust <$> view (tsGConf . optSettings . setMlsPrivateKeyPaths) diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index 0d35e8fafe..ea0d41cc9c 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -2902,9 +2902,10 @@ wsAssertConvReceiptModeUpdate conv usr new n = do wsAssertBackendRemoveProposalWithEpoch :: HasCallStack => Qualified UserId -> Qualified ConvId -> KeyPackageRef -> Epoch -> Notification -> IO ByteString wsAssertBackendRemoveProposalWithEpoch fromUser convId kpref epoch n = do bs <- wsAssertBackendRemoveProposal fromUser (Conv <$> convId) kpref n - let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' @(Message 'MLSPlainText) bs - let tbs = rmValue . msgTBS $ msg - tbsMsgEpoch tbs @?= epoch + let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs + case msg.content of + MessagePublic pmsg -> liftIO $ pmsg.content.rmValue.epoch @?= epoch + _ -> assertFailure "unexpected message content" pure bs wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> KeyPackageRef -> Notification -> IO ByteString @@ -2915,16 +2916,16 @@ wsAssertBackendRemoveProposal fromUser cnvOrSubCnv kpref n = do evtType e @?= MLSMessageAdd evtFrom e @?= fromUser let bs = getMLSMessageData (evtData e) - let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' bs - let tbs = rmValue . msgTBS $ msg - tbsMsgSender tbs @?= PreconfiguredSender 0 - case tbsMsgPayload tbs of - ProposalMessage rp -> - case rmValue rp of - RemoveProposal kpRefRemove -> - kpRefRemove @?= kpref - otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp - otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload + let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs + liftIO $ case msg.content of + MessagePublic pmsg -> do + pmsg.content.rmValue.sender @?= SenderExternal 0 + case pmsg.content.rmValue.content of + FramedContentProposal prop -> case prop.rmValue of + RemoveProposal kpRefRemove -> kpRefRemove @?= kpref + otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp + otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload + _ -> assertFailure $ "Expected PublicMessage" pure bs where getMLSMessageData :: Conv.EventData -> ByteString @@ -2944,19 +2945,16 @@ wsAssertAddProposal fromUser convId n = do evtType e @?= MLSMessageAdd evtFrom e @?= fromUser let bs = getMLSMessageData (evtData e) - let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' bs - let tbs = rmValue . msgTBS $ msg - tbsMsgSender tbs @?= NewMemberSender - case tbsMsgPayload tbs of - ProposalMessage rp -> - case rmValue rp of - AddProposal _ -> pure () - otherProp -> - assertFailure $ - "Expected AddProposal but got " <> show otherProp - otherPayload -> - assertFailure $ - "Expected ProposalMessage but got " <> show otherPayload + let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' @Message bs + liftIO $ case msg.content of + MessagePublic pmsg -> do + pmsg.content.rmValue.sender @?= SenderExternal 0 + case pmsg.content.rmValue.content of + FramedContentProposal prop -> case prop.rmValue of + AddProposal _ -> pure () + otherProp -> assertFailure $ "Expected AddProposal but got " <> show otherProp + otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload + _ -> assertFailure $ "Expected PublicMessage" pure bs where getMLSMessageData :: Conv.EventData -> ByteString diff --git a/services/galley/test/integration/Main.hs b/services/galley/test/integration/Run.hs similarity index 99% rename from services/galley/test/integration/Main.hs rename to services/galley/test/integration/Run.hs index c67d355dfa..c35edc8d5e 100644 --- a/services/galley/test/integration/Main.hs +++ b/services/galley/test/integration/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main +module Run ( main, ) where diff --git a/services/galley/test/unit.hs b/services/galley/test/unit.hs new file mode 100644 index 0000000000..a26473d24e --- /dev/null +++ b/services/galley/test/unit.hs @@ -0,0 +1 @@ +import Run diff --git a/services/galley/test/unit/Main.hs b/services/galley/test/unit/Run.hs similarity index 99% rename from services/galley/test/unit/Main.hs rename to services/galley/test/unit/Run.hs index fbf969775e..57963cefef 100644 --- a/services/galley/test/unit/Main.hs +++ b/services/galley/test/unit/Run.hs @@ -15,7 +15,7 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Main +module Run ( main, ) where From 8af51353a3a41c49bb0d29c99033f8d9918e1a92 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 22 Mar 2023 13:58:54 +0100 Subject: [PATCH 03/75] Fix KeyPackage parser --- libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 10 ++-------- libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs | 2 +- libs/wire-api/src/Wire/API/MLS/Serialisation.hs | 12 +++++------- .../brig/src/Brig/API/MLS/KeyPackages/Validation.hs | 2 +- 4 files changed, 9 insertions(+), 17 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index 019790338f..ff9b74f83a 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -35,7 +35,6 @@ import Cassandra.CQL hiding (Set) import Control.Applicative import Control.Lens hiding (set, (.=)) import Data.Aeson (FromJSON, ToJSON) -import Data.Binary import Data.Binary.Get import Data.Binary.Put import qualified Data.ByteString as B @@ -167,7 +166,6 @@ data KeyPackageTBS = KeyPackageTBS cipherSuite :: CipherSuite, initKey :: HPKEPublicKey, leafNode :: LeafNode, - credential :: Credential, extensions :: [Extension] } deriving stock (Eq, Show, Generic) @@ -180,7 +178,6 @@ instance ParseMLS KeyPackageTBS where <*> parseMLS <*> parseMLS <*> parseMLS - <*> parseMLS <*> parseMLSVector @VarInt parseMLS data KeyPackage = KeyPackage @@ -201,9 +198,6 @@ instance HasField "cipherSuite" KeyPackage CipherSuite where instance HasField "initKey" KeyPackage HPKEPublicKey where getField = (.tbs.rmValue.initKey) -instance HasField "credential" KeyPackage Credential where - getField = (.tbs.rmValue.credential) - instance HasField "extensions" KeyPackage [Extension] where getField = (.tbs.rmValue.extensions) @@ -211,7 +205,7 @@ instance HasField "leafNode" KeyPackage LeafNode where getField = (.tbs.rmValue.leafNode) keyPackageIdentity :: KeyPackage -> Either Text ClientIdentity -keyPackageIdentity = decodeMLS' @ClientIdentity . (.credential.identityData) +keyPackageIdentity = decodeMLS' @ClientIdentity . (.leafNode.credential.identityData) rawKeyPackageSchema :: ValueSchema NamedSwaggerDoc (RawMLS KeyPackage) rawKeyPackageSchema = @@ -225,7 +219,7 @@ instance ParseMLS KeyPackage where parseMLS = KeyPackage <$> parseRawMLS parseMLS - <*> parseMLSBytes @Word16 + <*> parseMLSBytes @VarInt -------------------------------------------------------------------------------- diff --git a/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs index c20bbe153b..9fcbb71847 100644 --- a/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs +++ b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs @@ -30,7 +30,7 @@ import Imports import Wire.API.MLS.Serialisation import Wire.Arbitrary -newtype ProtocolVersion = ProtocolVersion {pvNumber :: Word8} +newtype ProtocolVersion = ProtocolVersion {pvNumber :: Word16} deriving newtype (Eq, Ord, Show, Binary, Arbitrary, ParseMLS, SerialiseMLS) data ProtocolVersionTag = ProtocolMLS10 | ProtocolMLSDraft11 diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 04472c0dbe..d241bf1ff2 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -108,13 +108,11 @@ instance Binary VarInt where get :: Get VarInt get = do w <- lookAhead getWord8 - let x = shiftR (w .&. 0xc0) 6 - maskVarInt = VarInt . (.&. 0x3fffffff) - if - | x == 0b00 -> maskVarInt . fromIntegral <$> getWord8 - | x == 0b01 -> maskVarInt . fromIntegral <$> getWord16be - | x == 0b10 -> maskVarInt . fromIntegral <$> getWord32be - | otherwise -> fail "invalid VarInt prefix" + case shiftR (w .&. 0xc0) 6 of + 0b00 -> VarInt . fromIntegral <$> getWord8 + 0b01 -> VarInt . (.&. 0x3fff) . fromIntegral <$> getWord16be + 0b10 -> VarInt . (.&. 0x3fffffff) . fromIntegral <$> getWord32be + _ -> fail "invalid VarInt prefix" instance SerialiseMLS VarInt where serialiseMLS = put diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index eaefbff18f..187a378df0 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -94,7 +94,7 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do (pvTag (kp.protocolVersion) >>= guard . (== ProtocolMLS10)) -- validate credential, lifetime and capabilities - validateCredential identity kp.credential + validateCredential identity kp.leafNode.credential validateSource kp.leafNode.source validateCapabilities kp.leafNode.capabilities From 87e588b82246b1e22bfaf6276edd9c4fbaf8ce77 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Thu, 23 Mar 2023 08:59:52 +0100 Subject: [PATCH 04/75] Fix MLS signature verification Signatures in MLS are computed on a special `SignContent` structure, so we need to replicate that for verification. --- libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 52 +++++++++++++++++-- libs/wire-api/src/Wire/API/MLS/Message.hs | 2 +- .../Brig/API/MLS/KeyPackages/Validation.hs | 5 +- .../brig/test/integration/API/MLS/Util.hs | 2 +- .../brig/test/integration/API/User/Client.hs | 3 +- 5 files changed, 55 insertions(+), 9 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index d9da3c305a..673415db5b 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -17,7 +17,26 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Wire.API.MLS.CipherSuite where +module Wire.API.MLS.CipherSuite ( + -- * MLS ciphersuites + CipherSuite (..), + CipherSuiteTag (..), + cipherSuiteTag, + tagCipherSuite, + + -- * MLS signature schemes + SignatureScheme (..), + SignatureSchemeTag (..), + signatureScheme, + signatureSchemeName, + signatureSchemeTag, + csSignatureScheme, + + -- * Utilities + csHash, + csVerifySignatureWithLabel, + csVerifySignature, + ) where import Cassandra.CQL import Control.Error (note) @@ -88,12 +107,39 @@ csHash :: CipherSuiteTag -> ByteString -> ByteString -> ByteString csHash MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 ctx value = HKDF.expand (HKDF.extract @SHA256 (mempty :: ByteString) value) ctx 16 -csVerifySignature :: CipherSuiteTag -> ByteString -> ByteString -> ByteString -> Bool +csVerifySignature :: CipherSuiteTag -> ByteString -> RawMLS a -> ByteString -> Bool csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = fromMaybe False . maybeCryptoError $ do pub' <- Ed25519.publicKey pub sig' <- Ed25519.signature sig - pure $ Ed25519.verify pub' x sig' + pure $ Ed25519.verify pub' x.rmRaw sig' + +data SignContent a = SignContent + { sigLabel :: ByteString, + content :: RawMLS a + } + +instance SerialiseMLS (SignContent a) where + serialiseMLS c = do + serialiseMLSBytes @VarInt c.sigLabel + serialiseMLSBytes @VarInt c.content.rmRaw + +mkSignContent :: ByteString -> RawMLS a -> SignContent a +mkSignContent sigLabel content = + SignContent + { sigLabel = "MLS 1.0 " <> sigLabel, + content = content + } + +csVerifySignatureWithLabel :: + CipherSuiteTag -> + ByteString -> + ByteString -> + RawMLS a -> + ByteString -> + Bool +csVerifySignatureWithLabel cs pub label x sig = + csVerifySignature cs pub (mkRawMLS (mkSignContent label x)) sig csSignatureScheme :: CipherSuiteTag -> SignatureSchemeTag csSignatureScheme MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 = Ed25519 diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index 9084d4fc68..a47e2a6149 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -398,7 +398,7 @@ verifyMessageSignature :: ByteString -> Bool verifyMessageSignature ctx msgContent authData pubkey = isJust $ do - let tbs = encodeMLS' (framedContentTBS ctx msgContent) + let tbs = mkRawMLS (framedContentTBS ctx msgContent) sig = authData.signature_ cs <- cipherSuiteTag ctx.rmValue.cipherSuite guard $ csVerifySignature cs pubkey tbs sig diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index 187a378df0..a9b774f4f0 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -80,10 +80,11 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do -- validate signature unless - ( csVerifySignature + ( csVerifySignatureWithLabel cs kp.leafNode.signatureKey - kp.tbs.rmRaw + "KeyPackageTBS" + kp.tbs kp.signature_ ) $ mlsProtocolError "Invalid signature" diff --git a/services/brig/test/integration/API/MLS/Util.hs b/services/brig/test/integration/API/MLS/Util.hs index 8c8d302872..51b9dd5105 100644 --- a/services/brig/test/integration/API/MLS/Util.hs +++ b/services/brig/test/integration/API/MLS/Util.hs @@ -34,10 +34,10 @@ import System.FilePath import System.Process import Test.Tasty.HUnit import Util +import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation -import Wire.API.MLS.CipherSuite import Wire.API.User.Client data SetKey = SetKey | DontSetKey diff --git a/services/brig/test/integration/API/User/Client.hs b/services/brig/test/integration/API/User/Client.hs index 23429b0a00..01bcca00c7 100644 --- a/services/brig/test/integration/API/User/Client.hs +++ b/services/brig/test/integration/API/User/Client.hs @@ -61,7 +61,7 @@ import Test.Tasty.HUnit import UnliftIO (mapConcurrently) import Util import Wire.API.Internal.Notification - +import Wire.API.MLS.CipherSuite import qualified Wire.API.Team.Feature as Public import Wire.API.User import qualified Wire.API.User as Public @@ -71,7 +71,6 @@ import Wire.API.User.Client.DPoPAccessToken import Wire.API.User.Client.Prekey import Wire.API.UserMap (QualifiedUserMap (..), UserMap (..), WrappedQualifiedUserMap) import Wire.API.Wrapped (Wrapped (..)) -import Wire.API.MLS.CipherSuite tests :: ConnectionLimit -> Opt.Timeout -> Opt.Opts -> Manager -> DB.ClientState -> Brig -> Cannon -> Galley -> TestTree tests _cl _at opts p db b c g = From 26ce33d43c39e851bc4951a053f722b7e95639ad Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 24 Mar 2023 08:16:19 +0100 Subject: [PATCH 05/75] Update paths now contain leaf nodes --- libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 41 +-- libs/wire-api/src/Wire/API/MLS/Commit.hs | 19 +- libs/wire-api/src/Wire/API/MLS/Credential.hs | 22 ++ libs/wire-api/src/Wire/API/MLS/Extension.hs | 2 +- libs/wire-api/src/Wire/API/MLS/Group.hs | 4 +- libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 4 + libs/wire-api/src/Wire/API/MLS/Message.hs | 29 +- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 25 +- libs/wire-api/src/Wire/API/MLS/ProposalTag.hs | 1 - .../src/Wire/API/MLS/PublicGroupState.hs | 1 + .../src/Wire/API/MLS/Serialisation.hs | 14 +- libs/wire-api/src/Wire/API/MLS/Welcome.hs | 8 +- .../src/Wire/API/Routes/Internal/Brig.hs | 13 + .../src/Wire/API/Routes/Public/Galley/MLS.hs | 6 +- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 32 +- services/brig/src/Brig/API/Internal.hs | 2 + .../Brig/API/MLS/KeyPackages/Validation.hs | 11 +- services/galley/src/Galley/API/Federation.hs | 4 +- services/galley/src/Galley/API/MLS/Message.hs | 306 +++++------------- .../galley/src/Galley/Effects/BrigAccess.hs | 15 +- services/galley/src/Galley/Intra/Client.hs | 38 ++- services/galley/src/Galley/Intra/Effects.hs | 5 +- services/galley/test/integration/API/MLS.hs | 8 +- .../galley/test/integration/API/MLS/Util.hs | 5 +- 24 files changed, 275 insertions(+), 340 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index 673415db5b..bc4bd8f3f6 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -17,26 +17,27 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Wire.API.MLS.CipherSuite ( - -- * MLS ciphersuites - CipherSuite (..), - CipherSuiteTag (..), - cipherSuiteTag, - tagCipherSuite, - - -- * MLS signature schemes - SignatureScheme (..), - SignatureSchemeTag (..), - signatureScheme, - signatureSchemeName, - signatureSchemeTag, - csSignatureScheme, - - -- * Utilities - csHash, - csVerifySignatureWithLabel, - csVerifySignature, - ) where +module Wire.API.MLS.CipherSuite + ( -- * MLS ciphersuites + CipherSuite (..), + CipherSuiteTag (..), + cipherSuiteTag, + tagCipherSuite, + + -- * MLS signature schemes + SignatureScheme (..), + SignatureSchemeTag (..), + signatureScheme, + signatureSchemeName, + signatureSchemeTag, + csSignatureScheme, + + -- * Utilities + csHash, + csVerifySignatureWithLabel, + csVerifySignature, + ) +where import Cassandra.CQL import Control.Error (note) diff --git a/libs/wire-api/src/Wire/API/MLS/Commit.hs b/libs/wire-api/src/Wire/API/MLS/Commit.hs index 8f1a17c8ce..1b0b788f30 100644 --- a/libs/wire-api/src/Wire/API/MLS/Commit.hs +++ b/libs/wire-api/src/Wire/API/MLS/Commit.hs @@ -18,7 +18,7 @@ module Wire.API.MLS.Commit where import Imports -import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation import Wire.Arbitrary @@ -30,16 +30,19 @@ data Commit = Commit deriving (Eq, Show) instance ParseMLS Commit where - parseMLS = Commit <$> parseMLSVector @Word32 parseMLS <*> parseMLSOptional parseMLS + parseMLS = + Commit + <$> traceMLS "proposals" (parseMLSVector @VarInt parseMLS) + <*> traceMLS "update path" (parseMLSOptional parseMLS) data UpdatePath = UpdatePath - { upLeaf :: RawMLS KeyPackage, + { upLeaf :: RawMLS LeafNode, upNodes :: [UpdatePathNode] } deriving (Eq, Show) instance ParseMLS UpdatePath where - parseMLS = UpdatePath <$> parseMLS <*> parseMLSVector @Word32 parseMLS + parseMLS = UpdatePath <$> parseMLS <*> parseMLSVector @VarInt parseMLS data UpdatePathNode = UpdatePathNode { upnPublicKey :: ByteString, @@ -48,7 +51,7 @@ data UpdatePathNode = UpdatePathNode deriving (Eq, Show) instance ParseMLS UpdatePathNode where - parseMLS = UpdatePathNode <$> parseMLSBytes @Word16 <*> parseMLSVector @Word32 parseMLS + parseMLS = UpdatePathNode <$> parseMLSBytes @VarInt <*> parseMLSVector @VarInt parseMLS data HPKECiphertext = HPKECiphertext { hcOutput :: ByteString, @@ -58,9 +61,9 @@ data HPKECiphertext = HPKECiphertext deriving (Arbitrary) via (GenericUniform HPKECiphertext) instance ParseMLS HPKECiphertext where - parseMLS = HPKECiphertext <$> parseMLSBytes @Word16 <*> parseMLSBytes @Word16 + parseMLS = HPKECiphertext <$> parseMLSBytes @VarInt <*> parseMLSBytes @VarInt instance SerialiseMLS HPKECiphertext where serialiseMLS (HPKECiphertext out ct) = do - serialiseMLSBytes @Word16 out - serialiseMLSBytes @Word16 ct + serialiseMLSBytes @VarInt out + serialiseMLSBytes @VarInt ct diff --git a/libs/wire-api/src/Wire/API/MLS/Credential.hs b/libs/wire-api/src/Wire/API/MLS/Credential.hs index eb74be4fe8..5eea497d54 100644 --- a/libs/wire-api/src/Wire/API/MLS/Credential.hs +++ b/libs/wire-api/src/Wire/API/MLS/Credential.hs @@ -29,12 +29,14 @@ import Data.Binary import Data.Binary.Get import Data.Binary.Parser import Data.Binary.Parser.Char8 +import Data.Binary.Put import Data.Domain import Data.Id import Data.Qualified import Data.Schema import qualified Data.Swagger as S import qualified Data.Text as T +import qualified Data.Text.Encoding as T import Data.UUID import GHC.Records import Imports @@ -77,6 +79,7 @@ data ClientIdentity = ClientIdentity } deriving stock (Eq, Ord, Generic) deriving (FromJSON, ToJSON, S.ToSchema) via Schema ClientIdentity + deriving (Arbitrary) via (GenericUniform ClientIdentity) instance Show ClientIdentity where show (ClientIdentity dom u c) = @@ -100,6 +103,17 @@ instance ToSchema ClientIdentity where <*> ciUser .= field "user_id" schema <*> ciClient .= field "client_id" schema +instance S.ToParamSchema ClientIdentity where + toParamSchema _ = mempty & S.type_ ?~ S.SwaggerString + +instance FromHttpApiData ClientIdentity where + parseHeader = decodeMLS' + parseUrlPiece = decodeMLS' . T.encodeUtf8 + +instance ToHttpApiData ClientIdentity where + toHeader = encodeMLS' + toUrlPiece = T.decodeUtf8 . encodeMLS' + instance ParseMLS ClientIdentity where parseMLS = do uid <- @@ -111,6 +125,14 @@ instance ParseMLS ClientIdentity where either fail pure . (mkDomain . T.pack) =<< many' anyChar pure $ ClientIdentity dom uid cid +instance SerialiseMLS ClientIdentity where + serialiseMLS cid = do + putByteString $ toASCIIBytes (toUUID (ciUser cid)) + putCharUtf8 ':' + putStringUtf8 $ T.unpack (client (ciClient cid)) + putCharUtf8 '@' + putStringUtf8 $ T.unpack (domainText (ciDomain cid)) + mkClientIdentity :: Qualified UserId -> ClientId -> ClientIdentity mkClientIdentity (Qualified uid domain) = ClientIdentity domain uid diff --git a/libs/wire-api/src/Wire/API/MLS/Extension.hs b/libs/wire-api/src/Wire/API/MLS/Extension.hs index 84606420e1..3c060f6fc4 100644 --- a/libs/wire-api/src/Wire/API/MLS/Extension.hs +++ b/libs/wire-api/src/Wire/API/MLS/Extension.hs @@ -39,4 +39,4 @@ instance ParseMLS Extension where instance SerialiseMLS Extension where serialiseMLS (Extension ty d) = do serialiseMLS ty - serialiseMLSBytes @Word32 d + serialiseMLSBytes @VarInt d diff --git a/libs/wire-api/src/Wire/API/MLS/Group.hs b/libs/wire-api/src/Wire/API/MLS/Group.hs index c693ddd2a2..3110552000 100644 --- a/libs/wire-api/src/Wire/API/MLS/Group.hs +++ b/libs/wire-api/src/Wire/API/MLS/Group.hs @@ -39,10 +39,10 @@ instance IsString GroupId where fromString = GroupId . fromString instance ParseMLS GroupId where - parseMLS = GroupId <$> parseMLSBytes @Word8 + parseMLS = GroupId <$> parseMLSBytes @VarInt instance SerialiseMLS GroupId where - serialiseMLS (GroupId gid) = serialiseMLSBytes @Word8 gid + serialiseMLS (GroupId gid) = serialiseMLSBytes @VarInt gid instance ToSchema GroupId where schema = diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs index 78b8cc5430..dad086966b 100644 --- a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -24,6 +24,7 @@ module Wire.API.MLS.LeafNode ) where +import qualified Data.Swagger as S import GHC.Records import Imports import Test.QuickCheck @@ -73,6 +74,9 @@ instance ParseMLS LeafNode where <$> parseMLS <*> parseMLSBytes @VarInt +instance S.ToSchema LeafNode where + declareNamedSchema _ = pure (mlsSwagger "LeafNode") + instance HasField "encryptionKey" LeafNode HPKEPublicKey where getField = (.tbs.encryptionKey) diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index a47e2a6149..cfb39db117 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -70,18 +70,18 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome data WireFormatTag - = WireFormatPrivateTag - | WireFormatPublicTag + = WireFormatPublicTag + | WireFormatPrivateTag | WireFormatWelcomeTag | WireFormatGroupInfoTag | WireFormatKeyPackageTag deriving (Enum, Bounded, Eq, Show) instance ParseMLS WireFormatTag where - parseMLS = parseMLSEnum @Word8 "wire format" + parseMLS = parseMLSEnum @Word16 "wire format" instance SerialiseMLS WireFormatTag where - serialiseMLS = serialiseMLSEnum @Word8 + serialiseMLS = serialiseMLSEnum @Word16 data Message = Message { protocolVersion :: ProtocolVersion, @@ -90,7 +90,10 @@ data Message = Message deriving (Eq, Show) instance ParseMLS Message where - parseMLS = Message <$> parseMLS <*> parseMLS + parseMLS = + Message + <$> traceMLS "version" parseMLS + <*> traceMLS "content" parseMLS instance SerialiseMLS Message where serialiseMLS msg = do @@ -154,9 +157,9 @@ data PublicMessage = PublicMessage instance ParseMLS PublicMessage where parseMLS = do - content <- parseMLS + content <- traceMLS "pub content" parseMLS authData <- parseFramedContentAuthData (framedContentDataTag (content.rmValue.content)) - membershipTag <- case content.rmValue.sender of + membershipTag <- traceMLS "membership tag" $ case content.rmValue.sender of SenderMember _ -> Just <$> parseMLSBytes @VarInt _ -> pure Nothing pure @@ -249,11 +252,11 @@ data FramedContent = FramedContent instance ParseMLS FramedContent where parseMLS = FramedContent - <$> parseMLS - <*> parseMLS - <*> parseMLS - <*> parseMLSBytes @VarInt - <*> parseMLS + <$> traceMLS "groupId" parseMLS + <*> traceMLS "epoch" parseMLS + <*> traceMLS "sender" parseMLS + <*> traceMLS "authdata" (parseMLSBytes @VarInt) + <*> traceMLS "content" parseMLS instance SerialiseMLS FramedContent where serialiseMLS fc = do @@ -336,7 +339,7 @@ data FramedContentAuthData = FramedContentAuthData deriving (Eq, Show) parseFramedContentAuthData :: FramedContentDataTag -> Get FramedContentAuthData -parseFramedContentAuthData tag = do +parseFramedContentAuthData tag = traceMLS "authdata" $ do sig <- parseMLSBytes @VarInt confirmationTag <- case tag of FramedContentCommitTag -> Just <$> parseMLSBytes @VarInt diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 104b781496..c0c69ae1ea 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -21,7 +21,6 @@ module Wire.API.MLS.Proposal where import Cassandra -import Control.Arrow import Control.Lens (makePrisms) import Data.Binary import Data.Binary.Get @@ -42,10 +41,9 @@ data Proposal = AddProposal (RawMLS KeyPackage) | UpdateProposal KeyPackage | RemoveProposal KeyPackageRef - | PreSharedKeyProposal PreSharedKeyID + | PreSharedKeyProposal PreSharedKeyID -- TODO | ReInitProposal ReInit | ExternalInitProposal ByteString - | AppAckProposal [MessageRange] | GroupContextExtensionsProposal [Extension] deriving stock (Eq, Show) @@ -57,10 +55,9 @@ instance ParseMLS Proposal where RemoveProposalTag -> RemoveProposal <$> parseMLS PreSharedKeyProposalTag -> PreSharedKeyProposal <$> parseMLS ReInitProposalTag -> ReInitProposal <$> parseMLS - ExternalInitProposalTag -> ExternalInitProposal <$> parseMLSBytes @Word16 - AppAckProposalTag -> AppAckProposal <$> parseMLSVector @Word32 parseMLS + ExternalInitProposalTag -> ExternalInitProposal <$> parseMLSBytes @VarInt GroupContextExtensionsProposalTag -> - GroupContextExtensionsProposal <$> parseMLSVector @Word32 parseMLS + GroupContextExtensionsProposal <$> parseMLSVector @VarInt parseMLS mkRemoveProposal :: KeyPackageRef -> RawMLS Proposal mkRemoveProposal ref = RawMLS bytes (RemoveProposal ref) @@ -69,16 +66,6 @@ mkRemoveProposal ref = RawMLS bytes (RemoveProposal ref) serialiseMLS RemoveProposalTag serialiseMLS ref -serialiseAppAckProposal :: [MessageRange] -> Put -serialiseAppAckProposal mrs = do - serialiseMLS AppAckProposalTag - serialiseMLSVector @Word32 serialiseMLS mrs - -mkAppAckProposal :: [MessageRange] -> RawMLS Proposal -mkAppAckProposal = uncurry RawMLS . (bytes &&& AppAckProposal) - where - bytes = LBS.toStrict . runPut . serialiseAppAckProposal - -- | Compute the proposal ref given a ciphersuite and the raw proposal data. proposalRef :: CipherSuiteTag -> RawMLS Proposal -> ProposalRef proposalRef cs = @@ -90,7 +77,7 @@ data PreSharedKeyTag = ExternalKeyTag | ResumptionKeyTag deriving (Bounded, Enum, Eq, Show) instance ParseMLS PreSharedKeyTag where - parseMLS = parseMLSEnum @Word16 "PreSharedKeyID type" + parseMLS = parseMLSEnum @Word8 "PreSharedKeyID type" data PreSharedKeyID = ExternalKeyID ByteString | ResumptionKeyID Resumption deriving stock (Eq, Show) @@ -99,7 +86,7 @@ instance ParseMLS PreSharedKeyID where parseMLS = do t <- parseMLS case t of - ExternalKeyTag -> ExternalKeyID <$> parseMLSBytes @Word8 + ExternalKeyTag -> ExternalKeyID <$> parseMLSBytes @VarInt ResumptionKeyTag -> ResumptionKeyID <$> parseMLS data Resumption = Resumption @@ -130,7 +117,7 @@ instance ParseMLS ReInit where <$> parseMLS <*> parseMLS <*> parseMLS - <*> parseMLSVector @Word32 parseMLS + <*> parseMLSVector @VarInt parseMLS data MessageRange = MessageRange { mrSender :: KeyPackageRef, diff --git a/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs index e9e670088e..45561c8b78 100644 --- a/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs +++ b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs @@ -29,7 +29,6 @@ data ProposalTag | PreSharedKeyProposalTag | ReInitProposalTag | ExternalInitProposalTag - | AppAckProposalTag | GroupContextExtensionsProposalTag deriving stock (Bounded, Enum, Eq, Generic, Show) deriving (Arbitrary) via GenericUniform ProposalTag diff --git a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs b/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs index e34ab49dd0..ec5ac539bc 100644 --- a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs +++ b/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs @@ -33,6 +33,7 @@ import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- TODO: replace with GroupInfo data PublicGroupStateTBS = PublicGroupStateTBS { pgsVersion :: ProtocolVersion, pgsCipherSuite :: CipherSuite, diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index d241bf1ff2..9ba9a98833 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -46,6 +46,7 @@ module Wire.API.MLS.Serialisation mlsSwagger, parseRawMLS, mkRawMLS, + traceMLS, ) where @@ -68,6 +69,7 @@ import Data.Proxy import Data.Schema import qualified Data.Swagger as S import qualified Data.Text as Text +import Debug.Trace import Imports import Test.QuickCheck (Arbitrary (..), chooseInt) @@ -184,19 +186,19 @@ serialiseMLSEnum :: Put serialiseMLSEnum = put . fromMLSEnum @w -data MLSEnumError = MLSEnumUnknown | MLSEnumInvalid +data MLSEnumError = MLSEnumUnknown Int | MLSEnumInvalid toMLSEnum' :: forall a w. (Bounded a, Enum a, Integral w) => w -> Either MLSEnumError a toMLSEnum' w = case fromIntegral w - 1 of n | n < 0 -> Left MLSEnumInvalid - | n < fromEnum @a minBound || n > fromEnum @a maxBound -> Left MLSEnumUnknown + | n < fromEnum @a minBound || n > fromEnum @a maxBound -> Left (MLSEnumUnknown n) | otherwise -> pure (toEnum n) toMLSEnum :: forall a w f. (Bounded a, Enum a, MonadFail f, Integral w) => String -> w -> f a toMLSEnum name = either err pure . toMLSEnum' where - err MLSEnumUnknown = fail $ "Unknown " <> name + err (MLSEnumUnknown value) = fail $ "Unknown " <> name <> ": " <> show value err MLSEnumInvalid = fail $ "Invalid " <> name fromMLSEnum :: (Integral w, Enum a) => a -> w @@ -309,3 +311,9 @@ instance SerialiseMLS (RawMLS a) where mkRawMLS :: SerialiseMLS a => a -> RawMLS a mkRawMLS x = RawMLS (LBS.toStrict (runPut (serialiseMLS x))) x + +traceMLS :: Show a => String -> Get a -> Get a +traceMLS l g = do + r <- g + traceM $ l <> " " <> show r + pure r diff --git a/libs/wire-api/src/Wire/API/MLS/Welcome.hs b/libs/wire-api/src/Wire/API/MLS/Welcome.hs index 1575ca2c4e..117d9492dc 100644 --- a/libs/wire-api/src/Wire/API/MLS/Welcome.hs +++ b/libs/wire-api/src/Wire/API/MLS/Welcome.hs @@ -43,15 +43,15 @@ instance ParseMLS Welcome where Welcome <$> parseMLS @ProtocolVersion <*> parseMLS - <*> parseMLSVector @Word32 parseMLS - <*> parseMLSBytes @Word32 + <*> parseMLSVector @VarInt parseMLS + <*> parseMLSBytes @VarInt instance SerialiseMLS Welcome where serialiseMLS (Welcome pv cs ss gi) = do serialiseMLS pv serialiseMLS cs - serialiseMLSVector @Word32 serialiseMLS ss - serialiseMLSBytes @Word32 gi + serialiseMLSVector @VarInt serialiseMLS ss + serialiseMLSBytes @VarInt gi data GroupSecrets = GroupSecrets { gsNewMember :: KeyPackageRef, diff --git a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs index 88a292e4e2..c2e81c8b5b 100644 --- a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs +++ b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs @@ -54,6 +54,8 @@ import Wire.API.Error.Brig import Wire.API.MLS.CipherSuite (SignatureSchemeTag) import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Servant import Wire.API.MakesFederatedCall import Wire.API.Routes.Internal.Brig.Connection import Wire.API.Routes.Internal.Brig.EJPD @@ -277,6 +279,7 @@ type MLSAPI = ) :<|> GetMLSClients :<|> MapKeyPackageRefs + -- TODO: remove the following endpoint :<|> Named "put-key-package-add" ( "key-package-add" @@ -286,6 +289,16 @@ type MLSAPI = '[Servant.JSON] (Respond 200 "Key package ref mapping updated" NewKeyPackageResult) ) + :<|> Named + "validate-leaf-node" + ( "validate-leaf-node" + :> Capture "identity" ClientIdentity + :> ReqBody '[MLS] LeafNode + :> MultiVerb1 + 'GET + '[Servant.JSON] + (RespondEmpty 200 "Leaf node is valid") + ) ) type PutConversationByKeyPackageRef = diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index d424982328..d97320bb51 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -81,7 +81,7 @@ type MLSMessagingAPI = :> CanThrow MLSProposalFailure :> "messages" :> ZLocalUser - :> ZOptClient + :> ZClient :> ZConn :> ReqBody '[MLS] (RawMLS Message) :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" [Event]) @@ -119,7 +119,7 @@ type MLSMessagingAPI = :> CanThrow MLSProposalFailure :> "messages" :> ZLocalUser - :> ZOptClient + :> ZClient :> ZConn :> ReqBody '[MLS] (RawMLS Message) :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" MLSMessageSendingStatus) @@ -159,7 +159,7 @@ type MLSMessagingAPI = :> CanThrow MLSProposalFailure :> "commit-bundles" :> ZLocalUser - :> ZOptClient + :> ZClient :> ZConn :> ReqBody '[CommitBundleMimeType] CommitBundle :> MultiVerb1 'POST '[JSON] (Respond 201 "Commit accepted and forwarded" MLSMessageSendingStatus) diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 93c4b9c5a2..0ffa420aa9 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -27,6 +27,7 @@ import Test.Tasty.QuickCheck import Type.Reflection (typeRep) import Wire.API.ConverProtoLens import Wire.API.MLS.CommitBundle +import Wire.API.MLS.Credential import Wire.API.MLS.Extension import Wire.API.MLS.GroupInfoBundle import Wire.API.MLS.KeyPackage @@ -40,10 +41,10 @@ tests :: T.TestTree tests = T.localOption (T.Timeout (60 * 1000000) "60s") . T.testGroup "MLS roundtrip tests" $ [ testRoundTrip @KeyPackageRef, + testRoundTrip @ClientIdentity, testRoundTrip @TestPreconfiguredSender, testRoundTrip @RemoveProposalMessage, testRoundTrip @RemoveProposalPayload, - testRoundTrip @AppAckProposalTest, testRoundTrip @ExtensionVector, testRoundTrip @PublicGroupStateTBS, testRoundTrip @PublicGroupState, @@ -100,15 +101,18 @@ newtype MessageGenerator tbs = MessageGenerator {unMessageGenerator :: Message} instance ArbitraryFramedContent fc => Arbitrary (MessageGenerator fc) where arbitrary = - fmap MessageGenerator $ + fmap MessageGenerator $ do + fc <- arbitraryFramedContent @fc + mt <- case fc.sender of + SenderMember _ -> Just <$> arbitrary + _ -> pure Nothing Message <$> arbitrary <*> fmap MessagePublic - ( PublicMessage - <$> fmap mkRawMLS (arbitraryFramedContent @fc) - <*> (FramedContentAuthData <$> arbitrary <*> pure Nothing) - <*> arbitrary + ( PublicMessage (mkRawMLS fc) + <$> (FramedContentAuthData <$> arbitrary <*> pure Nothing) + <*> pure mt ) data FramedContentGenerator sender payload @@ -162,27 +166,15 @@ instance ArbitrarySender TestPreconfiguredSender where --- -newtype AppAckProposalTest = AppAckProposalTest Proposal - deriving newtype (ParseMLS, Eq, Show) - -instance Arbitrary AppAckProposalTest where - arbitrary = AppAckProposalTest . AppAckProposal <$> arbitrary - -instance SerialiseMLS AppAckProposalTest where - serialiseMLS (AppAckProposalTest (AppAckProposal mrs)) = serialiseAppAckProposal mrs - serialiseMLS _ = serialiseAppAckProposal [] - ---- - newtype ExtensionVector = ExtensionVector [Extension] deriving newtype (Arbitrary, Eq, Show) instance ParseMLS ExtensionVector where - parseMLS = ExtensionVector <$> parseMLSVector @Word32 (parseMLS @Extension) + parseMLS = ExtensionVector <$> parseMLSVector @VarInt (parseMLS @Extension) instance SerialiseMLS ExtensionVector where serialiseMLS (ExtensionVector exts) = do - serialiseMLSVector @Word32 serialiseMLS exts + serialiseMLSVector @VarInt serialiseMLS exts --- diff --git a/services/brig/src/Brig/API/Internal.hs b/services/brig/src/Brig/API/Internal.hs index 98d228b9ac..4334b93d56 100644 --- a/services/brig/src/Brig/API/Internal.hs +++ b/services/brig/src/Brig/API/Internal.hs @@ -147,6 +147,8 @@ mlsAPI = :<|> getMLSClients :<|> mapKeyPackageRefsInternal :<|> Named @"put-key-package-add" upsertKeyPackage + -- Used by galley to validate leaf nodes appearing in an update path + :<|> Named @"validate-leaf-node" validateLeafNode accountAPI :: ( Member BlacklistStore r, diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index a9b774f4f0..bef1faf74d 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -18,6 +18,7 @@ module Brig.API.MLS.KeyPackages.Validation ( -- * Main key package validation function validateKeyPackage, + validateLeafNode, mlsProtocolError, validateLifetime', ) @@ -95,12 +96,16 @@ validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do (pvTag (kp.protocolVersion) >>= guard . (== ProtocolMLS10)) -- validate credential, lifetime and capabilities - validateCredential identity kp.leafNode.credential - validateSource kp.leafNode.source - validateCapabilities kp.leafNode.capabilities + validateLeafNode identity kp.leafNode pure (kpRef cs kpd, kpd) +validateLeafNode :: ClientIdentity -> LeafNode -> Handler r () +validateLeafNode identity leafNode = do + validateCredential identity leafNode.credential + validateSource leafNode.source + validateCapabilities leafNode.capabilities + validateCredential :: ClientIdentity -> Credential -> Handler r () validateCredential identity (BasicCredential cred) = do identity' <- diff --git a/services/galley/src/Galley/API/Federation.hs b/services/galley/src/Galley/API/Federation.hs index 31125e12e3..c45c7f646b 100644 --- a/services/galley/src/Galley/API/Federation.hs +++ b/services/galley/src/Galley/API/Federation.hs @@ -674,7 +674,7 @@ sendMLSCommitBundle remoteDomain msr = <$> postMLSCommitBundle loc (tUntagged sender) - (Just (mmsrSenderClient msr)) + (mmsrSenderClient msr) qConvOrSub Nothing ibundle @@ -722,7 +722,7 @@ sendMLSMessage remoteDomain msr = <$> postMLSMessage loc (tUntagged sender) - (Just (mmsrSenderClient msr)) + (mmsrSenderClient msr) qConvOrSub Nothing msg diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 4ef364cc01..44b2dad48f 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -30,7 +30,6 @@ module Galley.API.MLS.Message ) where -import Control.Arrow ((>>>)) import Control.Comonad import Control.Error.Util (hush) import Control.Lens (forOf_, preview) @@ -99,9 +98,13 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.MLS.Welcome import Wire.API.Message -import Wire.API.Routes.Internal.Brig import Wire.API.User.Client +-- TODO: +-- [ ] replace ref with index in remove proposals +-- [ ] validate leaf nodes and key packages locally on galley +-- [ ] remove MissingSenderClient error + data IncomingMessage = IncomingMessage { epoch :: Epoch, groupId :: GroupId, @@ -232,16 +235,16 @@ postMLSMessageFromLocalUserV1 :: Member SubConversationStore r ) => Local UserId -> - Maybe ClientId -> + ClientId -> ConnId -> RawMLS Message -> Sem r [Event] -postMLSMessageFromLocalUserV1 lusr mc conn smsg = do +postMLSMessageFromLocalUserV1 lusr c conn smsg = do assertMLSEnabled imsg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage smsg cnvOrSub <- lookupConvByGroupId imsg.groupId >>= noteS @'ConvNotFound - fst . first (map lcuEvent) - <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) imsg + map lcuEvent . fst + <$> postMLSMessage lusr (tUntagged lusr) c cnvOrSub (Just conn) imsg postMLSMessageFromLocalUser :: ( HasProposalEffects r, @@ -264,17 +267,17 @@ postMLSMessageFromLocalUser :: Member SubConversationStore r ) => Local UserId -> - Maybe ClientId -> + ClientId -> ConnId -> RawMLS Message -> Sem r MLSMessageSendingStatus -postMLSMessageFromLocalUser lusr mc conn smsg = do +postMLSMessageFromLocalUser lusr c conn smsg = do assertMLSEnabled imsg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage smsg cnvOrSub <- lookupConvByGroupId imsg.groupId >>= noteS @'ConvNotFound (events, unreachables) <- first (map lcuEvent) - <$> postMLSMessage lusr (tUntagged lusr) mc cnvOrSub (Just conn) imsg + <$> postMLSMessage lusr (tUntagged lusr) c cnvOrSub (Just conn) imsg t <- toUTCTimeMillis <$> input pure $ MLSMessageSendingStatus events t unreachables @@ -287,16 +290,16 @@ postMLSCommitBundle :: ) => Local x -> Qualified UserId -> - Maybe ClientId -> + ClientId -> Qualified ConvOrSubConvId -> Maybe ConnId -> IncomingBundle -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSCommitBundle loc qusr mc qConvOrSub conn bundle = +postMLSCommitBundle loc qusr c qConvOrSub conn bundle = foldQualified loc - (postMLSCommitBundleToLocalConv qusr mc conn bundle) - (postMLSCommitBundleToRemoteConv loc qusr mc conn bundle) + (postMLSCommitBundleToLocalConv qusr c conn bundle) + (postMLSCommitBundleToRemoteConv loc qusr c conn bundle) qConvOrSub postMLSCommitBundleFromLocalUser :: @@ -307,17 +310,17 @@ postMLSCommitBundleFromLocalUser :: Member SubConversationStore r ) => Local UserId -> - Maybe ClientId -> + ClientId -> ConnId -> CommitBundle -> Sem r MLSMessageSendingStatus -postMLSCommitBundleFromLocalUser lusr mc conn bundle = do +postMLSCommitBundleFromLocalUser lusr c conn bundle = do assertMLSEnabled ibundle <- noteS @'MLSUnsupportedMessage $ mkIncomingBundle bundle qConvOrSub <- lookupConvByGroupId ibundle.groupId >>= noteS @'ConvNotFound (events, unreachables) <- first (map lcuEvent) - <$> postMLSCommitBundle lusr (tUntagged lusr) mc qConvOrSub (Just conn) ibundle + <$> postMLSCommitBundle lusr (tUntagged lusr) c qConvOrSub (Just conn) ibundle t <- toUTCTimeMillis <$> input pure $ MLSMessageSendingStatus events t unreachables @@ -328,15 +331,14 @@ postMLSCommitBundleToLocalConv :: Member SubConversationStore r ) => Qualified UserId -> - Maybe ClientId -> + ClientId -> Maybe ConnId -> IncomingBundle -> Local ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSCommitBundleToLocalConv qusr mc conn bundle lConvOrSubId = do +postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do lConvOrSub <- fetchConvOrSub qusr lConvOrSubId - - senderClient <- fmap ciClient <$> getSenderIdentity qusr mc (Just bundle.sender) + senderIdentity <- getSenderIdentity qusr c (Just bundle.sender) action <- getCommitData lConvOrSub bundle.epoch bundle.commit.rmValue -- check that the welcome message matches the action @@ -348,8 +350,7 @@ postMLSCommitBundleToLocalConv qusr mc conn bundle lConvOrSubId = do $ throwS @'MLSWelcomeMismatch events <- processCommitWithAction - qusr - senderClient + senderIdentity conn lConvOrSub bundle.epoch @@ -379,21 +380,19 @@ postMLSCommitBundleToRemoteConv :: ) => Local x -> Qualified UserId -> - Maybe ClientId -> + ClientId -> Maybe ConnId -> IncomingBundle -> Remote ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSCommitBundleToRemoteConv loc qusr mc con bundle rConvOrSubId = do +postMLSCommitBundleToRemoteConv loc qusr c con bundle rConvOrSubId = do -- only local users can send messages to remote conversations lusr <- foldQualified loc pure (\_ -> throwS @'ConvAccessDenied) qusr -- only members may send commit bundles to a remote conversation flip unless (throwS @'ConvMemberNotFound) =<< checkLocalMemberRemoteConv (tUnqualified lusr) (convOfConvOrSub <$> rConvOrSubId) - senderIdentity <- - noteS @'MLSMissingSenderClient - =<< getSenderIdentity qusr mc (Just bundle.sender) + senderIdentity <- getSenderIdentity qusr c (Just bundle.sender) resp <- runFederated rConvOrSubId $ @@ -436,43 +435,30 @@ postMLSMessage :: ) => Local x -> Qualified UserId -> - Maybe ClientId -> + ClientId -> Qualified ConvOrSubConvId -> Maybe ConnId -> IncomingMessage -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessage loc qusr mc qconvOrSub con msg = do - mSender <- fmap ciClient <$> getSenderIdentity qusr mc msg.sender +postMLSMessage loc qusr c qconvOrSub con msg = do + -- verify sender identity + void $ getSenderIdentity qusr c msg.sender + foldQualified loc - (postMLSMessageToLocalConv qusr mSender con msg) - (postMLSMessageToRemoteConv loc qusr mSender con msg) + (postMLSMessageToLocalConv qusr c con msg) + (postMLSMessageToRemoteConv loc qusr c con msg) qconvOrSub -getSenderIndex :: Sender -> Maybe Word32 -getSenderIndex sender = case sender of - SenderMember index -> Just index - _ -> Nothing - --- FUTUREWORK: once we can assume that the Z-Client header is present (i.e. --- when v2 is dropped), remove the Maybe in the return type. getSenderIdentity :: - ( Member (ErrorS 'MLSClientSenderUserMismatch) r - ) => Qualified UserId -> - Maybe ClientId -> + ClientId -> Maybe Sender -> - Sem r (Maybe ClientIdentity) -getSenderIdentity qusr mc mSender = do - let mSenderClient = do - sender <- mSender - index <- getSenderIndex sender - error "TODO: get client ID from index" index - -- At this point, mc is the client ID of the request, while mSenderClient is the - -- one contained in the message. We throw an error if the two don't match. - when (((==) <$> mc <*> mSenderClient) == Just False) $ - throwS @'MLSClientSenderUserMismatch - pure (mkClientIdentity qusr <$> (mc <|> mSenderClient)) + Sem r ClientIdentity +getSenderIdentity qusr c _mSender = do + let cid = mkClientIdentity qusr c + -- TODO: check that mSender matches cid + pure cid postMLSMessageToLocalConv :: ( HasProposalEffects r, @@ -480,7 +466,6 @@ postMLSMessageToLocalConv :: Member (ErrorS 'MissingLegalholdConsent) r, Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, @@ -490,19 +475,21 @@ postMLSMessageToLocalConv :: Member SubConversationStore r ) => Qualified UserId -> - Maybe ClientId -> + ClientId -> Maybe ConnId -> IncomingMessage -> Local ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessageToLocalConv qusr senderClient con msg convOrSubId = do +postMLSMessageToLocalConv qusr c con msg convOrSubId = do lConvOrSub <- fetchConvOrSub qusr convOrSubId + senderIdentity <- getSenderIdentity qusr c msg.sender + -- validate message events <- case msg.content of IncomingMessageContentPublic pub -> case pub.content of - FramedContentCommit c -> - processCommit qusr senderClient con lConvOrSub msg.epoch pub.sender c.rmValue + FramedContentCommit commit -> + processCommit senderIdentity con lConvOrSub msg.epoch pub.sender commit.rmValue FramedContentApplicationData _ -> throwS @'MLSUnsupportedMessage FramedContentProposal prop -> processProposal qusr lConvOrSub msg pub prop $> mempty @@ -521,18 +508,17 @@ postMLSMessageToRemoteConv :: ) => Local x -> Qualified UserId -> - Maybe ClientId -> + ClientId -> Maybe ConnId -> IncomingMessage -> Remote ConvOrSubConvId -> Sem r ([LocalConversationUpdate], UnreachableUsers) -postMLSMessageToRemoteConv loc qusr mc con msg rConvOrSubId = do +postMLSMessageToRemoteConv loc qusr senderClient con msg rConvOrSubId = do -- only local users can send messages to remote conversations lusr <- foldQualified loc pure (\_ -> throwS @'ConvAccessDenied) qusr -- only members may send messages to the remote conversation flip unless (throwS @'ConvMemberNotFound) =<< checkLocalMemberRemoteConv (tUnqualified lusr) (convOfConvOrSub <$> rConvOrSubId) - senderClient <- noteS @'MLSMissingSenderClient mc resp <- runFederated rConvOrSubId $ fedClient @'Galley @"send-mls-message" $ @@ -632,7 +618,6 @@ processCommit :: Member (ErrorS 'MissingLegalholdConsent) r, Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, @@ -640,17 +625,16 @@ processCommit :: Member Resource r, Member SubConversationStore r ) => - Qualified UserId -> - Maybe ClientId -> + ClientIdentity -> Maybe ConnId -> Local ConvOrSubConv -> Epoch -> Sender -> Commit -> Sem r [LocalConversationUpdate] -processCommit qusr senderClient con lConvOrSub epoch sender commit = do +processCommit senderIdentity con lConvOrSub epoch sender commit = do action <- getCommitData lConvOrSub epoch commit - processCommitWithAction qusr senderClient con lConvOrSub epoch action sender commit + processCommitWithAction senderIdentity con lConvOrSub epoch action sender commit processExternalCommit :: forall r. @@ -661,7 +645,6 @@ processExternalCommit :: Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSKeyPackageRefNotFound) r, Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, Member ExternalAccess r, Member FederatorAccess r, @@ -674,17 +657,16 @@ processExternalCommit :: Member SubConversationStore r, Member TinyLog r ) => - Qualified UserId -> - Maybe ClientId -> + ClientIdentity -> Local ConvOrSubConv -> Epoch -> ProposalAction -> Maybe UpdatePath -> Sem r () -processExternalCommit qusr mSenderClient lConvOrSub epoch action updatePath = +processExternalCommit senderIdentity lConvOrSub epoch action updatePath = withCommitLock (cnvmlsGroupId . mlsMetaConvOrSub . tUnqualified $ lConvOrSub) epoch $ do let convOrSub = tUnqualified lConvOrSub - newKeyPackage <- + leafNode <- upLeaf <$> note (mlsProtocolError "External commits need an update path") @@ -696,33 +678,16 @@ processExternalCommit qusr mSenderClient lConvOrSub epoch action updatePath = throw . mlsProtocolError $ "The external commit must not have add proposals" - newRef <- - kpRef' newKeyPackage - & note (mlsProtocolError "An invalid key package in the update path") - -- validate and update mapping in brig - eithCid <- - nkpresClientIdentity - <$$> validateAndAddKeyPackageRef - NewKeyPackage - { nkpConversation = tUntagged (convOfConvOrSub . idForConvOrSub <$> lConvOrSub), - nkpKeyPackage = KeyPackageData (rmRaw newKeyPackage) - } - cid <- either (\errMsg -> throw (mlsProtocolError ("Tried to add invalid KeyPackage: " <> errMsg))) pure eithCid - - unless (cidQualifiedUser cid == qusr) $ - throw . mlsProtocolError $ - "The external commit attempts to add another user" - - senderClient <- noteS @'MLSMissingSenderClient mSenderClient - - unless (ciClient cid == senderClient) $ - throw . mlsProtocolError $ - "The external commit attempts to add another client of the user, it must only add itself" + validateLeafNode senderIdentity leafNode >>= \case + Left errMsg -> + throw $ + mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) + Right _ -> pure () -- only members can join a subconversation forOf_ _SubConv convOrSub $ \(mlsConv, _) -> - unless (isClientMember cid (mcMembers mlsConv)) $ + unless (isClientMember senderIdentity (mcMembers mlsConv)) $ throwS @'MLSSubConvClientNotInParent -- check if there is a key package ref in the remove proposal @@ -730,15 +695,13 @@ processExternalCommit qusr mSenderClient lConvOrSub epoch action updatePath = if Map.null (paRemove action) then pure Nothing else do - (remCid, r) <- derefUser (paRemove action) qusr - unless (cidQualifiedUser cid == cidQualifiedUser remCid) + (remCid, r) <- derefUser (paRemove action) (cidQualifiedUser senderIdentity) + unless (cidQualifiedUser senderIdentity == cidQualifiedUser remCid) . throw . mlsProtocolError $ "The external commit attempts to remove a client from a user other than themselves" pure (Just r) - updateKeyPackageMapping lConvOrSub qusr (ciClient cid) remRef newRef - -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch @@ -749,7 +712,7 @@ processExternalCommit qusr mSenderClient lConvOrSub epoch action updatePath = <$> getPendingBackendRemoveProposals (cnvmlsGroupId . mlsMetaConvOrSub . tUnqualified $ lConvOrSub') epoch -- requeue backend remove proposals for the current epoch let cm = membersConvOrSub (tUnqualified lConvOrSub') - createAndSendRemoveProposals lConvOrSub' kpRefs qusr cm + createAndSendRemoveProposals lConvOrSub' kpRefs (cidQualifiedUser senderIdentity) cm where derefUser :: ClientMap -> Qualified UserId -> Sem r (ClientIdentity, KeyPackageRef) derefUser cm user = case Map.assocs cm of @@ -777,15 +740,13 @@ processCommitWithAction :: Member (ErrorS 'MissingLegalholdConsent) r, Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, Member Resource r, Member SubConversationStore r ) => - Qualified UserId -> - Maybe ClientId -> + ClientIdentity -> Maybe ConnId -> Local ConvOrSubConv -> Epoch -> @@ -793,99 +754,38 @@ processCommitWithAction :: Sender -> Commit -> Sem r [LocalConversationUpdate] -processCommitWithAction qusr senderClient con lConvOrSub epoch action sender commit = +processCommitWithAction senderIdentity con lConvOrSub epoch action sender commit = case sender of - SenderMember index -> - processInternalCommit qusr senderClient con lConvOrSub epoch action (error "TODO" index) commit + SenderMember _index -> + processInternalCommit senderIdentity con lConvOrSub epoch action commit SenderExternal _ -> throw (mlsProtocolError "Unexpected sender") SenderNewMemberProposal -> throw (mlsProtocolError "Unexpected sender") SenderNewMemberCommit -> - processExternalCommit qusr senderClient lConvOrSub epoch action (cPath commit) $> [] + processExternalCommit senderIdentity lConvOrSub epoch action (cPath commit) $> [] processInternalCommit :: forall r. ( HasProposalEffects r, Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, Member SubConversationStore r, Member Resource r ) => - Qualified UserId -> - Maybe ClientId -> + ClientIdentity -> Maybe ConnId -> Local ConvOrSubConv -> Epoch -> ProposalAction -> - KeyPackageRef -> Commit -> Sem r [LocalConversationUpdate] -processInternalCommit qusr senderClient con lConvOrSub epoch action senderRef commit = do +processInternalCommit senderIdentity con lConvOrSub epoch action commit = do let convOrSub = tUnqualified lConvOrSub mlsMeta = mlsMetaConvOrSub convOrSub - localSelf = isLocal lConvOrSub qusr - - updatePathRef <- - for - (cPath commit) - (upLeaf >>> kpRef' >>> note (mlsProtocolError "Could not compute key package ref")) withCommitLock (cnvmlsGroupId . mlsMetaConvOrSub $ convOrSub) epoch $ do - postponedKeyPackageRefUpdate <- - if epoch == Epoch 0 - then do - let cType = cnvmType . mcMetadata . convOfConvOrSub $ convOrSub - case (localSelf, cType, cmAssocs . membersConvOrSub $ convOrSub, convOrSub) of - (True, SelfConv, [], Conv _) -> do - creatorClient <- noteS @'MLSMissingSenderClient senderClient - let creatorRef = fromMaybe senderRef updatePathRef - updateKeyPackageMapping lConvOrSub qusr creatorClient Nothing creatorRef - (True, SelfConv, _, _) -> - -- this is a newly created (sub)conversation, and it should - -- contain exactly one client (the creator) - throw (InternalErrorWithDescription "Unexpected creator client set") - (True, _, [(qu, (creatorClient, _))], Conv _) - | qu == qusr -> do - -- use update path as sender reference and if not existing fall back to sender - let creatorRef = fromMaybe senderRef updatePathRef - -- register the creator client - updateKeyPackageMapping - lConvOrSub - qusr - creatorClient - Nothing - creatorRef - -- remote clients cannot send the first commit - (False, _, _, _) -> throwS @'MLSStaleMessage - (True, _, [], SubConv parentConv _) -> do - creatorClient <- noteS @'MLSMissingSenderClient senderClient - unless (isClientMember (mkClientIdentity qusr creatorClient) (mcMembers parentConv)) $ - throwS @'MLSSubConvClientNotInParent - let creatorRef = fromMaybe senderRef updatePathRef - updateKeyPackageMapping lConvOrSub qusr creatorClient Nothing creatorRef - (_, _, _, _) -> - throw (InternalErrorWithDescription "Unexpected creator client set") - pure $ pure () -- no key package ref update necessary - else case updatePathRef of - Just updatedRef -> do - -- postpone key package ref update until other checks/processing passed - case senderClient of - Just cli -> - pure - ( updateKeyPackageMapping - lConvOrSub - qusr - cli - (Just senderRef) - updatedRef - ) - Nothing -> pure (pure ()) - Nothing -> pure (pure ()) -- ignore commits without update path - -- check all pending proposals are referenced in the commit allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) (cProposals commit) @@ -893,45 +793,13 @@ processInternalCommit qusr senderClient con lConvOrSub epoch action senderRef co throwS @'MLSCommitMissingReferences -- process and execute proposals - updates <- executeProposalAction qusr con lConvOrSub action + updates <- executeProposalAction (cidQualifiedUser senderIdentity) con lConvOrSub action - -- update key package ref if necessary - postponedKeyPackageRefUpdate -- increment epoch number for_ lConvOrSub incrementEpoch pure updates --- | Note: Use this only for KeyPackage that are already validated -updateKeyPackageMapping :: - ( Member BrigAccess r, - Member MemberStore r - ) => - Local ConvOrSubConv -> - Qualified UserId -> - ClientId -> - Maybe KeyPackageRef -> - KeyPackageRef -> - Sem r () -updateKeyPackageMapping lConvOrSub qusr cid mOld new = do - let qconv = tUntagged (convOfConvOrSub . idForConvOrSub <$> lConvOrSub) - -- update actual mapping in brig - case mOld of - Nothing -> - addKeyPackageRef new qusr cid qconv - Just old -> - updateKeyPackageRef - KeyPackageUpdate - { kpupPrevious = old, - kpupNext = new - } - let groupId = cnvmlsGroupId . mlsMetaConvOrSub . tUnqualified $ lConvOrSub - - -- remove old (client, key package) pair - removeMLSClients groupId qusr (Set.singleton cid) - -- add new (client, key package) pair - addMLSClients groupId qusr (Set.singleton (cid, new)) - applyProposalRef :: ( HasProposalEffects r, ( Member (ErrorS 'ConvNotFound) r, @@ -963,35 +831,23 @@ applyProposal :: GroupId -> Proposal -> Sem r ProposalAction -applyProposal convOrSubConvId groupId (AddProposal kp) = do +applyProposal _convOrSubConvId groupId (AddProposal kp) = do ref <- kpRef' kp & note (mlsProtocolError "Could not compute ref of a key package in an Add proposal") mbClientIdentity <- getClientByKeyPackageRef ref clientIdentity <- case mbClientIdentity of Nothing -> do - -- external add proposal for a new key package unknown to the backend - lConvOrSubConvId <- qualifyLocal convOrSubConvId - addKeyPackageMapping lConvOrSubConvId ref (KeyPackageData (rmRaw kp)) - Just ci -> + -- TODO: validate key package + cid <- + either + (\_ -> throw (mlsProtocolError "Invalid key package in an Add proposal")) + pure + $ keyPackageIdentity kp.rmValue + addMLSClients groupId (cidQualifiedUser cid) (Set.singleton (ciClient cid, ref)) + pure cid + Just cid -> -- ad-hoc add proposal in commit, the key package has been claimed before - pure ci - pure (paAddClient . (<$$>) (,ref) . cidQualifiedClient $ clientIdentity) - where - addKeyPackageMapping :: Local ConvOrSubConvId -> KeyPackageRef -> KeyPackageData -> Sem r ClientIdentity - addKeyPackageMapping lConvOrSubConvId ref kpdata = do - -- validate and update mapping in brig - eithCid <- - nkpresClientIdentity - <$$> validateAndAddKeyPackageRef - NewKeyPackage - { nkpConversation = tUntagged (convOfConvOrSub <$> lConvOrSubConvId), - nkpKeyPackage = kpdata - } - cid <- either (\errMsg -> throw (mlsProtocolError ("Tried to add invalid KeyPackage: " <> errMsg))) pure eithCid - let qcid = cidQualifiedClient cid - let qusr = fst <$> qcid - -- update mapping in galley - addMLSClients groupId qusr (Set.singleton (ciClient cid, ref)) pure cid + pure (paAddClient . (<$$>) (,ref) . cidQualifiedClient $ clientIdentity) applyProposal _convOrSubConvId _groupId (RemoveProposal ref) = do qclient <- cidQualifiedClient <$> derefKeyPackage ref pure (paRemoveClient ((,ref) <$$> qclient)) diff --git a/services/galley/src/Galley/Effects/BrigAccess.hs b/services/galley/src/Galley/Effects/BrigAccess.hs index 2c5e4741ba..a6802eb165 100644 --- a/services/galley/src/Galley/Effects/BrigAccess.hs +++ b/services/galley/src/Galley/Effects/BrigAccess.hs @@ -51,7 +51,8 @@ module Galley.Effects.BrigAccess getClientByKeyPackageRef, getLocalMLSClients, addKeyPackageRef, - validateAndAddKeyPackageRef, + validateLeafNode, + validateKeyPackage, updateKeyPackageRef, deleteKeyPackageRefs, @@ -76,7 +77,8 @@ import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.Routes.Internal.Brig +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig.Connection import qualified Wire.API.Routes.Internal.Galley.TeamFeatureNoConfigMulti as Multi import Wire.API.Team.Feature @@ -134,7 +136,14 @@ data BrigAccess m a where GetClientByKeyPackageRef :: KeyPackageRef -> BrigAccess m (Maybe ClientIdentity) GetLocalMLSClients :: Local UserId -> SignatureSchemeTag -> BrigAccess m (Set ClientInfo) AddKeyPackageRef :: KeyPackageRef -> Qualified UserId -> ClientId -> Qualified ConvId -> BrigAccess m () - ValidateAndAddKeyPackageRef :: NewKeyPackage -> BrigAccess m (Either Text NewKeyPackageResult) + ValidateLeafNode :: + ClientIdentity -> + RawMLS LeafNode -> + BrigAccess m (Either Text ()) + ValidateKeyPackage :: + ClientIdentity -> + RawMLS KeyPackage -> + BrigAccess m (Either Text ()) UpdateKeyPackageRef :: KeyPackageUpdate -> BrigAccess m () DeleteKeyPackageRefs :: [KeyPackageRef] -> BrigAccess m () UpdateSearchVisibilityInbound :: diff --git a/services/galley/src/Galley/Intra/Client.hs b/services/galley/src/Galley/Intra/Client.hs index 11278568e5..f5143872cb 100644 --- a/services/galley/src/Galley/Intra/Client.hs +++ b/services/galley/src/Galley/Intra/Client.hs @@ -26,7 +26,8 @@ module Galley.Intra.Client getLocalMLSClients, addKeyPackageRef, updateKeyPackageRef, - validateAndAddKeyPackageRef, + validateLeafNode, + validateKeyPackage, deleteKeyPackageRefs, ) where @@ -66,6 +67,8 @@ import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig import Wire.API.User.Auth.LegalHold import Wire.API.User.Client @@ -246,19 +249,38 @@ updateKeyPackageRef keyPackageRef = . expect2xx ) -validateAndAddKeyPackageRef :: NewKeyPackage -> App (Either Text NewKeyPackageResult) -validateAndAddKeyPackageRef nkp = do +validateKeyPackage :: ClientIdentity -> RawMLS KeyPackage -> App (Either Text ()) +validateKeyPackage cid keyPackage = do res <- call Brig - ( method PUT - . paths ["i", "mls", "key-package-add"] - . json nkp + ( method GET + . paths ["i", "mls", "validate-key-package", toHeader cid] + . content "message/mls" + . bytes (encodeMLS' keyPackage) + ) + let statusCode = HTTP.statusCode (Rq.responseStatus res) + if + | statusCode `div` 100 == 2 -> Right <$> parseResponse (mkError status502 "server-error") res + | statusCode `div` 100 == 4 -> do + err <- parseResponse (mkError status502 "server-error") res + pure (Left ("Error validating key package: " <> toStrict (Error.label err) <> ": " <> toStrict (Error.message err))) + | otherwise -> throwM (mkError status502 "server-error" "Unexpected http status returned from /i/mls/validate-leaf-node") + +validateLeafNode :: ClientIdentity -> RawMLS LeafNode -> App (Either Text ()) +validateLeafNode cid leafNode = do + res <- + call + Brig + ( method GET + . paths ["i", "mls", "validate-leaf-node", toHeader cid] + . content "message/mls" + . bytes (encodeMLS' leafNode) ) let statusCode = HTTP.statusCode (Rq.responseStatus res) if | statusCode `div` 100 == 2 -> Right <$> parseResponse (mkError status502 "server-error") res | statusCode `div` 100 == 4 -> do err <- parseResponse (mkError status502 "server-error") res - pure (Left ("Error validating keypackage: " <> toStrict (Error.label err) <> ": " <> toStrict (Error.message err))) - | otherwise -> throwM (mkError status502 "server-error" "Unexpected http status returned from /i/mls/key-packages/add") + pure (Left ("Error validating leaf node: " <> toStrict (Error.label err) <> ": " <> toStrict (Error.message err))) + | otherwise -> throwM (mkError status502 "server-error" "Unexpected http status returned from /i/mls/validate-leaf-node") diff --git a/services/galley/src/Galley/Intra/Effects.hs b/services/galley/src/Galley/Intra/Effects.hs index 3d38b4b5c6..0a07187c36 100644 --- a/services/galley/src/Galley/Intra/Effects.hs +++ b/services/galley/src/Galley/Intra/Effects.hs @@ -86,9 +86,8 @@ interpretBrigAccess = interpret $ \case AddKeyPackageRef ref qusr cl qcnv -> embedApp $ addKeyPackageRef ref qusr cl qcnv - ValidateAndAddKeyPackageRef nkp -> - embedApp $ - validateAndAddKeyPackageRef nkp + ValidateLeafNode cid ln -> embedApp $ validateLeafNode cid ln + ValidateKeyPackage cid kp -> embedApp $ validateKeyPackage cid kp UpdateKeyPackageRef update -> embedApp $ updateKeyPackageRef update diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 4aa2a469e6..744ba79a12 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -100,7 +100,7 @@ tests s = ], testGroup "Commit" - [ test s "add user to a conversation" testAddUser, + [ test s "add user to a conversation XXX" testAddUser, test s "add user with an incomplete welcome" testAddUserWithBundleIncompleteWelcome, test s "add user (not connected)" testAddUserNotConnected, test s "add user (partial client list)" testAddUserPartial, @@ -458,7 +458,13 @@ testAddUser = do qcnv <- runMLSTest $ do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] + + putStrLn $ "alice1: " <> show alice1 + putStrLn $ "bob1: " <> show bob1 + putStrLn $ "bob2: " <> show bob2 + traverse_ uploadNewKeyPackage [bob1, bob2] + (_, qcnv) <- setupMLSGroup alice1 events <- createAddCommit alice1 [bob] >>= sendAndConsumeCommit event <- assertOne events diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 5ed6878420..fcad3e9ffe 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -417,7 +417,8 @@ uploadNewKeyPackage qcid = do generateKeyPackage :: HasCallStack => ClientIdentity -> MLSTest (RawMLS KeyPackage, KeyPackageRef) generateKeyPackage qcid = do - kp <- liftIO . decodeMLSError =<< mlscli qcid ["key-package", "create"] Nothing + kpData <- mlscli qcid ["key-package", "create"] Nothing + kp <- liftIO $ decodeMLSError kpData let ref = fromJust (kpRef' kp) fp <- keyPackageFile qcid ref liftIO $ BS.writeFile fp (rmRaw kp) @@ -904,6 +905,8 @@ consumeMessage1 cid msg = do -- commit, the 'sendAndConsumeCommit' function should be used instead. sendAndConsumeMessage :: HasCallStack => MessagePackage -> MLSTest ([Event], UnreachableUsers) sendAndConsumeMessage mp = do + putStrLn "sending message:" + print $ hex (mpMessage mp) res <- fmap (mmssEvents Tuple.&&& mmssUnreachableUsers) $ responseJsonError From 7dcc23f7b557742e0337e9f08e758d5ad599475e Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Mon, 27 Mar 2023 15:55:28 +0200 Subject: [PATCH 06/75] Remove proposals now have indices instead of refs --- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 39 ++-- libs/wire-api/src/Wire/API/MLS/Welcome.hs | 10 +- .../API/Routes/Public/Galley/Conversation.hs | 4 - libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 6 +- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 2 +- services/galley/galley.cabal | 1 + services/galley/schema/src/Run.hs | 4 +- services/galley/schema/src/V82_MLSDraft17.hs | 31 +++ services/galley/src/Galley/API/Action.hs | 4 - services/galley/src/Galley/API/Create.hs | 28 +-- .../galley/src/Galley/API/MLS/Conversation.hs | 3 +- services/galley/src/Galley/API/MLS/Message.hs | 193 ++++++++---------- services/galley/src/Galley/API/MLS/Removal.hs | 21 +- .../src/Galley/API/MLS/SubConversation.hs | 9 +- services/galley/src/Galley/API/MLS/Types.hs | 50 +++-- services/galley/src/Galley/API/MLS/Util.hs | 5 +- services/galley/src/Galley/API/Update.hs | 16 +- .../Galley/Cassandra/Conversation/Members.hs | 7 +- .../galley/src/Galley/Cassandra/Queries.hs | 9 +- .../src/Galley/Cassandra/SubConversation.hs | 3 +- .../galley/src/Galley/Effects/MemberStore.hs | 3 +- services/galley/test/integration/API/Util.hs | 4 +- 22 files changed, 234 insertions(+), 218 deletions(-) create mode 100644 services/galley/schema/src/V82_MLSDraft17.hs diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index c0c69ae1ea..cef7ca4200 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -24,14 +24,13 @@ import Cassandra import Control.Lens (makePrisms) import Data.Binary import Data.Binary.Get -import Data.Binary.Put -import qualified Data.ByteString.Lazy as LBS import Imports import Wire.API.MLS.CipherSuite import Wire.API.MLS.Context import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.ProposalTag import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation @@ -39,10 +38,10 @@ import Wire.Arbitrary data Proposal = AddProposal (RawMLS KeyPackage) - | UpdateProposal KeyPackage - | RemoveProposal KeyPackageRef - | PreSharedKeyProposal PreSharedKeyID -- TODO - | ReInitProposal ReInit + | UpdateProposal (RawMLS LeafNode) + | RemoveProposal Word32 + | PreSharedKeyProposal (RawMLS PreSharedKeyID) + | ReInitProposal (RawMLS ReInit) | ExternalInitProposal ByteString | GroupContextExtensionsProposal [Extension] deriving stock (Eq, Show) @@ -59,12 +58,28 @@ instance ParseMLS Proposal where GroupContextExtensionsProposalTag -> GroupContextExtensionsProposal <$> parseMLSVector @VarInt parseMLS -mkRemoveProposal :: KeyPackageRef -> RawMLS Proposal -mkRemoveProposal ref = RawMLS bytes (RemoveProposal ref) - where - bytes = LBS.toStrict . runPut $ do - serialiseMLS RemoveProposalTag - serialiseMLS ref +instance SerialiseMLS Proposal where + serialiseMLS (AddProposal kp) = do + serialiseMLS AddProposalTag + serialiseMLS kp + serialiseMLS (UpdateProposal ln) = do + serialiseMLS UpdateProposalTag + serialiseMLS ln + serialiseMLS (RemoveProposal i) = do + serialiseMLS RemoveProposalTag + serialiseMLS i + serialiseMLS (PreSharedKeyProposal k) = do + serialiseMLS PreSharedKeyProposalTag + serialiseMLS k + serialiseMLS (ReInitProposal ri) = do + serialiseMLS ReInitProposalTag + serialiseMLS ri + serialiseMLS (ExternalInitProposal ko) = do + serialiseMLS ExternalInitProposalTag + serialiseMLSBytes @VarInt ko + serialiseMLS (GroupContextExtensionsProposal es) = do + serialiseMLS GroupContextExtensionsProposalTag + serialiseMLSVector @VarInt serialiseMLS es -- | Compute the proposal ref given a ciphersuite and the raw proposal data. proposalRef :: CipherSuiteTag -> RawMLS Proposal -> ProposalRef diff --git a/libs/wire-api/src/Wire/API/MLS/Welcome.hs b/libs/wire-api/src/Wire/API/MLS/Welcome.hs index 117d9492dc..cacb183cba 100644 --- a/libs/wire-api/src/Wire/API/MLS/Welcome.hs +++ b/libs/wire-api/src/Wire/API/MLS/Welcome.hs @@ -22,13 +22,11 @@ import Imports import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.KeyPackage -import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary data Welcome = Welcome - { welProtocolVersion :: ProtocolVersion, - welCipherSuite :: CipherSuite, + { welCipherSuite :: CipherSuite, welSecrets :: [GroupSecrets], welGroupInfo :: ByteString } @@ -41,14 +39,12 @@ instance S.ToSchema Welcome where instance ParseMLS Welcome where parseMLS = Welcome - <$> parseMLS @ProtocolVersion - <*> parseMLS + <$> parseMLS <*> parseMLSVector @VarInt parseMLS <*> parseMLSBytes @VarInt instance SerialiseMLS Welcome where - serialiseMLS (Welcome pv cs ss gi) = do - serialiseMLS pv + serialiseMLS (Welcome cs ss gi) = do serialiseMLS cs serialiseMLSVector @VarInt serialiseMLS ss serialiseMLSBytes @VarInt gi diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs index 7519255d91..91503dcf06 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs @@ -380,7 +380,6 @@ type ConversationAPI = :> CanThrow 'MissingLegalholdConsent :> Description "This returns 201 when a new conversation is created, and 200 when the conversation already existed" :> ZLocalUser - :> ZOptClient :> ZOptConn :> "conversations" :> VersionedReqBody 'V2 '[Servant.JSON] NewConv @@ -403,7 +402,6 @@ type ConversationAPI = :> CanThrow 'MissingLegalholdConsent :> Description "This returns 201 when a new conversation is created, and 200 when the conversation already existed" :> ZLocalUser - :> ZOptClient :> ZOptConn :> "conversations" :> ReqBody '[Servant.JSON] NewConv @@ -424,7 +422,6 @@ type ConversationAPI = :> CanThrow 'MissingLegalholdConsent :> Description "This returns 201 when a new conversation is created, and 200 when the conversation already existed" :> ZLocalUser - :> ZOptClient :> ZOptConn :> "conversations" :> ReqBody '[Servant.JSON] NewConv @@ -1261,7 +1258,6 @@ type ConversationAPI = :> CanThrow 'ConvInvalidProtocolTransition :> CanThrow 'ConvMemberNotFound :> ZLocalUser - :> ZClient :> ZConn :> "conversations" :> QualifiedCapture' '[Description "Conversation ID"] "cnv" ConvId diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index ec5d4b5c52..9a755bcf29 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -121,7 +121,9 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do usr <- flip Qualified (Domain "example.com") <$> (Id <$> UUID.nextRandom) pure (userClientQid usr c) void . liftIO $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing - kp <- liftIO $ decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing + kp :: RawMLS KeyPackage <- + liftIO $ + decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing liftIO $ BS.writeFile (tmp qcid2) (rmRaw kp) let groupFilename = "group" @@ -132,7 +134,7 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do secretKey <- Ed25519.generateSecretKey let publicKey = Ed25519.toPublic secretKey - let proposal = mkRemoveProposal (fromJust (kpRef' kp)) + let proposal = mkRawMLS (RemoveProposal (error "TODO: remove proposal")) let message = mkSignedMessage secretKey diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 0ffa420aa9..894f176d9e 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -147,7 +147,7 @@ newtype RemoveProposalPayload = RemoveProposalPayload {unRemoveProposalPayload : deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance Arbitrary RemoveProposalPayload where - arbitrary = RemoveProposalPayload . FramedContentProposal . mkRemoveProposal <$> arbitrary + arbitrary = RemoveProposalPayload . FramedContentProposal . mkRawMLS . RemoveProposal <$> arbitrary instance ArbitraryFramedContentData RemoveProposalPayload where arbitraryFramedContentData = unRemoveProposalPayload <$> arbitrary diff --git a/services/galley/galley.cabal b/services/galley/galley.cabal index 6a921b7cc1..5af98a02c4 100644 --- a/services/galley/galley.cabal +++ b/services/galley/galley.cabal @@ -593,6 +593,7 @@ executable galley-schema V79_TeamFeatureMlsE2EId V80_AddConversationCodePassword V81_MLSSubconversation + V82_MLSDraft17 hs-source-dirs: schema/src default-extensions: TemplateHaskell diff --git a/services/galley/schema/src/Run.hs b/services/galley/schema/src/Run.hs index e8583485ee..447b203f4e 100644 --- a/services/galley/schema/src/Run.hs +++ b/services/galley/schema/src/Run.hs @@ -84,6 +84,7 @@ import qualified V78_TeamFeatureOutlookCalIntegration import qualified V79_TeamFeatureMlsE2EId import qualified V80_AddConversationCodePassword import qualified V81_MLSSubconversation +import qualified V82_MLSDraft17 main :: IO () main = do @@ -153,7 +154,8 @@ main = do V78_TeamFeatureOutlookCalIntegration.migration, V79_TeamFeatureMlsE2EId.migration, V80_AddConversationCodePassword.migration, - V81_MLSSubconversation.migration + V81_MLSSubconversation.migration, + V82_MLSDraft17.migration -- When adding migrations here, don't forget to update -- 'schemaVersion' in Galley.Cassandra -- (see also docs/developer/cassandra-interaction.md) diff --git a/services/galley/schema/src/V82_MLSDraft17.hs b/services/galley/schema/src/V82_MLSDraft17.hs new file mode 100644 index 0000000000..c8f4410e20 --- /dev/null +++ b/services/galley/schema/src/V82_MLSDraft17.hs @@ -0,0 +1,31 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module V82_MLSDraft17 (migration) where + +import Cassandra.Schema +import Imports +import Text.RawString.QQ + +migration :: Migration +migration = + Migration 82 "Upgrade to MLS draft 17 structures" $ do + schema' + [r| ALTER TABLE mls_group_member_client + ADD (leaf_node_index int + ); + |] diff --git a/services/galley/src/Galley/API/Action.hs b/services/galley/src/Galley/API/Action.hs index 3916bbb914..2f9b52acc9 100644 --- a/services/galley/src/Galley/API/Action.hs +++ b/services/galley/src/Galley/API/Action.hs @@ -55,7 +55,6 @@ import Data.Singletons import Data.Time.Clock import Galley.API.Error import Galley.API.MLS.Removal -import Galley.API.MLS.Types (cmAssocs) import Galley.API.Util import Galley.App import Galley.Data.Conversation @@ -342,9 +341,6 @@ performAction tag origUser lconv action = do pure (mempty, action) SConversationDeleteTag -> do let deleteGroup groupId = do - cm <- E.lookupMLSClients groupId - let refs = cm & cmAssocs & map (snd . snd) - E.deleteKeyPackageRefs refs E.removeAllMLSClients groupId E.deleteAllProposals groupId diff --git a/services/galley/src/Galley/API/Create.hs b/services/galley/src/Galley/API/Create.hs index 58ed1273dd..80382af9ed 100644 --- a/services/galley/src/Galley/API/Create.hs +++ b/services/galley/src/Galley/API/Create.hs @@ -43,7 +43,6 @@ import Data.Time import qualified Data.UUID.Tagged as U import Galley.API.Error import Galley.API.MLS -import Galley.API.MLS.KeyPackage (nullKeyPackageRef) import Galley.API.MLS.Keys (getMLSRemovalKey) import Galley.API.Mapping import Galley.API.One2One @@ -70,7 +69,6 @@ import Polysemy.Error import Polysemy.Input import qualified Polysemy.TinyLog as P import Wire.API.Conversation hiding (Conversation, Member) -import Wire.API.Conversation.Protocol import Wire.API.Error import Wire.API.Error.Galley import Wire.API.Event.Conversation @@ -90,7 +88,6 @@ import Wire.API.Team.Permission hiding (self) createGroupConversationUpToV3 :: ( Member BrigAccess r, Member ConversationStore r, - Member MemberStore r, Member (ErrorS 'ConvAccessDenied) r, Member (Error FederationError) r, Member (Error InternalError) r, @@ -100,7 +97,6 @@ createGroupConversationUpToV3 :: Member (ErrorS 'NotConnected) r, Member (ErrorS 'MLSNotEnabled) r, Member (ErrorS 'MLSNonEmptyMemberList) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MissingLegalholdConsent) r, Member FederatorAccess r, Member GundeckAccess r, @@ -108,18 +104,17 @@ createGroupConversationUpToV3 :: Member (Input Opts) r, Member (Input UTCTime) r, Member LegalHoldStore r, + Member MemberStore r, Member TeamStore r, Member P.TinyLog r ) => Local UserId -> - Maybe ClientId -> Maybe ConnId -> NewConv -> Sem r ConversationResponse -createGroupConversationUpToV3 lusr mCreatorClient conn newConv = +createGroupConversationUpToV3 lusr conn newConv = createGroupConversationGeneric lusr - mCreatorClient conn newConv (const conversationCreated) @@ -129,7 +124,6 @@ createGroupConversationUpToV3 lusr mCreatorClient conn newConv = createGroupConversation :: ( Member BrigAccess r, Member ConversationStore r, - Member MemberStore r, Member (ErrorS 'ConvAccessDenied) r, Member (Error FederationError) r, Member (Error InternalError) r, @@ -139,7 +133,6 @@ createGroupConversation :: Member (ErrorS 'NotConnected) r, Member (ErrorS 'MLSNotEnabled) r, Member (ErrorS 'MLSNonEmptyMemberList) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MissingLegalholdConsent) r, Member FederatorAccess r, Member GundeckAccess r, @@ -147,18 +140,17 @@ createGroupConversation :: Member (Input Opts) r, Member (Input UTCTime) r, Member LegalHoldStore r, + Member MemberStore r, Member TeamStore r, Member P.TinyLog r ) => Local UserId -> - Maybe ClientId -> Maybe ConnId -> NewConv -> Sem r CreateGroupConversationResponse -createGroupConversation lusr mCreatorClient conn newConv = +createGroupConversation lusr conn newConv = createGroupConversationGeneric lusr - mCreatorClient conn newConv groupConversationCreated @@ -176,7 +168,6 @@ createGroupConversationGeneric :: Member (ErrorS 'NotConnected) r, Member (ErrorS 'MLSNotEnabled) r, Member (ErrorS 'MLSNonEmptyMemberList) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MissingLegalholdConsent) r, Member FederatorAccess r, Member GundeckAccess r, @@ -188,7 +179,6 @@ createGroupConversationGeneric :: Member P.TinyLog r ) => Local UserId -> - Maybe ClientId -> Maybe ConnId -> NewConv -> -- | The function that incorporates the failed to add remote users in the @@ -196,7 +186,7 @@ createGroupConversationGeneric :: -- ignores the first argument. (Set (Remote UserId) -> Local UserId -> Conversation -> Sem r resp) -> Sem r resp -createGroupConversationGeneric lusr mCreatorClient conn newConv convCreated = do +createGroupConversationGeneric lusr conn newConv convCreated = do (nc, fromConvSize -> allUsers) <- newRegularConversation lusr newConv let tinfo = newConvTeam newConv checkCreateConvPermissions lusr newConv tinfo allUsers @@ -218,14 +208,6 @@ createGroupConversationGeneric lusr mCreatorClient conn newConv convCreated = do failedToNotify <- do conv <- E.createConversation lcnv nc - -- set creator client for MLS conversations - case (convProtocol conv, mCreatorClient) of - (ProtocolProteus, _) -> pure () - (ProtocolMLS mlsMeta, Just c) -> - E.addMLSClients (cnvmlsGroupId mlsMeta) (tUntagged lusr) (Set.singleton (c, nullKeyPackageRef)) - (ProtocolMLS _mlsMeta, Nothing) -> throwS @'MLSMissingSenderClient - (ProtocolMixed _mlsMeta, _) -> pure () - -- NOTE: We only send (conversation) events to members of the conversation failedToNotify <- notifyCreatedConversation lusr conn conv -- We already added all the invitees, but now remove from the conversation diff --git a/services/galley/src/Galley/API/MLS/Conversation.hs b/services/galley/src/Galley/API/MLS/Conversation.hs index fb2396d9c8..9202755c0f 100644 --- a/services/galley/src/Galley/API/MLS/Conversation.hs +++ b/services/galley/src/Galley/API/MLS/Conversation.hs @@ -42,7 +42,8 @@ mkMLSConversation conv = mcLocalMembers = Data.convLocalMembers conv, mcRemoteMembers = Data.convRemoteMembers conv, mcMLSData = mlsData, - mcMembers = cm + mcMembers = cm, + mcIndexMap = mempty -- TODO } mcConv :: MLSConversation -> Data.Conversation diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 44b2dad48f..8a4cf77740 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -48,7 +48,6 @@ import Galley.API.Action import Galley.API.Error import Galley.API.MLS.Conversation import Galley.API.MLS.Enabled -import Galley.API.MLS.KeyPackage import Galley.API.MLS.Propagate import Galley.API.MLS.Removal import Galley.API.MLS.Types @@ -101,9 +100,14 @@ import Wire.API.Message import Wire.API.User.Client -- TODO: --- [ ] replace ref with index in remove proposals +-- [x] replace ref with index in remove proposals -- [ ] validate leaf nodes and key packages locally on galley -- [ ] remove MissingSenderClient error +-- [ ] PreSharedKey proposal +-- [ ] remove all key package ref mapping +-- [ ] initialise index maps +-- [ ] newtype for leaf node indices +-- [ ] compute new indices for add proposals data IncomingMessage = IncomingMessage { epoch :: Epoch, @@ -341,13 +345,13 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do senderIdentity <- getSenderIdentity qusr c (Just bundle.sender) action <- getCommitData lConvOrSub bundle.epoch bundle.commit.rmValue - -- check that the welcome message matches the action - for_ bundle.welcome $ \welcome -> - when - ( Set.fromList (map gsNewMember (welSecrets (rmValue welcome))) - /= Set.fromList (map (snd . snd) (cmAssocs (paAdd action))) - ) - $ throwS @'MLSWelcomeMismatch + -- TODO: check that the welcome message matches the action + -- for_ bundle.welcome $ \welcome -> + -- when + -- ( Set.fromList (map gsNewMember (welSecrets (rmValue welcome))) + -- /= Set.fromList (map (snd . snd) (cmAssocs (paAdd action))) + -- ) + -- $ throwS @'MLSWelcomeMismatch events <- processCommitWithAction senderIdentity @@ -464,7 +468,6 @@ postMLSMessageToLocalConv :: ( HasProposalEffects r, Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, @@ -582,11 +585,11 @@ instance Semigroup ProposalAction where instance Monoid ProposalAction where mempty = ProposalAction mempty mempty mempty -paAddClient :: Qualified (UserId, (ClientId, KeyPackageRef)) -> ProposalAction -paAddClient quc = mempty {paAdd = Map.singleton (fmap fst quc) (uncurry Map.singleton (snd (qUnqualified quc)))} +paAddClient :: ClientIdentity -> Word32 -> ProposalAction +paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} -paRemoveClient :: Qualified (UserId, (ClientId, KeyPackageRef)) -> ProposalAction -paRemoveClient quc = mempty {paRemove = Map.singleton (fmap fst quc) (uncurry Map.singleton (snd (qUnqualified quc)))} +paRemoveClient :: ClientIdentity -> Word32 -> ProposalAction +paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} paExternalInitPresent :: ProposalAction paExternalInitPresent = mempty {paExternalInit = Any True} @@ -616,7 +619,6 @@ processCommit :: ( HasProposalEffects r, Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, @@ -642,8 +644,6 @@ processExternalCommit :: Member ConversationStore r, Member (Error MLSProtocolError) r, Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r, Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, Member ExternalAccess r, @@ -663,82 +663,73 @@ processExternalCommit :: ProposalAction -> Maybe UpdatePath -> Sem r () -processExternalCommit senderIdentity lConvOrSub epoch action updatePath = - withCommitLock (cnvmlsGroupId . mlsMetaConvOrSub . tUnqualified $ lConvOrSub) epoch $ do - let convOrSub = tUnqualified lConvOrSub - leafNode <- - upLeaf - <$> note - (mlsProtocolError "External commits need an update path") - updatePath - when (paExternalInit action == mempty) $ - throw . mlsProtocolError $ - "The external commit is missing an external init proposal" - unless (paAdd action == mempty) $ - throw . mlsProtocolError $ - "The external commit must not have add proposals" - - -- validate and update mapping in brig - validateLeafNode senderIdentity leafNode >>= \case - Left errMsg -> - throw $ - mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) - Right _ -> pure () - - -- only members can join a subconversation - forOf_ _SubConv convOrSub $ \(mlsConv, _) -> - unless (isClientMember senderIdentity (mcMembers mlsConv)) $ - throwS @'MLSSubConvClientNotInParent - - -- check if there is a key package ref in the remove proposal - remRef <- - if Map.null (paRemove action) - then pure Nothing - else do - (remCid, r) <- derefUser (paRemove action) (cidQualifiedUser senderIdentity) - unless (cidQualifiedUser senderIdentity == cidQualifiedUser remCid) - . throw - . mlsProtocolError - $ "The external commit attempts to remove a client from a user other than themselves" - pure (Just r) +processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do + let convOrSub = tUnqualified lConvOrSub + leafNode <- + upLeaf + <$> note + (mlsProtocolError "External commits need an update path") + updatePath + when (paExternalInit action == mempty) $ + throw . mlsProtocolError $ + "The external commit is missing an external init proposal" + unless (paAdd action == mempty) $ + throw . mlsProtocolError $ + "The external commit must not have add proposals" + + -- validate and update mapping in brig + validateLeafNode senderIdentity leafNode >>= \case + Left errMsg -> + throw $ + mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) + Right _ -> pure () + + -- only members can join a subconversation + forOf_ _SubConv convOrSub $ \(mlsConv, _) -> + unless (isClientMember senderIdentity (mcMembers mlsConv)) $ + throwS @'MLSSubConvClientNotInParent + + let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) + + withCommitLock groupId epoch $ do + -- validate remove proposal: an external commit can contain + -- + -- > At most one Remove proposal, with which the joiner removes an old + -- > version of themselves + remIndex <- case cmAssocs (paRemove action) of + [] -> pure Nothing + [(_, idx :: Word32)] -> do + cid <- + note (mlsProtocolError "Invalid index in remove proposal") $ + indexToClient (indicesConvOrSub convOrSub) idx + unless (cid == senderIdentity) $ + throw $ + mlsProtocolError "Only the self client can be removed by an external commit" + pure (Just idx) + _ -> throw (mlsProtocolError "Multiple remove proposals in external commits not allowed") -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch -- fetch backend remove proposals of the previous epoch - kpRefs <- + indicesInRemoveProposals <- -- skip remove proposals of already removed by the external commit - filter (maybe (const True) (/=) remRef) - <$> getPendingBackendRemoveProposals (cnvmlsGroupId . mlsMetaConvOrSub . tUnqualified $ lConvOrSub') epoch + filter (maybe (const True) (/=) remIndex) + <$> getPendingBackendRemoveProposals groupId epoch + -- requeue backend remove proposals for the current epoch let cm = membersConvOrSub (tUnqualified lConvOrSub') - createAndSendRemoveProposals lConvOrSub' kpRefs (cidQualifiedUser senderIdentity) cm - where - derefUser :: ClientMap -> Qualified UserId -> Sem r (ClientIdentity, KeyPackageRef) - derefUser cm user = case Map.assocs cm of - [(u, clients)] -> do - unless (user == u) $ - throwS @'MLSClientSenderUserMismatch - ref <- ensureSingleton clients - ci <- derefKeyPackage ref - unless (cidQualifiedUser ci == user) $ - throwS @'MLSClientSenderUserMismatch - pure (ci, ref) - _ -> throwRemProposal - ensureSingleton :: Map k a -> Sem r a - ensureSingleton m = case Map.elems m of - [e] -> pure e - _ -> throwRemProposal - throwRemProposal = - throw . mlsProtocolError $ - "The external commit must have at most one remove proposal" + createAndSendRemoveProposals + lConvOrSub' + indicesInRemoveProposals + (cidQualifiedUser senderIdentity) + cm processCommitWithAction :: forall r. ( HasProposalEffects r, Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, @@ -831,26 +822,16 @@ applyProposal :: GroupId -> Proposal -> Sem r ProposalAction -applyProposal _convOrSubConvId groupId (AddProposal kp) = do - ref <- kpRef' kp & note (mlsProtocolError "Could not compute ref of a key package in an Add proposal") - mbClientIdentity <- getClientByKeyPackageRef ref - clientIdentity <- case mbClientIdentity of - Nothing -> do - -- TODO: validate key package - cid <- - either - (\_ -> throw (mlsProtocolError "Invalid key package in an Add proposal")) - pure - $ keyPackageIdentity kp.rmValue - addMLSClients groupId (cidQualifiedUser cid) (Set.singleton (ciClient cid, ref)) - pure cid - Just cid -> - -- ad-hoc add proposal in commit, the key package has been claimed before - pure cid - pure (paAddClient . (<$$>) (,ref) . cidQualifiedClient $ clientIdentity) -applyProposal _convOrSubConvId _groupId (RemoveProposal ref) = do - qclient <- cidQualifiedClient <$> derefKeyPackage ref - pure (paRemoveClient ((,ref) <$$> qclient)) +applyProposal _convOrSubConvId _groupId (AddProposal kp) = do + let idx = error "TODO: compute new index" + -- TODO: validate key package + cid <- getKeyPackageIdentity kp.rmValue + -- TODO: we probably should not update the conversation state here + -- addMLSClients groupId (cidQualifiedUser cid) (Set.singleton (ciClient cid, idx)) + pure (paAddClient cid idx) +applyProposal _convOrSubConvId _groupId (RemoveProposal idx) = do + let cid = error "TODO: lookup in index map" + pure (paRemoveClient cid idx) applyProposal _convOrSubConvId _groupId (ExternalInitProposal _) = -- only record the fact there was an external init proposal, but do not -- process it in any way. @@ -950,11 +931,7 @@ checkExternalProposalUser qusr prop = do loc ( \lusr -> case prop of AddProposal kp -> do - ClientIdentity {ciUser, ciClient} <- - either - (const $ throwS @'MLSUnsupportedProposal) - pure - (keyPackageIdentity kp.rmValue) + ClientIdentity {ciUser, ciClient} <- getKeyPackageIdentity kp.rmValue -- requesting user must match key package owner when (tUnqualified lusr /= ciUser) $ throwS @'MLSUnsupportedProposal -- client referenced in key package must be one of the user's clients @@ -1189,6 +1166,14 @@ removeMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of $ users SubConv _ _ -> pure [] +getKeyPackageIdentity :: + Member (ErrorS 'MLSUnsupportedProposal) r => + KeyPackage -> + Sem r ClientIdentity +getKeyPackageIdentity = + either (\_ -> throwS @'MLSUnsupportedProposal) pure + . keyPackageIdentity + handleNoChanges :: Monoid a => Sem (Error NoChanges ': r) a -> Sem r a handleNoChanges = fmap fold . runError diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 42ead7c84a..90b8b54a2c 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -43,7 +43,6 @@ import Polysemy.TinyLog import qualified System.Logger as Log import Wire.API.Conversation.Protocol import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -61,7 +60,7 @@ createAndSendRemoveProposals :: Foldable t ) => Local ConvOrSubConv -> - t KeyPackageRef -> + t Word32 -> Qualified UserId -> -- | The client map that has all the recipients of the message. This is an -- argument, and not constructed within the function, because of a special @@ -71,15 +70,15 @@ createAndSendRemoveProposals :: -- conversation/subconversation client maps. ClientMap -> Sem r () -createAndSendRemoveProposals lConvOrSubConv cs qusr cm = do +createAndSendRemoveProposals lConvOrSubConv indices qusr cm = do let meta = mlsMetaConvOrSub (tUnqualified lConvOrSubConv) mKeyPair <- getMLSRemovalKey case mKeyPair of Nothing -> do warn $ Log.msg ("No backend removal key is configured (See 'mlsPrivateKeyPaths' in galley's config). Not able to remove client from MLS conversation." :: Text) Just (secKey, pubKey) -> do - for_ cs $ \kpref -> do - let proposal = mkRemoveProposal kpref + for_ indices $ \idx -> do + let proposal = mkRawMLS (RemoveProposal idx) msg = mkSignedMessage secKey @@ -111,13 +110,13 @@ removeClientsWithClientMapRecursively :: Foldable f ) => Local MLSConversation -> - (ConvOrSubConv -> f KeyPackageRef) -> + (ConvOrSubConv -> f Word32) -> Qualified UserId -> Sem r () -removeClientsWithClientMapRecursively lMlsConv getKPs qusr = do +removeClientsWithClientMapRecursively lMlsConv getIndices qusr = do let mainConv = fmap Conv lMlsConv cm = mcMembers (tUnqualified lMlsConv) - createAndSendRemoveProposals mainConv (getKPs (tUnqualified mainConv)) qusr cm + createAndSendRemoveProposals mainConv (getIndices (tUnqualified mainConv)) qusr cm -- remove this client from all subconversations subs <- listSubConversations' (mcId (tUnqualified lMlsConv)) @@ -126,7 +125,7 @@ removeClientsWithClientMapRecursively lMlsConv getKPs qusr = do createAndSendRemoveProposals subConv - (getKPs (tUnqualified subConv)) + (getIndices (tUnqualified subConv)) qusr cm @@ -149,8 +148,8 @@ removeClient :: removeClient lc qusr cid = do mMlsConv <- mkMLSConversation (tUnqualified lc) for_ mMlsConv $ \mlsConv -> do - let getKPs = cmLookupRef (mkClientIdentity qusr cid) . membersConvOrSub - removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getKPs qusr + let getIndices = cmLookupIndex (mkClientIdentity qusr cid) . membersConvOrSub + removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getIndices qusr -- | Send remove proposals for all clients of the user to the local conversation. removeUser :: diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index d22bad99d5..59b35b260a 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -142,7 +142,8 @@ getLocalSubConversation qusr lconv sconv = do cnvmlsEpochTimestamp = Nothing, cnvmlsCipherSuite = suite }, - scMembers = mkClientMap [] + scMembers = mkClientMap [], + scIndexMap = mempty -- TODO } pure sub Just sub -> pure sub @@ -423,9 +424,9 @@ leaveLocalSubConversation cid lcnv sub = do subConv <- noteS @'ConvNotFound =<< Eff.getSubConversation (tUnqualified lcnv) sub - kp <- + idx <- note (mlsProtocolError "Client is not a member of the subconversation") $ - cmLookupRef cid (scMembers subConv) + cmLookupIndex cid (scMembers subConv) -- remove the leaver from the member list let (gid, epoch) = (cnvmlsGroupId &&& cnvmlsEpoch) (scMLSData subConv) Eff.removeMLSClients gid (cidQualifiedUser cid) . Set.singleton . ciClient $ cid @@ -440,7 +441,7 @@ leaveLocalSubConversation cid lcnv sub = do else createAndSendRemoveProposals (qualifyAs lcnv (SubConv mlsConv subConv)) - (Identity kp) + (Identity idx) (cidQualifiedUser cid) cm diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index 69f0f795a0..e4d1d3254d 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -20,6 +20,8 @@ module Galley.API.MLS.Types where import Data.Domain import Data.Id +import Data.IntMap (IntMap) +import qualified Data.IntMap as IntMap import qualified Data.Map as Map import Data.Qualified import Galley.Types.Conversations.Members @@ -27,20 +29,26 @@ import Imports import Wire.API.Conversation import Wire.API.Conversation.Protocol import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage import Wire.API.MLS.SubConversation -type ClientMap = Map (Qualified UserId) (Map ClientId KeyPackageRef) +newtype IndexMap = IndexMap {unIndexMap :: IntMap ClientIdentity} + deriving (Eq, Show) + deriving newtype (Semigroup, Monoid) + +indexToClient :: IndexMap -> Word32 -> Maybe ClientIdentity +indexToClient m i = IntMap.lookup (fromIntegral i) (unIndexMap m) + +type ClientMap = Map (Qualified UserId) (Map ClientId Word32) -mkClientMap :: [(Domain, UserId, ClientId, KeyPackageRef)] -> ClientMap +mkClientMap :: [(Domain, UserId, ClientId, Int32)] -> ClientMap mkClientMap = foldr addEntry mempty where - addEntry :: (Domain, UserId, ClientId, KeyPackageRef) -> ClientMap -> ClientMap - addEntry (dom, usr, c, kpr) = - Map.insertWith (<>) (Qualified usr dom) (Map.singleton c kpr) + addEntry :: (Domain, UserId, ClientId, Int32) -> ClientMap -> ClientMap + addEntry (dom, usr, c, kpi) = + Map.insertWith (<>) (Qualified usr dom) (Map.singleton c (fromIntegral kpi)) -cmLookupRef :: ClientIdentity -> ClientMap -> Maybe KeyPackageRef -cmLookupRef cid cm = do +cmLookupIndex :: ClientIdentity -> ClientMap -> Maybe Word32 +cmLookupIndex cid cm = do clients <- Map.lookup (cidQualifiedUser cid) cm Map.lookup (ciClient cid) clients @@ -54,13 +62,19 @@ cmRemoveClient cid cm = case Map.lookup (cidQualifiedUser cid) cm of else Map.insert (cidQualifiedUser cid) clients' cm isClientMember :: ClientIdentity -> ClientMap -> Bool -isClientMember ci = isJust . cmLookupRef ci +isClientMember ci = isJust . cmLookupIndex ci -cmAssocs :: ClientMap -> [(Qualified UserId, (ClientId, KeyPackageRef))] +cmAssocs :: ClientMap -> [(ClientIdentity, Word32)] cmAssocs cm = do (quid, clients) <- Map.assocs cm - (clientId, ref) <- Map.assocs clients - pure (quid, (clientId, ref)) + (clientId, idx) <- Map.assocs clients + pure (mkClientIdentity quid clientId, idx) + +cmSingleton :: ClientIdentity -> Word32 -> ClientMap +cmSingleton cid idx = + Map.singleton + (cidQualifiedUser cid) + (Map.singleton (ciClient cid) idx) -- | Inform a handler for 'POST /conversations/list-ids' if the MLS global team -- conversation and the MLS self-conversation should be included in the @@ -74,7 +88,8 @@ data MLSConversation = MLSConversation mcMLSData :: ConversationMLSData, mcLocalMembers :: [LocalMember], mcRemoteMembers :: [RemoteMember], - mcMembers :: ClientMap + mcMembers :: ClientMap, + mcIndexMap :: IndexMap } deriving (Show) @@ -82,13 +97,14 @@ data SubConversation = SubConversation { scParentConvId :: ConvId, scSubConvId :: SubConvId, scMLSData :: ConversationMLSData, - scMembers :: ClientMap + scMembers :: ClientMap, + scIndexMap :: IndexMap } deriving (Eq, Show) toPublicSubConv :: Qualified SubConversation -> PublicSubConversation toPublicSubConv (Qualified (SubConversation {..}) domain) = - let members = fmap (\(quid, (cid, _kp)) -> mkClientIdentity quid cid) (cmAssocs scMembers) + let members = map fst (cmAssocs scMembers) in PublicSubConversation { pscParentConvId = Qualified scParentConvId domain, pscSubConvId = scSubConvId, @@ -109,6 +125,10 @@ membersConvOrSub :: ConvOrSubConv -> ClientMap membersConvOrSub (Conv c) = mcMembers c membersConvOrSub (SubConv _ s) = scMembers s +indicesConvOrSub :: ConvOrSubConv -> IndexMap +indicesConvOrSub (Conv c) = mcIndexMap c +indicesConvOrSub (SubConv _ s) = scIndexMap s + convOfConvOrSub :: ConvOrSubChoice c s -> c convOfConvOrSub (Conv c) = c convOfConvOrSub (SubConv c _) = c diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index 61d2445bf5..f59ca80f78 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -37,7 +37,6 @@ import Wire.API.Error import Wire.API.Error.Galley import Wire.API.MLS.Epoch import Wire.API.MLS.Group -import Wire.API.MLS.KeyPackage import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -72,7 +71,7 @@ getPendingBackendRemoveProposals :: ) => GroupId -> Epoch -> - Sem r [KeyPackageRef] + Sem r [Word32] getPendingBackendRemoveProposals gid epoch = do proposals <- getAllPendingProposals gid epoch catMaybes @@ -80,7 +79,7 @@ getPendingBackendRemoveProposals gid epoch = do proposals ( \case (Just ProposalOriginBackend, proposal) -> case rmValue proposal of - RemoveProposal kp -> pure . Just $ kp + RemoveProposal i -> pure (Just i) _ -> pure Nothing (Just ProposalOriginClient, _) -> pure Nothing (Nothing, _) -> do diff --git a/services/galley/src/Galley/API/Update.hs b/services/galley/src/Galley/API/Update.hs index 45c36abf16..39297cf6b2 100644 --- a/services/galley/src/Galley/API/Update.hs +++ b/services/galley/src/Galley/API/Update.hs @@ -86,7 +86,6 @@ import Data.Time import Galley.API.Action import Galley.API.Error import Galley.API.Federation (onConversationUpdated) -import Galley.API.MLS.KeyPackage (nullKeyPackageRef) import Galley.API.Mapping import Galley.API.Message import qualified Galley.API.Query as Query @@ -135,7 +134,6 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Error import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Group import Wire.API.Message import Wire.API.Password (mkSafePassword) import Wire.API.Provider.Service (ServiceRef) @@ -690,19 +688,17 @@ updateConversationProtocolWithLocalUser :: Member (ErrorS 'ConvInvalidProtocolTransition) r, Member (ErrorS 'ConvMemberNotFound) r, Member (Error FederationError) r, - Member MemberStore r, Member ConversationStore r ) => Local UserId -> - ClientId -> ConnId -> Qualified ConvId -> ProtocolUpdate -> Sem r () -updateConversationProtocolWithLocalUser lusr client conn qcnv update = +updateConversationProtocolWithLocalUser lusr _conn qcnv update = foldQualified lusr - (\lcnv -> updateLocalConversationProtocol (tUntagged lusr) client (Just conn) lcnv update) + (\lcnv -> updateLocalConversationProtocol (tUntagged lusr) lcnv update) (\_rcnv -> throw FederationNotImplemented) qcnv @@ -711,22 +707,18 @@ updateLocalConversationProtocol :: ( Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'ConvInvalidProtocolTransition) r, Member (ErrorS 'ConvMemberNotFound) r, - Member MemberStore r, Member ConversationStore r ) => Qualified UserId -> - ClientId -> - Maybe ConnId -> Local ConvId -> ProtocolUpdate -> Sem r () -updateLocalConversationProtocol qusr client _mconn lcnv (ProtocolUpdate newProtocol) = do +updateLocalConversationProtocol qusr lcnv (ProtocolUpdate newProtocol) = do conv <- E.getConversation (tUnqualified lcnv) >>= noteS @'ConvNotFound void $ ensureOtherMember lcnv qusr conv case (protocolTag (convProtocol conv), newProtocol) of - (ProtocolProteusTag, ProtocolMixedTag) -> do + (ProtocolProteusTag, ProtocolMixedTag) -> E.updateToMixedProtocol lcnv MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 - E.addMLSClients (convToGroupId lcnv) qusr (Set.singleton (client, nullKeyPackageRef)) (ProtocolProteusTag, ProtocolProteusTag) -> pure () (ProtocolMixedTag, ProtocolMixedTag) -> diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index 7665edb26e..7d0eee8260 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -48,7 +48,6 @@ import qualified UnliftIO import Wire.API.Conversation.Member hiding (Member) import Wire.API.Conversation.Role import Wire.API.MLS.Group -import Wire.API.MLS.KeyPackage import Wire.API.Provider.Service -- | Add members to a local conversation. @@ -342,12 +341,12 @@ removeLocalMembersFromRemoteConv (tUntagged -> Qualified conv convDomain) victim setConsistency LocalQuorum for_ victims $ \u -> addPrepQuery Cql.deleteUserRemoteConv (u, convDomain, conv) -addMLSClients :: GroupId -> Qualified UserId -> Set.Set (ClientId, KeyPackageRef) -> Client () +addMLSClients :: GroupId -> Qualified UserId -> Set.Set (ClientId, Word32) -> Client () addMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do setType BatchLogged setConsistency LocalQuorum - for_ cs $ \(c, kpr) -> - addPrepQuery Cql.addMLSClient (groupId, domain, usr, c, kpr) + for_ cs $ \(c, idx) -> + addPrepQuery Cql.addMLSClient (groupId, domain, usr, c, fromIntegral idx) removeMLSClients :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () removeMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do diff --git a/services/galley/src/Galley/Cassandra/Queries.hs b/services/galley/src/Galley/Cassandra/Queries.hs index c838ddd00f..5c7fe09a00 100644 --- a/services/galley/src/Galley/Cassandra/Queries.hs +++ b/services/galley/src/Galley/Cassandra/Queries.hs @@ -34,7 +34,6 @@ import Wire.API.Conversation.Code import Wire.API.Conversation.Protocol import Wire.API.Conversation.Role import Wire.API.MLS.CipherSuite -import Wire.API.MLS.KeyPackage import Wire.API.MLS.PublicGroupState import Wire.API.MLS.SubConversation import Wire.API.Password (Password) @@ -462,8 +461,8 @@ rmMemberClient c = -- MLS Clients -------------------------------------------------------------- -addMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId, KeyPackageRef) () -addMLSClient = "insert into mls_group_member_client (group_id, user_domain, user, client, key_package_ref) values (?, ?, ?, ?, ?)" +addMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId, Int32) () +addMLSClient = "insert into mls_group_member_client (group_id, user_domain, user, client, leaf_node_index) values (?, ?, ?, ?, ?)" removeMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId) () removeMLSClient = "delete from mls_group_member_client where group_id = ? and user_domain = ? and user = ? and client = ?" @@ -471,8 +470,8 @@ removeMLSClient = "delete from mls_group_member_client where group_id = ? and us removeAllMLSClients :: PrepQuery W (Identity GroupId) () removeAllMLSClients = "DELETE FROM mls_group_member_client WHERE group_id = ?" -lookupMLSClients :: PrepQuery R (Identity GroupId) (Domain, UserId, ClientId, KeyPackageRef) -lookupMLSClients = "select user_domain, user, client, key_package_ref from mls_group_member_client where group_id = ?" +lookupMLSClients :: PrepQuery R (Identity GroupId) (Domain, UserId, ClientId, Int32) +lookupMLSClients = "select user_domain, user, client, leaf_node_index from mls_group_member_client where group_id = ?" acquireCommitLock :: PrepQuery W (GroupId, Epoch, Int32) Row acquireCommitLock = "insert into mls_commit_locks (group_id, epoch) values (?, ?) if not exists using ttl ?" diff --git a/services/galley/src/Galley/Cassandra/SubConversation.hs b/services/galley/src/Galley/Cassandra/SubConversation.hs index ad14312114..3dbe6c842c 100644 --- a/services/galley/src/Galley/Cassandra/SubConversation.hs +++ b/services/galley/src/Galley/Cassandra/SubConversation.hs @@ -56,7 +56,8 @@ selectSubConversation convId subConvId = do cnvmlsEpochTimestamp = epochTimestamp epoch epochWritetime, cnvmlsCipherSuite = suite }, - scMembers = cm + scMembers = cm, + scIndexMap = mempty -- TODO } insertSubConversation :: ConvId -> SubConvId -> CipherSuiteTag -> Epoch -> GroupId -> Maybe OpaquePublicGroupState -> Client () diff --git a/services/galley/src/Galley/Effects/MemberStore.hs b/services/galley/src/Galley/Effects/MemberStore.hs index bdc61c9016..10b7e168d2 100644 --- a/services/galley/src/Galley/Effects/MemberStore.hs +++ b/services/galley/src/Galley/Effects/MemberStore.hs @@ -60,7 +60,6 @@ import Imports import Polysemy import Wire.API.Conversation.Member hiding (Member) import Wire.API.MLS.Group -import Wire.API.MLS.KeyPackage import Wire.API.Provider.Service data MemberStore m a where @@ -77,7 +76,7 @@ data MemberStore m a where SetOtherMember :: Local ConvId -> Qualified UserId -> OtherMemberUpdate -> MemberStore m () DeleteMembers :: ConvId -> UserList UserId -> MemberStore m () DeleteMembersInRemoteConversation :: Remote ConvId -> [UserId] -> MemberStore m () - AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, KeyPackageRef) -> MemberStore m () + AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, Word32) -> MemberStore m () RemoveMLSClients :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveAllMLSClients :: GroupId -> MemberStore m () LookupMLSClients :: GroupId -> MemberStore m ClientMap diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index ea0d41cc9c..4731df544e 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -2909,7 +2909,7 @@ wsAssertBackendRemoveProposalWithEpoch fromUser convId kpref epoch n = do pure bs wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> KeyPackageRef -> Notification -> IO ByteString -wsAssertBackendRemoveProposal fromUser cnvOrSubCnv kpref n = do +wsAssertBackendRemoveProposal fromUser cnvOrSubCnv _kpref n = do let e = List1.head (WS.unpackPayload n) ntfTransient n @?= False evtConv e @?= convOfConvOrSub <$> cnvOrSubCnv @@ -2922,7 +2922,7 @@ wsAssertBackendRemoveProposal fromUser cnvOrSubCnv kpref n = do pmsg.content.rmValue.sender @?= SenderExternal 0 case pmsg.content.rmValue.content of FramedContentProposal prop -> case prop.rmValue of - RemoveProposal kpRefRemove -> kpRefRemove @?= kpref + RemoveProposal kpRefRemove -> kpRefRemove @?= error "kpref" otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload _ -> assertFailure $ "Expected PublicMessage" From 0689a2642b5169315e674b84361ff4ed55df83a9 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Mon, 3 Apr 2023 16:51:41 +0200 Subject: [PATCH 07/75] Adapt integration tests to remove proposal changes --- services/galley/src/Galley/API/MLS/Message.hs | 1 + services/galley/src/Galley/API/MLS/Util.hs | 2 +- services/galley/test/integration/API/MLS.hs | 13 ++- .../galley/test/integration/API/MLS/Util.hs | 82 +++++++++---------- services/galley/test/integration/API/Util.hs | 13 ++- 5 files changed, 55 insertions(+), 56 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 8a4cf77740..e216e8c7da 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -108,6 +108,7 @@ import Wire.API.User.Client -- [ ] initialise index maps -- [ ] newtype for leaf node indices -- [ ] compute new indices for add proposals +-- [ ] remove prefixes from rmValue and rmRaw data IncomingMessage = IncomingMessage { epoch :: Epoch, diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index f59ca80f78..927e265937 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -108,7 +108,7 @@ withCommitLock gid epoch action = ) (const $ releaseCommitLock gid epoch) $ \_ -> do - -- FUTUREWORK: fetch epoch again and check that is matches + -- FUTUREWORK: fetch epoch again and check that it matches action where ttl = fromIntegral (600 :: Int) -- 10 minutes diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 744ba79a12..f47fed47c9 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -356,9 +356,8 @@ testWelcomeNoKey = do void $ setupMLSGroup alice1 -- add bob using an "out-of-band" key package - (_, ref) <- generateKeyPackage bob1 - kp <- keyPackageFile bob1 ref - commit <- createAddCommitWithKeyPackages alice1 [(bob1, kp)] + (kp, _) <- generateKeyPackage bob1 + commit <- createAddCommitWithKeyPackages alice1 [(bob1, kp.rmRaw)] welcome <- liftIO $ case mpWelcome commit of Nothing -> assertFailure "Expected welcome message" Just w -> pure w @@ -556,10 +555,10 @@ testAddClientPartial = do void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit -- now bob2 and bob3 upload key packages, and alice adds bob2 only - kp <- uploadNewKeyPackage bob2 >>= keyPackageFile bob2 + kp <- uploadNewKeyPackage bob2 void $ uploadNewKeyPackage bob3 void $ - createAddCommitWithKeyPackages alice1 [(bob2, kp)] + createAddCommitWithKeyPackages alice1 [(bob2, kp.rmRaw)] >>= sendAndConsumeCommit testSendAnotherUsersCommit :: TestM () @@ -1779,8 +1778,8 @@ sendRemoteMLSWelcomeKPNotFound = do commit <- runMLSTest $ do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ setupFakeMLSGroup alice1 - kp <- generateKeyPackage bob1 >>= keyPackageFile bob1 . snd - createAddCommitWithKeyPackages alice1 [(bob1, kp)] + kp <- fst <$> generateKeyPackage bob1 + createAddCommitWithKeyPackages alice1 [(bob1, kp.rmRaw)] welcome <- assertJust (mpWelcome commit) fedGalleyClient <- view tsFedGalleyClient diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index fcad3e9ffe..4a1d66d047 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -28,6 +28,7 @@ import Control.Arrow ((&&&)) import Control.Error.Util import Control.Lens (preview, to, view, (.~), (^..)) import Control.Monad.Catch +import Control.Monad.Cont import Control.Monad.State (StateT, evalStateT) import qualified Control.Monad.State as State import Control.Monad.Trans.Maybe @@ -400,7 +401,7 @@ createFakeMLSClient qusr = do pure cid -- | create and upload to backend -uploadNewKeyPackage :: HasCallStack => ClientIdentity -> MLSTest KeyPackageRef +uploadNewKeyPackage :: HasCallStack => ClientIdentity -> MLSTest (RawMLS KeyPackage) uploadNewKeyPackage qcid = do (kp, _) <- generateKeyPackage qcid @@ -413,15 +414,13 @@ uploadNewKeyPackage qcid = do . json (KeyPackageUpload [kp]) ) !!! const 201 === statusCode - pure $ fromJust (kpRef' kp) + pure kp generateKeyPackage :: HasCallStack => ClientIdentity -> MLSTest (RawMLS KeyPackage, KeyPackageRef) generateKeyPackage qcid = do kpData <- mlscli qcid ["key-package", "create"] Nothing kp <- liftIO $ decodeMLSError kpData let ref = fromJust (kpRef' kp) - fp <- keyPackageFile qcid ref - liftIO $ BS.writeFile fp (rmRaw kp) pure (kp, ref) setClientGroupState :: HasCallStack => ClientIdentity -> ByteString -> MLSTest () @@ -549,13 +548,6 @@ fakeGroupId = liftIO $ fmap (GroupId . BS.pack) (replicateM 32 (generate arbitrary)) -keyPackageFile :: HasCallStack => ClientIdentity -> KeyPackageRef -> MLSTest FilePath -keyPackageFile qcid ref = - State.gets $ \mls -> - mlsBaseDir mls - cid2Str qcid - T.unpack (T.decodeUtf8 (hex (unKeyPackageRef ref))) - claimLocalKeyPackages :: HasCallStack => ClientIdentity -> Local UserId -> MLSTest KeyPackageBundle claimLocalKeyPackages qcid lusr = do brig <- viewBrig @@ -606,16 +598,13 @@ claimKeyPackages cid qusr = do loc <- liftTest $ qualifyLocal () foldQualified loc (claimLocalKeyPackages cid) claimRemoteKeyPackages qusr -bundleKeyPackages :: KeyPackageBundle -> MLSTest [(ClientIdentity, FilePath)] -bundleKeyPackages bundle = do - let bundleEntries = kpbEntries bundle - entryIdentity be = mkClientIdentity (kpbeUser be) (kpbeClient be) - for (toList bundleEntries) $ \be -> do - let d = kpData . kpbeKeyPackage $ be - qcid = entryIdentity be - fn <- keyPackageFile qcid (kpbeRef be) - liftIO $ BS.writeFile fn d - pure (qcid, fn) +bundleKeyPackages :: KeyPackageBundle -> [(ClientIdentity, ByteString)] +bundleKeyPackages bundle = + let getEntry be = + ( mkClientIdentity (kpbeUser be) (kpbeClient be), + kpData (kpbeKeyPackage be) + ) + in map getEntry (toList (kpbEntries bundle)) -- | Claim keypackages and create a commit/welcome pair on a given client. -- Note that this alters the state of the group immediately. If we want to test @@ -623,7 +612,7 @@ bundleKeyPackages bundle = do -- group to the previous state by using an older version of the group file. createAddCommit :: HasCallStack => ClientIdentity -> [Qualified UserId] -> MLSTest MessagePackage createAddCommit cid users = do - kps <- concat <$> traverse (bundleKeyPackages <=< claimKeyPackages cid) users + kps <- fmap (concat . map bundleKeyPackages) . traverse (claimKeyPackages cid) $ users liftIO $ assertBool "no key packages could be claimed" (not (null kps)) createAddCommitWithKeyPackages cid kps @@ -654,8 +643,8 @@ createExternalCommit qcid mpgs qcs = do State.modify $ \mls -> mls - { mlsNewMembers = Set.singleton qcid -- This might be a different client - -- than those that have been in the + { mlsNewMembers = Set.singleton qcid + -- This might be a different client than those that have been in the -- group from before. } @@ -670,7 +659,7 @@ createExternalCommit qcid mpgs qcs = do createAddProposals :: HasCallStack => ClientIdentity -> [Qualified UserId] -> MLSTest [MessagePackage] createAddProposals cid users = do - kps <- concat <$> traverse (bundleKeyPackages <=< claimKeyPackages cid) users + kps <- fmap (concat . map bundleKeyPackages) . traverse (claimKeyPackages cid) $ users traverse (createAddProposalWithKeyPackage cid) kps -- | Create an application message. @@ -696,13 +685,14 @@ createApplicationMessage cid messageContent = do createAddCommitWithKeyPackages :: ClientIdentity -> - [(ClientIdentity, FilePath)] -> + [(ClientIdentity, ByteString)] -> MLSTest MessagePackage createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do bd <- State.gets mlsBaseDir welcomeFile <- liftIO $ emptyTempFile bd "welcome" pgsFile <- liftIO $ emptyTempFile bd "pgs" - commit <- + + commit <- runContT (traverse (withTempKeyPackageFile . snd) clientsAndKeyPackages) $ \kpFiles -> mlscli qcid ( [ "member", @@ -716,7 +706,7 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do "--group-out", "" ] - <> map snd clientsAndKeyPackages + <> kpFiles ) Nothing @@ -737,13 +727,13 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do createAddProposalWithKeyPackage :: ClientIdentity -> - (ClientIdentity, FilePath) -> + (ClientIdentity, ByteString) -> MLSTest MessagePackage createAddProposalWithKeyPackage cid (_, kp) = do - prop <- + prop <- runContT (withTempKeyPackageFile kp) $ \kpFile -> mlscli cid - ["proposal", "--group-in", "", "--group-out", "", "add", kp] + ["proposal", "--group-in", "", "--group-out", "", "add", kpFile] Nothing pure MessagePackage @@ -791,17 +781,15 @@ readWelcome fp = runMaybeT $ do liftIO $ BS.readFile fp createRemoveCommit :: HasCallStack => ClientIdentity -> [ClientIdentity] -> MLSTest MessagePackage -createRemoveCommit cid targets = do +createRemoveCommit cid _targets = do + -- TODO bd <- State.gets mlsBaseDir welcomeFile <- liftIO $ emptyTempFile bd "welcome" pgsFile <- liftIO $ emptyTempFile bd "pgs" g <- getClientGroupState cid - let kprefByClient = Map.fromList (readGroupState g) - let fetchKeyPackage c = keyPackageFile c (kprefByClient Map.! c) - kps <- traverse fetchKeyPackage targets - + let indices = map snd (readGroupState g) commit <- mlscli cid @@ -816,7 +804,7 @@ createRemoveCommit cid targets = do "--group-state-out", pgsFile ] - <> kps + <> map show indices ) Nothing welcome <- liftIO $ readWelcome welcomeFile @@ -987,20 +975,21 @@ mlsBracket clients k = do c <- view tsCannon WS.bracketAsClientRN c (map (ciUser &&& ciClient) clients) k -readGroupState :: ByteString -> [(ClientIdentity, KeyPackageRef)] +readGroupState :: ByteString -> [(ClientIdentity, Word32)] readGroupState j = do + -- TODO: figure out the new JSON format of the group state node <- j ^.. key "group" . key "tree" . key "tree" . key "nodes" . _Array . traverse leafNode <- node ^.. key "node" . key "LeafNode" identity <- either (const []) pure . decodeMLS' . BS.pack . map fromIntegral $ leafNode ^.. key "key_package" . key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" . _Array . traverse . _Integer - kpr <- (unhexM . T.encodeUtf8 =<<) $ leafNode ^.. key "key_package_ref" . _String - pure (identity, KeyPackageRef kpr) + _kpr <- (unhexM . T.encodeUtf8 =<<) $ leafNode ^.. key "key_package_ref" . _String + pure (identity, error "TODO: get index") getClientsFromGroupState :: ClientIdentity -> Qualified UserId -> - MLSTest [(ClientIdentity, KeyPackageRef)] + MLSTest [(ClientIdentity, Word32)] getClientsFromGroupState cid u = do groupState <- readGroupState <$> getClientGroupState cid pure $ filter (\(cid', _) -> cidQualifiedUser cid' == u) groupState @@ -1292,3 +1281,14 @@ getCurrentGroupId = do State.gets mlsGroupId >>= \case Nothing -> liftIO $ assertFailure "Creating add proposal for non-existing group" Just g -> pure g + +withTempKeyPackageFile :: ByteString -> ContT a MLSTest FilePath +withTempKeyPackageFile bs = do + bd <- State.gets mlsBaseDir + ContT $ \k -> + bracket + (liftIO (openBinaryTempFile bd "kp")) + (\(fp, _) -> liftIO (removeFile fp)) + $ \(fp, h) -> do + liftIO $ BS.hPut h bs `finally` hClose h + k fp diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index 4731df544e..cf54add2b7 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -121,7 +121,6 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Domain (originDomainHeaderName) import Wire.API.Internal.Notification hiding (target) -import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -2899,17 +2898,17 @@ wsAssertConvReceiptModeUpdate conv usr new n = do evtFrom e @?= usr evtData e @?= EdConvReceiptModeUpdate (ConversationReceiptModeUpdate new) -wsAssertBackendRemoveProposalWithEpoch :: HasCallStack => Qualified UserId -> Qualified ConvId -> KeyPackageRef -> Epoch -> Notification -> IO ByteString -wsAssertBackendRemoveProposalWithEpoch fromUser convId kpref epoch n = do - bs <- wsAssertBackendRemoveProposal fromUser (Conv <$> convId) kpref n +wsAssertBackendRemoveProposalWithEpoch :: HasCallStack => Qualified UserId -> Qualified ConvId -> Word32 -> Epoch -> Notification -> IO ByteString +wsAssertBackendRemoveProposalWithEpoch fromUser convId idx epoch n = do + bs <- wsAssertBackendRemoveProposal fromUser (Conv <$> convId) idx n let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs case msg.content of MessagePublic pmsg -> liftIO $ pmsg.content.rmValue.epoch @?= epoch _ -> assertFailure "unexpected message content" pure bs -wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> KeyPackageRef -> Notification -> IO ByteString -wsAssertBackendRemoveProposal fromUser cnvOrSubCnv _kpref n = do +wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> Word32 -> Notification -> IO ByteString +wsAssertBackendRemoveProposal fromUser cnvOrSubCnv idx n = do let e = List1.head (WS.unpackPayload n) ntfTransient n @?= False evtConv e @?= convOfConvOrSub <$> cnvOrSubCnv @@ -2922,7 +2921,7 @@ wsAssertBackendRemoveProposal fromUser cnvOrSubCnv _kpref n = do pmsg.content.rmValue.sender @?= SenderExternal 0 case pmsg.content.rmValue.content of FramedContentProposal prop -> case prop.rmValue of - RemoveProposal kpRefRemove -> kpRefRemove @?= error "kpref" + RemoveProposal kpRefRemove -> kpRefRemove @?= idx otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload _ -> assertFailure $ "Expected PublicMessage" From 21c4f3a795906a09fbf7c16ca1de075f134833c6 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 4 Apr 2023 10:36:14 +0200 Subject: [PATCH 08/75] Compute new node index for add proposals --- libs/wire-api/src/Wire/API/Error/Galley.hs | 3 ++ libs/wire-api/src/Wire/API/MLS/Message.hs | 20 +++++----- .../src/Wire/API/Routes/Public/Galley/MLS.hs | 23 +++++++----- services/galley/src/Galley/API/MLS/Message.hs | 37 +++++++++++++------ services/galley/src/Galley/API/MLS/Types.hs | 15 +++++--- .../galley/test/integration/API/MLS/Util.hs | 2 +- 6 files changed, 62 insertions(+), 38 deletions(-) diff --git a/libs/wire-api/src/Wire/API/Error/Galley.hs b/libs/wire-api/src/Wire/API/Error/Galley.hs index b06efe0300..9c0d317fa1 100644 --- a/libs/wire-api/src/Wire/API/Error/Galley.hs +++ b/libs/wire-api/src/Wire/API/Error/Galley.hs @@ -73,6 +73,7 @@ data GalleyError | MLSNonEmptyMemberList | MLSDuplicatePublicKey | MLSKeyPackageRefNotFound + | MLSInvalidLeafNodeIndex | MLSUnsupportedMessage | MLSProposalNotFound | MLSUnsupportedProposal @@ -203,6 +204,8 @@ type instance MapError 'MLSDuplicatePublicKey = 'StaticError 400 "mls-duplicate- type instance MapError 'MLSKeyPackageRefNotFound = 'StaticError 404 "mls-key-package-ref-not-found" "A referenced key package could not be mapped to a known client" +type instance MapError 'MLSInvalidLeafNodeIndex = 'StaticError 400 "mls-invalid-leaf-node-index" "A referenced leaf node index points to a black or non-existing node" + type instance MapError 'MLSUnsupportedMessage = 'StaticError 422 "mls-unsupported-message" "Attempted to send a message with an unsupported combination of content type and wire format" type instance MapError 'MLSProposalNotFound = 'StaticError 404 "mls-proposal-not-found" "A proposal referenced in a commit message could not be found" diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index cfb39db117..652d5c88d5 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -92,8 +92,8 @@ data Message = Message instance ParseMLS Message where parseMLS = Message - <$> traceMLS "version" parseMLS - <*> traceMLS "content" parseMLS + <$> parseMLS + <*> parseMLS instance SerialiseMLS Message where serialiseMLS msg = do @@ -157,9 +157,9 @@ data PublicMessage = PublicMessage instance ParseMLS PublicMessage where parseMLS = do - content <- traceMLS "pub content" parseMLS + content <- parseMLS authData <- parseFramedContentAuthData (framedContentDataTag (content.rmValue.content)) - membershipTag <- traceMLS "membership tag" $ case content.rmValue.sender of + membershipTag <- case content.rmValue.sender of SenderMember _ -> Just <$> parseMLSBytes @VarInt _ -> pure Nothing pure @@ -252,11 +252,11 @@ data FramedContent = FramedContent instance ParseMLS FramedContent where parseMLS = FramedContent - <$> traceMLS "groupId" parseMLS - <*> traceMLS "epoch" parseMLS - <*> traceMLS "sender" parseMLS - <*> traceMLS "authdata" (parseMLSBytes @VarInt) - <*> traceMLS "content" parseMLS + <$> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLS instance SerialiseMLS FramedContent where serialiseMLS fc = do @@ -339,7 +339,7 @@ data FramedContentAuthData = FramedContentAuthData deriving (Eq, Show) parseFramedContentAuthData :: FramedContentDataTag -> Get FramedContentAuthData -parseFramedContentAuthData tag = traceMLS "authdata" $ do +parseFramedContentAuthData tag = do sig <- parseMLSBytes @VarInt confirmationTag <- case tag of FramedContentCommitTag -> Just <$> parseMLSBytes @VarInt diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index d97320bb51..2c953d695c 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -66,6 +66,7 @@ type MLSMessagingAPI = :> CanThrow 'MLSClientMismatch :> CanThrow 'MLSCommitMissingReferences :> CanThrow 'MLSKeyPackageRefNotFound + :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag @@ -101,21 +102,22 @@ type MLSMessagingAPI = :> CanThrow 'ConvMemberNotFound :> CanThrow 'ConvNotFound :> CanThrow 'LegalHoldNotEnabled + :> CanThrow 'MissingLegalholdConsent :> CanThrow 'MLSClientMismatch + :> CanThrow 'MLSClientSenderUserMismatch :> CanThrow 'MLSCommitMissingReferences + :> CanThrow 'MLSGroupConversationMismatch + :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSKeyPackageRefNotFound + :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag :> CanThrow 'MLSSelfRemovalNotAllowed :> CanThrow 'MLSStaleMessage + :> CanThrow 'MLSSubConvClientNotInParent :> CanThrow 'MLSUnsupportedMessage :> CanThrow 'MLSUnsupportedProposal - :> CanThrow 'MLSClientSenderUserMismatch - :> CanThrow 'MLSGroupConversationMismatch - :> CanThrow 'MLSMissingSenderClient - :> CanThrow 'MissingLegalholdConsent - :> CanThrow 'MLSSubConvClientNotInParent :> CanThrow MLSProposalFailure :> "messages" :> ZLocalUser @@ -140,22 +142,23 @@ type MLSMessagingAPI = :> CanThrow 'ConvMemberNotFound :> CanThrow 'ConvNotFound :> CanThrow 'LegalHoldNotEnabled + :> CanThrow 'MissingLegalholdConsent :> CanThrow 'MLSClientMismatch + :> CanThrow 'MLSClientSenderUserMismatch :> CanThrow 'MLSCommitMissingReferences + :> CanThrow 'MLSGroupConversationMismatch + :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSKeyPackageRefNotFound + :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag :> CanThrow 'MLSSelfRemovalNotAllowed :> CanThrow 'MLSStaleMessage + :> CanThrow 'MLSSubConvClientNotInParent :> CanThrow 'MLSUnsupportedMessage :> CanThrow 'MLSUnsupportedProposal - :> CanThrow 'MLSClientSenderUserMismatch - :> CanThrow 'MLSGroupConversationMismatch - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSWelcomeMismatch - :> CanThrow 'MissingLegalholdConsent - :> CanThrow 'MLSSubConvClientNotInParent :> CanThrow MLSProposalFailure :> "commit-bundles" :> ZLocalUser diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index e216e8c7da..5044803939 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -204,6 +204,7 @@ type MLSMessageStaticErrors = ErrorS 'MLSProposalNotFound, ErrorS 'MissingLegalholdConsent, ErrorS 'MLSKeyPackageRefNotFound, + ErrorS 'MLSInvalidLeafNodeIndex, ErrorS 'MLSClientMismatch, ErrorS 'MLSUnsupportedProposal, ErrorS 'MLSCommitMissingReferences, @@ -550,6 +551,7 @@ type HasProposalEffects r = Member (Error MLSProposalFailure) r, Member (Error MLSProtocolError) r, Member (ErrorS 'MLSClientMismatch) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r, Member (ErrorS 'MLSKeyPackageRefNotFound) r, Member (ErrorS 'MLSUnsupportedProposal) r, Member ExternalAccess r, @@ -614,7 +616,16 @@ getCommitData lConvOrSub epoch commit = do -- check epoch number when (epoch /= curEpoch) $ throwS @'MLSStaleMessage - foldMap (applyProposalRef (idForConvOrSub convOrSub) mlsMeta groupId epoch suite) (cProposals commit) + foldMap + ( applyProposalRef + (idForConvOrSub convOrSub) + (indexMapConvOrSub convOrSub) + mlsMeta + groupId + epoch + suite + ) + (cProposals commit) processCommit :: ( HasProposalEffects r, @@ -702,7 +713,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do [(_, idx :: Word32)] -> do cid <- note (mlsProtocolError "Invalid index in remove proposal") $ - indexToClient (indicesConvOrSub convOrSub) idx + imLookup (indexMapConvOrSub convOrSub) idx unless (cid == senderIdentity) $ throw $ mlsProtocolError "Only the self client can be removed by an external commit" @@ -801,43 +812,45 @@ applyProposalRef :: ) ) => ConvOrSubConvId -> + IndexMap -> ConversationMLSData -> GroupId -> Epoch -> CipherSuiteTag -> ProposalOrRef -> Sem r ProposalAction -applyProposalRef convOrSubConvId mlsMeta groupId epoch _suite (Ref ref) = do +applyProposalRef convOrSubConvId im mlsMeta groupId epoch _suite (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound checkEpoch epoch mlsMeta checkGroup groupId mlsMeta - applyProposal convOrSubConvId groupId (rmValue p) -applyProposalRef convOrSubConvId _mlsMeta groupId _epoch suite (Inline p) = do + applyProposal convOrSubConvId im groupId (rmValue p) +applyProposalRef convOrSubConvId im _mlsMeta groupId _epoch suite (Inline p) = do checkProposalCipherSuite suite p - applyProposal convOrSubConvId groupId p + applyProposal convOrSubConvId im groupId p applyProposal :: forall r. HasProposalEffects r => ConvOrSubConvId -> + IndexMap -> GroupId -> Proposal -> Sem r ProposalAction -applyProposal _convOrSubConvId _groupId (AddProposal kp) = do - let idx = error "TODO: compute new index" +applyProposal _convOrSubConvId im _groupId (AddProposal kp) = do + let idx = imNextIndex im -- TODO: validate key package cid <- getKeyPackageIdentity kp.rmValue -- TODO: we probably should not update the conversation state here -- addMLSClients groupId (cidQualifiedUser cid) (Set.singleton (ciClient cid, idx)) pure (paAddClient cid idx) -applyProposal _convOrSubConvId _groupId (RemoveProposal idx) = do - let cid = error "TODO: lookup in index map" +applyProposal _convOrSubConvId im _groupId (RemoveProposal idx) = do + cid <- noteS @'MLSInvalidLeafNodeIndex $ imLookup im idx pure (paRemoveClient cid idx) -applyProposal _convOrSubConvId _groupId (ExternalInitProposal _) = +applyProposal _convOrSubConvId _im _groupId (ExternalInitProposal _) = -- only record the fact there was an external init proposal, but do not -- process it in any way. pure paExternalInitPresent -applyProposal _convOrSubConvId _groupId _ = pure mempty +applyProposal _convOrSubConvId _im _groupId _ = pure mempty checkProposalCipherSuite :: Member (Error MLSProtocolError) r => diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index e4d1d3254d..a00639ebc1 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -35,8 +35,13 @@ newtype IndexMap = IndexMap {unIndexMap :: IntMap ClientIdentity} deriving (Eq, Show) deriving newtype (Semigroup, Monoid) -indexToClient :: IndexMap -> Word32 -> Maybe ClientIdentity -indexToClient m i = IntMap.lookup (fromIntegral i) (unIndexMap m) +imLookup :: IndexMap -> Word32 -> Maybe ClientIdentity +imLookup m i = IntMap.lookup (fromIntegral i) (unIndexMap m) + +imNextIndex :: IndexMap -> Word32 +imNextIndex im = + fromIntegral . fromJust $ + find (\n -> not $ IntMap.member n (unIndexMap im)) [0 ..] type ClientMap = Map (Qualified UserId) (Map ClientId Word32) @@ -125,9 +130,9 @@ membersConvOrSub :: ConvOrSubConv -> ClientMap membersConvOrSub (Conv c) = mcMembers c membersConvOrSub (SubConv _ s) = scMembers s -indicesConvOrSub :: ConvOrSubConv -> IndexMap -indicesConvOrSub (Conv c) = mcIndexMap c -indicesConvOrSub (SubConv _ s) = scIndexMap s +indexMapConvOrSub :: ConvOrSubConv -> IndexMap +indexMapConvOrSub (Conv c) = mcIndexMap c +indexMapConvOrSub (SubConv _ s) = scIndexMap s convOfConvOrSub :: ConvOrSubChoice c s -> c convOfConvOrSub (Conv c) = c diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 4a1d66d047..cc7ba23dff 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -701,7 +701,7 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do "", "--welcome-out", welcomeFile, - "--group-state-out", + "--group-info-out", pgsFile, "--group-out", "" From 5418005ba5cfff1839403f8af2a8e76aff2491cb Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Tue, 4 Apr 2023 15:08:32 +0000 Subject: [PATCH 09/75] New commit bundle API Also replace PublicGroupState with GroupInfo --- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 110 +++++++++------- libs/wire-api/src/Wire/API/MLS/GroupInfo.hs | 114 ++++++++++++++++ .../src/Wire/API/MLS/GroupInfoBundle.hs | 98 -------------- libs/wire-api/src/Wire/API/MLS/Message.hs | 2 +- .../src/Wire/API/MLS/PublicGroupState.hs | 122 ------------------ .../src/Wire/API/MLS/Serialisation.hs | 8 ++ libs/wire-api/src/Wire/API/MLS/Servant.hs | 20 +-- .../API/Routes/Public/Galley/Conversation.hs | 6 +- .../src/Wire/API/Routes/Public/Galley/MLS.hs | 2 +- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 51 +------- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 55 +------- libs/wire-api/wire-api.cabal | 3 +- .../brig/test/integration/Federation/Util.hs | 8 +- services/galley/src/Galley/API/Federation.hs | 10 +- .../galley/src/Galley/API/MLS/GroupInfo.hs | 12 +- services/galley/src/Galley/API/MLS/Message.hs | 38 +++--- .../src/Galley/API/MLS/SubConversation.hs | 8 +- .../src/Galley/Cassandra/Conversation.hs | 31 +++-- .../galley/src/Galley/Cassandra/Instances.hs | 10 +- .../galley/src/Galley/Cassandra/Queries.hs | 20 +-- .../src/Galley/Cassandra/SubConversation.hs | 34 +++-- .../src/Galley/Effects/ConversationStore.hs | 15 +-- .../Galley/Effects/SubConversationStore.hs | 8 +- services/galley/test/integration/API/MLS.hs | 16 +-- .../galley/test/integration/API/MLS/Util.hs | 38 +++--- 25 files changed, 326 insertions(+), 513 deletions(-) create mode 100644 libs/wire-api/src/Wire/API/MLS/GroupInfo.hs delete mode 100644 libs/wire-api/src/Wire/API/MLS/GroupInfoBundle.hs delete mode 100644 libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index 57f75490bc..0930b6a699 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -15,68 +15,80 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Wire.API.MLS.CommitBundle where +module Wire.API.MLS.CommitBundle (CommitBundle (..)) where -import Control.Lens (view, (.~), (?~)) -import Data.Bifunctor (first) -import qualified Data.ByteString as BS -import Data.ProtoLens (decodeMessage, encodeMessage) -import qualified Data.ProtoLens (Message (defMessage)) +import Control.Applicative import qualified Data.Swagger as S import qualified Data.Text as T import Imports -import qualified Proto.Mls -import qualified Proto.Mls_Fields as Proto.Mls -import Wire.API.ConverProtoLens -import Wire.API.MLS.GroupInfoBundle +import Wire.API.MLS.GroupInfo import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome data CommitBundle = CommitBundle - { cbCommitMsg :: RawMLS Message, + { cbCommitMsg :: RawMLS Message, -- TODO: change this type to Commit cbWelcome :: Maybe (RawMLS Welcome), - cbGroupInfoBundle :: GroupInfoBundle + cbGroupInfo :: RawMLS GroupInfo } deriving (Eq, Show) -instance ConvertProtoLens Proto.Mls.CommitBundle CommitBundle where - fromProtolens protoBundle = protoLabel "CommitBundle" $ do - CommitBundle - <$> protoLabel - "commit" - ( decodeMLS' (view Proto.Mls.commit protoBundle) - ) - <*> protoLabel - "welcome" - ( let bs = view Proto.Mls.welcome protoBundle - in if BS.length bs == 0 - then pure Nothing - else Just <$> decodeMLS' bs - ) - <*> protoLabel "group_info_bundle" (fromProtolens (view Proto.Mls.groupInfoBundle protoBundle)) - toProtolens bundle = - let commitData = rmRaw (cbCommitMsg bundle) - welcomeData = foldMap rmRaw (cbWelcome bundle) - groupInfoData = toProtolens (cbGroupInfoBundle bundle) - in ( Data.ProtoLens.defMessage - & Proto.Mls.commit .~ commitData - & Proto.Mls.welcome .~ welcomeData - & Proto.Mls.groupInfoBundle .~ groupInfoData - ) +data CommitBundleF f = CommitBundleF + { cbCommitMsg :: f (RawMLS Message), + cbWelcome :: f (RawMLS Welcome), + cbGroupInfo :: f (RawMLS GroupInfo) + } -instance S.ToSchema CommitBundle where - declareNamedSchema _ = - pure $ - S.NamedSchema (Just "CommitBundle") $ - mempty - & S.description - ?~ "A protobuf-serialized object. See wireapp/generic-message-proto for the definition." +instance Alternative f => Semigroup (CommitBundleF f) where + cb1 <> cb2 = + CommitBundleF + (cb1.cbCommitMsg <|> cb2.cbCommitMsg) + (cb1.cbWelcome <|> cb2.cbWelcome) + (cb1.cbGroupInfo <|> cb2.cbGroupInfo) + +instance Alternative f => Monoid (CommitBundleF f) where + mempty = CommitBundleF empty empty empty + +checkCommitBundleF :: CommitBundleF [] -> Either Text CommitBundle +checkCommitBundleF cb = + CommitBundle + <$> check "commit" cb.cbCommitMsg + <*> checkOpt "welcome" cb.cbWelcome + <*> check "group info" cb.cbGroupInfo + where + check :: Text -> [a] -> Either Text a + check _ [x] = pure x + check name [] = Left ("Missing " <> name) + check name _ = Left ("Redundant occurrence of " <> name) + + checkOpt :: Text -> [a] -> Either Text (Maybe a) + checkOpt _ [] = pure Nothing + checkOpt _ [x] = pure (Just x) + checkOpt name _ = Left ("Redundant occurrence of " <> name) -deserializeCommitBundle :: ByteString -> Either Text CommitBundle -deserializeCommitBundle b = do - protoCommitBundle :: Proto.Mls.CommitBundle <- first (("Parsing protobuf failed: " <>) . T.pack) (decodeMessage b) - first ("Converting from protobuf failed: " <>) (fromProtolens protoCommitBundle) +findMessageInStream :: Alternative f => RawMLS Message -> Either Text (CommitBundleF f) +findMessageInStream msg = case msg.rmValue.content of + MessagePublic mp -> case mp.content.rmValue.content of + FramedContentCommit _ -> pure (CommitBundleF (pure msg) empty empty) + _ -> Left "unexpected public message" + MessageWelcome w -> pure (CommitBundleF empty (pure w) empty) + MessageGroupInfo -> error "TODO: get group info from message" + _ -> Left "unexpected message type" -serializeCommitBundle :: CommitBundle -> ByteString -serializeCommitBundle = encodeMessage . (toProtolens @Proto.Mls.CommitBundle @CommitBundle) +findMessagesInStream :: Alternative f => [RawMLS Message] -> Either Text (CommitBundleF f) +findMessagesInStream = getAp . foldMap (Ap . findMessageInStream) + +instance ParseMLS CommitBundle where + parseMLS = do + msgs <- parseMLSStream parseMLS + either (fail . T.unpack) pure $ + findMessagesInStream msgs >>= checkCommitBundleF + +instance SerialiseMLS CommitBundle where + serialiseMLS cb = do + serialiseMLS cb.cbCommitMsg + traverse_ serialiseMLS cb.cbWelcome + serialiseMLS cb.cbGroupInfo + +instance S.ToSchema CommitBundle where + declareNamedSchema _ = pure (mlsSwagger "CommitBundle") diff --git a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs new file mode 100644 index 0000000000..7971fa7b0a --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs @@ -0,0 +1,114 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.GroupInfo + ( GroupInfo (..), + GroupInfoData (..), + ) +where + +import Data.Binary.Get +import Data.Binary.Put +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Swagger as S +import GHC.Records +import Imports +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Epoch +import Wire.API.MLS.Extension +import Wire.API.MLS.Group +import Wire.API.MLS.ProtocolVersion +import Wire.API.MLS.Serialisation +import Wire.Arbitrary + +data GroupContext = GroupContext + { protocolVersion :: ProtocolVersion, + cipherSuite :: CipherSuite, + groupId :: GroupId, + epoch :: Epoch, + treeHash :: ByteString, + confirmedTranscriptHash :: ByteString, + extensions :: [Extension] + } + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform GroupContext) + +instance ParseMLS GroupContext where + parseMLS = + GroupContext + <$> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLSBytes @VarInt + <*> parseMLSVector @VarInt parseMLS + +data GroupInfoTBS = GroupInfoTBS + { groupContext :: GroupContext, + extensions :: [Extension], + confirmationTag :: ByteString, + signer :: Word32 + } + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform GroupInfoTBS) + +instance ParseMLS GroupInfoTBS where + parseMLS = + GroupInfoTBS + <$> parseMLS + <*> parseMLSVector @VarInt parseMLS + <*> parseMLSBytes @VarInt + <*> parseMLS + +data GroupInfo = GroupInfo + { tbs :: GroupInfoTBS, + signature_ :: ByteString + } + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform GroupInfo) + +instance ParseMLS GroupInfo where + parseMLS = + GroupInfo + <$> parseMLS + <*> parseMLSBytes @VarInt + +instance HasField "groupContext" GroupInfo GroupContext where + getField = (.tbs.groupContext) + +instance HasField "extensions" GroupInfo [Extension] where + getField = (.tbs.extensions) + +instance HasField "confirmationTag" GroupInfo ByteString where + getField = (.tbs.confirmationTag) + +instance HasField "signer" GroupInfo Word32 where + getField = (.tbs.signer) + +newtype GroupInfoData = GroupInfoData {unGroupInfoData :: ByteString} + deriving stock (Eq, Ord, Show) + deriving newtype (Arbitrary) + +instance ParseMLS GroupInfoData where + parseMLS = GroupInfoData . LBS.toStrict <$> getRemainingLazyByteString + +instance SerialiseMLS GroupInfoData where + serialiseMLS (GroupInfoData bs) = putByteString bs + +instance S.ToSchema GroupInfoData where + declareNamedSchema _ = pure (mlsSwagger "GroupInfoData") diff --git a/libs/wire-api/src/Wire/API/MLS/GroupInfoBundle.hs b/libs/wire-api/src/Wire/API/MLS/GroupInfoBundle.hs deleted file mode 100644 index 93cc706e98..0000000000 --- a/libs/wire-api/src/Wire/API/MLS/GroupInfoBundle.hs +++ /dev/null @@ -1,98 +0,0 @@ --- This file is part of the Wire Server implementation. --- --- Copyright (C) 2022 Wire Swiss GmbH --- --- This program is free software: you can redistribute it and/or modify it under --- the terms of the GNU Affero General Public License as published by the Free --- Software Foundation, either version 3 of the License, or (at your option) any --- later version. --- --- This program is distributed in the hope that it will be useful, but WITHOUT --- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS --- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more --- details. --- --- You should have received a copy of the GNU Affero General Public License along --- with this program. If not, see . - -module Wire.API.MLS.GroupInfoBundle where - -import Control.Lens (view, (.~)) -import Data.ProtoLens (Message (defMessage)) -import Imports -import qualified Proto.Mls -import qualified Proto.Mls_Fields as Proto.Mls -import Test.QuickCheck -import Wire.API.ConverProtoLens -import Wire.API.MLS.PublicGroupState -import Wire.API.MLS.Serialisation -import Wire.Arbitrary - -data GroupInfoType = GroupInfoTypePublicGroupState | UnencryptedGroupInfo | JweEncryptedGroupInfo - deriving stock (Eq, Show, Generic, Enum, Bounded) - deriving (Arbitrary) via (GenericUniform GroupInfoType) - -instance ConvertProtoLens Proto.Mls.GroupInfoType GroupInfoType where - fromProtolens Proto.Mls.PUBLIC_GROUP_STATE = pure GroupInfoTypePublicGroupState - fromProtolens Proto.Mls.GROUP_INFO = pure UnencryptedGroupInfo - fromProtolens Proto.Mls.GROUP_INFO_JWE = pure JweEncryptedGroupInfo - - toProtolens GroupInfoTypePublicGroupState = Proto.Mls.PUBLIC_GROUP_STATE - toProtolens UnencryptedGroupInfo = Proto.Mls.GROUP_INFO - toProtolens JweEncryptedGroupInfo = Proto.Mls.GROUP_INFO_JWE - -data RatchetTreeType = TreeFull | TreeDelta | TreeByRef - deriving stock (Eq, Show, Generic, Bounded, Enum) - deriving (Arbitrary) via (GenericUniform RatchetTreeType) - -instance ConvertProtoLens Proto.Mls.RatchetTreeType RatchetTreeType where - fromProtolens Proto.Mls.FULL = pure TreeFull - fromProtolens Proto.Mls.DELTA = pure TreeDelta - fromProtolens Proto.Mls.REFERENCE = pure TreeByRef - - toProtolens TreeFull = Proto.Mls.FULL - toProtolens TreeDelta = Proto.Mls.DELTA - toProtolens TreeByRef = Proto.Mls.REFERENCE - -data GroupInfoBundle = GroupInfoBundle - { gipGroupInfoType :: GroupInfoType, - gipRatchetTreeType :: RatchetTreeType, - gipGroupState :: RawMLS PublicGroupState - } - deriving stock (Eq, Show, Generic) - -instance ConvertProtoLens Proto.Mls.GroupInfoBundle GroupInfoBundle where - fromProtolens protoBundle = - protoLabel "GroupInfoBundle" $ - GroupInfoBundle - <$> protoLabel "field group_info_type" (fromProtolens (view Proto.Mls.groupInfoType protoBundle)) - <*> protoLabel "field ratchet_tree_type" (fromProtolens (view Proto.Mls.ratchetTreeType protoBundle)) - <*> protoLabel "field group_info" (decodeMLS' (view Proto.Mls.groupInfo protoBundle)) - toProtolens bundle = - let encryptionType = toProtolens (gipGroupInfoType bundle) - treeType = toProtolens (gipRatchetTreeType bundle) - in ( defMessage - & Proto.Mls.groupInfoType .~ encryptionType - & Proto.Mls.ratchetTreeType .~ treeType - & Proto.Mls.groupInfo .~ rmRaw (gipGroupState bundle) - ) - -instance Arbitrary GroupInfoBundle where - arbitrary = - GroupInfoBundle - <$> arbitrary - <*> arbitrary - <*> (mkRawMLS <$> arbitrary) - -instance ParseMLS GroupInfoBundle where - parseMLS = - GroupInfoBundle - <$> parseMLSEnum @Word8 "GroupInfoTypeEnum" - <*> parseMLSEnum @Word8 "RatchetTreeEnum" - <*> parseMLS - -instance SerialiseMLS GroupInfoBundle where - serialiseMLS (GroupInfoBundle e t pgs) = do - serialiseMLSEnum @Word8 e - serialiseMLSEnum @Word8 t - serialiseMLS pgs diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index 652d5c88d5..c70291cd3c 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -106,7 +106,7 @@ instance HasField "wireFormat" Message WireFormatTag where data MessageContent = MessagePrivate (RawMLS PrivateMessage) | MessagePublic PublicMessage - | MessageWelcome Welcome + | MessageWelcome (RawMLS Welcome) | MessageGroupInfo -- TODO | MessageKeyPackage (RawMLS KeyPackage) deriving (Eq, Show) diff --git a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs b/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs deleted file mode 100644 index ec5ac539bc..0000000000 --- a/libs/wire-api/src/Wire/API/MLS/PublicGroupState.hs +++ /dev/null @@ -1,122 +0,0 @@ --- This file is part of the Wire Server implementation. --- --- Copyright (C) 2022 Wire Swiss GmbH --- --- This program is free software: you can redistribute it and/or modify it under --- the terms of the GNU Affero General Public License as published by the Free --- Software Foundation, either version 3 of the License, or (at your option) any --- later version. --- --- This program is distributed in the hope that it will be useful, but WITHOUT --- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS --- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more --- details. --- --- You should have received a copy of the GNU Affero General Public License along --- with this program. If not, see . -{-# LANGUAGE RecordWildCards #-} - -module Wire.API.MLS.PublicGroupState where - -import Data.Binary -import Data.Binary.Get -import Data.Binary.Put -import qualified Data.ByteString.Lazy as LBS -import qualified Data.Swagger as S -import Imports -import Test.QuickCheck hiding (label) -import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Epoch -import Wire.API.MLS.Group -import Wire.API.MLS.KeyPackage -import Wire.API.MLS.ProtocolVersion -import Wire.API.MLS.Serialisation -import Wire.Arbitrary - --- TODO: replace with GroupInfo -data PublicGroupStateTBS = PublicGroupStateTBS - { pgsVersion :: ProtocolVersion, - pgsCipherSuite :: CipherSuite, - pgsGroupId :: GroupId, - pgsEpoch :: Epoch, - pgsTreeHash :: ByteString, - pgsInterimTranscriptHash :: ByteString, - pgsConfirmedInterimTranscriptHash :: ByteString, - pgsGroupContextExtensions :: ByteString, - pgsOtherExtensions :: ByteString, - pgsExternalPub :: ByteString, - pgsSigner :: KeyPackageRef - } - deriving stock (Eq, Show, Generic) - deriving (Arbitrary) via (GenericUniform PublicGroupStateTBS) - -instance ParseMLS PublicGroupStateTBS where - parseMLS = - PublicGroupStateTBS - <$> label "pgsVersion" parseMLS - <*> label "pgsCipherSuite" parseMLS - <*> label "pgsGroupId" parseMLS - <*> label "pgsEpoch" parseMLS - <*> label "pgsTreeHash" (parseMLSBytes @Word8) - <*> label "pgsInterimTranscriptHash" (parseMLSBytes @Word8) - <*> label "pgsConfirmedInterimTranscriptHash" (parseMLSBytes @Word8) - <*> label "pgsGroupContextExtensions" (parseMLSBytes @Word32) - <*> label "pgsOtherExtensions" (parseMLSBytes @Word32) - <*> label "pgsExternalPub" (parseMLSBytes @Word16) - <*> label "pgsSigner" parseMLS - -instance SerialiseMLS PublicGroupStateTBS where - serialiseMLS (PublicGroupStateTBS {..}) = do - serialiseMLS pgsVersion - serialiseMLS pgsCipherSuite - serialiseMLS pgsGroupId - serialiseMLS pgsEpoch - serialiseMLSBytes @Word8 pgsTreeHash - serialiseMLSBytes @Word8 pgsInterimTranscriptHash - serialiseMLSBytes @Word8 pgsConfirmedInterimTranscriptHash - serialiseMLSBytes @Word32 pgsGroupContextExtensions - serialiseMLSBytes @Word32 pgsOtherExtensions - serialiseMLSBytes @Word16 pgsExternalPub - serialiseMLS pgsSigner - -data PublicGroupState = PublicGroupState - { pgTBS :: RawMLS PublicGroupStateTBS, - pgSignature :: ByteString - } - deriving stock (Eq, Show, Generic) - --- | A type that holds an MLS-encoded 'PublicGroupState' value via --- 'serialiseMLS'. -newtype OpaquePublicGroupState = OpaquePublicGroupState - {unOpaquePublicGroupState :: ByteString} - deriving (Generic, Eq, Show) - deriving (Arbitrary) via (GenericUniform OpaquePublicGroupState) - -instance ParseMLS OpaquePublicGroupState where - parseMLS = OpaquePublicGroupState . LBS.toStrict <$> getRemainingLazyByteString - -instance SerialiseMLS OpaquePublicGroupState where - serialiseMLS (OpaquePublicGroupState bs) = putByteString bs - -instance S.ToSchema OpaquePublicGroupState where - declareNamedSchema _ = pure (mlsSwagger "OpaquePublicGroupState") - -toOpaquePublicGroupState :: RawMLS PublicGroupState -> OpaquePublicGroupState -toOpaquePublicGroupState = OpaquePublicGroupState . rmRaw - -instance Arbitrary PublicGroupState where - arbitrary = - PublicGroupState - <$> (mkRawMLS <$> arbitrary) - <*> arbitrary - -instance ParseMLS PublicGroupState where - parseMLS = - PublicGroupState - <$> label "pgTBS" parseMLS - <*> label "pgSignature" (parseMLSBytes @Word16) - -instance SerialiseMLS PublicGroupState where - serialiseMLS PublicGroupState {..} = do - serialiseMLS pgTBS - serialiseMLSBytes @Word16 pgSignature diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 9ba9a98833..25f537f4dc 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -22,6 +22,8 @@ module Wire.API.MLS.Serialisation ( ParseMLS (..), SerialiseMLS (..), VarInt (..), + parseMLSStream, + serialiseMLSStream, parseMLSVector, serialiseMLSVector, parseMLSBytes, @@ -120,6 +122,12 @@ instance SerialiseMLS VarInt where serialiseMLS = put instance ParseMLS VarInt where parseMLS = get +parseMLSStream :: Get a -> Get [a] +parseMLSStream = many . lookAhead + +serialiseMLSStream :: (a -> Put) -> [a] -> Put +serialiseMLSStream = traverse_ + parseMLSVector :: forall w a. (Binary w, Integral w) => Get a -> Get [a] parseMLSVector getItem = do len <- get @w diff --git a/libs/wire-api/src/Wire/API/MLS/Servant.hs b/libs/wire-api/src/Wire/API/MLS/Servant.hs index 33831f241b..9a34c399de 100644 --- a/libs/wire-api/src/Wire/API/MLS/Servant.hs +++ b/libs/wire-api/src/Wire/API/MLS/Servant.hs @@ -15,17 +15,14 @@ -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -module Wire.API.MLS.Servant (MLS, mimeUnrenderMLSWith, CommitBundleMimeType) where +module Wire.API.MLS.Servant (MLS, mimeUnrenderMLSWith) where import Data.Bifunctor import Data.Binary -import qualified Data.ByteString.Lazy as LBS import qualified Data.Text as T import Imports import Network.HTTP.Media ((//)) import Servant.API hiding (Get) -import Wire.API.MLS.CommitBundle -import Wire.API.MLS.PublicGroupState (OpaquePublicGroupState, unOpaquePublicGroupState) import Wire.API.MLS.Serialisation data MLS @@ -36,19 +33,8 @@ instance Accept MLS where instance {-# OVERLAPPABLE #-} ParseMLS a => MimeUnrender MLS a where mimeUnrender _ = mimeUnrenderMLSWith parseMLS -instance MimeRender MLS OpaquePublicGroupState where - mimeRender _ = LBS.fromStrict . unOpaquePublicGroupState +instance {-# OVERLAPPABLE #-} SerialiseMLS a => MimeRender MLS a where + mimeRender _ = encodeMLS mimeUnrenderMLSWith :: Get a -> LByteString -> Either String a mimeUnrenderMLSWith p = first T.unpack . decodeMLSWith p - -data CommitBundleMimeType - -instance Accept CommitBundleMimeType where - contentType _ = "application" // "x-protobuf" - -instance MimeUnrender CommitBundleMimeType CommitBundle where - mimeUnrender _ = first T.unpack . deserializeCommitBundle . LBS.toStrict - -instance MimeRender CommitBundleMimeType CommitBundle where - mimeRender _ = LBS.fromStrict . serializeCommitBundle diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs index 91503dcf06..080a5f7b78 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs @@ -33,7 +33,7 @@ import Wire.API.Conversation.Typing import Wire.API.Error import Wire.API.Error.Galley import Wire.API.Event.Conversation -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.Servant import Wire.API.MLS.SubConversation import Wire.API.MakesFederatedCall @@ -213,7 +213,7 @@ type ConversationAPI = ( Respond 200 "The group information" - OpaquePublicGroupState + GroupInfoData ) ) :<|> Named @@ -548,7 +548,7 @@ type ConversationAPI = ( Respond 200 "The group information" - OpaquePublicGroupState + GroupInfoData ) ) -- This endpoint can lead to the following events being sent: diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index 2c953d695c..6c0c09b337 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -164,7 +164,7 @@ type MLSMessagingAPI = :> ZLocalUser :> ZClient :> ZConn - :> ReqBody '[CommitBundleMimeType] CommitBundle + :> ReqBody '[MLS] (RawMLS CommitBundle) :> MultiVerb1 'POST '[JSON] (Respond 201 "Commit accepted and forwarded" MLSMessageSendingStatus) ) :<|> Named diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index 9a755bcf29..ffcbb33ec5 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -45,7 +45,6 @@ import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.ProtocolVersion -import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation tests :: TestTree @@ -57,8 +56,7 @@ tests = testCase "parse welcome message" testParseWelcome, testCase "key package ref" testKeyPackageRef, testCase "validate message signature" testVerifyMLSPlainTextWithKey, - testCase "create signed remove proposal" testRemoveProposalMessageSignature, - testCase "parse GroupInfoBundle" testParseGroupInfoBundle -- TODO: remove this also + testCase "create signed remove proposal" testRemoveProposalMessageSignature ] testParseKeyPackage :: IO () @@ -98,6 +96,10 @@ testParseApplication = pure () testParseWelcome :: IO () testParseWelcome = pure () +-- TODO +testParseGroupInfo :: IO () +testParseGroupInfo = pure () + testKeyPackageRef :: IO () testKeyPackageRef = do kpData <- BS.readFile "test/resources/key_package1.mls" @@ -163,49 +165,6 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do ) Nothing -testParseGroupInfoBundle :: IO () -testParseGroupInfoBundle = withSystemTempDirectory "mls" $ \tmp -> do - qcid <- do - let c = newClientId 0x3ae58155 - usr <- flip Qualified (Domain "example.com") <$> (Id <$> UUID.nextRandom) - pure (userClientQid usr c) - void . liftIO $ spawn (cli qcid tmp ["init", qcid]) Nothing - - qcid2 <- do - let c = newClientId 0x4ae58157 - usr <- flip Qualified (Domain "example.com") <$> (Id <$> UUID.nextRandom) - pure (userClientQid usr c) - void . liftIO $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing - kp :: RawMLS KeyPackage <- liftIO $ decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing - liftIO $ BS.writeFile (tmp qcid2) (rmRaw kp) - - let groupFilename = "group" - let gid = GroupId "abcd" - createGroup tmp qcid groupFilename gid - - void $ - liftIO $ - spawn - ( cli - qcid - tmp - [ "member", - "add", - "--group", - tmp groupFilename, - "--in-place", - tmp qcid2, - "--group-state-out", - tmp "group-info-bundle" - ] - ) - Nothing - - bundleBS <- BS.readFile (tmp "group-info-bundle") - case decodeMLS' @PublicGroupState bundleBS of - Left err -> assertFailure ("Failed parsing PublicGroupState: " <> T.unpack err) - Right _ -> pure () - createGroup :: FilePath -> String -> String -> GroupId -> IO () createGroup tmp store groupName gid = do groupJSON <- diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 894f176d9e..cd4537799a 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -21,19 +21,15 @@ module Test.Wire.API.Roundtrip.MLS (tests) where import Data.Binary.Put import Imports -import qualified Proto.Mls import qualified Test.Tasty as T import Test.Tasty.QuickCheck import Type.Reflection (typeRep) -import Wire.API.ConverProtoLens -import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential import Wire.API.MLS.Extension -import Wire.API.MLS.GroupInfoBundle +import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal -import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome @@ -46,13 +42,9 @@ tests = testRoundTrip @RemoveProposalMessage, testRoundTrip @RemoveProposalPayload, testRoundTrip @ExtensionVector, - testRoundTrip @PublicGroupStateTBS, - testRoundTrip @PublicGroupState, + testRoundTrip @GroupInfoData, testRoundTrip @Welcome, - testRoundTrip @OpaquePublicGroupState, - testRoundTrip @VarInt, - testConvertProtoRoundTrip @Proto.Mls.GroupInfoBundle @GroupInfoBundle, - testConvertProtoRoundTrip @Proto.Mls.CommitBundle @TestCommitBundle + testRoundTrip @VarInt ] testRoundTrip :: @@ -66,24 +58,6 @@ testRoundTrip = testProperty msg trip counterexample (show (runPut (serialiseMLS v))) $ Right v === (decodeMLS . runPut . serialiseMLS) v -testConvertProtoRoundTrip :: - forall p a. - ( Arbitrary a, - Typeable a, - Show a, - Show p, - Eq a, - ConvertProtoLens p a - ) => - T.TestTree -testConvertProtoRoundTrip = testProperty (show (typeRep @a)) trip - where - trip (v :: a) = - counterexample (show (toProtolens @p @a v)) $ - Right v === do - let pa = toProtolens @p @a v - fromProtolens @p @a pa - -------------------------------------------------------------------------------- -- auxiliary types @@ -133,7 +107,7 @@ instance --- -newtype RemoveProposalMessage = RemoveProposalMessage {unRemoveProposalMessage :: Message} +newtype RemoveProposalMessage = RemoveProposalMessage Message deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance Arbitrary RemoveProposalMessage where @@ -175,24 +149,3 @@ instance ParseMLS ExtensionVector where instance SerialiseMLS ExtensionVector where serialiseMLS (ExtensionVector exts) = do serialiseMLSVector @VarInt serialiseMLS exts - ---- - -newtype TestCommitBundle = TestCommitBundle {unTestCommitBundle :: CommitBundle} - deriving (Show, Eq) - --- | The commit bundle should contain a commit message, not a remove proposal --- message. However defining MLS serialization for Commits and all nested types --- seems overkill to test the commit bundle roundtrip -instance Arbitrary TestCommitBundle where - arbitrary = do - bundle <- - CommitBundle - <$> (mkRawMLS . unRemoveProposalMessage <$> arbitrary) - <*> oneof [Just <$> (mkRawMLS <$> arbitrary), pure Nothing] - <*> arbitrary - pure (TestCommitBundle bundle) - -instance ConvertProtoLens Proto.Mls.CommitBundle TestCommitBundle where - fromProtolens = fmap TestCommitBundle . fromProtolens @Proto.Mls.CommitBundle @CommitBundle - toProtolens = toProtolens . unTestCommitBundle diff --git a/libs/wire-api/wire-api.cabal b/libs/wire-api/wire-api.cabal index 2e0130c20c..e1f68f85c0 100644 --- a/libs/wire-api/wire-api.cabal +++ b/libs/wire-api/wire-api.cabal @@ -105,7 +105,7 @@ library Wire.API.MLS.Epoch Wire.API.MLS.Extension Wire.API.MLS.Group - Wire.API.MLS.GroupInfoBundle + Wire.API.MLS.GroupInfo Wire.API.MLS.HPKEPublicKey Wire.API.MLS.KeyPackage Wire.API.MLS.Keys @@ -115,7 +115,6 @@ library Wire.API.MLS.Proposal Wire.API.MLS.ProposalTag Wire.API.MLS.ProtocolVersion - Wire.API.MLS.PublicGroupState Wire.API.MLS.Serialisation Wire.API.MLS.Servant Wire.API.MLS.SubConversation diff --git a/services/brig/test/integration/Federation/Util.hs b/services/brig/test/integration/Federation/Util.hs index 8d56f4f4b0..e510aa50eb 100644 --- a/services/brig/test/integration/Federation/Util.hs +++ b/services/brig/test/integration/Federation/Util.hs @@ -67,7 +67,6 @@ import Wire.API.Conversation (Conversation (cnvMembers)) import Wire.API.Conversation.Member (OtherMember (OtherMember), cmOthers) import Wire.API.Conversation.Role (roleNameWireAdmin) import Wire.API.MLS.CommitBundle -import Wire.API.MLS.GroupInfoBundle import Wire.API.MLS.Serialisation import Wire.API.Team.Feature (FeatureStatus (..)) import Wire.API.User @@ -123,8 +122,7 @@ sendCommitBundle tmp subGroupStateFn galley uid cid commit = do subGroupStateRaw <- liftIO $ BS.readFile $ tmp subGroupStateFn subGroupState <- either (liftIO . assertFailure . T.unpack) pure . decodeMLS' $ subGroupStateRaw subCommit <- either (liftIO . assertFailure . T.unpack) pure . decodeMLS' $ commit - let subGroupBundle = CommitBundle subCommit Nothing (GroupInfoBundle UnencryptedGroupInfo TreeFull subGroupState) - let subGroupBundleRaw = serializeCommitBundle subGroupBundle + let subGroupBundle = CommitBundle subCommit Nothing subGroupState post ( galley . paths @@ -133,7 +131,7 @@ sendCommitBundle tmp subGroupStateFn galley uid cid commit = do . zClient cid . zConn "conn" . header "Z-Type" "access" - . content "application/x-protobuf" - . bytes subGroupBundleRaw + . content "message/mls" + . lbytes (encodeMLS subGroupBundle) ) !!! const 201 === statusCode diff --git a/services/galley/src/Galley/API/Federation.hs b/services/galley/src/Galley/API/Federation.hs index c45c7f646b..2195db6a15 100644 --- a/services/galley/src/Galley/API/Federation.hs +++ b/services/galley/src/Galley/API/Federation.hs @@ -93,9 +93,8 @@ import Wire.API.Federation.API.Common (EmptyResponse (..)) import Wire.API.Federation.API.Galley import qualified Wire.API.Federation.API.Galley as F import Wire.API.Federation.Error -import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.MLS.Welcome @@ -666,7 +665,10 @@ sendMLSCommitBundle remoteDomain msr = assertMLSEnabled loc <- qualifyLocal () let sender = toRemoteUnsafe remoteDomain (F.mmsrSender msr) - bundle <- either (throw . mlsProtocolError) pure $ deserializeCommitBundle (fromBase64ByteString (F.mmsrRawMessage msr)) + bundle <- + either (throw . mlsProtocolError) pure $ + decodeMLS' (fromBase64ByteString (F.mmsrRawMessage msr)) + ibundle <- noteS @'MLSUnsupportedMessage $ mkIncomingBundle bundle qConvOrSub <- E.lookupConvByGroupId ibundle.groupId >>= noteS @'ConvNotFound when (qUnqualified qConvOrSub /= F.mmsrConvOrSubId msr) $ throwS @'MLSGroupConversationMismatch @@ -827,7 +829,7 @@ queryGroupInfo origDomain req = getSubConversationGroupInfoFromLocalConv (tUntagged sender) subConvId lconvId pure . Base64ByteString - . unOpaquePublicGroupState + . unGroupInfoData $ state updateTypingIndicator :: diff --git a/services/galley/src/Galley/API/MLS/GroupInfo.hs b/services/galley/src/Galley/API/MLS/GroupInfo.hs index 34fed731c0..dfbe65f3c0 100644 --- a/services/galley/src/Galley/API/MLS/GroupInfo.hs +++ b/services/galley/src/Galley/API/MLS/GroupInfo.hs @@ -36,7 +36,7 @@ import Wire.API.Error.Galley import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Error -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation type MLSGroupInfoStaticErrors = @@ -55,7 +55,7 @@ getGroupInfo :: Members MLSGroupInfoStaticErrors r => Local UserId -> Qualified ConvId -> - Sem r OpaquePublicGroupState + Sem r GroupInfoData getGroupInfo lusr qcnvId = do assertMLSEnabled foldQualified @@ -71,10 +71,10 @@ getGroupInfoFromLocalConv :: Members MLSGroupInfoStaticErrors r => Qualified UserId -> Local ConvId -> - Sem r OpaquePublicGroupState + Sem r GroupInfoData getGroupInfoFromLocalConv qusr lcnvId = do void $ getLocalConvForUser qusr lcnvId - E.getPublicGroupState (tUnqualified lcnvId) + E.getGroupInfo (tUnqualified lcnvId) >>= noteS @'MLSMissingGroupInfo getGroupInfoFromRemoteConv :: @@ -84,7 +84,7 @@ getGroupInfoFromRemoteConv :: Members MLSGroupInfoStaticErrors r => Local UserId -> Remote ConvOrSubConvId -> - Sem r OpaquePublicGroupState + Sem r GroupInfoData getGroupInfoFromRemoteConv lusr rcnv = do let getRequest = GetGroupInfoRequest @@ -96,6 +96,6 @@ getGroupInfoFromRemoteConv lusr rcnv = do GetGroupInfoResponseError e -> rethrowErrors @MLSGroupInfoStaticErrors e GetGroupInfoResponseState s -> pure - . OpaquePublicGroupState + . GroupInfoData . fromBase64ByteString $ s diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 5044803939..0446226772 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -87,12 +87,11 @@ import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential -import Wire.API.MLS.GroupInfoBundle +import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal import qualified Wire.API.MLS.Proposal as Proposal -import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.MLS.Welcome @@ -107,8 +106,11 @@ import Wire.API.User.Client -- [ ] remove all key package ref mapping -- [ ] initialise index maps -- [ ] newtype for leaf node indices --- [ ] compute new indices for add proposals +-- [x] compute new indices for add proposals -- [ ] remove prefixes from rmValue and rmRaw +-- [x] remove PublicGroupState and GroupInfoBundle modules +-- [ ] remove protobuf definitions of CommitBundle +-- [ ] (?) rename public_group_state field in conversation table data IncomingMessage = IncomingMessage { epoch :: Epoch, @@ -141,7 +143,7 @@ data IncomingBundle = IncomingBundle commit :: RawMLS Commit, rawMessage :: RawMLS Message, welcome :: Maybe (RawMLS Welcome), - groupInfoBundle :: GroupInfoBundle, + groupInfo :: GroupInfoData, serialized :: ByteString } @@ -173,9 +175,9 @@ mkIncomingMessage msg = case msg.rmValue.content of } _ -> Nothing -mkIncomingBundle :: CommitBundle -> Maybe IncomingBundle +mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle mkIncomingBundle bundle = do - imsg <- mkIncomingMessage bundle.cbCommitMsg + imsg <- mkIncomingMessage bundle.rmValue.cbCommitMsg content <- case imsg.content of IncomingMessageContentPublic c -> pure c _ -> Nothing @@ -188,10 +190,10 @@ mkIncomingBundle bundle = do groupId = imsg.groupId, sender = content.sender, commit = commit, - rawMessage = bundle.cbCommitMsg, - welcome = bundle.cbWelcome, - groupInfoBundle = bundle.cbGroupInfoBundle, - serialized = serializeCommitBundle bundle + rawMessage = bundle.rmValue.cbCommitMsg, + welcome = bundle.rmValue.cbWelcome, + groupInfo = GroupInfoData bundle.rmValue.cbGroupInfo.rmRaw, + serialized = bundle.rmRaw } type MLSMessageStaticErrors = @@ -318,7 +320,7 @@ postMLSCommitBundleFromLocalUser :: Local UserId -> ClientId -> ConnId -> - CommitBundle -> + RawMLS CommitBundle -> Sem r MLSMessageSendingStatus postMLSCommitBundleFromLocalUser lusr c conn bundle = do assertMLSEnabled @@ -363,7 +365,7 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do action bundle.sender bundle.commit.rmValue - storeGroupInfoBundle (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfoBundle + storeGroupInfo (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfo let cm = membersConvOrSub (tUnqualified lConvOrSub) unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.rmRaw cm @@ -1275,18 +1277,16 @@ instance where handleMLSProposalFailure = mapError (MLSProposalFailure . toWai) -storeGroupInfoBundle :: +storeGroupInfo :: ( Member ConversationStore r, Member SubConversationStore r ) => ConvOrSubConvId -> - GroupInfoBundle -> + GroupInfoData -> Sem r () -storeGroupInfoBundle convOrSub bundle = do - let gs = toOpaquePublicGroupState (gipGroupState bundle) - case convOrSub of - Conv cid -> setPublicGroupState cid gs - SubConv cid subconvid -> setSubConversationPublicGroupState cid subconvid (Just gs) +storeGroupInfo convOrSub ginfo = case convOrSub of + Conv cid -> setGroupInfo cid ginfo + SubConv cid subconvid -> setSubConversationGroupInfo cid subconvid (Just ginfo) fetchConvOrSub :: forall r. diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index 59b35b260a..fbfc8acaa0 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -69,7 +69,7 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Error import Wire.API.MLS.Credential -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation type MLSGetSubConvStaticErrors = @@ -193,7 +193,7 @@ getSubConversationGroupInfo :: Local UserId -> Qualified ConvId -> SubConvId -> - Sem r OpaquePublicGroupState + Sem r GroupInfoData getSubConversationGroupInfo lusr qcnvId subconv = do assertMLSEnabled foldQualified @@ -213,10 +213,10 @@ getSubConversationGroupInfoFromLocalConv :: Qualified UserId -> SubConvId -> Local ConvId -> - Sem r OpaquePublicGroupState + Sem r GroupInfoData getSubConversationGroupInfoFromLocalConv qusr subConvId lcnvId = do void $ getLocalConvForUser qusr lcnvId - Eff.getSubConversationPublicGroupState (tUnqualified lcnvId) subConvId + Eff.getSubConversationGroupInfo (tUnqualified lcnvId) subConvId >>= noteS @'MLSMissingGroupInfo type MLSDeleteSubConvStaticErrors = diff --git a/services/galley/src/Galley/Cassandra/Conversation.hs b/services/galley/src/Galley/Cassandra/Conversation.hs index 520cd3ea9c..df4667fd9f 100644 --- a/services/galley/src/Galley/Cassandra/Conversation.hs +++ b/services/galley/src/Galley/Cassandra/Conversation.hs @@ -58,7 +58,7 @@ import Wire.API.Conversation hiding (Conversation, Member) import Wire.API.Conversation.Protocol import Wire.API.MLS.CipherSuite import Wire.API.MLS.Group -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation createMLSSelfConversation :: @@ -199,16 +199,15 @@ conversationMeta conv = accessRoles = maybeRole t $ parseAccessRoles r mbAccessRolesV2 pure $ ConversationMetadata t c (defAccess t a) accessRoles n i mt rm -getPublicGroupState :: ConvId -> Client (Maybe OpaquePublicGroupState) -getPublicGroupState cid = do - fmap join $ - runIdentity - <$$> retry - x1 - ( query1 - Cql.selectPublicGroupState - (params LocalQuorum (Identity cid)) - ) +getGroupInfo :: ConvId -> Client (Maybe GroupInfoData) +getGroupInfo cid = do + runIdentity + <$$> retry + x1 + ( query1 + Cql.selectGroupInfo + (params LocalQuorum (Identity cid)) + ) isConvAlive :: ConvId -> Client Bool isConvAlive cid = do @@ -241,9 +240,9 @@ updateConvMessageTimer cid mtimer = retry x5 $ write Cql.updateConvMessageTimer updateConvEpoch :: ConvId -> Epoch -> Client () updateConvEpoch cid epoch = retry x5 $ write Cql.updateConvEpoch (params LocalQuorum (epoch, cid)) -setPublicGroupState :: ConvId -> OpaquePublicGroupState -> Client () -setPublicGroupState conv gib = - write Cql.updatePublicGroupState (params LocalQuorum (gib, conv)) +setGroupInfo :: ConvId -> GroupInfoData -> Client () +setGroupInfo conv gid = + write Cql.updateGroupInfo (params LocalQuorum (gid, conv)) getConversation :: ConvId -> Client (Maybe Conversation) getConversation conv = do @@ -463,7 +462,7 @@ interpretConversationStoreToCassandra = interpret $ \case LookupConvByGroupId gId -> embedClient $ lookupConvByGroupId gId GetConversations cids -> localConversations cids GetConversationMetadata cid -> embedClient $ conversationMeta cid - GetPublicGroupState cid -> embedClient $ getPublicGroupState cid + GetGroupInfo cid -> embedClient $ getGroupInfo cid IsConversationAlive cid -> embedClient $ isConvAlive cid SelectConversations uid cids -> embedClient $ localConversationIdsOf uid cids GetRemoteConversationStatus uid cids -> embedClient $ remoteConversationStatus uid cids @@ -476,7 +475,7 @@ interpretConversationStoreToCassandra = interpret $ \case DeleteConversation cid -> embedClient $ deleteConversation cid SetGroupIdForConversation gId cid -> embedClient $ setGroupIdForConversation gId cid DeleteGroupIdForConversation gId -> embedClient $ deleteGroupIdForConversation gId - SetPublicGroupState cid gib -> embedClient $ setPublicGroupState cid gib + SetGroupInfo cid gib -> embedClient $ setGroupInfo cid gib AcquireCommitLock gId epoch ttl -> embedClient $ acquireCommitLock gId epoch ttl ReleaseCommitLock gId epoch -> embedClient $ releaseCommitLock gId epoch DeleteGroupIds gIds -> deleteGroupIds gIds diff --git a/services/galley/src/Galley/Cassandra/Instances.hs b/services/galley/src/Galley/Cassandra/Instances.hs index 4610857013..b315912134 100644 --- a/services/galley/src/Galley/Cassandra/Instances.hs +++ b/services/galley/src/Galley/Cassandra/Instances.hs @@ -37,8 +37,8 @@ import Wire.API.Asset (AssetKey, assetKeyToText) import Wire.API.Conversation import Wire.API.Conversation.Protocol import Wire.API.MLS.CipherSuite +import Wire.API.MLS.GroupInfo import Wire.API.MLS.Proposal -import Wire.API.MLS.PublicGroupState import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.Routes.Internal.Galley.TeamsIntra @@ -201,12 +201,12 @@ instance Cql GroupId where fromCql (CqlBlob b) = Right . GroupId . LBS.toStrict $ b fromCql _ = Left "group_id: blob expected" -instance Cql OpaquePublicGroupState where +instance Cql GroupInfoData where ctype = Tagged BlobColumn - toCql = CqlBlob . LBS.fromStrict . unOpaquePublicGroupState - fromCql (CqlBlob b) = Right $ OpaquePublicGroupState (LBS.toStrict b) - fromCql _ = Left "OpaquePublicGroupState: blob expected" + toCql = CqlBlob . LBS.fromStrict . unGroupInfoData + fromCql (CqlBlob b) = Right $ GroupInfoData (LBS.toStrict b) + fromCql _ = Left "GroupInfoData: blob expected" instance Cql Icon where ctype = Tagged TextColumn diff --git a/services/galley/src/Galley/Cassandra/Queries.hs b/services/galley/src/Galley/Cassandra/Queries.hs index 5c7fe09a00..d05e2f41ca 100644 --- a/services/galley/src/Galley/Cassandra/Queries.hs +++ b/services/galley/src/Galley/Cassandra/Queries.hs @@ -34,7 +34,7 @@ import Wire.API.Conversation.Code import Wire.API.Conversation.Protocol import Wire.API.Conversation.Role import Wire.API.MLS.CipherSuite -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation import Wire.API.Password (Password) import Wire.API.Provider @@ -284,11 +284,11 @@ deleteConv = "delete from conversation using timestamp 32503680000000000 where c markConvDeleted :: PrepQuery W (Identity ConvId) () markConvDeleted = "update conversation set deleted = true where conv = ?" -selectPublicGroupState :: PrepQuery R (Identity ConvId) (Identity (Maybe OpaquePublicGroupState)) -selectPublicGroupState = "select public_group_state from conversation where conv = ?" +selectGroupInfo :: PrepQuery R (Identity ConvId) (Identity GroupInfoData) +selectGroupInfo = "select public_group_state from conversation where conv = ?" -updatePublicGroupState :: PrepQuery W (OpaquePublicGroupState, ConvId) () -updatePublicGroupState = "update conversation set public_group_state = ? where conv = ?" +updateGroupInfo :: PrepQuery W (GroupInfoData, ConvId) () +updateGroupInfo = "update conversation set public_group_state = ? where conv = ?" -- Conversations accessible by code ----------------------------------------- @@ -331,14 +331,14 @@ lookupGroupId = "SELECT conv_id, domain, subconv_id from group_id_conv_id where selectSubConversation :: PrepQuery R (ConvId, SubConvId) (CipherSuiteTag, Epoch, Writetime Epoch, GroupId) selectSubConversation = "SELECT cipher_suite, epoch, WRITETIME(epoch), group_id FROM subconversation WHERE conv_id = ? and subconv_id = ?" -insertSubConversation :: PrepQuery W (ConvId, SubConvId, CipherSuiteTag, Epoch, GroupId, Maybe OpaquePublicGroupState) () +insertSubConversation :: PrepQuery W (ConvId, SubConvId, CipherSuiteTag, Epoch, GroupId, Maybe GroupInfoData) () insertSubConversation = "INSERT INTO subconversation (conv_id, subconv_id, cipher_suite, epoch, group_id, public_group_state) VALUES (?, ?, ?, ?, ?, ?)" -updateSubConvPublicGroupState :: PrepQuery W (ConvId, SubConvId, Maybe OpaquePublicGroupState) () -updateSubConvPublicGroupState = "INSERT INTO subconversation (conv_id, subconv_id, public_group_state) VALUES (?, ?, ?)" +updateSubConvGroupInfo :: PrepQuery W (ConvId, SubConvId, Maybe GroupInfoData) () +updateSubConvGroupInfo = "INSERT INTO subconversation (conv_id, subconv_id, public_group_state) VALUES (?, ?, ?)" -selectSubConvPublicGroupState :: PrepQuery R (ConvId, SubConvId) (Identity (Maybe OpaquePublicGroupState)) -selectSubConvPublicGroupState = "SELECT public_group_state FROM subconversation WHERE conv_id = ? AND subconv_id = ?" +selectSubConvGroupInfo :: PrepQuery R (ConvId, SubConvId) (Identity (Maybe GroupInfoData)) +selectSubConvGroupInfo = "SELECT public_group_state FROM subconversation WHERE conv_id = ? AND subconv_id = ?" deleteGroupId :: PrepQuery W (Identity GroupId) () deleteGroupId = "DELETE FROM group_id_conv_id WHERE group_id = ?" diff --git a/services/galley/src/Galley/Cassandra/SubConversation.hs b/services/galley/src/Galley/Cassandra/SubConversation.hs index 3dbe6c842c..cf80db6e12 100644 --- a/services/galley/src/Galley/Cassandra/SubConversation.hs +++ b/services/galley/src/Galley/Cassandra/SubConversation.hs @@ -37,7 +37,7 @@ import Polysemy.Input import Wire.API.Conversation.Protocol import Wire.API.MLS.CipherSuite import Wire.API.MLS.Group -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation selectSubConversation :: ConvId -> SubConvId -> Client (Maybe SubConversation) @@ -60,17 +60,24 @@ selectSubConversation convId subConvId = do scIndexMap = mempty -- TODO } -insertSubConversation :: ConvId -> SubConvId -> CipherSuiteTag -> Epoch -> GroupId -> Maybe OpaquePublicGroupState -> Client () -insertSubConversation convId subConvId suite epoch groupId mPgs = - retry x5 (write Cql.insertSubConversation (params LocalQuorum (convId, subConvId, suite, epoch, groupId, mPgs))) +insertSubConversation :: + ConvId -> + SubConvId -> + CipherSuiteTag -> + Epoch -> + GroupId -> + Maybe GroupInfoData -> + Client () +insertSubConversation convId subConvId suite epoch groupId mGroupInfo = + retry x5 (write Cql.insertSubConversation (params LocalQuorum (convId, subConvId, suite, epoch, groupId, mGroupInfo))) -updateSubConvPublicGroupState :: ConvId -> SubConvId -> Maybe OpaquePublicGroupState -> Client () -updateSubConvPublicGroupState convId subConvId mPgs = - retry x5 (write Cql.updateSubConvPublicGroupState (params LocalQuorum (convId, subConvId, mPgs))) +updateSubConvGroupInfo :: ConvId -> SubConvId -> Maybe GroupInfoData -> Client () +updateSubConvGroupInfo convId subConvId mGroupInfo = + retry x5 (write Cql.updateSubConvGroupInfo (params LocalQuorum (convId, subConvId, mGroupInfo))) -selectSubConvPublicGroupState :: ConvId -> SubConvId -> Client (Maybe OpaquePublicGroupState) -selectSubConvPublicGroupState convId subConvId = - (runIdentity =<<) <$> retry x5 (query1 Cql.selectSubConvPublicGroupState (params LocalQuorum (convId, subConvId))) +selectSubConvGroupInfo :: ConvId -> SubConvId -> Client (Maybe GroupInfoData) +selectSubConvGroupInfo convId subConvId = + (runIdentity =<<) <$> retry x5 (query1 Cql.selectSubConvGroupInfo (params LocalQuorum (convId, subConvId))) setGroupIdForSubConversation :: GroupId -> Qualified ConvId -> SubConvId -> Client () setGroupIdForSubConversation groupId qconv sconv = @@ -108,10 +115,11 @@ interpretSubConversationStoreToCassandra :: Sem (SubConversationStore ': r) a -> Sem r a interpretSubConversationStoreToCassandra = interpret $ \case - CreateSubConversation convId subConvId suite epoch groupId mPgs -> embedClient (insertSubConversation convId subConvId suite epoch groupId mPgs) + CreateSubConversation convId subConvId suite epoch groupId mGroupInfo -> + embedClient (insertSubConversation convId subConvId suite epoch groupId mGroupInfo) GetSubConversation convId subConvId -> embedClient (selectSubConversation convId subConvId) - GetSubConversationPublicGroupState convId subConvId -> embedClient (selectSubConvPublicGroupState convId subConvId) - SetSubConversationPublicGroupState convId subConvId mPgs -> embedClient (updateSubConvPublicGroupState convId subConvId mPgs) + GetSubConversationGroupInfo convId subConvId -> embedClient (selectSubConvGroupInfo convId subConvId) + SetSubConversationGroupInfo convId subConvId mPgs -> embedClient (updateSubConvGroupInfo convId subConvId mPgs) SetGroupIdForSubConversation gId cid sconv -> embedClient $ setGroupIdForSubConversation gId cid sconv SetSubConversationEpoch cid sconv epoch -> embedClient $ setEpochForSubConversation cid sconv epoch DeleteGroupIdForSubConversation groupId -> embedClient $ deleteGroupId groupId diff --git a/services/galley/src/Galley/Effects/ConversationStore.hs b/services/galley/src/Galley/Effects/ConversationStore.hs index 5d9fa1d51c..b305d98842 100644 --- a/services/galley/src/Galley/Effects/ConversationStore.hs +++ b/services/galley/src/Galley/Effects/ConversationStore.hs @@ -31,7 +31,7 @@ module Galley.Effects.ConversationStore lookupConvByGroupId, getConversations, getConversationMetadata, - getPublicGroupState, + getGroupInfo, isConversationAlive, getRemoteConversationStatus, selectConversations, @@ -46,7 +46,7 @@ module Galley.Effects.ConversationStore acceptConnectConversation, setGroupIdForConversation, deleteGroupIdForConversation, - setPublicGroupState, + setGroupInfo, deleteGroupIds, updateToMixedProtocol, @@ -72,7 +72,7 @@ import Polysemy import Wire.API.Conversation hiding (Conversation, Member) import Wire.API.MLS.CipherSuite (CipherSuiteTag) import Wire.API.MLS.Epoch -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation data ConversationStore m a where @@ -86,9 +86,7 @@ data ConversationStore m a where LookupConvByGroupId :: GroupId -> ConversationStore m (Maybe (Qualified ConvOrSubConvId)) GetConversations :: [ConvId] -> ConversationStore m [Conversation] GetConversationMetadata :: ConvId -> ConversationStore m (Maybe ConversationMetadata) - GetPublicGroupState :: - ConvId -> - ConversationStore m (Maybe OpaquePublicGroupState) + GetGroupInfo :: ConvId -> ConversationStore m (Maybe GroupInfoData) IsConversationAlive :: ConvId -> ConversationStore m Bool GetRemoteConversationStatus :: UserId -> @@ -103,10 +101,7 @@ data ConversationStore m a where SetConversationEpoch :: ConvId -> Epoch -> ConversationStore m () SetGroupIdForConversation :: GroupId -> Qualified ConvId -> ConversationStore m () DeleteGroupIdForConversation :: GroupId -> ConversationStore m () - SetPublicGroupState :: - ConvId -> - OpaquePublicGroupState -> - ConversationStore m () + SetGroupInfo :: ConvId -> GroupInfoData -> ConversationStore m () AcquireCommitLock :: GroupId -> Epoch -> NominalDiffTime -> ConversationStore m LockAcquired ReleaseCommitLock :: GroupId -> Epoch -> ConversationStore m () DeleteGroupIds :: [GroupId] -> ConversationStore m () diff --git a/services/galley/src/Galley/Effects/SubConversationStore.hs b/services/galley/src/Galley/Effects/SubConversationStore.hs index 056eec34d8..ed95279539 100644 --- a/services/galley/src/Galley/Effects/SubConversationStore.hs +++ b/services/galley/src/Galley/Effects/SubConversationStore.hs @@ -27,14 +27,14 @@ import Polysemy import Wire.API.Conversation.Protocol import Wire.API.MLS.CipherSuite import Wire.API.MLS.Group -import Wire.API.MLS.PublicGroupState +import Wire.API.MLS.GroupInfo import Wire.API.MLS.SubConversation data SubConversationStore m a where - CreateSubConversation :: ConvId -> SubConvId -> CipherSuiteTag -> Epoch -> GroupId -> Maybe OpaquePublicGroupState -> SubConversationStore m () + CreateSubConversation :: ConvId -> SubConvId -> CipherSuiteTag -> Epoch -> GroupId -> Maybe GroupInfoData -> SubConversationStore m () GetSubConversation :: ConvId -> SubConvId -> SubConversationStore m (Maybe SubConversation) - GetSubConversationPublicGroupState :: ConvId -> SubConvId -> SubConversationStore m (Maybe OpaquePublicGroupState) - SetSubConversationPublicGroupState :: ConvId -> SubConvId -> Maybe OpaquePublicGroupState -> SubConversationStore m () + GetSubConversationGroupInfo :: ConvId -> SubConvId -> SubConversationStore m (Maybe GroupInfoData) + SetSubConversationGroupInfo :: ConvId -> SubConvId -> Maybe GroupInfoData -> SubConversationStore m () SetGroupIdForSubConversation :: GroupId -> Qualified ConvId -> SubConvId -> SubConversationStore m () SetSubConversationEpoch :: ConvId -> SubConvId -> Epoch -> SubConversationStore m () DeleteGroupIdForSubConversation :: GroupId -> SubConversationStore m () diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index f47fed47c9..e488f4bd8c 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -421,8 +421,8 @@ testAddUserWithBundle = do (qcnv `elem` map cnvQualifiedId convs) returnedGS <- getGroupInfo alice (fmap Conv qcnv) - liftIO $ assertBool "Commit does not contain a public group State" (isJust (mpPublicGroupState commit)) - liftIO $ mpPublicGroupState commit @?= Just returnedGS + liftIO $ assertBool "Commit does not contain a public group State" (isJust (mpGroupInfo commit)) + liftIO $ mpGroupInfo commit @?= Just returnedGS testAddUserWithBundleIncompleteWelcome :: TestM () testAddUserWithBundleIncompleteWelcome = do @@ -465,7 +465,7 @@ testAddUser = do traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - events <- createAddCommit alice1 [bob] >>= sendAndConsumeCommit + events <- createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle event <- assertOne events liftIO $ assertJoinEvent qcnv alice [bob] roleNameWireMember event pure qcnv @@ -1985,7 +1985,7 @@ testGetGroupInfoOfLocalConv = do void $ sendAndConsumeCommitBundle commit -- check the group info matches - gs <- assertJust (mpPublicGroupState commit) + gs <- assertJust (mpGroupInfo commit) returnedGS <- liftTest $ getGroupInfo alice (fmap Conv qcnv) liftIO $ gs @=? returnedGS @@ -2028,7 +2028,7 @@ testFederatedGetGroupInfo = do [alice1, bob1] <- traverse createMLSClient [alice, bob] (_, qcnv) <- setupMLSGroup alice1 commit <- createAddCommit alice1 [bob] - groupState <- assertJust (mpPublicGroupState commit) + groupState <- assertJust (mpGroupInfo commit) let mock = receiveCommitMock [bob1] <|> welcomeMock void . withTempMockFederator' mock $ do @@ -2505,7 +2505,7 @@ testJoinRemoteSubConv = do receiveNewRemoteConv qcs subGroupId -- bob joins subconversation - let pgs = mpPublicGroupState initialCommit + let pgs = mpGroupInfo initialCommit let mock = ("send-mls-commit-bundle" ~> MLSMessageResponseUpdates [] (UnreachableUsers [])) <|> queryGroupStateMock (fold pgs) bob @@ -2985,7 +2985,7 @@ testDeleteRemoteParentOfSubConv = do -- inform backend about the subconversation receiveNewRemoteConv qcs subGroupId - let pgs = mpPublicGroupState initialCommit + let pgs = mpGroupInfo initialCommit let mock = ("send-mls-commit-bundle" ~> MLSMessageResponseUpdates [] (UnreachableUsers [])) <|> queryGroupStateMock (fold pgs) bob @@ -3255,7 +3255,7 @@ testLeaveRemoteSubConv = do -- inform backend about the subconversation receiveNewRemoteConv qcs subGroupId - let pgs = mpPublicGroupState initialCommit + let pgs = mpGroupInfo initialCommit let mock = ("send-mls-commit-bundle" ~> MLSMessageResponseUpdates [] (UnreachableUsers [])) <|> queryGroupStateMock (fold pgs) bob diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index cc7ba23dff..1338502650 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -52,6 +52,7 @@ import Data.Time import qualified Data.Tuple.Extra as Tuple import qualified Data.UUID as UUID import qualified Data.UUID.V4 as UUIDV4 +import Debug.Trace import Galley.Keys import Galley.Options import qualified Galley.Options as Opts @@ -75,7 +76,6 @@ import Wire.API.Federation.API.Galley import Wire.API.MLS.CipherSuite import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential -import Wire.API.MLS.GroupInfoBundle import Wire.API.MLS.KeyPackage import Wire.API.MLS.Keys import Wire.API.MLS.Message @@ -293,7 +293,7 @@ data MessagePackage = MessagePackage { mpSender :: ClientIdentity, mpMessage :: ByteString, mpWelcome :: Maybe ByteString, - mpPublicGroupState :: Maybe ByteString + mpGroupInfo :: Maybe ByteString } deriving (Show) @@ -632,9 +632,9 @@ createExternalCommit qcid mpgs qcs = do mlscli qcid [ "external-commit", - "--group-state-in", + "--group-info-in", "-", - "--group-state-out", + "--group-info-out", pgsFile, "--group-out", "" @@ -654,7 +654,7 @@ createExternalCommit qcid mpgs qcs = do { mpSender = qcid, mpMessage = commit, mpWelcome = Nothing, - mpPublicGroupState = Just newPgs + mpGroupInfo = Just newPgs } createAddProposals :: HasCallStack => ClientIdentity -> [Qualified UserId] -> MLSTest [MessagePackage] @@ -680,7 +680,7 @@ createApplicationMessage cid messageContent = do { mpSender = cid, mpMessage = message, mpWelcome = Nothing, - mpPublicGroupState = Nothing + mpGroupInfo = Nothing } createAddCommitWithKeyPackages :: @@ -722,7 +722,7 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do { mpSender = qcid, mpMessage = commit, mpWelcome = Just welcome, - mpPublicGroupState = Just pgs + mpGroupInfo = Just pgs } createAddProposalWithKeyPackage :: @@ -740,7 +740,7 @@ createAddProposalWithKeyPackage cid (_, kp) = do { mpSender = cid, mpMessage = prop, mpWelcome = Nothing, - mpPublicGroupState = Nothing + mpGroupInfo = Nothing } createPendingProposalCommit :: HasCallStack => ClientIdentity -> MLSTest MessagePackage @@ -758,7 +758,7 @@ createPendingProposalCommit qcid = do "", "--welcome-out", welcomeFile, - "--group-state-out", + "--group-info-out", pgsFile ] Nothing @@ -770,7 +770,7 @@ createPendingProposalCommit qcid = do { mpSender = qcid, mpMessage = commit, mpWelcome = welcome, - mpPublicGroupState = Just pgs + mpGroupInfo = Just pgs } readWelcome :: FilePath -> IO (Maybe ByteString) @@ -801,7 +801,7 @@ createRemoveCommit cid _targets = do "", "--welcome-out", welcomeFile, - "--group-state-out", + "--group-info-out", pgsFile ] <> map show indices @@ -814,7 +814,7 @@ createRemoveCommit cid _targets = do { mpSender = cid, mpMessage = commit, mpWelcome = welcome, - mpPublicGroupState = Just pgs + mpGroupInfo = Just pgs } createExternalAddProposal :: HasCallStack => ClientIdentity -> MLSTest MessagePackage @@ -845,7 +845,7 @@ createExternalAddProposal joiner = do { mpSender = joiner, mpMessage = proposal, mpWelcome = Nothing, - mpPublicGroupState = Nothing + mpGroupInfo = Nothing } consumeWelcome :: HasCallStack => ByteString -> MLSTest () @@ -932,24 +932,24 @@ mkBundle :: MessagePackage -> Either Text CommitBundle mkBundle mp = do commitB <- decodeMLS' (mpMessage mp) welcomeB <- traverse decodeMLS' (mpWelcome mp) - pgs <- note "public group state unavailable" (mpPublicGroupState mp) - pgsB <- decodeMLS' pgs - pure $ - CommitBundle commitB welcomeB $ - GroupInfoBundle UnencryptedGroupInfo TreeFull pgsB + ginfo <- note "group info unavailable" (mpGroupInfo mp) + ginfoB <- decodeMLS' ginfo + pure $ CommitBundle commitB welcomeB ginfoB createBundle :: MonadIO m => MessagePackage -> m ByteString createBundle mp = do bundle <- either (liftIO . assertFailure . T.unpack) pure $ mkBundle mp - pure (serializeCommitBundle bundle) + pure (encodeMLS' bundle) sendAndConsumeCommitBundle :: HasCallStack => MessagePackage -> MLSTest [Event] sendAndConsumeCommitBundle mp = do + traverse_ (traceM . ("welcome: " <>) . show . hex) $ mpWelcome mp + traverse_ (traceM . ("groupState: " <>) . show . hex) $ mpGroupInfo mp qcs <- getConvId bundle <- createBundle mp events <- liftTest $ postCommitBundle (mpSender mp) qcs bundle From 9cef1fad17995b5108e3c21d28c6c2db134cee35 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 5 Apr 2023 17:22:06 +0200 Subject: [PATCH 10/75] Add instances for roundtrip tests of MLS types --- .../wire-api/src/Wire/API/MLS/Capabilities.hs | 8 +++ libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 12 +++- libs/wire-api/src/Wire/API/MLS/Commit.hs | 28 ++++++-- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 8 +-- libs/wire-api/src/Wire/API/MLS/Context.hs | 13 +--- libs/wire-api/src/Wire/API/MLS/Credential.hs | 8 +++ libs/wire-api/src/Wire/API/MLS/GroupInfo.hs | 22 +++++++ .../src/Wire/API/MLS/HPKEPublicKey.hs | 3 + libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 17 ++++- libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 30 ++++++++- libs/wire-api/src/Wire/API/MLS/Lifetime.hs | 7 +- libs/wire-api/src/Wire/API/MLS/Message.hs | 37 ++++++----- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 62 ++++++++++++++++-- .../src/Wire/API/MLS/Serialisation.hs | 9 ++- libs/wire-api/test/resources/key_package1.mls | Bin 262 -> 0 bytes libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 34 +++++----- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 51 ++++++++++++-- .../galley/test/integration/API/MLS/Util.hs | 9 +-- 18 files changed, 284 insertions(+), 74 deletions(-) delete mode 100644 libs/wire-api/test/resources/key_package1.mls diff --git a/libs/wire-api/src/Wire/API/MLS/Capabilities.hs b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs index 1647b6d092..bfbb07cc2b 100644 --- a/libs/wire-api/src/Wire/API/MLS/Capabilities.hs +++ b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs @@ -44,3 +44,11 @@ instance ParseMLS Capabilities where <*> parseMLSVector @VarInt parseMLS <*> parseMLSVector @VarInt parseMLS <*> parseMLSVector @VarInt parseMLS + +instance SerialiseMLS Capabilities where + serialiseMLS caps = do + serialiseMLSVector @VarInt serialiseMLS caps.versions + serialiseMLSVector @VarInt serialiseMLS caps.ciphersuites + serialiseMLSVector @VarInt serialiseMLS caps.extensions + serialiseMLSVector @VarInt serialiseMLS caps.proposals + serialiseMLSVector @VarInt serialiseMLS caps.credentials diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index bc4bd8f3f6..483a113426 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -43,12 +43,13 @@ import Cassandra.CQL import Control.Error (note) import Control.Lens ((?~)) import Crypto.Error +import Crypto.Hash (hashWith) import Crypto.Hash.Algorithms -import qualified Crypto.KDF.HKDF as HKDF import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Data.Aeson as Aeson import Data.Aeson.Types (FromJSON (..), FromJSONKey (..), ToJSON (..), ToJSONKey (..)) import qualified Data.Aeson.Types as Aeson +import Data.ByteArray hiding (index) import Data.Proxy import Data.Schema import qualified Data.Swagger as S @@ -104,9 +105,9 @@ cipherSuiteTag (CipherSuite n) = case n of tagCipherSuite :: CipherSuiteTag -> CipherSuite tagCipherSuite MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 = CipherSuite 1 -csHash :: CipherSuiteTag -> ByteString -> ByteString -> ByteString +csHash :: CipherSuiteTag -> ByteString -> RawMLS a -> ByteString csHash MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 ctx value = - HKDF.expand (HKDF.extract @SHA256 (mempty :: ByteString) value) ctx 16 + convert . hashWith SHA256 . encodeMLS' $ RefHashInput ctx value csVerifySignature :: CipherSuiteTag -> ByteString -> RawMLS a -> ByteString -> Bool csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = @@ -115,6 +116,11 @@ csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = sig' <- Ed25519.signature sig pure $ Ed25519.verify pub' x.rmRaw sig' +type RefHashInput = SignContent + +pattern RefHashInput :: ByteString -> RawMLS a -> RefHashInput a +pattern RefHashInput label content = SignContent label content + data SignContent a = SignContent { sigLabel :: ByteString, content :: RawMLS a diff --git a/libs/wire-api/src/Wire/API/MLS/Commit.hs b/libs/wire-api/src/Wire/API/MLS/Commit.hs index 1b0b788f30..b130a6036c 100644 --- a/libs/wire-api/src/Wire/API/MLS/Commit.hs +++ b/libs/wire-api/src/Wire/API/MLS/Commit.hs @@ -27,32 +27,50 @@ data Commit = Commit { cProposals :: [ProposalOrRef], cPath :: Maybe UpdatePath } - deriving (Eq, Show) + deriving (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform Commit) instance ParseMLS Commit where parseMLS = Commit - <$> traceMLS "proposals" (parseMLSVector @VarInt parseMLS) - <*> traceMLS "update path" (parseMLSOptional parseMLS) + <$> parseMLSVector @VarInt parseMLS + <*> parseMLSOptional parseMLS + +instance SerialiseMLS Commit where + serialiseMLS c = do + serialiseMLSVector @VarInt serialiseMLS c.cProposals + serialiseMLSOptional serialiseMLS c.cPath data UpdatePath = UpdatePath { upLeaf :: RawMLS LeafNode, upNodes :: [UpdatePathNode] } - deriving (Eq, Show) + deriving (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform UpdatePath) instance ParseMLS UpdatePath where parseMLS = UpdatePath <$> parseMLS <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS UpdatePath where + serialiseMLS up = do + serialiseMLS up.upLeaf + serialiseMLSVector @VarInt serialiseMLS up.upNodes + data UpdatePathNode = UpdatePathNode { upnPublicKey :: ByteString, upnSecret :: [HPKECiphertext] } - deriving (Eq, Show) + deriving (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform UpdatePathNode) instance ParseMLS UpdatePathNode where parseMLS = UpdatePathNode <$> parseMLSBytes @VarInt <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS UpdatePathNode where + serialiseMLS upn = do + serialiseMLSBytes @VarInt upn.upnPublicKey + serialiseMLSVector @VarInt serialiseMLS upn.upnSecret + data HPKECiphertext = HPKECiphertext { hcOutput :: ByteString, hcCiphertext :: ByteString diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index 0930b6a699..7dd8653116 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -31,7 +31,7 @@ data CommitBundle = CommitBundle cbWelcome :: Maybe (RawMLS Welcome), cbGroupInfo :: RawMLS GroupInfo } - deriving (Eq, Show) + deriving stock (Eq, Show, Generic) data CommitBundleF f = CommitBundleF { cbCommitMsg :: f (RawMLS Message), @@ -72,7 +72,7 @@ findMessageInStream msg = case msg.rmValue.content of FramedContentCommit _ -> pure (CommitBundleF (pure msg) empty empty) _ -> Left "unexpected public message" MessageWelcome w -> pure (CommitBundleF empty (pure w) empty) - MessageGroupInfo -> error "TODO: get group info from message" + MessageGroupInfo gi -> pure (CommitBundleF empty empty (pure gi)) _ -> Left "unexpected message type" findMessagesInStream :: Alternative f => [RawMLS Message] -> Either Text (CommitBundleF f) @@ -87,8 +87,8 @@ instance ParseMLS CommitBundle where instance SerialiseMLS CommitBundle where serialiseMLS cb = do serialiseMLS cb.cbCommitMsg - traverse_ serialiseMLS cb.cbWelcome - serialiseMLS cb.cbGroupInfo + traverse_ (serialiseMLS . mkMessage . MessageWelcome) cb.cbWelcome + serialiseMLS $ mkMessage (MessageGroupInfo cb.cbGroupInfo) instance S.ToSchema CommitBundle where declareNamedSchema _ = pure (mlsSwagger "CommitBundle") diff --git a/libs/wire-api/src/Wire/API/MLS/Context.hs b/libs/wire-api/src/Wire/API/MLS/Context.hs index 661b7ce632..4324b61d7a 100644 --- a/libs/wire-api/src/Wire/API/MLS/Context.hs +++ b/libs/wire-api/src/Wire/API/MLS/Context.hs @@ -19,15 +19,6 @@ module Wire.API.MLS.Context where import Imports --- Warning: the "context" string here is different from the one mandated by --- the spec, but it is the one that happens to be used by openmls. Until --- openmls is patched and we switch to a fixed version, we will have to use --- the "wrong" string here as well. --- --- This is used when invoking 'csHash'. -context :: ByteString -context = "MLS 1.0 ref" - proposalContext, keyPackageContext :: ByteString -proposalContext = context -keyPackageContext = context +proposalContext = "MLS 1.0 Proposal Reference" +keyPackageContext = "MLS 1.0 KeyPackage Reference" diff --git a/libs/wire-api/src/Wire/API/MLS/Credential.hs b/libs/wire-api/src/Wire/API/MLS/Credential.hs index 5eea497d54..12a3da5b6d 100644 --- a/libs/wire-api/src/Wire/API/MLS/Credential.hs +++ b/libs/wire-api/src/Wire/API/MLS/Credential.hs @@ -59,6 +59,9 @@ data CredentialTag where instance ParseMLS CredentialTag where parseMLS = parseMLSEnum @Word16 "credential type" +instance SerialiseMLS CredentialTag where + serialiseMLS = serialiseMLSEnum @Word16 + instance ParseMLS Credential where parseMLS = parseMLS >>= \case @@ -66,6 +69,11 @@ instance ParseMLS Credential where BasicCredential <$> parseMLSBytes @VarInt +instance SerialiseMLS Credential where + serialiseMLS (BasicCredential i) = do + serialiseMLS BasicCredentialTag + serialiseMLSBytes @VarInt i + credentialTag :: Credential -> CredentialTag credentialTag BasicCredential {} = BasicCredentialTag diff --git a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs index 7971fa7b0a..3fe3d0ca5c 100644 --- a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs +++ b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs @@ -58,6 +58,16 @@ instance ParseMLS GroupContext where <*> parseMLSBytes @VarInt <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS GroupContext where + serialiseMLS gc = do + serialiseMLS gc.protocolVersion + serialiseMLS gc.cipherSuite + serialiseMLS gc.groupId + serialiseMLS gc.epoch + serialiseMLSBytes @VarInt gc.treeHash + serialiseMLSBytes @VarInt gc.confirmedTranscriptHash + serialiseMLSVector @VarInt serialiseMLS gc.extensions + data GroupInfoTBS = GroupInfoTBS { groupContext :: GroupContext, extensions :: [Extension], @@ -75,6 +85,13 @@ instance ParseMLS GroupInfoTBS where <*> parseMLSBytes @VarInt <*> parseMLS +instance SerialiseMLS GroupInfoTBS where + serialiseMLS tbs = do + serialiseMLS tbs.groupContext + serialiseMLSVector @VarInt serialiseMLS tbs.extensions + serialiseMLSBytes @VarInt tbs.confirmationTag + serialiseMLS tbs.signer + data GroupInfo = GroupInfo { tbs :: GroupInfoTBS, signature_ :: ByteString @@ -88,6 +105,11 @@ instance ParseMLS GroupInfo where <$> parseMLS <*> parseMLSBytes @VarInt +instance SerialiseMLS GroupInfo where + serialiseMLS gi = do + serialiseMLS gi.tbs + serialiseMLSBytes @VarInt gi.signature_ + instance HasField "groupContext" GroupInfo GroupContext where getField = (.tbs.groupContext) diff --git a/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs index 8531ef3bd6..004ed3443d 100644 --- a/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs +++ b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs @@ -28,3 +28,6 @@ newtype HPKEPublicKey = HPKEPublicKey {unHPKEPublicKey :: ByteString} instance ParseMLS HPKEPublicKey where parseMLS = HPKEPublicKey <$> parseMLSBytes @VarInt + +instance SerialiseMLS HPKEPublicKey where + serialiseMLS = serialiseMLSBytes @VarInt . unHPKEPublicKey diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index ff9b74f83a..8f36c385a2 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -149,6 +149,7 @@ kpRef :: CipherSuiteTag -> KeyPackageData -> KeyPackageRef kpRef cs = KeyPackageRef . csHash cs keyPackageContext + . flip RawMLS () . kpData -- | Compute ref of a key package. Return 'Nothing' if the key package cipher @@ -180,11 +181,20 @@ instance ParseMLS KeyPackageTBS where <*> parseMLS <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS KeyPackageTBS where + serialiseMLS tbs = do + serialiseMLS tbs.protocolVersion + serialiseMLS tbs.cipherSuite + serialiseMLS tbs.initKey + serialiseMLS tbs.leafNode + serialiseMLSVector @VarInt serialiseMLS tbs.extensions + data KeyPackage = KeyPackage { tbs :: RawMLS KeyPackageTBS, signature_ :: ByteString } - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform KeyPackage) instance S.ToSchema KeyPackage where declareNamedSchema _ = pure (mlsSwagger "KeyPackage") @@ -221,6 +231,11 @@ instance ParseMLS KeyPackage where <$> parseRawMLS parseMLS <*> parseMLSBytes @VarInt +instance SerialiseMLS KeyPackage where + serialiseMLS kp = do + serialiseMLS kp.tbs + serialiseMLSBytes @VarInt kp.signature_ + -------------------------------------------------------------------------------- data KeyPackageUpdate = KeyPackageUpdate diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs index dad086966b..702734834f 100644 --- a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -18,6 +18,7 @@ module Wire.API.MLS.LeafNode ( LeafIndex, LeafNode (..), + LeafNodeTBS (..), LeafNodeSource (..), LeafNodeSourceTag (..), leafNodeSourceTag, @@ -59,6 +60,15 @@ instance ParseMLS LeafNodeTBS where <*> parseMLS <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS LeafNodeTBS where + serialiseMLS tbs = do + serialiseMLS tbs.encryptionKey + serialiseMLSBytes @VarInt tbs.signatureKey + serialiseMLS tbs.credential + serialiseMLS tbs.capabilities + serialiseMLS tbs.source + serialiseMLSVector @VarInt serialiseMLS tbs.extensions + -- | This type can only verify the signature when the LeafNodeSource is -- LeafNodeSourceKeyPackage data LeafNode = LeafNode @@ -74,6 +84,11 @@ instance ParseMLS LeafNode where <$> parseMLS <*> parseMLSBytes @VarInt +instance SerialiseMLS LeafNode where + serialiseMLS ln = do + serialiseMLS ln.tbs + serialiseMLSBytes @VarInt ln.signature_ + instance S.ToSchema LeafNode where declareNamedSchema _ = pure (mlsSwagger "LeafNode") @@ -109,15 +124,28 @@ instance ParseMLS LeafNodeSource where LeafNodeSourceUpdateTag -> pure LeafNodeSourceUpdate LeafNodeSourceCommitTag -> LeafNodeSourceCommit <$> parseMLSBytes @VarInt +instance SerialiseMLS LeafNodeSource where + serialiseMLS (LeafNodeSourceKeyPackage lt) = do + serialiseMLS LeafNodeSourceKeyPackageTag + serialiseMLS lt + serialiseMLS LeafNodeSourceUpdate = + serialiseMLS LeafNodeSourceUpdateTag + serialiseMLS (LeafNodeSourceCommit bs) = do + serialiseMLS LeafNodeSourceCommitTag + serialiseMLSBytes @VarInt bs + data LeafNodeSourceTag = LeafNodeSourceKeyPackageTag | LeafNodeSourceUpdateTag | LeafNodeSourceCommitTag deriving (Show, Eq, Ord, Enum, Bounded) -instance Bounded LeafNodeSourceTag => ParseMLS LeafNodeSourceTag where +instance ParseMLS LeafNodeSourceTag where parseMLS = parseMLSEnum @Word8 "leaf node source" +instance SerialiseMLS LeafNodeSourceTag where + serialiseMLS = serialiseMLSEnum @Word8 + instance HasField "name" LeafNodeSourceTag Text where getField LeafNodeSourceKeyPackageTag = "key_package" getField LeafNodeSourceUpdateTag = "update" diff --git a/libs/wire-api/src/Wire/API/MLS/Lifetime.hs b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs index 64f53b6727..8a05ce1c42 100644 --- a/libs/wire-api/src/Wire/API/MLS/Lifetime.hs +++ b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs @@ -26,7 +26,7 @@ import Wire.Arbitrary -- | Seconds since the UNIX epoch. newtype Timestamp = Timestamp {timestampSeconds :: Word64} - deriving newtype (Eq, Show, Arbitrary, ParseMLS) + deriving newtype (Eq, Show, Arbitrary, ParseMLS, SerialiseMLS) tsPOSIX :: Timestamp -> POSIXTime tsPOSIX = fromIntegral . timestampSeconds @@ -40,3 +40,8 @@ data Lifetime = Lifetime instance ParseMLS Lifetime where parseMLS = Lifetime <$> parseMLS <*> parseMLS + +instance SerialiseMLS Lifetime where + serialiseMLS lt = do + serialiseMLS lt.ltNotBefore + serialiseMLS lt.ltNotAfter diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index c70291cd3c..e78e972b00 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -21,6 +21,7 @@ module Wire.API.MLS.Message ( -- * MLS Message types Message (..), + mkMessage, MessageContent (..), PublicMessage (..), PrivateMessage (..), @@ -62,12 +63,14 @@ import Wire.API.MLS.Commit import Wire.API.MLS.Epoch import Wire.API.MLS.Extension import Wire.API.MLS.Group +import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage import Wire.API.MLS.LeafNode import Wire.API.MLS.Proposal import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome +import Wire.Arbitrary data WireFormatTag = WireFormatPublicTag @@ -89,6 +92,9 @@ data Message = Message } deriving (Eq, Show) +mkMessage :: MessageContent -> Message +mkMessage = Message defaultProtocolVersion + instance ParseMLS Message where parseMLS = Message @@ -107,7 +113,7 @@ data MessageContent = MessagePrivate (RawMLS PrivateMessage) | MessagePublic PublicMessage | MessageWelcome (RawMLS Welcome) - | MessageGroupInfo -- TODO + | MessageGroupInfo (RawMLS GroupInfo) | MessageKeyPackage (RawMLS KeyPackage) deriving (Eq, Show) @@ -115,7 +121,7 @@ instance HasField "wireFormat" MessageContent WireFormatTag where getField (MessagePrivate _) = WireFormatPrivateTag getField (MessagePublic _) = WireFormatPublicTag getField (MessageWelcome _) = WireFormatWelcomeTag - getField MessageGroupInfo = WireFormatGroupInfoTag + getField (MessageGroupInfo _) = WireFormatGroupInfoTag getField (MessageKeyPackage _) = WireFormatKeyPackageTag instance ParseMLS MessageContent where @@ -124,7 +130,7 @@ instance ParseMLS MessageContent where WireFormatPrivateTag -> MessagePrivate <$> parseMLS WireFormatPublicTag -> MessagePublic <$> parseMLS WireFormatWelcomeTag -> MessageWelcome <$> parseMLS - WireFormatGroupInfoTag -> pure MessageGroupInfo + WireFormatGroupInfoTag -> MessageGroupInfo <$> parseMLS WireFormatKeyPackageTag -> MessageKeyPackage <$> parseMLS instance SerialiseMLS MessageContent where @@ -137,10 +143,9 @@ instance SerialiseMLS MessageContent where serialiseMLS (MessageWelcome welcome) = do serialiseMLS WireFormatWelcomeTag serialiseMLS welcome - serialiseMLS MessageGroupInfo = do + serialiseMLS (MessageGroupInfo gi) = do serialiseMLS WireFormatGroupInfoTag - -- TODO - pure () + serialiseMLS gi serialiseMLS (MessageKeyPackage kp) = do serialiseMLS WireFormatKeyPackageTag serialiseMLS kp @@ -151,6 +156,7 @@ instance S.ToSchema Message where data PublicMessage = PublicMessage { content :: RawMLS FramedContent, authData :: FramedContentAuthData, + -- Present iff content.rmValue.sender is of type Member. membershipTag :: Maybe ByteString } deriving (Eq, Show) @@ -214,6 +220,7 @@ data Sender | SenderNewMemberProposal | SenderNewMemberCommit deriving (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform Sender) instance ParseMLS Sender where parseMLS = @@ -334,6 +341,7 @@ framedContentTBS ctx msgContent = data FramedContentAuthData = FramedContentAuthData { signature_ :: ByteString, + -- Present iff it is part of a commit. confirmationTag :: Maybe ByteString } deriving (Eq, Show) @@ -383,16 +391,13 @@ mkSignedMessage priv pub gid epoch payload = groupContext = Nothing } sig = BA.convert $ sign priv pub (encodeMLS' tbs) - in Message - { protocolVersion = defaultProtocolVersion, - content = - MessagePublic - PublicMessage - { content = framedContent, - authData = FramedContentAuthData sig Nothing, - membershipTag = Nothing - } - } + in mkMessage $ + MessagePublic + PublicMessage + { content = framedContent, + authData = FramedContentAuthData sig Nothing, + membershipTag = Nothing + } verifyMessageSignature :: RawMLS GroupContext -> diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index cef7ca4200..4534e99f39 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -24,7 +24,10 @@ import Cassandra import Control.Lens (makePrisms) import Data.Binary import Data.Binary.Get +import Data.Binary.Put +import Data.ByteString as B import Imports +import Test.QuickCheck import Wire.API.MLS.CipherSuite import Wire.API.MLS.Context import Wire.API.MLS.Extension @@ -44,7 +47,8 @@ data Proposal | ReInitProposal (RawMLS ReInit) | ExternalInitProposal ByteString | GroupContextExtensionsProposal [Extension] - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform Proposal) instance ParseMLS Proposal where parseMLS = @@ -86,6 +90,7 @@ proposalRef :: CipherSuiteTag -> RawMLS Proposal -> ProposalRef proposalRef cs = ProposalRef . csHash cs proposalContext + . flip RawMLS () . rmRaw data PreSharedKeyTag = ExternalKeyTag | ResumptionKeyTag @@ -94,8 +99,12 @@ data PreSharedKeyTag = ExternalKeyTag | ResumptionKeyTag instance ParseMLS PreSharedKeyTag where parseMLS = parseMLSEnum @Word8 "PreSharedKeyID type" +instance SerialiseMLS PreSharedKeyTag where + serialiseMLS = serialiseMLSEnum @Word8 + data PreSharedKeyID = ExternalKeyID ByteString | ResumptionKeyID Resumption - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform PreSharedKeyID) instance ParseMLS PreSharedKeyID where parseMLS = do @@ -104,12 +113,21 @@ instance ParseMLS PreSharedKeyID where ExternalKeyTag -> ExternalKeyID <$> parseMLSBytes @VarInt ResumptionKeyTag -> ResumptionKeyID <$> parseMLS +instance SerialiseMLS PreSharedKeyID where + serialiseMLS (ExternalKeyID bs) = do + serialiseMLS ExternalKeyTag + serialiseMLSBytes @VarInt bs + serialiseMLS (ResumptionKeyID r) = do + serialiseMLS ResumptionKeyTag + serialiseMLS r + data Resumption = Resumption { resUsage :: Word8, resGroupId :: GroupId, resEpoch :: Word64 } - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform Resumption) instance ParseMLS Resumption where parseMLS = @@ -118,13 +136,20 @@ instance ParseMLS Resumption where <*> parseMLS <*> parseMLS +instance SerialiseMLS Resumption where + serialiseMLS r = do + serialiseMLS r.resUsage + serialiseMLS r.resGroupId + serialiseMLS r.resEpoch + data ReInit = ReInit { riGroupId :: GroupId, riProtocolVersion :: ProtocolVersion, riCipherSuite :: CipherSuite, riExtensions :: [Extension] } - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform ReInit) instance ParseMLS ReInit where parseMLS = @@ -134,6 +159,13 @@ instance ParseMLS ReInit where <*> parseMLS <*> parseMLSVector @VarInt parseMLS +instance SerialiseMLS ReInit where + serialiseMLS ri = do + serialiseMLS ri.riGroupId + serialiseMLS ri.riProtocolVersion + serialiseMLS ri.riCipherSuite + serialiseMLSVector @VarInt serialiseMLS ri.riExtensions + data MessageRange = MessageRange { mrSender :: KeyPackageRef, mrFirstGeneration :: Word32, @@ -163,8 +195,12 @@ data ProposalOrRefTag = InlineTag | RefTag instance ParseMLS ProposalOrRefTag where parseMLS = parseMLSEnum @Word8 "ProposalOrRef type" +instance SerialiseMLS ProposalOrRefTag where + serialiseMLS = serialiseMLSEnum @Word8 + data ProposalOrRef = Inline Proposal | Ref ProposalRef - deriving stock (Eq, Show) + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform ProposalOrRef) instance ParseMLS ProposalOrRef where parseMLS = @@ -172,12 +208,26 @@ instance ParseMLS ProposalOrRef where InlineTag -> Inline <$> parseMLS RefTag -> Ref <$> parseMLS +instance SerialiseMLS ProposalOrRef where + serialiseMLS (Inline p) = do + serialiseMLS InlineTag + serialiseMLS p + serialiseMLS (Ref r) = do + serialiseMLS RefTag + serialiseMLS r + newtype ProposalRef = ProposalRef {unProposalRef :: ByteString} - deriving stock (Eq, Show, Ord) + deriving stock (Eq, Show, Ord, Generic) instance ParseMLS ProposalRef where parseMLS = ProposalRef <$> getByteString 16 +instance SerialiseMLS ProposalRef where + serialiseMLS = putByteString . unProposalRef + +instance Arbitrary ProposalRef where + arbitrary = ProposalRef . B.pack <$> vectorOf 16 arbitrary + makePrisms ''ProposalOrRef data ProposalOrigin diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 25f537f4dc..2a83562b04 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -123,7 +123,11 @@ instance SerialiseMLS VarInt where serialiseMLS = put instance ParseMLS VarInt where parseMLS = get parseMLSStream :: Get a -> Get [a] -parseMLSStream = many . lookAhead +parseMLSStream p = do + e <- isEmpty + if e + then pure [] + else (:) <$> p <*> parseMLSStream p serialiseMLSStream :: (a -> Put) -> [a] -> Put serialiseMLSStream = traverse_ @@ -265,6 +269,9 @@ data RawMLS a = RawMLS } deriving stock (Eq, Show, Foldable) +instance (Arbitrary a, SerialiseMLS a) => Arbitrary (RawMLS a) where + arbitrary = mkRawMLS <$> arbitrary + -- | A schema for a raw MLS object. -- -- This can be used for embedding MLS objects into JSON. It expresses the diff --git a/libs/wire-api/test/resources/key_package1.mls b/libs/wire-api/test/resources/key_package1.mls deleted file mode 100644 index 8023c6907928ca58c8e3ff3ec2ee3ce4bc1f99eb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 262 zcmZQ%U}R8Wv69?(F|yfHt1|cP`k+T!{tMT&OaFhpVnOEKk5P}TGJ$GrlT1ua6HSZ_ zbuCkp%ydmm3`}$_%u>yD%@U1MlTyu%(~=X@tc(*=O)U&fO&xM`iuF<}5_1c3QuUJa zb2-=<6eRoCUUAX8I9u(~{?F_3HlM$$+4G&jUgc-^p|73G%uh{bU|=u+wmWU{gcM_KBO8 z$-m?@jpivxotf{>a^dHN=Z{X`u=~~BsjvF0DC5uqrY-N}QZ|)ea}P8RdZ?k~_=+6> DHm_P$ diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index ffcbb33ec5..913dcc0c60 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -61,7 +61,11 @@ tests = testParseKeyPackage :: IO () testParseKeyPackage = do - kpData <- BS.readFile "test/resources/key_package1.mls" + let qcid = "b455a431-9db6-4404-86e7-6a3ebe73fcaf:3ae58155@mls.example.com" + kpData <- withSystemTempDirectory "mls" $ \tmp -> do + void $ spawn (cli qcid tmp ["init", qcid]) Nothing + spawn (cli qcid tmp ["key-package", "create"]) Nothing + kp <- case decodeMLS' @KeyPackage kpData of Left err -> assertFailure (T.unpack err) Right x -> pure x @@ -80,10 +84,6 @@ testParseKeyPackage = do ciClient = newClientId 0x3ae58155 } - -- check raw TBS package - let rawTBS = kp.tbs.rmRaw - rawTBS @?= BS.take 196 kpData - -- TODO testParseCommit :: IO () testParseCommit = pure () @@ -102,8 +102,13 @@ testParseGroupInfo = pure () testKeyPackageRef :: IO () testKeyPackageRef = do - kpData <- BS.readFile "test/resources/key_package1.mls" - ref <- KeyPackageRef <$> BS.readFile "test/resources/key_package_ref1" + let qcid = "b455a431-9db6-4404-86e7-6a3ebe73fcaf:3ae58155@mls.example.com" + (kpData, ref) <- withSystemTempDirectory "mls" $ \tmp -> do + void $ spawn (cli qcid tmp ["init", qcid]) Nothing + kpData <- spawn (cli qcid tmp ["key-package", "create"]) Nothing + ref <- spawn (cli qcid tmp ["key-package", "ref", "-"]) (Just kpData) + pure (kpData, KeyPackageRef ref) + kpRef MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 (KeyPackageData kpData) @?= ref -- TODO @@ -116,27 +121,26 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do let c = newClientId 0x3ae58155 usr <- flip Qualified (Domain "example.com") <$> (Id <$> UUID.nextRandom) pure (userClientQid usr c) - void . liftIO $ spawn (cli qcid tmp ["init", qcid]) Nothing + void $ spawn (cli qcid tmp ["init", qcid]) Nothing qcid2 <- do let c = newClientId 0x4ae58157 usr <- flip Qualified (Domain "example.com") <$> (Id <$> UUID.nextRandom) pure (userClientQid usr c) - void . liftIO $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing + void $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing kp :: RawMLS KeyPackage <- - liftIO $ - decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing - liftIO $ BS.writeFile (tmp qcid2) (rmRaw kp) + decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing + BS.writeFile (tmp qcid2) (rmRaw kp) let groupFilename = "group" let gid = GroupId "abcd" createGroup tmp qcid groupFilename gid - void $ liftIO $ spawn (cli qcid tmp ["member", "add", "--group", tmp groupFilename, "--in-place", tmp qcid2]) Nothing + void $ spawn (cli qcid tmp ["member", "add", "--group", tmp groupFilename, "--in-place", tmp qcid2]) Nothing secretKey <- Ed25519.generateSecretKey let publicKey = Ed25519.toPublic secretKey - let proposal = mkRawMLS (RemoveProposal (error "TODO: remove proposal")) + let proposal = mkRawMLS (RemoveProposal 1) let message = mkSignedMessage secretKey @@ -150,7 +154,7 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do let signerKeyFilename = "signer-key.bin" BS.writeFile (tmp signerKeyFilename) (convert publicKey) - void . liftIO $ + void $ spawn ( cli qcid diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index cd4537799a..1beb775720 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -1,4 +1,5 @@ {-# LANGUAGE GeneralizedNewtypeDeriving #-} + -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -15,19 +16,21 @@ -- -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -{-# OPTIONS_GHC -Wwarn #-} module Test.Wire.API.Roundtrip.MLS (tests) where -import Data.Binary.Put +import Data.Hex import Imports import qualified Test.Tasty as T import Test.Tasty.QuickCheck import Type.Reflection (typeRep) +import Wire.API.MLS.Commit +import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential import Wire.API.MLS.Extension import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -37,13 +40,20 @@ tests :: T.TestTree tests = T.localOption (T.Timeout (60 * 1000000) "60s") . T.testGroup "MLS roundtrip tests" $ [ testRoundTrip @KeyPackageRef, + testRoundTrip @LeafNode, + testRoundTrip @LeafNodeTBS, + testRoundTrip @KeyPackageTBS, + testRoundTrip @Credential, testRoundTrip @ClientIdentity, testRoundTrip @TestPreconfiguredSender, testRoundTrip @RemoveProposalMessage, testRoundTrip @RemoveProposalPayload, testRoundTrip @ExtensionVector, testRoundTrip @GroupInfoData, + testRoundTrip @TestCommitBundle, testRoundTrip @Welcome, + testRoundTrip @Proposal, + testRoundTrip @ProposalRef, testRoundTrip @VarInt ] @@ -55,8 +65,10 @@ testRoundTrip = testProperty msg trip where msg = show (typeRep @a) trip (v :: a) = - counterexample (show (runPut (serialiseMLS v))) $ - Right v === (decodeMLS . runPut . serialiseMLS) v + let serialised = encodeMLS v + parsed = decodeMLS serialised + in counterexample (show $ hex serialised) $ + Right v === parsed -------------------------------------------------------------------------------- -- auxiliary types @@ -64,13 +76,16 @@ testRoundTrip = testProperty msg trip class ArbitrarySender a where arbitrarySender :: Gen Sender +instance ArbitrarySender Sender where + arbitrarySender = arbitrary + class ArbitraryFramedContentData a where arbitraryFramedContentData :: Gen FramedContentData class ArbitraryFramedContent a where arbitraryFramedContent :: Gen FramedContent -newtype MessageGenerator tbs = MessageGenerator {unMessageGenerator :: Message} +newtype MessageGenerator fc = MessageGenerator {unMessageGenerator :: Message} deriving newtype (ParseMLS, SerialiseMLS, Eq, Show) instance ArbitraryFramedContent fc => Arbitrary (MessageGenerator fc) where @@ -80,12 +95,15 @@ instance ArbitraryFramedContent fc => Arbitrary (MessageGenerator fc) where mt <- case fc.sender of SenderMember _ -> Just <$> arbitrary _ -> pure Nothing + confirmationTag <- case fc.content of + FramedContentCommit _ -> Just <$> arbitrary + _ -> pure Nothing Message <$> arbitrary <*> fmap MessagePublic ( PublicMessage (mkRawMLS fc) - <$> (FramedContentAuthData <$> arbitrary <*> pure Nothing) + <$> (FramedContentAuthData <$> arbitrary <*> pure confirmationTag) <*> pure mt ) @@ -149,3 +167,24 @@ instance ParseMLS ExtensionVector where instance SerialiseMLS ExtensionVector where serialiseMLS (ExtensionVector exts) = do serialiseMLSVector @VarInt serialiseMLS exts + +-- + +newtype TestCommitBundle = TestCommitBundle CommitBundle + deriving newtype (Eq, Show, ParseMLS, SerialiseMLS) + +instance Arbitrary TestCommitBundle where + arbitrary = + TestCommitBundle <$> do + commitMsg <- + mkRawMLS . unMessageGenerator @(FramedContentGenerator Sender CommitPayload) + <$> arbitrary + welcome <- arbitrary + gi <- arbitrary + pure $ CommitBundle commitMsg welcome gi + +newtype CommitPayload = CommitPayload {unCommitPayload :: RawMLS Commit} + deriving newtype (Arbitrary) + +instance ArbitraryFramedContentData CommitPayload where + arbitraryFramedContentData = FramedContentCommit . unCommitPayload <$> arbitrary diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 1338502650..01be63704f 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -690,7 +690,7 @@ createAddCommitWithKeyPackages :: createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do bd <- State.gets mlsBaseDir welcomeFile <- liftIO $ emptyTempFile bd "welcome" - pgsFile <- liftIO $ emptyTempFile bd "pgs" + giFile <- liftIO $ emptyTempFile bd "gi" commit <- runContT (traverse (withTempKeyPackageFile . snd) clientsAndKeyPackages) $ \kpFiles -> mlscli @@ -702,7 +702,7 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do "--welcome-out", welcomeFile, "--group-info-out", - pgsFile, + giFile, "--group-out", "" ] @@ -716,13 +716,14 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do } welcome <- liftIO $ BS.readFile welcomeFile - pgs <- liftIO $ BS.readFile pgsFile + gi <- liftIO $ BS.readFile giFile + liftIO . putStrLn $ "gi:\n" <> show (hex gi) pure $ MessagePackage { mpSender = qcid, mpMessage = commit, mpWelcome = Just welcome, - mpGroupInfo = Just pgs + mpGroupInfo = Just gi } createAddProposalWithKeyPackage :: From 67a6118708e79e955f52ac7ccca78b7680a6d1ea Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 6 Apr 2023 13:51:44 +0000 Subject: [PATCH 11/75] fix adding users to MLS conversations * change content-type of commit bundle in integration tests * fix keypackage ref serialisation * add context to commit bundle parsing --- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 2 ++ libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 11 +++------- .../src/Wire/API/MLS/Serialisation.hs | 4 +++- services/galley/test/integration/API/MLS.hs | 4 ---- .../galley/test/integration/API/MLS/Util.hs | 21 ++++++++++--------- 5 files changed, 19 insertions(+), 23 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index 7dd8653116..ed185ec90e 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -39,6 +39,8 @@ data CommitBundleF f = CommitBundleF cbGroupInfo :: f (RawMLS GroupInfo) } +deriving instance Show (CommitBundleF []) + instance Alternative f => Semigroup (CommitBundleF f) where cb1 <> cb2 = CommitBundleF diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index 8f36c385a2..4a426a7c5d 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -35,9 +35,6 @@ import Cassandra.CQL hiding (Set) import Control.Applicative import Control.Lens hiding (set, (.=)) import Data.Aeson (FromJSON, ToJSON) -import Data.Binary.Get -import Data.Binary.Put -import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as LBS import Data.Id import Data.Json.Util @@ -125,18 +122,16 @@ newtype KeyPackageRef = KeyPackageRef {unKeyPackageRef :: ByteString} deriving stock (Eq, Ord, Show) deriving (FromHttpApiData, ToHttpApiData, S.ToParamSchema) via Base64ByteString deriving (ToJSON, FromJSON, S.ToSchema) via (Schema KeyPackageRef) - -instance Arbitrary KeyPackageRef where - arbitrary = KeyPackageRef . B.pack <$> vectorOf 16 arbitrary + deriving newtype (Arbitrary) instance ToSchema KeyPackageRef where schema = named "KeyPackageRef" $ unKeyPackageRef .= fmap KeyPackageRef base64Schema instance ParseMLS KeyPackageRef where - parseMLS = KeyPackageRef <$> getByteString 16 + parseMLS = KeyPackageRef <$> parseMLSBytes @VarInt instance SerialiseMLS KeyPackageRef where - serialiseMLS = putByteString . unKeyPackageRef + serialiseMLS = serialiseMLSBytes @VarInt . unKeyPackageRef instance Cql KeyPackageRef where ctype = Tagged BlobColumn diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 2a83562b04..1dd394a0ac 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -329,6 +329,8 @@ mkRawMLS x = RawMLS (LBS.toStrict (runPut (serialiseMLS x))) x traceMLS :: Show a => String -> Get a -> Get a traceMLS l g = do + begin <- bytesRead r <- g - traceM $ l <> " " <> show r + end <- bytesRead + traceM $ l <> " " <> show begin <> ":" <> show end <> " " <> show r pure r diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index e488f4bd8c..07cfaaeab4 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -458,10 +458,6 @@ testAddUser = do qcnv <- runMLSTest $ do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] - putStrLn $ "alice1: " <> show alice1 - putStrLn $ "bob1: " <> show bob1 - putStrLn $ "bob2: " <> show bob2 - traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 01be63704f..5478fc63f8 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -33,6 +33,7 @@ import Control.Monad.State (StateT, evalStateT) import qualified Control.Monad.State as State import Control.Monad.Trans.Maybe import Data.Aeson.Lens +import Data.Bifunctor import Data.Binary.Builder (toLazyByteString) import qualified Data.ByteArray as BA import qualified Data.ByteString as BS @@ -52,7 +53,6 @@ import Data.Time import qualified Data.Tuple.Extra as Tuple import qualified Data.UUID as UUID import qualified Data.UUID.V4 as UUIDV4 -import Debug.Trace import Galley.Keys import Galley.Options import qualified Galley.Options as Opts @@ -144,7 +144,7 @@ localPostCommitBundle sender bundle = do . zUser (ciUser sender) . zClient (ciClient sender) . zConn "conn" - . Bilge.content "application/x-protobuf" + . Bilge.content "message/mls" . bytes bundle ) @@ -717,7 +717,6 @@ createAddCommitWithKeyPackages qcid clientsAndKeyPackages = do welcome <- liftIO $ BS.readFile welcomeFile gi <- liftIO $ BS.readFile giFile - liftIO . putStrLn $ "gi:\n" <> show (hex gi) pure $ MessagePackage { mpSender = qcid, @@ -854,7 +853,7 @@ consumeWelcome welcome = do qcids <- State.gets mlsNewMembers for_ qcids $ \qcid -> do hasState <- hasClientGroupState qcid - liftIO $ assertBool "Existing clients in a conversation should not consume commits" (not hasState) + liftIO $ assertBool "Existing clients in a conversation should not consume welcomes" (not hasState) void $ mlscli qcid @@ -931,13 +930,17 @@ sendAndConsumeCommit mp = do mkBundle :: MessagePackage -> Either Text CommitBundle mkBundle mp = do - commitB <- decodeMLS' (mpMessage mp) - welcomeB <- traverse decodeMLS' (mpWelcome mp) + commitB <- first ("Commit: " <>) $ decodeMLS' (mpMessage mp) + welcomeB <- first ("Welcome: " <>) $ for (mpWelcome mp) $ \m -> do + w <- decodeMLS' @Message m + case w.content of + MessageWelcome welcomeB -> pure welcomeB + _ -> Left "expected welcome" ginfo <- note "group info unavailable" (mpGroupInfo mp) - ginfoB <- decodeMLS' ginfo + ginfoB <- first ("GroupInfo: " <>) $ decodeMLS' ginfo pure $ CommitBundle commitB welcomeB ginfoB -createBundle :: MonadIO m => MessagePackage -> m ByteString +createBundle :: (HasCallStack, MonadIO m) => MessagePackage -> m ByteString createBundle mp = do bundle <- either (liftIO . assertFailure . T.unpack) pure $ @@ -949,8 +952,6 @@ sendAndConsumeCommitBundle :: MessagePackage -> MLSTest [Event] sendAndConsumeCommitBundle mp = do - traverse_ (traceM . ("welcome: " <>) . show . hex) $ mpWelcome mp - traverse_ (traceM . ("groupState: " <>) . show . hex) $ mpGroupInfo mp qcs <- getConvId bundle <- createBundle mp events <- liftTest $ postCommitBundle (mpSender mp) qcs bundle From 404b35a7f77f8253cfcf647eddcffc7839eeb23b Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Tue, 11 Apr 2023 14:49:41 +0000 Subject: [PATCH 12/75] fix integration test: send other user's commit * keep track of index map while processing proposals * add creator client to ProposalAction in epoch 0 --- services/galley/src/Galley/API/Federation.hs | 1 - .../galley/src/Galley/API/MLS/Conversation.hs | 2 +- services/galley/src/Galley/API/MLS/Message.hs | 145 +++++++-------- .../src/Galley/API/MLS/SubConversation.hs | 2 +- services/galley/src/Galley/API/MLS/Types.hs | 24 ++- .../src/Galley/Cassandra/SubConversation.hs | 4 +- services/galley/test/integration/API/MLS.hs | 175 +++++++----------- .../galley/test/integration/API/MLS/Util.hs | 53 +----- 8 files changed, 156 insertions(+), 250 deletions(-) diff --git a/services/galley/src/Galley/API/Federation.hs b/services/galley/src/Galley/API/Federation.hs index 2195db6a15..41ea52adf3 100644 --- a/services/galley/src/Galley/API/Federation.hs +++ b/services/galley/src/Galley/API/Federation.hs @@ -695,7 +695,6 @@ sendMLSMessage :: Member (Input UTCTime) r, Member LegalHoldStore r, Member MemberStore r, - Member Resource r, Member TeamStore r, Member P.TinyLog r, Member ProposalStore r, diff --git a/services/galley/src/Galley/API/MLS/Conversation.hs b/services/galley/src/Galley/API/MLS/Conversation.hs index 9202755c0f..f63c038244 100644 --- a/services/galley/src/Galley/API/MLS/Conversation.hs +++ b/services/galley/src/Galley/API/MLS/Conversation.hs @@ -43,7 +43,7 @@ mkMLSConversation conv = mcRemoteMembers = Data.convRemoteMembers conv, mcMLSData = mlsData, mcMembers = cm, - mcIndexMap = mempty -- TODO + mcIndexMap = mkIndexMap cm } mcConv :: MLSConversation -> Data.Conversation diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 0446226772..7904b23b95 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -72,6 +72,7 @@ import Polysemy.Error import Polysemy.Input import Polysemy.Internal import Polysemy.Resource (Resource) +import Polysemy.State import Polysemy.TinyLog import Wire.API.Conversation hiding (Member) import Wire.API.Conversation.Protocol @@ -104,7 +105,7 @@ import Wire.API.User.Client -- [ ] remove MissingSenderClient error -- [ ] PreSharedKey proposal -- [ ] remove all key package ref mapping --- [ ] initialise index maps +-- [x] initialise index maps -- [ ] newtype for leaf node indices -- [x] compute new indices for add proposals -- [ ] remove prefixes from rmValue and rmRaw @@ -239,7 +240,6 @@ postMLSMessageFromLocalUserV1 :: Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSUnsupportedMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, Member SubConversationStore r ) => Local UserId -> @@ -271,7 +271,6 @@ postMLSMessageFromLocalUser :: Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSUnsupportedMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, Member SubConversationStore r ) => Local UserId -> @@ -346,9 +345,9 @@ postMLSCommitBundleToLocalConv :: Sem r ([LocalConversationUpdate], UnreachableUsers) postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do lConvOrSub <- fetchConvOrSub qusr lConvOrSubId - senderIdentity <- getSenderIdentity qusr c (Just bundle.sender) + senderIdentity <- getSenderIdentity qusr c bundle.sender lConvOrSub - action <- getCommitData lConvOrSub bundle.epoch bundle.commit.rmValue + action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue -- TODO: check that the welcome message matches the action -- for_ bundle.welcome $ \welcome -> -- when @@ -400,15 +399,13 @@ postMLSCommitBundleToRemoteConv loc qusr c con bundle rConvOrSubId = do flip unless (throwS @'ConvMemberNotFound) =<< checkLocalMemberRemoteConv (tUnqualified lusr) (convOfConvOrSub <$> rConvOrSubId) - senderIdentity <- getSenderIdentity qusr c (Just bundle.sender) - resp <- runFederated rConvOrSubId $ fedClient @'Galley @"send-mls-commit-bundle" $ MLSMessageSendRequest { mmsrConvOrSubId = tUnqualified rConvOrSubId, mmsrSender = tUnqualified lusr, - mmsrSenderClient = ciClient senderIdentity, + mmsrSenderClient = c, mmsrRawMessage = Base64ByteString bundle.serialized } case resp of @@ -438,7 +435,6 @@ postMLSMessage :: Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSUnsupportedMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, Member SubConversationStore r ) => Local x -> @@ -449,9 +445,6 @@ postMLSMessage :: IncomingMessage -> Sem r ([LocalConversationUpdate], UnreachableUsers) postMLSMessage loc qusr c qconvOrSub con msg = do - -- verify sender identity - void $ getSenderIdentity qusr c msg.sender - foldQualified loc (postMLSMessageToLocalConv qusr c con msg) @@ -459,26 +452,31 @@ postMLSMessage loc qusr c qconvOrSub con msg = do qconvOrSub getSenderIdentity :: + ( Member (ErrorS 'MLSClientSenderUserMismatch) r, + Member (Error MLSProtocolError) r + ) => Qualified UserId -> ClientId -> - Maybe Sender -> + Sender -> + Local ConvOrSubConv -> Sem r ClientIdentity -getSenderIdentity qusr c _mSender = do +getSenderIdentity qusr c mSender lConvOrSubConv = do let cid = mkClientIdentity qusr c - -- TODO: check that mSender matches cid + let idxMap = indexMapConvOrSub $ tUnqualified lConvOrSubConv + let epoch = epochNumber . cnvmlsEpoch . mlsMetaConvOrSub . tUnqualified $ lConvOrSubConv + case mSender of + SenderMember idx | epoch > 0 -> do + cid' <- note (mlsProtocolError "unknown sender leaf index") $ imLookup idxMap idx + unless (cid' == cid) $ throwS @'MLSClientSenderUserMismatch + _ -> pure () pure cid postMLSMessageToLocalConv :: ( HasProposalEffects r, Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, + Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSUnsupportedMessage) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, Member SubConversationStore r ) => Qualified UserId -> @@ -490,13 +488,13 @@ postMLSMessageToLocalConv :: postMLSMessageToLocalConv qusr c con msg convOrSubId = do lConvOrSub <- fetchConvOrSub qusr convOrSubId - senderIdentity <- getSenderIdentity qusr c msg.sender + for_ msg.sender $ \sender -> + void $ getSenderIdentity qusr c sender lConvOrSub -- validate message events <- case msg.content of IncomingMessageContentPublic pub -> case pub.content of - FramedContentCommit commit -> - processCommit senderIdentity con lConvOrSub msg.epoch pub.sender commit.rmValue + FramedContentCommit _commit -> throwS @'MLSUnsupportedMessage FramedContentApplicationData _ -> throwS @'MLSUnsupportedMessage FramedContentProposal prop -> processProposal qusr lConvOrSub msg pub prop $> mempty @@ -605,11 +603,12 @@ getCommitData :: Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSStaleMessage) r ) => + ClientIdentity -> Local ConvOrSubConv -> Epoch -> Commit -> Sem r ProposalAction -getCommitData lConvOrSub epoch commit = do +getCommitData senderIdentity lConvOrSub epoch commit = do let convOrSub = tUnqualified lConvOrSub mlsMeta = mlsMetaConvOrSub convOrSub curEpoch = cnvmlsEpoch mlsMeta @@ -618,39 +617,22 @@ getCommitData lConvOrSub epoch commit = do -- check epoch number when (epoch /= curEpoch) $ throwS @'MLSStaleMessage - foldMap - ( applyProposalRef - (idForConvOrSub convOrSub) - (indexMapConvOrSub convOrSub) - mlsMeta - groupId - epoch - suite - ) - (cProposals commit) - -processCommit :: - ( HasProposalEffects r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, - Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, - Member SubConversationStore r - ) => - ClientIdentity -> - Maybe ConnId -> - Local ConvOrSubConv -> - Epoch -> - Sender -> - Commit -> - Sem r [LocalConversationUpdate] -processCommit senderIdentity con lConvOrSub epoch sender commit = do - action <- getCommitData lConvOrSub epoch commit - processCommitWithAction senderIdentity con lConvOrSub epoch action sender commit + evalState (indexMapConvOrSub convOrSub) $ do + creatorAction <- + if epoch == Epoch 0 + then addProposedClient senderIdentity + else mempty + action <- + foldMap + ( applyProposalRef + (idForConvOrSub convOrSub) + mlsMeta + groupId + epoch + suite + ) + (cProposals commit) + pure (creatorAction <> action) processExternalCommit :: forall r. @@ -807,52 +789,57 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do applyProposalRef :: ( HasProposalEffects r, - ( Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSStaleMessage) r, - Member ProposalStore r - ) + Member (State IndexMap) r, + Member (ErrorS 'ConvNotFound) r, + Member (ErrorS 'MLSProposalNotFound) r, + Member (ErrorS 'MLSStaleMessage) r ) => ConvOrSubConvId -> - IndexMap -> ConversationMLSData -> GroupId -> Epoch -> CipherSuiteTag -> ProposalOrRef -> Sem r ProposalAction -applyProposalRef convOrSubConvId im mlsMeta groupId epoch _suite (Ref ref) = do +applyProposalRef convOrSubConvId mlsMeta groupId epoch _suite (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound checkEpoch epoch mlsMeta checkGroup groupId mlsMeta - applyProposal convOrSubConvId im groupId (rmValue p) -applyProposalRef convOrSubConvId im _mlsMeta groupId _epoch suite (Inline p) = do + applyProposal convOrSubConvId groupId (rmValue p) +applyProposalRef convOrSubConvId _mlsMeta groupId _epoch suite (Inline p) = do checkProposalCipherSuite suite p - applyProposal convOrSubConvId im groupId p + applyProposal convOrSubConvId groupId p + +addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction +addProposedClient cid = do + im <- get + let (idx, im') = imAddClient im cid + put im' + pure (paAddClient cid idx) applyProposal :: forall r. - HasProposalEffects r => + ( HasProposalEffects r, + Member (State IndexMap) r + ) => ConvOrSubConvId -> - IndexMap -> GroupId -> Proposal -> Sem r ProposalAction -applyProposal _convOrSubConvId im _groupId (AddProposal kp) = do - let idx = imNextIndex im +applyProposal _convOrSubConvId _groupId (AddProposal kp) = do -- TODO: validate key package cid <- getKeyPackageIdentity kp.rmValue - -- TODO: we probably should not update the conversation state here - -- addMLSClients groupId (cidQualifiedUser cid) (Set.singleton (ciClient cid, idx)) - pure (paAddClient cid idx) -applyProposal _convOrSubConvId im _groupId (RemoveProposal idx) = do - cid <- noteS @'MLSInvalidLeafNodeIndex $ imLookup im idx + addProposedClient cid +applyProposal _convOrSubConvId _groupId (RemoveProposal idx) = do + im <- get + (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx + put im' pure (paRemoveClient cid idx) -applyProposal _convOrSubConvId _im _groupId (ExternalInitProposal _) = +applyProposal _convOrSubConvId _groupId (ExternalInitProposal _) = -- only record the fact there was an external init proposal, but do not -- process it in any way. pure paExternalInitPresent -applyProposal _convOrSubConvId _im _groupId _ = pure mempty +applyProposal _convOrSubConvId _groupId _ = pure mempty checkProposalCipherSuite :: Member (Error MLSProtocolError) r => diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index fbfc8acaa0..c3fc90c08d 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -143,7 +143,7 @@ getLocalSubConversation qusr lconv sconv = do cnvmlsCipherSuite = suite }, scMembers = mkClientMap [], - scIndexMap = mempty -- TODO + scIndexMap = mempty } pure sub Just sub -> pure sub diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index a00639ebc1..f0938752dc 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -29,21 +29,33 @@ import Imports import Wire.API.Conversation import Wire.API.Conversation.Protocol import Wire.API.MLS.Credential +import Wire.API.MLS.LeafNode import Wire.API.MLS.SubConversation newtype IndexMap = IndexMap {unIndexMap :: IntMap ClientIdentity} deriving (Eq, Show) deriving newtype (Semigroup, Monoid) -imLookup :: IndexMap -> Word32 -> Maybe ClientIdentity +mkIndexMap :: ClientMap -> IndexMap +mkIndexMap = IndexMap . IntMap.fromList . map (swap . fmap fromIntegral) . cmAssocs + +imLookup :: IndexMap -> LeafIndex -> Maybe ClientIdentity imLookup m i = IntMap.lookup (fromIntegral i) (unIndexMap m) -imNextIndex :: IndexMap -> Word32 +imNextIndex :: IndexMap -> LeafIndex imNextIndex im = fromIntegral . fromJust $ find (\n -> not $ IntMap.member n (unIndexMap im)) [0 ..] -type ClientMap = Map (Qualified UserId) (Map ClientId Word32) +imAddClient :: IndexMap -> ClientIdentity -> (LeafIndex, IndexMap) +imAddClient im cid = let idx = imNextIndex im in (idx, IndexMap $ IntMap.insert (fromIntegral idx) cid $ unIndexMap im) + +imRemoveClient :: IndexMap -> LeafIndex -> Maybe (ClientIdentity, IndexMap) +imRemoveClient im idx = do + cid <- imLookup im idx + pure (cid, IndexMap . IntMap.delete (fromIntegral idx) $ unIndexMap im) + +type ClientMap = Map (Qualified UserId) (Map ClientId LeafIndex) mkClientMap :: [(Domain, UserId, ClientId, Int32)] -> ClientMap mkClientMap = foldr addEntry mempty @@ -52,7 +64,7 @@ mkClientMap = foldr addEntry mempty addEntry (dom, usr, c, kpi) = Map.insertWith (<>) (Qualified usr dom) (Map.singleton c (fromIntegral kpi)) -cmLookupIndex :: ClientIdentity -> ClientMap -> Maybe Word32 +cmLookupIndex :: ClientIdentity -> ClientMap -> Maybe LeafIndex cmLookupIndex cid cm = do clients <- Map.lookup (cidQualifiedUser cid) cm Map.lookup (ciClient cid) clients @@ -69,13 +81,13 @@ cmRemoveClient cid cm = case Map.lookup (cidQualifiedUser cid) cm of isClientMember :: ClientIdentity -> ClientMap -> Bool isClientMember ci = isJust . cmLookupIndex ci -cmAssocs :: ClientMap -> [(ClientIdentity, Word32)] +cmAssocs :: ClientMap -> [(ClientIdentity, LeafIndex)] cmAssocs cm = do (quid, clients) <- Map.assocs cm (clientId, idx) <- Map.assocs clients pure (mkClientIdentity quid clientId, idx) -cmSingleton :: ClientIdentity -> Word32 -> ClientMap +cmSingleton :: ClientIdentity -> LeafIndex -> ClientMap cmSingleton cid idx = Map.singleton (cidQualifiedUser cid) diff --git a/services/galley/src/Galley/Cassandra/SubConversation.hs b/services/galley/src/Galley/Cassandra/SubConversation.hs index cf80db6e12..f445d0ce88 100644 --- a/services/galley/src/Galley/Cassandra/SubConversation.hs +++ b/services/galley/src/Galley/Cassandra/SubConversation.hs @@ -26,7 +26,7 @@ import Data.Id import qualified Data.Map as Map import Data.Qualified import Data.Time.Clock -import Galley.API.MLS.Types (SubConversation (..)) +import Galley.API.MLS.Types import Galley.Cassandra.Conversation.MLS (lookupMLSClients) import qualified Galley.Cassandra.Queries as Cql import Galley.Cassandra.Store (embedClient) @@ -57,7 +57,7 @@ selectSubConversation convId subConvId = do cnvmlsCipherSuite = suite }, scMembers = cm, - scIndexMap = mempty -- TODO + scIndexMap = mkIndexMap cm } insertSubConversation :: diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 07cfaaeab4..f878733794 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -79,13 +79,11 @@ tests s = [ testGroup "Message" [ test s "sender must be part of conversation" testSenderNotInConversation, - test s "send other user's commit" testSendAnotherUsersCommit + test s "send other user's commit XXX" testSendAnotherUsersCommit ], testGroup "Welcome" [ test s "local welcome" testLocalWelcome, - test s "local welcome (client with no public key)" testWelcomeNoKey, - test s "remote welcome" testRemoteWelcome, test s "post a remote MLS welcome message" sendRemoteMLSWelcome, test s "post a remote MLS welcome message (key package ref not found)" sendRemoteMLSWelcomeKPNotFound ], @@ -100,7 +98,7 @@ tests s = ], testGroup "Commit" - [ test s "add user to a conversation XXX" testAddUser, + [ test s "add user to a conversation" testAddUser, test s "add user with an incomplete welcome" testAddUserWithBundleIncompleteWelcome, test s "add user (not connected)" testAddUserNotConnected, test s "add user (partial client list)" testAddUserPartial, @@ -338,7 +336,7 @@ testLocalWelcome = do Nothing -> assertFailure "Expected welcome message" Just w -> pure w events <- mlsBracket [bob1] $ \wss -> do - es <- sendAndConsumeCommit commit + es <- sendAndConsumeCommitBundle commit WS.assertMatchN_ (5 # Second) wss $ wsAssertMLSWelcome (cidQualifiedUser bob1) welcome @@ -348,49 +346,6 @@ testLocalWelcome = do event <- assertOne events liftIO $ assertJoinEvent qcnv alice [bob] roleNameWireMember event -testWelcomeNoKey :: TestM () -testWelcomeNoKey = do - users <- createAndConnectUsers [Nothing, Nothing] - runMLSTest $ do - [alice1, bob1] <- traverse createMLSClient users - void $ setupMLSGroup alice1 - - -- add bob using an "out-of-band" key package - (kp, _) <- generateKeyPackage bob1 - commit <- createAddCommitWithKeyPackages alice1 [(bob1, kp.rmRaw)] - welcome <- liftIO $ case mpWelcome commit of - Nothing -> assertFailure "Expected welcome message" - Just w -> pure w - - err <- - responseJsonError - =<< postWelcome (ciUser alice1) welcome - assertFailure "Expected welcome message" - Just w -> pure w - (_, reqs) <- - withTempMockFederator' welcomeMock $ - postWelcome (ciUser (mpSender commit)) welcome - !!! const 201 === statusCode - consumeWelcome welcome - fedWelcome <- assertOne (filter ((== "mls-welcome") . frRPC) reqs) - let req :: Maybe MLSWelcomeRequest = Aeson.decode (frBody fedWelcome) - liftIO $ req @?= (Just . MLSWelcomeRequest . Base64ByteString) welcome - testAddUserWithBundle :: TestM () testAddUserWithBundle = do [alice, bob] <- createAndConnectUsers [Nothing, Nothing] @@ -494,7 +449,7 @@ testAddUserNotConnected = do -- now connect and retry liftTest $ connectUsers (qUnqualified alice) (pure (qUnqualified bob)) - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit testAddUserWithProteusClients :: TestM () testAddUserWithProteusClients = do @@ -508,7 +463,7 @@ testAddUserWithProteusClients = do _bob3 <- createWireClient bob void $ setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle testAddUserPartial :: TestM () testAddUserPartial = do @@ -548,14 +503,14 @@ testAddClientPartial = do -- alice1 creates a group with bob1 void $ setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle -- now bob2 and bob3 upload key packages, and alice adds bob2 only kp <- uploadNewKeyPackage bob2 void $ uploadNewKeyPackage bob3 void $ createAddCommitWithKeyPackages alice1 [(bob2, kp.rmRaw)] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle testSendAnotherUsersCommit :: TestM () testSendAnotherUsersCommit = do @@ -570,7 +525,7 @@ testSendAnotherUsersCommit = do -- create group with alice1 and bob1 void $ setupMLSGroup alice1 - createAddCommit alice1 [bob] >>= void . sendAndConsumeCommit + createAddCommit alice1 [bob] >>= void . sendAndConsumeCommitBundle -- Alice creates a commit that adds bob2 bob2 <- createMLSClient bob @@ -580,7 +535,7 @@ testSendAnotherUsersCommit = do -- and the corresponding commit is sent from Bob instead of Alice err <- responseJsonError - =<< postMessage bob1 (mpMessage mp) + =<< (localPostCommitBundle bob1 =<< createBundle mp) setupMLSGroup alice1 - createAddCommit alice1 [bob] >>= void . sendAndConsumeCommit + createAddCommit alice1 [bob] >>= void . sendAndConsumeCommitBundle e <- responseJsonError =<< postMembers @@ -623,7 +578,7 @@ testRemoveUsersDirectly = do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ uploadNewKeyPackage bob1 qcnv <- snd <$> setupMLSGroup alice1 - createAddCommit alice1 [bob] >>= void . sendAndConsumeCommit + createAddCommit alice1 [bob] >>= void . sendAndConsumeCommitBundle e <- responseJsonError =<< deleteMemberQualified @@ -640,7 +595,7 @@ testProteusMessage = do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ uploadNewKeyPackage bob1 qcnv <- snd <$> setupMLSGroup alice1 - createAddCommit alice1 [bob] >>= void . sendAndConsumeCommit + createAddCommit alice1 [bob] >>= void . sendAndConsumeCommitBundle e <- responseJsonError =<< postProteusMessageQualified @@ -666,7 +621,7 @@ testStaleCommit = do gsBackup <- getClientGroupState alice1 -- add the first batch of users to the conversation - void $ createAddCommit alice1 users1 >>= sendAndConsumeCommit + void $ createAddCommit alice1 users1 >>= sendAndConsumeCommitBundle -- now roll back alice1 and try to add the second batch of users setClientGroupState alice1 gsBackup @@ -688,7 +643,7 @@ testAddRemoteUser = do commit <- createAddCommit alice1 [bob] (events, reqs) <- withTempMockFederator' (receiveCommitMock [bob1] <|> welcomeMock) $ - sendAndConsumeCommit commit + sendAndConsumeCommitBundle commit pure (events, reqs, qcnv) liftIO $ do @@ -722,10 +677,10 @@ testCommitLock = do traverse_ uploadNewKeyPackage [bob1, charlie1, dee1] -- alice adds add bob - void $ createAddCommit alice1 [cidQualifiedUser bob1] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [cidQualifiedUser bob1] >>= sendAndConsumeCommitBundle -- alice adds charlie - void $ createAddCommit alice1 [cidQualifiedUser charlie1] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [cidQualifiedUser charlie1] >>= sendAndConsumeCommitBundle -- simulate concurrent commit by blocking epoch casClient <- view tsCass @@ -764,7 +719,7 @@ testAddUserBareProposalCommit = do >>= traverse_ sendAndConsumeMessage commit <- createPendingProposalCommit alice1 void $ assertJust (mpWelcome commit) - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit -- check that bob can now see the conversation liftTest $ do @@ -827,8 +782,8 @@ testAdminRemovesUserFromConv = do void $ createWireClient bob -- also create one extra non-MLS client traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit - events <- createRemoveCommit alice1 [bob1, bob2] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle + events <- createRemoveCommit alice1 [bob1, bob2] >>= sendAndConsumeCommitBundle pure (qcnv, events) liftIO $ assertOne events >>= assertLeaveEvent qcnv alice [bob] @@ -852,7 +807,7 @@ testRemoveClientsIncomplete = do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] traverse_ uploadNewKeyPackage [bob1, bob2] void $ setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle commit <- createRemoveCommit alice1 [bob1] err <- @@ -873,7 +828,7 @@ testRemoteAppMessage = do let mock = receiveCommitMock [bob1] <|> messageSentMock <|> welcomeMock ((message, events), reqs) <- withTempMockFederator' mock $ do - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle message <- createApplicationMessage alice1 "hello" (events, _) <- sendAndConsumeMessage message pure (message, events) @@ -1126,7 +1081,7 @@ testAppMessage = do clients@(alice1 : _) <- traverse createMLSClient users traverse_ uploadNewKeyPackage (tail clients) (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 (tail users) >>= sendAndConsumeCommit + void $ createAddCommit alice1 (tail users) >>= sendAndConsumeCommitBundle message <- createApplicationMessage alice1 "some text" mlsBracket clients $ \wss -> do @@ -1151,7 +1106,7 @@ testAppMessage2 = do -- create group with alice1 and other clients conversation <- snd <$> setupMLSGroup alice1 mp <- createAddCommit alice1 [bob, charlie] - void $ sendAndConsumeCommit mp + void $ sendAndConsumeCommitBundle mp traverse_ consumeWelcome (mpWelcome mp) @@ -1188,7 +1143,7 @@ testAppMessageSomeReachable = do <|> welcomeMock ([event], _) <- withTempMockFederator' mocks $ do - sendAndConsumeCommit commit + sendAndConsumeCommitBundle commit let unreachables = Set.singleton (Domain "charlie.example.com") withTempMockFederator' (mockUnreachableFor unreachables) $ do @@ -1219,7 +1174,7 @@ testAppMessageUnreachable = do commit <- createAddCommit alice1 [bob] ([event], _) <- withTempMockFederator' (receiveCommitMock [bob1] <|> welcomeMock) $ - sendAndConsumeCommit commit + sendAndConsumeCommitBundle commit message <- createApplicationMessage alice1 "hi, bob!" (_, us) <- sendAndConsumeMessage message @@ -1307,7 +1262,7 @@ testRemoteToLocal = do let mock = receiveCommitMock [bob1] <|> welcomeMock <|> claimKeyPackagesMock kpb void . withTempMockFederator' mock $ - sendAndConsumeCommit mp + sendAndConsumeCommitBundle mp traverse_ consumeWelcome (mpWelcome mp) message <- createApplicationMessage bob1 "hello from another backend" @@ -1352,7 +1307,7 @@ testRemoteToLocalWrongConversation = do mp <- createAddCommit alice1 [bob] let mock = receiveCommitMock [bob1] <|> welcomeMock - void . withTempMockFederator' mock $ sendAndConsumeCommit mp + void . withTempMockFederator' mock $ sendAndConsumeCommitBundle mp traverse_ consumeWelcome (mpWelcome mp) message <- createApplicationMessage bob1 "hello from another backend" @@ -1442,7 +1397,7 @@ propInvalidEpoch = do -- Add bob -> epoch 1 void $ uploadNewKeyPackage bob1 gsBackup <- getClientGroupState alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle gsBackup2 <- getClientGroupState alice1 -- try to send a proposal from an old epoch (0) @@ -1475,7 +1430,7 @@ propInvalidEpoch = do void $ uploadNewKeyPackage dee1 setClientGroupState alice1 gsBackup2 createAddProposals alice1 [dee] >>= traverse_ sendAndConsumeMessage - void $ createPendingProposalCommit alice1 >>= sendAndConsumeCommit + void $ createPendingProposalCommit alice1 >>= sendAndConsumeCommitBundle -- scenario: -- alice1 creates a group and adds bob1 @@ -1502,7 +1457,7 @@ testExternalAddProposal = do (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle -- bob joins with an external proposal bob2 <- createMLSClient bob @@ -1516,7 +1471,7 @@ testExternalAddProposal = do void $ createPendingProposalCommit alice1 - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle -- alice sends a message do @@ -1535,7 +1490,7 @@ testExternalAddProposal = do qcnv !!! const 200 === statusCode createAddCommit bob2 [charlie] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle testExternalAddProposalNonAdminCommit :: TestM () testExternalAddProposalNonAdminCommit = do @@ -1557,7 +1512,7 @@ testExternalAddProposalNonAdminCommit = do (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle -- bob joins with an external proposal mlsBracket [alice1, bob1] $ \wss -> do @@ -1571,7 +1526,7 @@ testExternalAddProposalNonAdminCommit = do -- bob1 commits void $ createPendingProposalCommit bob1 - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle -- scenario: -- alice adds bob and charlie @@ -1593,7 +1548,7 @@ testExternalAddProposalWrongClient = do void $ setupMLSGroup alice1 void $ createAddCommit alice1 [bob, charlie] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle prop <- createExternalAddProposal bob2 postMessage charlie1 (mpMessage prop) @@ -1616,7 +1571,7 @@ testExternalAddProposalWrongUser = do void $ setupMLSGroup alice1 void $ createAddCommit alice1 [bob] - >>= sendAndConsumeCommit + >>= sendAndConsumeCommitBundle prop <- createExternalAddProposal charlie1 postMessage charlie1 (mpMessage prop) @@ -1691,7 +1646,7 @@ testBackendRemoveProposalLocalConvLocalUser = do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle bobClients <- getClientsFromGroupState alice1 bob mlsBracket [alice1] $ \wss -> void $ do @@ -1708,7 +1663,7 @@ testBackendRemoveProposalLocalConvLocalUser = do consumeMessage1 alice1 msg -- alice commits the external proposals - events <- createPendingProposalCommit alice1 >>= sendAndConsumeCommit + events <- createPendingProposalCommit alice1 >>= sendAndConsumeCommitBundle liftIO $ events @?= [] testBackendRemoveProposalLocalConvRemoteUser :: TestM () @@ -1722,7 +1677,7 @@ testBackendRemoveProposalLocalConvRemoteUser = do let mock = receiveCommitMock [bob1, bob2] <|> welcomeMock <|> messageSentMock void . withTempMockFederator' mock $ do mlsBracket [alice1] $ \[wsA] -> do - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit bobClients <- getClientsFromGroupState alice1 bob fedGalleyClient <- view tsFedGalleyClient @@ -1799,7 +1754,7 @@ testBackendRemoveProposalLocalConvLocalLeaverCreator = do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle aliceClients <- getClientsFromGroupState alice1 alice mlsBracket [alice1, bob1, bob2] $ \wss -> void $ do @@ -1827,7 +1782,7 @@ testBackendRemoveProposalLocalConvLocalLeaverCreator = do WS.assertNoEvent (1 # WS.Second) wss -- bob commits the external proposals - events <- createPendingProposalCommit bob1 >>= sendAndConsumeCommit + events <- createPendingProposalCommit bob1 >>= sendAndConsumeCommitBundle liftIO $ events @?= [] testBackendRemoveProposalLocalConvLocalLeaverCommitter :: TestM () @@ -1838,13 +1793,13 @@ testBackendRemoveProposalLocalConvLocalLeaverCommitter = do [alice1, bob1, bob2, charlie1] <- traverse createMLSClient [alice, bob, bob, charlie] traverse_ uploadNewKeyPackage [bob1, bob2, charlie1] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle -- promote bob putOtherMemberQualified (ciUser alice1) bob (OtherMemberUpdate (Just roleNameWireAdmin)) qcnv !!! const 200 === statusCode - void $ createAddCommit bob1 [charlie] >>= sendAndConsumeCommit + void $ createAddCommit bob1 [charlie] >>= sendAndConsumeCommitBundle bobClients <- getClientsFromGroupState alice1 bob mlsBracket [alice1, charlie1, bob1, bob2] $ \wss -> void $ do @@ -1872,7 +1827,7 @@ testBackendRemoveProposalLocalConvLocalLeaverCommitter = do WS.assertNoEvent (1 # WS.Second) wss -- alice commits the external proposals - events <- createPendingProposalCommit alice1 >>= sendAndConsumeCommit + events <- createPendingProposalCommit alice1 >>= sendAndConsumeCommitBundle liftIO $ events @?= [] testBackendRemoveProposalLocalConvRemoteLeaver :: TestM () @@ -1888,7 +1843,7 @@ testBackendRemoveProposalLocalConvRemoteLeaver = do bobClients <- getClientsFromGroupState alice1 bob void . withTempMockFederator' mock $ do mlsBracket [alice1] $ \[wsA] -> void $ do - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit fedGalleyClient <- view tsFedGalleyClient void $ runFedClient @@ -1913,7 +1868,7 @@ testBackendRemoveProposalLocalConvLocalClient = do [alice1, bob1, bob2, charlie1] <- traverse createMLSClient [alice, bob, bob, charlie] traverse_ uploadNewKeyPackage [bob1, bob2, charlie1] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle Just (_, kpBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob mlsBracket [alice1, bob1] $ \[wsA, wsB] -> do @@ -1936,7 +1891,7 @@ testBackendRemoveProposalLocalConvLocalClient = do flip consumeMessage1 msg mp <- createPendingProposalCommit charlie1 - events <- sendAndConsumeCommit mp + events <- sendAndConsumeCommitBundle mp liftIO $ events @?= [] WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ \n -> do wsAssertMLSMessage (Conv <$> qcnv) charlie (mpMessage mp) n @@ -1954,7 +1909,7 @@ testBackendRemoveProposalLocalConvRemoteClient = do let mock = receiveCommitMock [bob1] <|> welcomeMock <|> messageSentMock void . withTempMockFederator' mock $ do mlsBracket [alice1] $ \[wsA] -> void $ do - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit fedGalleyClient <- view tsFedGalleyClient void $ @@ -2132,7 +2087,7 @@ testRemoteUserPostsCommitBundle = do void $ do let mock = receiveCommitMock [bob1] <|> welcomeMock withTempMockFederator' mock $ do - void $ sendAndConsumeCommit commit + void $ sendAndConsumeCommitBundle commit putOtherMemberQualified (qUnqualified alice) bob (OtherMemberUpdate (Just roleNameWireAdmin)) qcnv !!! const 200 === statusCode @@ -2234,7 +2189,7 @@ testSelfConversationLeave = do clients@(creator : others) <- traverse createMLSClient (replicate 3 alice) traverse_ uploadNewKeyPackage others (_, qcnv) <- setupMLSSelfGroup creator - void $ createAddCommit creator [alice] >>= sendAndConsumeCommit + void $ createAddCommit creator [alice] >>= sendAndConsumeCommitBundle mlsBracket clients $ \wss -> do liftTest $ deleteMemberQualified (qUnqualified alice) alice qcnv @@ -2290,7 +2245,7 @@ getGroupInfoDisabled = do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ uploadNewKeyPackage bob1 (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle withMLSDisabled $ localGetGroupInfo (qUnqualified alice) (fmap Conv qcnv) @@ -2351,7 +2306,7 @@ testJoinSubConv = do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle let subId = SubConvId "conference" sub <- @@ -2424,7 +2379,7 @@ testJoinSubNonMemberClient = do traverse createMLSClient [alice, alice, bob] traverse_ uploadNewKeyPackage [bob1, alice2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [alice] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [alice] >>= sendAndConsumeCommitBundle qcs <- createSubConv qcnv alice1 (SubConvId "conference") @@ -2441,7 +2396,7 @@ testAddClientSubConvFailure = do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ uploadNewKeyPackage bob1 (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle let subId = SubConvId "conference" void $ createSubConv qcnv alice1 subId @@ -2564,7 +2519,7 @@ testRemoteUserJoinSubConv = do void $ do commit <- createAddCommit alice1 [bob] withTempMockFederator' (receiveCommitMock [bob1] <|> welcomeMock) $ - sendAndConsumeCommit commit + sendAndConsumeCommitBundle commit let mock = asum @@ -2617,7 +2572,7 @@ testSendMessageSubConv = do [alice1, bob1, bob2] <- traverse createMLSClient [alice, bob, bob] traverse_ uploadNewKeyPackage [bob1, bob2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle qcs <- createSubConv qcnv bob1 (SubConvId "conference") @@ -2683,7 +2638,7 @@ testRemoteMemberGetSubConv isAMember = do let mock = receiveCommitMock [bob1] <|> welcomeMock <|> claimKeyPackagesMock kpb void . withTempMockFederator' mock $ - sendAndConsumeCommit mp + sendAndConsumeCommitBundle mp let subconv = SubConvId "conference" @@ -2732,7 +2687,7 @@ testRemoteMemberDeleteSubConv isAMember = do mp <- createAddCommit alice1 [bob] let mock = receiveCommitMock [bob1] <|> welcomeMock - void . withTempMockFederator' mock . sendAndConsumeCommit $ mp + void . withTempMockFederator' mock . sendAndConsumeCommitBundle $ mp sub <- liftTest $ @@ -2920,7 +2875,7 @@ testDeleteParentOfSubConv = do (parentGroupId, qcnv) <- setupMLSGroup alice1 (qcs, _) <- withTempMockFederator' (receiveCommitMock [bob1]) $ do - void $ createAddCommit alice1 [arthur, bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [arthur, bob] >>= sendAndConsumeCommitBundle createSubConv qcnv alice1 sconv subGid <- getCurrentGroupId @@ -3101,7 +3056,7 @@ testLeaveSubConv isSubConvCreator = do <|> ("on-mls-message-sent" ~> RemoteMLSMessageOk) ) $ do - void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle qsub <- createSubConv qcnv bob1 subId void $ createExternalCommit alice1 Nothing qsub >>= sendAndConsumeCommitBundle @@ -3145,7 +3100,7 @@ testLeaveSubConv isSubConvCreator = do do leaveCommit <- createPendingProposalCommit (head others) mlsBracket (firstLeaver : others) $ \(wsLeaver : wss) -> do - events <- sendAndConsumeCommit leaveCommit + events <- sendAndConsumeCommitBundle leaveCommit liftIO $ events @?= [] WS.assertMatchN_ (5 # WS.Second) wss $ \n -> do wsAssertMLSMessage qsub (cidQualifiedUser . head $ others) (mpMessage leaveCommit) n @@ -3206,7 +3161,7 @@ testLeaveSubConvNonMember = do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ uploadNewKeyPackage bob1 (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle let subId = SubConvId "conference" _qsub <- createSubConv qcnv bob1 subId @@ -3281,7 +3236,7 @@ testRemoveUserParent = do [alice, bob, bob, charlie, charlie] traverse_ uploadNewKeyPackage [bob1, bob2, charlie1, charlie2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle let subname = SubConvId "conference" void $ createSubConv qcnv bob1 subname @@ -3343,7 +3298,7 @@ testRemoveCreatorParent = do [alice, bob, bob, charlie, charlie] traverse_ uploadNewKeyPackage [bob1, bob2, charlie1, charlie2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle let subname = SubConvId "conference" void $ createSubConv qcnv alice1 subname @@ -3414,7 +3369,7 @@ testCreatorRemovesUserFromParent = do [alice, bob, bob, charlie, charlie] traverse_ uploadNewKeyPackage [bob1, bob2, charlie1, charlie2] (_, qcnv) <- setupMLSGroup alice1 - void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommit + void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle stateParent <- State.get diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 5478fc63f8..660df72ac6 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -200,27 +200,6 @@ postCommitBundle sender qcs bundle = do (\rsender -> remotePostCommitBundle rsender qcs bundle) (cidQualifiedUser sender $> sender) --- FUTUREWORK: remove this and start using commit bundles everywhere in tests -postWelcome :: - ( MonadIO m, - MonadHttp m, - MonadReader TestSetup m, - HasCallStack - ) => - UserId -> - ByteString -> - m ResponseLBS -postWelcome uid welcome = do - galley <- view tsUnversionedGalley - post - ( galley - . paths ["v2", "mls", "welcome"] - . zUser uid - . zConn "conn" - . Bilge.content "message/mls" - . bytes welcome - ) - saveRemovalKey :: FilePath -> TestM () saveRemovalKey fp = do keys <- fromJust <$> view (tsGConf . optSettings . setMlsPrivateKeyPaths) @@ -612,7 +591,7 @@ bundleKeyPackages bundle = -- group to the previous state by using an older version of the group file. createAddCommit :: HasCallStack => ClientIdentity -> [Qualified UserId] -> MLSTest MessagePackage createAddCommit cid users = do - kps <- fmap (concat . map bundleKeyPackages) . traverse (claimKeyPackages cid) $ users + kps <- fmap (concatMap bundleKeyPackages) . traverse (claimKeyPackages cid) $ users liftIO $ assertBool "no key packages could be claimed" (not (null kps)) createAddCommitWithKeyPackages cid kps @@ -659,7 +638,7 @@ createExternalCommit qcid mpgs qcs = do createAddProposals :: HasCallStack => ClientIdentity -> [Qualified UserId] -> MLSTest [MessagePackage] createAddProposals cid users = do - kps <- fmap (concat . map bundleKeyPackages) . traverse (claimKeyPackages cid) $ users + kps <- fmap (concatMap bundleKeyPackages) . traverse (claimKeyPackages cid) $ users traverse (createAddProposalWithKeyPackage cid) kps -- | Create an application message. @@ -893,41 +872,15 @@ consumeMessage1 cid msg = do -- commit, the 'sendAndConsumeCommit' function should be used instead. sendAndConsumeMessage :: HasCallStack => MessagePackage -> MLSTest ([Event], UnreachableUsers) sendAndConsumeMessage mp = do - putStrLn "sending message:" - print $ hex (mpMessage mp) + for_ mp.mpWelcome $ \_ -> liftIO $ assertFailure "use sendAndConsumeCommitBundle" res <- fmap (mmssEvents Tuple.&&& mmssUnreachableUsers) $ responseJsonError =<< postMessage (mpSender mp) (mpMessage mp) do - postWelcome (ciUser (mpSender mp)) welcome - !!! const 201 === statusCode - consumeWelcome welcome - pure res --- | Send an MLS commit message, simulate clients receiving it, and update the --- test state accordingly. -sendAndConsumeCommit :: - HasCallStack => - MessagePackage -> - MLSTest [Event] -sendAndConsumeCommit mp = do - (events, _) <- sendAndConsumeMessage mp - - -- increment epoch and add new clients - State.modify $ \mls -> - mls - { mlsEpoch = mlsEpoch mls + 1, - mlsMembers = mlsMembers mls <> mlsNewMembers mls, - mlsNewMembers = mempty - } - - pure events - mkBundle :: MessagePackage -> Either Text CommitBundle mkBundle mp = do commitB <- first ("Commit: " <>) $ decodeMLS' (mpMessage mp) From 0def49c15e16a8702e03541022ac3c7285e06ae5 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 12 Apr 2023 11:46:05 +0200 Subject: [PATCH 13/75] readGroupState for the new group.json format --- .../galley/test/integration/API/MLS/Util.hs | 23 +++++++++++-------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 660df72ac6..db38371a7a 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -26,7 +26,7 @@ import Bilge import Bilge.Assert import Control.Arrow ((&&&)) import Control.Error.Util -import Control.Lens (preview, to, view, (.~), (^..)) +import Control.Lens (preview, to, view, (.~), (^..), (^?)) import Control.Monad.Catch import Control.Monad.Cont import Control.Monad.State (StateT, evalStateT) @@ -41,7 +41,6 @@ import qualified Data.ByteString.Base64.URL as B64U import Data.ByteString.Conversion import qualified Data.ByteString.Lazy as LBS import Data.Domain -import Data.Hex import Data.Id import Data.Json.Util hiding ((#)) import qualified Data.Map as Map @@ -932,14 +931,18 @@ mlsBracket clients k = do readGroupState :: ByteString -> [(ClientIdentity, Word32)] readGroupState j = do - -- TODO: figure out the new JSON format of the group state - node <- j ^.. key "group" . key "tree" . key "tree" . key "nodes" . _Array . traverse - leafNode <- node ^.. key "node" . key "LeafNode" - identity <- - either (const []) pure . decodeMLS' . BS.pack . map fromIntegral $ - leafNode ^.. key "key_package" . key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" . _Array . traverse . _Integer - _kpr <- (unhexM . T.encodeUtf8 =<<) $ leafNode ^.. key "key_package_ref" . _String - pure (identity, error "TODO: get index") + node <- j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse . key "node" + + case node ^? key "leaf_index" of + Just i -> do + identityBytes <- node ^.. key "leaf_node" . key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" + let identity = BS.pack (identityBytes ^.. _Array . traverse . _Integer . to fromIntegral) + cid <- case decodeMLS' identity of + Left _ -> [] + Right x -> pure x + n <- i ^.. _Integer . to fromIntegral + pure $ (cid, n) + Nothing -> [] getClientsFromGroupState :: ClientIdentity -> From c3d9d0c595f5a0ae549f173704ab990c43a11c35 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 12 Apr 2023 13:54:17 +0200 Subject: [PATCH 14/75] Generate welcome recipients when processing bundle Also remove old unsupported welcome endpoints. All welcome messages now need to be sent through commit bundles. --- .../src/Wire/API/Routes/Public/Galley/MLS.hs | 122 +++++------------- services/galley/src/Galley/API/MLS.hs | 2 - services/galley/src/Galley/API/MLS/Message.hs | 12 +- services/galley/src/Galley/API/MLS/Types.hs | 3 + services/galley/src/Galley/API/MLS/Welcome.hs | 62 ++------- services/galley/src/Galley/API/Public/MLS.hs | 4 +- services/galley/test/integration/API/MLS.hs | 4 +- 7 files changed, 52 insertions(+), 157 deletions(-) diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index 6c0c09b337..5ba0da1807 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -21,111 +21,55 @@ import Servant hiding (WithStatus) import Servant.Swagger.Internal.Orphans () import Wire.API.Error import Wire.API.Error.Galley -import Wire.API.Event.Conversation import Wire.API.MLS.CommitBundle import Wire.API.MLS.Keys import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.Servant -import Wire.API.MLS.Welcome import Wire.API.MakesFederatedCall import Wire.API.Routes.MultiVerb import Wire.API.Routes.Named import Wire.API.Routes.Public -import Wire.API.Routes.Version type MLSMessagingAPI = Named - "mls-welcome-message" - ( Summary "Post an MLS welcome message" - :> Until 'V3 - :> MakesFederatedCall 'Galley "mls-welcome" + "mls-message" + ( Summary "Post an MLS message" + :> MakesFederatedCall 'Galley "on-mls-message-sent" + :> MakesFederatedCall 'Galley "send-mls-message" + :> MakesFederatedCall 'Galley "on-conversation-updated" + :> MakesFederatedCall 'Galley "on-new-remote-conversation" + :> MakesFederatedCall 'Galley "on-new-remote-subconversation" + :> MakesFederatedCall 'Brig "get-mls-clients" + :> MakesFederatedCall 'Galley "on-delete-mls-conversation" + :> CanThrow 'ConvAccessDenied + :> CanThrow 'ConvMemberNotFound + :> CanThrow 'ConvNotFound + :> CanThrow 'LegalHoldNotEnabled + :> CanThrow 'MissingLegalholdConsent + :> CanThrow 'MLSClientMismatch + :> CanThrow 'MLSClientSenderUserMismatch + :> CanThrow 'MLSCommitMissingReferences + :> CanThrow 'MLSGroupConversationMismatch + :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSKeyPackageRefNotFound + :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNotEnabled - :> "welcome" + :> CanThrow 'MLSProposalNotFound + :> CanThrow 'MLSProtocolErrorTag + :> CanThrow 'MLSSelfRemovalNotAllowed + :> CanThrow 'MLSStaleMessage + :> CanThrow 'MLSSubConvClientNotInParent + :> CanThrow 'MLSUnsupportedMessage + :> CanThrow 'MLSUnsupportedProposal + :> CanThrow MLSProposalFailure + :> "messages" :> ZLocalUser + :> ZClient :> ZConn - :> ReqBody '[MLS] (RawMLS Welcome) - :> MultiVerb1 'POST '[JSON] (RespondEmpty 201 "Welcome message sent") + :> ReqBody '[MLS] (RawMLS Message) + :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" MLSMessageSendingStatus) ) - :<|> Named - "mls-message-v1" - ( Summary "Post an MLS message" - :> MakesFederatedCall 'Brig "get-mls-clients" - :> MakesFederatedCall 'Galley "on-conversation-updated" - :> MakesFederatedCall 'Galley "on-delete-mls-conversation" - :> MakesFederatedCall 'Galley "on-mls-message-sent" - :> MakesFederatedCall 'Galley "on-new-remote-conversation" - :> MakesFederatedCall 'Galley "on-new-remote-subconversation" - :> MakesFederatedCall 'Galley "send-mls-message" - :> Until 'V2 - :> CanThrow 'ConvAccessDenied - :> CanThrow 'ConvMemberNotFound - :> CanThrow 'ConvNotFound - :> CanThrow 'LegalHoldNotEnabled - :> CanThrow 'MLSClientMismatch - :> CanThrow 'MLSCommitMissingReferences - :> CanThrow 'MLSKeyPackageRefNotFound - :> CanThrow 'MLSInvalidLeafNodeIndex - :> CanThrow 'MLSNotEnabled - :> CanThrow 'MLSProposalNotFound - :> CanThrow 'MLSProtocolErrorTag - :> CanThrow 'MLSSelfRemovalNotAllowed - :> CanThrow 'MLSStaleMessage - :> CanThrow 'MLSUnsupportedMessage - :> CanThrow 'MLSUnsupportedProposal - :> CanThrow 'MLSClientSenderUserMismatch - :> CanThrow 'MLSGroupConversationMismatch - :> CanThrow 'MLSMissingSenderClient - :> CanThrow 'MissingLegalholdConsent - :> CanThrow 'MLSSubConvClientNotInParent - :> CanThrow MLSProposalFailure - :> "messages" - :> ZLocalUser - :> ZClient - :> ZConn - :> ReqBody '[MLS] (RawMLS Message) - :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" [Event]) - ) - :<|> Named - "mls-message" - ( Summary "Post an MLS message" - :> MakesFederatedCall 'Galley "on-mls-message-sent" - :> MakesFederatedCall 'Galley "send-mls-message" - :> MakesFederatedCall 'Galley "on-conversation-updated" - :> MakesFederatedCall 'Galley "on-new-remote-conversation" - :> MakesFederatedCall 'Galley "on-new-remote-subconversation" - :> MakesFederatedCall 'Brig "get-mls-clients" - :> MakesFederatedCall 'Galley "on-delete-mls-conversation" - :> From 'V2 - :> CanThrow 'ConvAccessDenied - :> CanThrow 'ConvMemberNotFound - :> CanThrow 'ConvNotFound - :> CanThrow 'LegalHoldNotEnabled - :> CanThrow 'MissingLegalholdConsent - :> CanThrow 'MLSClientMismatch - :> CanThrow 'MLSClientSenderUserMismatch - :> CanThrow 'MLSCommitMissingReferences - :> CanThrow 'MLSGroupConversationMismatch - :> CanThrow 'MLSInvalidLeafNodeIndex - :> CanThrow 'MLSKeyPackageRefNotFound - :> CanThrow 'MLSMissingSenderClient - :> CanThrow 'MLSNotEnabled - :> CanThrow 'MLSProposalNotFound - :> CanThrow 'MLSProtocolErrorTag - :> CanThrow 'MLSSelfRemovalNotAllowed - :> CanThrow 'MLSStaleMessage - :> CanThrow 'MLSSubConvClientNotInParent - :> CanThrow 'MLSUnsupportedMessage - :> CanThrow 'MLSUnsupportedProposal - :> CanThrow MLSProposalFailure - :> "messages" - :> ZLocalUser - :> ZClient - :> ZConn - :> ReqBody '[MLS] (RawMLS Message) - :> MultiVerb1 'POST '[JSON] (Respond 201 "Message sent" MLSMessageSendingStatus) - ) :<|> Named "mls-commit-bundle" ( Summary "Post a MLS CommitBundle" @@ -137,7 +81,6 @@ type MLSMessagingAPI = :> MakesFederatedCall 'Galley "on-new-remote-subconversation" :> MakesFederatedCall 'Brig "get-mls-clients" :> MakesFederatedCall 'Galley "on-delete-mls-conversation" - :> From 'V4 :> CanThrow 'ConvAccessDenied :> CanThrow 'ConvMemberNotFound :> CanThrow 'ConvNotFound @@ -170,7 +113,6 @@ type MLSMessagingAPI = :<|> Named "mls-public-keys" ( Summary "Get public keys used by the backend to sign external proposals" - :> From 'V4 :> CanThrow 'MLSNotEnabled :> "public-keys" :> ZLocalUser diff --git a/services/galley/src/Galley/API/MLS.hs b/services/galley/src/Galley/API/MLS.hs index cbd8307232..92574f416a 100644 --- a/services/galley/src/Galley/API/MLS.hs +++ b/services/galley/src/Galley/API/MLS.hs @@ -18,7 +18,6 @@ module Galley.API.MLS ( isMLSEnabled, assertMLSEnabled, - postMLSWelcomeFromLocalUser, postMLSMessage, postMLSCommitBundleFromLocalUser, postMLSMessageFromLocalUser, @@ -32,7 +31,6 @@ import Data.Id import Data.Qualified import Galley.API.MLS.Enabled import Galley.API.MLS.Message -import Galley.API.MLS.Welcome import Galley.Env import Imports import Polysemy diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 7904b23b95..2b18382fea 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -52,7 +52,7 @@ import Galley.API.MLS.Propagate import Galley.API.MLS.Removal import Galley.API.MLS.Types import Galley.API.MLS.Util -import Galley.API.MLS.Welcome (postMLSWelcome) +import Galley.API.MLS.Welcome (sendWelcomes) import Galley.API.Util import Galley.Data.Conversation.Types hiding (Conversation) import qualified Galley.Data.Conversation.Types as Data @@ -348,13 +348,6 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do senderIdentity <- getSenderIdentity qusr c bundle.sender lConvOrSub action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue - -- TODO: check that the welcome message matches the action - -- for_ bundle.welcome $ \welcome -> - -- when - -- ( Set.fromList (map gsNewMember (welSecrets (rmValue welcome))) - -- /= Set.fromList (map (snd . snd) (cmAssocs (paAdd action))) - -- ) - -- $ throwS @'MLSWelcomeMismatch events <- processCommitWithAction senderIdentity @@ -368,8 +361,7 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do let cm = membersConvOrSub (tUnqualified lConvOrSub) unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.rmRaw cm - traverse_ (postMLSWelcome lConvOrSub conn) bundle.welcome - + traverse_ (sendWelcomes lConvOrSub conn (cmIdentities (paAdd action))) bundle.welcome pure (events, unreachables) postMLSCommitBundleToRemoteConv :: diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index f0938752dc..cf9be1b49e 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -87,6 +87,9 @@ cmAssocs cm = do (clientId, idx) <- Map.assocs clients pure (mkClientIdentity quid clientId, idx) +cmIdentities :: ClientMap -> [ClientIdentity] +cmIdentities = map fst . cmAssocs + cmSingleton :: ClientIdentity -> LeafIndex -> ClientMap cmSingleton cid idx = Map.singleton diff --git a/services/galley/src/Galley/API/MLS/Welcome.hs b/services/galley/src/Galley/API/MLS/Welcome.hs index 936855d15a..29c869278d 100644 --- a/services/galley/src/Galley/API/MLS/Welcome.hs +++ b/services/galley/src/Galley/API/MLS/Welcome.hs @@ -16,26 +16,20 @@ -- with this program. If not, see . module Galley.API.MLS.Welcome - ( postMLSWelcome, - postMLSWelcomeFromLocalUser, + ( sendWelcomes, sendLocalWelcomes, ) where -import Control.Comonad import Data.Domain import Data.Id import Data.Json.Util import Data.Qualified import Data.Time -import Galley.API.MLS.Enabled -import Galley.API.MLS.KeyPackage import Galley.API.Push import Galley.Data.Conversation -import Galley.Effects.BrigAccess import Galley.Effects.FederatorAccess import Galley.Effects.GundeckAccess -import Galley.Env import Imports import qualified Network.Wai.Utilities.Error as Wai import Network.Wai.Utilities.Server @@ -50,60 +44,28 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Error import Wire.API.MLS.Credential +import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome import Wire.API.Message -postMLSWelcome :: - ( Member BrigAccess r, - Member FederatorAccess r, +sendWelcomes :: + ( Member FederatorAccess r, Member GundeckAccess r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r, - Member (Input UTCTime) r, - Member P.TinyLog r + Member P.TinyLog r, + Member (Input UTCTime) r ) => Local x -> Maybe ConnId -> + [ClientIdentity] -> RawMLS Welcome -> Sem r () -postMLSWelcome loc con wel = do +sendWelcomes loc con cids welcome = do now <- input - rcpts <- welcomeRecipients (rmValue wel) - let (locals, remotes) = partitionQualified loc rcpts - sendLocalWelcomes con now (rmRaw wel) (qualifyAs loc locals) - sendRemoteWelcomes (rmRaw wel) remotes - -postMLSWelcomeFromLocalUser :: - ( Member BrigAccess r, - Member FederatorAccess r, - Member GundeckAccess r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r, - Member (ErrorS 'MLSNotEnabled) r, - Member (Input UTCTime) r, - Member (Input Env) r, - Member P.TinyLog r - ) => - Local x -> - ConnId -> - RawMLS Welcome -> - Sem r () -postMLSWelcomeFromLocalUser loc con wel = do - assertMLSEnabled - postMLSWelcome loc (Just con) wel - -welcomeRecipients :: - ( Member BrigAccess r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r - ) => - Welcome -> - Sem r [Qualified (UserId, ClientId)] -welcomeRecipients = - traverse - ( fmap cidQualifiedClient - . derefKeyPackage - . gsNewMember - ) - . welSecrets + let (locals, remotes) = partitionQualified loc (map cidQualifiedClient cids) + let msg = encodeMLS' $ mkMessage (MessageWelcome welcome) + sendLocalWelcomes con now msg (qualifyAs loc locals) + sendRemoteWelcomes msg remotes sendLocalWelcomes :: Member GundeckAccess r => diff --git a/services/galley/src/Galley/API/Public/MLS.hs b/services/galley/src/Galley/API/Public/MLS.hs index 73187b06da..7de0a232ac 100644 --- a/services/galley/src/Galley/API/Public/MLS.hs +++ b/services/galley/src/Galley/API/Public/MLS.hs @@ -25,8 +25,6 @@ import Wire.API.Routes.Public.Galley.MLS mlsAPI :: API MLSAPI GalleyEffects mlsAPI = - mkNamedAPI @"mls-welcome-message" (callsFed (exposeAnnotations postMLSWelcomeFromLocalUser)) - <@> mkNamedAPI @"mls-message-v1" (callsFed (exposeAnnotations postMLSMessageFromLocalUserV1)) - <@> mkNamedAPI @"mls-message" (callsFed (exposeAnnotations postMLSMessageFromLocalUser)) + mkNamedAPI @"mls-message" (callsFed (exposeAnnotations postMLSMessageFromLocalUser)) <@> mkNamedAPI @"mls-commit-bundle" (callsFed (exposeAnnotations postMLSCommitBundleFromLocalUser)) <@> mkNamedAPI @"mls-public-keys" getMLSPublicKeys diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index f878733794..63cb80e906 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -79,11 +79,11 @@ tests s = [ testGroup "Message" [ test s "sender must be part of conversation" testSenderNotInConversation, - test s "send other user's commit XXX" testSendAnotherUsersCommit + test s "send other user's commit" testSendAnotherUsersCommit ], testGroup "Welcome" - [ test s "local welcome" testLocalWelcome, + [ test s "local welcome XXX" testLocalWelcome, test s "post a remote MLS welcome message" sendRemoteMLSWelcome, test s "post a remote MLS welcome message (key package ref not found)" sendRemoteMLSWelcomeKPNotFound ], From d1cfaaf37760306dfa57a5c11746bd7af41f2f10 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 12 Apr 2023 14:45:14 +0200 Subject: [PATCH 15/75] Send recipients as part of a welcome RPC --- .../src/Wire/API/Federation/API/Galley.hs | 7 ++-- services/galley/src/Galley/API/Federation.hs | 25 ++++---------- services/galley/src/Galley/API/MLS/Welcome.hs | 23 +++++++------ services/galley/test/integration/API/MLS.hs | 34 ++++--------------- 4 files changed, 31 insertions(+), 58 deletions(-) diff --git a/libs/wire-api-federation/src/Wire/API/Federation/API/Galley.hs b/libs/wire-api-federation/src/Wire/API/Federation/API/Galley.hs index 319a4470c8..06d3217cbd 100644 --- a/libs/wire-api-federation/src/Wire/API/Federation/API/Galley.hs +++ b/libs/wire-api-federation/src/Wire/API/Federation/API/Galley.hs @@ -445,8 +445,11 @@ data ConversationUpdateResponse via (CustomEncoded ConversationUpdateResponse) -- | A wrapper around a raw welcome message -newtype MLSWelcomeRequest = MLSWelcomeRequest - { unMLSWelcomeRequest :: Base64ByteString +data MLSWelcomeRequest = MLSWelcomeRequest + { -- | A serialised welcome message. + welcomeMessage :: Base64ByteString, + -- | Recipients local to the target backend. + recipients :: [(UserId, ClientId)] } deriving stock (Eq, Generic, Show) deriving (Arbitrary) via (GenericUniform MLSWelcomeRequest) diff --git a/services/galley/src/Galley/API/Federation.hs b/services/galley/src/Galley/API/Federation.hs index 41ea52adf3..389b9a4c22 100644 --- a/services/galley/src/Galley/API/Federation.hs +++ b/services/galley/src/Galley/API/Federation.hs @@ -46,7 +46,6 @@ import Galley.API.Action import Galley.API.Error import Galley.API.MLS.Enabled import Galley.API.MLS.GroupInfo -import Galley.API.MLS.KeyPackage import Galley.API.MLS.Message import Galley.API.MLS.Removal import Galley.API.MLS.SubConversation hiding (leaveSubConversation) @@ -97,7 +96,7 @@ import Wire.API.MLS.Credential import Wire.API.MLS.GroupInfo import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation -import Wire.API.MLS.Welcome +-- import Wire.API.MLS.Welcome import Wire.API.Message import Wire.API.Routes.Internal.Brig.Connection import Wire.API.Routes.Named (Named (Named)) @@ -729,8 +728,7 @@ sendMLSMessage remoteDomain msr = msg mlsSendWelcome :: - ( Member BrigAccess r, - Member (Error InternalError) r, + ( Member (Error InternalError) r, Member GundeckAccess r, Member (Input Env) r, Member (Input (Local ())) r, @@ -739,26 +737,17 @@ mlsSendWelcome :: Domain -> F.MLSWelcomeRequest -> Sem r F.MLSWelcomeResponse -mlsSendWelcome _origDomain (fromBase64ByteString . F.unMLSWelcomeRequest -> rawWelcome) = +mlsSendWelcome _origDomain req = fmap (either (const MLSWelcomeMLSNotEnabled) (const MLSWelcomeSent)) . runError @(Tagged 'MLSNotEnabled ()) $ do assertMLSEnabled loc <- qualifyLocal () now <- input - welcome <- either (throw . InternalErrorWithDescription . LT.fromStrict) pure $ decodeMLS' rawWelcome - -- Extract only recipients local to this backend - rcpts <- - fmap catMaybes - $ traverse - ( fmap (fmap cidQualifiedClient . hush) - . runError @(Tagged 'MLSKeyPackageRefNotFound ()) - . derefKeyPackage - . gsNewMember - ) - $ welSecrets welcome - let lrcpts = qualifyAs loc $ fst $ partitionQualified loc rcpts - sendLocalWelcomes Nothing now rawWelcome lrcpts + welcome <- + either (throw . InternalErrorWithDescription . LT.fromStrict) pure $ + decodeMLS' (fromBase64ByteString req.welcomeMessage) + sendLocalWelcomes Nothing now welcome (qualifyAs loc req.recipients) onMLSMessageSent :: ( Member ExternalAccess r, diff --git a/services/galley/src/Galley/API/MLS/Welcome.hs b/services/galley/src/Galley/API/MLS/Welcome.hs index 29c869278d..04ff7f55a6 100644 --- a/services/galley/src/Galley/API/MLS/Welcome.hs +++ b/services/galley/src/Galley/API/MLS/Welcome.hs @@ -63,7 +63,7 @@ sendWelcomes :: sendWelcomes loc con cids welcome = do now <- input let (locals, remotes) = partitionQualified loc (map cidQualifiedClient cids) - let msg = encodeMLS' $ mkMessage (MessageWelcome welcome) + let msg = mkRawMLS $ mkMessage (MessageWelcome welcome) sendLocalWelcomes con now msg (qualifyAs loc locals) sendRemoteWelcomes msg remotes @@ -71,10 +71,10 @@ sendLocalWelcomes :: Member GundeckAccess r => Maybe ConnId -> UTCTime -> - ByteString -> + RawMLS Message -> Local [(UserId, ClientId)] -> Sem r () -sendLocalWelcomes con now rawWelcome lclients = do +sendLocalWelcomes con now welcome lclients = do runMessagePush lclients Nothing $ foldMap (uncurry mkPush) (tUnqualified lclients) where @@ -83,21 +83,24 @@ sendLocalWelcomes con now rawWelcome lclients = do -- FUTUREWORK: use the conversation ID stored in the key package mapping table let lcnv = qualifyAs lclients (selfConv u) lusr = qualifyAs lclients u - e = Event (tUntagged lcnv) Nothing (tUntagged lusr) now $ EdMLSWelcome rawWelcome + e = Event (tUntagged lcnv) Nothing (tUntagged lusr) now $ EdMLSWelcome welcome.rmRaw in newMessagePush lclients mempty con defMessageMetadata (u, c) e sendRemoteWelcomes :: ( Member FederatorAccess r, Member P.TinyLog r ) => - ByteString -> + RawMLS Message -> [Remote (UserId, ClientId)] -> Sem r () -sendRemoteWelcomes rawWelcome clients = do - let req = MLSWelcomeRequest . Base64ByteString $ rawWelcome - rpc = fedClient @'Galley @"mls-welcome" req - traverse_ handleError <=< runFederatedConcurrentlyEither clients $ - const rpc +sendRemoteWelcomes welcome clients = do + let msg = Base64ByteString welcome.rmRaw + traverse_ handleError <=< runFederatedConcurrentlyEither clients $ \rcpts -> + fedClient @'Galley @"mls-welcome" + MLSWelcomeRequest + { welcomeMessage = msg, + recipients = tUnqualified rcpts + } where handleError :: Member P.TinyLog r => diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 63cb80e906..be20c954cd 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -83,9 +83,8 @@ tests s = ], testGroup "Welcome" - [ test s "local welcome XXX" testLocalWelcome, - test s "post a remote MLS welcome message" sendRemoteMLSWelcome, - test s "post a remote MLS welcome message (key package ref not found)" sendRemoteMLSWelcomeKPNotFound + [ test s "local welcome" testLocalWelcome, + test s "post a remote MLS welcome message" sendRemoteMLSWelcome ], testGroup "Creation" @@ -1700,11 +1699,12 @@ sendRemoteMLSWelcome :: TestM () sendRemoteMLSWelcome = do -- Alice is from the originating domain and Bob is local, i.e., on the receiving domain [alice, bob] <- createAndConnectUsers [Just "alice.example.com", Nothing] - commit <- runMLSTest $ do + (commit, bob1) <- runMLSTest $ do [alice1, bob1] <- traverse createMLSClient [alice, bob] void $ setupFakeMLSGroup alice1 void $ uploadNewKeyPackage bob1 - createAddCommit alice1 [bob] + commit <- createAddCommit alice1 [bob] + pure (commit, bob1) welcome <- assertJust (mpWelcome commit) @@ -1717,35 +1717,13 @@ sendRemoteMLSWelcome = do runFedClient @"mls-welcome" fedGalleyClient (qDomain alice) $ MLSWelcomeRequest (Base64ByteString welcome) + [qUnqualified (cidQualifiedClient bob1)] -- check that the corresponding event is received liftIO $ do WS.assertMatch_ (5 # WS.Second) wsB $ wsAssertMLSWelcome bob welcome -sendRemoteMLSWelcomeKPNotFound :: TestM () -sendRemoteMLSWelcomeKPNotFound = do - [alice, bob] <- createAndConnectUsers [Just "alice.example.com", Nothing] - commit <- runMLSTest $ do - [alice1, bob1] <- traverse createMLSClient [alice, bob] - void $ setupFakeMLSGroup alice1 - kp <- fst <$> generateKeyPackage bob1 - createAddCommitWithKeyPackages alice1 [(bob1, kp.rmRaw)] - welcome <- assertJust (mpWelcome commit) - - fedGalleyClient <- view tsFedGalleyClient - cannon <- view tsCannon - WS.bracketR cannon (qUnqualified bob) $ \wsB -> do - -- send welcome message - void $ - runFedClient @"mls-welcome" fedGalleyClient (qDomain alice) $ - MLSWelcomeRequest - (Base64ByteString welcome) - - liftIO $ do - -- check that no event is received - WS.assertNoEvent (1 # Second) [wsB] - testBackendRemoveProposalLocalConvLocalLeaverCreator :: TestM () testBackendRemoveProposalLocalConvLocalLeaverCreator = do [alice, bob] <- createAndConnectUsers (replicate 2 Nothing) From c190ce307053765c937be1adab4c56163ef0ccd2 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 12 Apr 2023 15:01:26 +0200 Subject: [PATCH 16/75] Use commit bundles in failure tests --- services/galley/src/Galley/API/MLS/Message.hs | 1 + services/galley/test/integration/API/MLS.hs | 58 ++++++------------- 2 files changed, 20 insertions(+), 39 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 2b18382fea..ea332ce8bf 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -112,6 +112,7 @@ import Wire.API.User.Client -- [x] remove PublicGroupState and GroupInfoBundle modules -- [ ] remove protobuf definitions of CommitBundle -- [ ] (?) rename public_group_state field in conversation table +-- [ ] consider adding more integration tests data IncomingMessage = IncomingMessage { epoch :: Epoch, diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index be20c954cd..7640b261d5 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -66,7 +66,6 @@ import Wire.API.MLS.Keys import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation -import Wire.API.MLS.Welcome import Wire.API.Message import Wire.API.Routes.MultiTablePaging import Wire.API.Routes.Version @@ -93,12 +92,11 @@ tests s = ], testGroup "Deletion" - [ test s "delete a MLS conversation" testDeleteMLSConv + [ test s "delete an MLS conversation" testDeleteMLSConv ], testGroup "Commit" [ test s "add user to a conversation" testAddUser, - test s "add user with an incomplete welcome" testAddUserWithBundleIncompleteWelcome, test s "add user (not connected)" testAddUserNotConnected, test s "add user (partial client list)" testAddUserPartial, test s "add client of existing user" testAddClientPartial, @@ -378,33 +376,6 @@ testAddUserWithBundle = do liftIO $ assertBool "Commit does not contain a public group State" (isJust (mpGroupInfo commit)) liftIO $ mpGroupInfo commit @?= Just returnedGS -testAddUserWithBundleIncompleteWelcome :: TestM () -testAddUserWithBundleIncompleteWelcome = do - [alice, bob] <- createAndConnectUsers [Nothing, Nothing] - - runMLSTest $ do - (alice1 : bobClients) <- traverse createMLSClient [alice, bob, bob] - traverse_ uploadNewKeyPackage bobClients - void $ setupMLSGroup alice1 - - -- create commit, but remove first recipient from welcome message - commit <- do - commit <- createAddCommit alice1 [bob] - liftIO $ do - welcome <- assertJust (mpWelcome commit) - w <- either (assertFailure . T.unpack) pure $ decodeMLS' welcome - let w' = w {welSecrets = take 1 (welSecrets w)} - welcome' = encodeMLS' w' - commit' = commit {mpWelcome = Just welcome'} - pure commit' - - bundle <- createBundle commit - err <- - responseJsonError - =<< localPostCommitBundle (mpSender commit) bundle - do err <- responseJsonError - =<< postMessage (mpSender commit) (mpMessage commit) + =<< localPostCommitBundle (mpSender commit) bundle >= sendAndConsumeCommitBundle commit <- createRemoveCommit alice1 [bob1] + bundle <- createBundle commit err <- responseJsonError - =<< postMessage alice1 (mpMessage commit) + =<< localPostCommitBundle alice1 bundle Date: Wed, 12 Apr 2023 16:53:22 +0200 Subject: [PATCH 17/75] Implement new proposal ref computation --- .../src/Wire/API/MLS/AuthenticatedContent.hs | 92 +++++++++++++++++++ libs/wire-api/src/Wire/API/MLS/Message.hs | 50 ++-------- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 19 +--- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 10 +- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 2 +- libs/wire-api/wire-api.cabal | 1 + services/galley/src/Galley/API/MLS/Message.hs | 13 ++- services/galley/src/Galley/API/MLS/Removal.hs | 8 +- 8 files changed, 127 insertions(+), 68 deletions(-) create mode 100644 libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs diff --git a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs new file mode 100644 index 0000000000..8efa64193b --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs @@ -0,0 +1,92 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.AuthenticatedContent + ( AuthenticatedContent (..), + authContentRef, + publicMessageRef, + mkSignedPublicMessage, + ) +where + +import Crypto.PubKey.Ed25519 +import qualified Data.ByteArray as BA +import Imports +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Context +import Wire.API.MLS.Epoch +import Wire.API.MLS.Group +import Wire.API.MLS.Message +import Wire.API.MLS.Proposal +import Wire.API.MLS.ProtocolVersion +import Wire.API.MLS.Serialisation + +-- Needed to compute proposal refs. +data AuthenticatedContent = AuthenticatedContent + { wireFormat :: WireFormatTag, + content :: RawMLS FramedContent, + authData :: RawMLS FramedContentAuthData + } + deriving (Eq, Show) + +instance SerialiseMLS AuthenticatedContent where + serialiseMLS ac = do + serialiseMLS ac.wireFormat + serialiseMLS ac.content + serialiseMLS ac.authData + +msgAuthContent :: PublicMessage -> AuthenticatedContent +msgAuthContent msg = + AuthenticatedContent + { wireFormat = WireFormatPublicTag, + content = msg.content, + authData = msg.authData + } + +-- | Compute the proposal ref given a ciphersuite and the raw proposal data. +authContentRef :: CipherSuiteTag -> AuthenticatedContent -> ProposalRef +authContentRef cs = ProposalRef . csHash cs proposalContext . mkRawMLS + +publicMessageRef :: CipherSuiteTag -> PublicMessage -> ProposalRef +publicMessageRef cs = authContentRef cs . msgAuthContent + +-- | Craft a message with the backend itself as a sender. Return the message and its ref. +mkSignedPublicMessage :: + SecretKey -> PublicKey -> GroupId -> Epoch -> FramedContentData -> PublicMessage +mkSignedPublicMessage priv pub gid epoch payload = + let framedContent = + mkRawMLS + FramedContent + { groupId = gid, + epoch = epoch, + sender = SenderExternal 0, + content = payload, + authenticatedData = mempty + } + tbs = + FramedContentTBS + { protocolVersion = defaultProtocolVersion, + wireFormat = WireFormatPublicTag, + content = framedContent, + groupContext = Nothing + } + sig = BA.convert $ sign priv pub (encodeMLS' tbs) + in PublicMessage + { content = framedContent, + authData = mkRawMLS (FramedContentAuthData sig Nothing), + membershipTag = Nothing + } diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index e78e972b00..9152e7c7ac 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -1,5 +1,6 @@ {-# LANGUAGE StandaloneKindSignatures #-} {-# LANGUAGE TemplateHaskell #-} + -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH @@ -16,10 +17,10 @@ -- -- You should have received a copy of the GNU Affero General Public License along -- with this program. If not, see . -{-# OPTIONS_GHC -Wwarn #-} module Wire.API.MLS.Message ( -- * MLS Message types + WireFormatTag (..), Message (..), mkMessage, MessageContent (..), @@ -35,7 +36,6 @@ module Wire.API.MLS.Message -- * Utilities verifyMessageSignature, - mkSignedMessage, -- * Servant types MLSMessageSendingStatus (..), @@ -43,17 +43,12 @@ module Wire.API.MLS.Message where import Control.Lens ((?~)) -import Crypto.PubKey.Ed25519 import qualified Data.Aeson as A import Data.Binary -import qualified Data.ByteArray as BA import Data.Id import Data.Json.Util -import Data.Kind import Data.Qualified import Data.Schema -import Data.Schema hiding (tag) -import Data.Singletons.TH import qualified Data.Swagger as S import GHC.Records import Imports @@ -155,7 +150,7 @@ instance S.ToSchema Message where data PublicMessage = PublicMessage { content :: RawMLS FramedContent, - authData :: FramedContentAuthData, + authData :: RawMLS FramedContentAuthData, -- Present iff content.rmValue.sender is of type Member. membershipTag :: Maybe ByteString } @@ -164,7 +159,7 @@ data PublicMessage = PublicMessage instance ParseMLS PublicMessage where parseMLS = do content <- parseMLS - authData <- parseFramedContentAuthData (framedContentDataTag (content.rmValue.content)) + authData <- parseRawMLS (parseFramedContentAuthData (framedContentDataTag (content.rmValue.content))) membershipTag <- case content.rmValue.sender of SenderMember _ -> Just <$> parseMLSBytes @VarInt _ -> pure Nothing @@ -347,9 +342,9 @@ data FramedContentAuthData = FramedContentAuthData deriving (Eq, Show) parseFramedContentAuthData :: FramedContentDataTag -> Get FramedContentAuthData -parseFramedContentAuthData tag = do +parseFramedContentAuthData t = do sig <- parseMLSBytes @VarInt - confirmationTag <- case tag of + confirmationTag <- case t of FramedContentCommitTag -> Just <$> parseMLSBytes @VarInt _ -> pure Nothing pure (FramedContentAuthData sig confirmationTag) @@ -370,44 +365,15 @@ data GroupContext = GroupContext } deriving (Eq, Show) --- | Craft a message with the backend itself as a sender. -mkSignedMessage :: - SecretKey -> PublicKey -> GroupId -> Epoch -> FramedContentData -> Message -mkSignedMessage priv pub gid epoch payload = - let framedContent = - mkRawMLS - FramedContent - { groupId = gid, - epoch = epoch, - sender = SenderExternal 0, - content = payload, - authenticatedData = mempty - } - tbs = - FramedContentTBS - { protocolVersion = defaultProtocolVersion, - wireFormat = WireFormatPublicTag, - content = framedContent, - groupContext = Nothing - } - sig = BA.convert $ sign priv pub (encodeMLS' tbs) - in mkMessage $ - MessagePublic - PublicMessage - { content = framedContent, - authData = FramedContentAuthData sig Nothing, - membershipTag = Nothing - } - verifyMessageSignature :: RawMLS GroupContext -> RawMLS FramedContent -> - FramedContentAuthData -> + RawMLS FramedContentAuthData -> ByteString -> Bool verifyMessageSignature ctx msgContent authData pubkey = isJust $ do let tbs = mkRawMLS (framedContentTBS ctx msgContent) - sig = authData.signature_ + sig = authData.rmValue.signature_ cs <- cipherSuiteTag ctx.rmValue.cipherSuite guard $ csVerifySignature cs pubkey tbs sig diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 4534e99f39..97cbd3efd7 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -23,13 +23,10 @@ module Wire.API.MLS.Proposal where import Cassandra import Control.Lens (makePrisms) import Data.Binary -import Data.Binary.Get -import Data.Binary.Put import Data.ByteString as B import Imports import Test.QuickCheck import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Context import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.KeyPackage @@ -85,14 +82,6 @@ instance SerialiseMLS Proposal where serialiseMLS GroupContextExtensionsProposalTag serialiseMLSVector @VarInt serialiseMLS es --- | Compute the proposal ref given a ciphersuite and the raw proposal data. -proposalRef :: CipherSuiteTag -> RawMLS Proposal -> ProposalRef -proposalRef cs = - ProposalRef - . csHash cs proposalContext - . flip RawMLS () - . rmRaw - data PreSharedKeyTag = ExternalKeyTag | ResumptionKeyTag deriving (Bounded, Enum, Eq, Show) @@ -218,15 +207,13 @@ instance SerialiseMLS ProposalOrRef where newtype ProposalRef = ProposalRef {unProposalRef :: ByteString} deriving stock (Eq, Show, Ord, Generic) + deriving newtype (Arbitrary) instance ParseMLS ProposalRef where - parseMLS = ProposalRef <$> getByteString 16 + parseMLS = ProposalRef <$> parseMLSBytes @VarInt instance SerialiseMLS ProposalRef where - serialiseMLS = putByteString . unProposalRef - -instance Arbitrary ProposalRef where - arbitrary = ProposalRef . B.pack <$> vectorOf 16 arbitrary + serialiseMLS = serialiseMLSBytes @VarInt . unProposalRef makePrisms ''ProposalOrRef diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index 913dcc0c60..6d7a084322 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -36,6 +36,7 @@ import System.Process import Test.Tasty import Test.Tasty.HUnit import UnliftIO (withSystemTempDirectory) +import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.Epoch @@ -140,16 +141,17 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do secretKey <- Ed25519.generateSecretKey let publicKey = Ed25519.toPublic secretKey - let proposal = mkRawMLS (RemoveProposal 1) - let message = - mkSignedMessage + proposal = mkRawMLS (RemoveProposal 1) + pmessage = + mkSignedPublicMessage secretKey publicKey gid (Epoch 1) (FramedContentProposal proposal) + message = mkMessage $ MessagePublic pmessage + messageFilename = "signed-message.mls" - let messageFilename = "signed-message.mls" BS.writeFile (tmp messageFilename) (rmRaw (mkRawMLS message)) let signerKeyFilename = "signer-key.bin" BS.writeFile (tmp signerKeyFilename) (convert publicKey) diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 1beb775720..526b7a883a 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -103,7 +103,7 @@ instance ArbitraryFramedContent fc => Arbitrary (MessageGenerator fc) where <*> fmap MessagePublic ( PublicMessage (mkRawMLS fc) - <$> (FramedContentAuthData <$> arbitrary <*> pure confirmationTag) + <$> (mkRawMLS <$> (FramedContentAuthData <$> arbitrary <*> pure confirmationTag)) <*> pure mt ) diff --git a/libs/wire-api/wire-api.cabal b/libs/wire-api/wire-api.cabal index e1f68f85c0..f66153a3c5 100644 --- a/libs/wire-api/wire-api.cabal +++ b/libs/wire-api/wire-api.cabal @@ -96,6 +96,7 @@ library Wire.API.MakesFederatedCall Wire.API.Message Wire.API.Message.Proto + Wire.API.MLS.AuthenticatedContent Wire.API.MLS.Capabilities Wire.API.MLS.CipherSuite Wire.API.MLS.Commit diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index ea332ce8bf..f898dae4d5 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -84,6 +84,7 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Brig import Wire.API.Federation.API.Galley import Wire.API.Federation.Error +import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.CommitBundle @@ -135,7 +136,7 @@ data IncomingPublicMessageContent = IncomingPublicMessageContent content :: FramedContentData, -- for verification framedContent :: RawMLS FramedContent, - authData :: FramedContentAuthData + authData :: RawMLS FramedContentAuthData } data IncomingBundle = IncomingBundle @@ -177,6 +178,14 @@ mkIncomingMessage msg = case msg.rmValue.content of } _ -> Nothing +incomingMessageAuthenticatedContent :: IncomingPublicMessageContent -> AuthenticatedContent +incomingMessageAuthenticatedContent pmsg = + AuthenticatedContent + { wireFormat = WireFormatPublicTag, + content = pmsg.framedContent, + authData = pmsg.authData + } + mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle mkIncomingBundle bundle = do imsg <- mkIncomingMessage bundle.rmValue.cbCommitMsg @@ -893,7 +902,7 @@ processProposal qusr lConvOrSub msg pub prop = do when (isExternal pub.sender) $ do checkExternalProposalSignature pub prop checkExternalProposalUser qusr propValue - let propRef = proposalRef suiteTag prop + let propRef = authContentRef suiteTag (incomingMessageAuthenticatedContent pub) storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop isExternal :: Sender -> Bool diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 90b8b54a2c..6a4ab73530 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -42,6 +42,7 @@ import Polysemy.Input import Polysemy.TinyLog import qualified System.Logger as Log import Wire.API.Conversation.Protocol +import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.Credential import Wire.API.MLS.Message import Wire.API.MLS.Proposal @@ -79,18 +80,19 @@ createAndSendRemoveProposals lConvOrSubConv indices qusr cm = do Just (secKey, pubKey) -> do for_ indices $ \idx -> do let proposal = mkRawMLS (RemoveProposal idx) - msg = - mkSignedMessage + pmsg = + mkSignedPublicMessage secKey pubKey (cnvmlsGroupId meta) (cnvmlsEpoch meta) (FramedContentProposal proposal) + msg = mkMessage (MessagePublic pmsg) msgEncoded = encodeMLS' msg storeProposal (cnvmlsGroupId meta) (cnvmlsEpoch meta) - (proposalRef (cnvmlsCipherSuite meta) proposal) + (publicMessageRef (cnvmlsCipherSuite meta) pmsg) ProposalOriginBackend proposal propagateMessage qusr lConvOrSubConv Nothing msgEncoded cm From c178e0d44967c6ef1101d4d8492cc271cf03121f Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 13 Apr 2023 09:01:01 +0000 Subject: [PATCH 18/75] fix integration test admin removes user from a conversation --- .../galley/test/integration/API/MLS/Util.hs | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index db38371a7a..f936919594 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -759,15 +759,15 @@ readWelcome fp = runMaybeT $ do liftIO $ BS.readFile fp createRemoveCommit :: HasCallStack => ClientIdentity -> [ClientIdentity] -> MLSTest MessagePackage -createRemoveCommit cid _targets = do - -- TODO +createRemoveCommit cid targets = do bd <- State.gets mlsBaseDir welcomeFile <- liftIO $ emptyTempFile bd "welcome" pgsFile <- liftIO $ emptyTempFile bd "pgs" g <- getClientGroupState cid - let indices = map snd (readGroupState g) + let groupStateMap = Map.fromList (readGroupState g) + let indices = map (fromMaybe (error "could not find target") . flip Map.lookup groupStateMap) targets commit <- mlscli cid @@ -931,17 +931,15 @@ mlsBracket clients k = do readGroupState :: ByteString -> [(ClientIdentity, Word32)] readGroupState j = do - node <- j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse . key "node" - - case node ^? key "leaf_index" of - Just i -> do - identityBytes <- node ^.. key "leaf_node" . key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" + (node, n) <- zip (j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse . key "node") [0 ..] + case node ^? key "leaf_node" of + Just leafNode -> do + identityBytes <- leafNode ^.. key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" let identity = BS.pack (identityBytes ^.. _Array . traverse . _Integer . to fromIntegral) cid <- case decodeMLS' identity of Left _ -> [] Right x -> pure x - n <- i ^.. _Integer . to fromIntegral - pure $ (cid, n) + pure (cid, n) Nothing -> [] getClientsFromGroupState :: From bf906492af052322cf028a0e060c8b8006fa4a88 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 13 Apr 2023 10:27:35 +0000 Subject: [PATCH 19/75] switch mls-test-cli call to external-proposal --- services/galley/test/integration/API/MLS/Util.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index f936919594..923b727e03 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -805,7 +805,7 @@ createExternalAddProposal joiner = do proposal <- mlscli joiner - [ "proposal-external", + [ "external-proposal", "--group-id", T.unpack (toBase64Text (unGroupId groupId)), "--epoch", From 6ad8bd226472fc7da0fd4a97e4f0d30a680e3355 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Thu, 13 Apr 2023 16:56:51 +0200 Subject: [PATCH 20/75] Implement validation of leaf nodes in galley - extract core validation function to wire-api - generalise validation of leaf node source - implement validation of key packages and leaf nodes in galley - remove all internal brig endpoints related to validation - validate leaf node in external commits - validate leaf node signature --- libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 76 ++++++++--- libs/wire-api/src/Wire/API/MLS/Validation.hs | 123 ++++++++++++++++++ .../src/Wire/API/Routes/Internal/Brig.hs | 22 ---- .../test/unit/Test/Wire/API/Roundtrip/MLS.hs | 2 +- libs/wire-api/wire-api.cabal | 1 + services/brig/src/Brig/API/Internal.hs | 38 ------ services/brig/src/Brig/API/MLS/KeyPackages.hs | 4 +- .../Brig/API/MLS/KeyPackages/Validation.hs | 99 ++++---------- services/galley/src/Galley/API/MLS/Message.hs | 50 ++++--- .../galley/src/Galley/Effects/BrigAccess.hs | 12 -- services/galley/src/Galley/Intra/Client.hs | 45 ------- services/galley/src/Galley/Intra/Effects.hs | 2 - services/galley/test/integration/API/MLS.hs | 2 +- 13 files changed, 243 insertions(+), 233 deletions(-) create mode 100644 libs/wire-api/src/Wire/API/MLS/Validation.hs diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs index 702734834f..064a6c2e55 100644 --- a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -18,13 +18,16 @@ module Wire.API.MLS.LeafNode ( LeafIndex, LeafNode (..), + LeafNodeCore (..), LeafNodeTBS (..), + LeafNodeTBSExtra (..), LeafNodeSource (..), LeafNodeSourceTag (..), leafNodeSourceTag, ) where +import Data.Binary import qualified Data.Swagger as S import GHC.Records import Imports @@ -32,6 +35,7 @@ import Test.QuickCheck import Wire.API.MLS.Capabilities import Wire.API.MLS.Credential import Wire.API.MLS.Extension +import Wire.API.MLS.Group import Wire.API.MLS.HPKEPublicKey import Wire.API.MLS.Lifetime import Wire.API.MLS.Serialisation @@ -39,7 +43,8 @@ import Wire.Arbitrary type LeafIndex = Word32 -data LeafNodeTBS = LeafNodeTBS +-- LeafNodeCore contains fields in the intersection of LeafNode and LeafNodeTBS +data LeafNodeCore = LeafNodeCore { encryptionKey :: HPKEPublicKey, signatureKey :: ByteString, credential :: Credential, @@ -48,11 +53,42 @@ data LeafNodeTBS = LeafNodeTBS extensions :: [Extension] } deriving (Show, Eq, Generic) - deriving (Arbitrary) via (GenericUniform LeafNodeTBS) + deriving (Arbitrary) via (GenericUniform LeafNodeCore) + +-- extra fields in LeafNodeTBS, but not in LeafNode +data LeafNodeTBSExtra + = LeafNodeTBSExtraKeyPackage + | LeafNodeTBSExtraUpdate GroupId LeafIndex + | LeafNodeTBSExtraCommit GroupId LeafIndex + +serialiseUntaggedLeafNodeTBSExtra :: LeafNodeTBSExtra -> Put +serialiseUntaggedLeafNodeTBSExtra LeafNodeTBSExtraKeyPackage = pure () +serialiseUntaggedLeafNodeTBSExtra (LeafNodeTBSExtraUpdate gid idx) = do + serialiseMLS gid + serialiseMLS idx +serialiseUntaggedLeafNodeTBSExtra (LeafNodeTBSExtraCommit gid idx) = do + serialiseMLS gid + serialiseMLS idx + +instance HasField "tag" LeafNodeTBSExtra LeafNodeSourceTag where + getField = \case + LeafNodeTBSExtraKeyPackage -> LeafNodeSourceKeyPackageTag + LeafNodeTBSExtraCommit _ _ -> LeafNodeSourceCommitTag + LeafNodeTBSExtraUpdate _ _ -> LeafNodeSourceUpdateTag -instance ParseMLS LeafNodeTBS where +data LeafNodeTBS = LeafNodeTBS + { core :: RawMLS LeafNodeCore, + extra :: LeafNodeTBSExtra + } + +instance SerialiseMLS LeafNodeTBS where + serialiseMLS tbs = do + serialiseMLS tbs.core + serialiseUntaggedLeafNodeTBSExtra tbs.extra + +instance ParseMLS LeafNodeCore where parseMLS = - LeafNodeTBS + LeafNodeCore <$> parseMLS <*> parseMLSBytes @VarInt <*> parseMLS @@ -60,19 +96,19 @@ instance ParseMLS LeafNodeTBS where <*> parseMLS <*> parseMLSVector @VarInt parseMLS -instance SerialiseMLS LeafNodeTBS where - serialiseMLS tbs = do - serialiseMLS tbs.encryptionKey - serialiseMLSBytes @VarInt tbs.signatureKey - serialiseMLS tbs.credential - serialiseMLS tbs.capabilities - serialiseMLS tbs.source - serialiseMLSVector @VarInt serialiseMLS tbs.extensions +instance SerialiseMLS LeafNodeCore where + serialiseMLS core = do + serialiseMLS core.encryptionKey + serialiseMLSBytes @VarInt core.signatureKey + serialiseMLS core.credential + serialiseMLS core.capabilities + serialiseMLS core.source + serialiseMLSVector @VarInt serialiseMLS core.extensions -- | This type can only verify the signature when the LeafNodeSource is -- LeafNodeSourceKeyPackage data LeafNode = LeafNode - { tbs :: LeafNodeTBS, + { core :: RawMLS LeafNodeCore, signature_ :: ByteString } deriving (Show, Eq, Generic) @@ -86,29 +122,29 @@ instance ParseMLS LeafNode where instance SerialiseMLS LeafNode where serialiseMLS ln = do - serialiseMLS ln.tbs + serialiseMLS ln.core serialiseMLSBytes @VarInt ln.signature_ instance S.ToSchema LeafNode where declareNamedSchema _ = pure (mlsSwagger "LeafNode") instance HasField "encryptionKey" LeafNode HPKEPublicKey where - getField = (.tbs.encryptionKey) + getField = (.core.rmValue.encryptionKey) instance HasField "signatureKey" LeafNode ByteString where - getField = (.tbs.signatureKey) + getField = (.core.rmValue.signatureKey) instance HasField "credential" LeafNode Credential where - getField = (.tbs.credential) + getField = (.core.rmValue.credential) instance HasField "capabilities" LeafNode Capabilities where - getField = (.tbs.capabilities) + getField = (.core.rmValue.capabilities) instance HasField "source" LeafNode LeafNodeSource where - getField = (.tbs.source) + getField = (.core.rmValue.source) instance HasField "extensions" LeafNode [Extension] where - getField = (.tbs.extensions) + getField = (.core.rmValue.extensions) data LeafNodeSource = LeafNodeSourceKeyPackage Lifetime diff --git a/libs/wire-api/src/Wire/API/MLS/Validation.hs b/libs/wire-api/src/Wire/API/MLS/Validation.hs new file mode 100644 index 0000000000..c99450c70e --- /dev/null +++ b/libs/wire-api/src/Wire/API/MLS/Validation.hs @@ -0,0 +1,123 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Wire.API.MLS.Validation + ( -- * Main key package validation function + validateKeyPackage, + validateLeafNode, + ) +where + +import Control.Applicative +import Imports +import Wire.API.MLS.Capabilities +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Credential +import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Lifetime +import Wire.API.MLS.ProtocolVersion +import Wire.API.MLS.Serialisation + +validateKeyPackage :: + Maybe ClientIdentity -> + KeyPackage -> + Either Text (CipherSuiteTag, Lifetime) +validateKeyPackage mIdentity kp = do + -- get ciphersuite + cs <- + maybe + (Left "Unsupported ciphersuite") + pure + $ cipherSuiteTag kp.cipherSuite + + -- validate signature + unless + ( csVerifySignatureWithLabel + cs + kp.leafNode.signatureKey + "KeyPackageTBS" + kp.tbs + kp.signature_ + ) + $ Left "Invalid KeyPackage signature" + + -- validate protocol version + maybe + (Left "Unsupported protocol version") + pure + (pvTag (kp.protocolVersion) >>= guard . (== ProtocolMLS10)) + + -- validate credential, lifetime and capabilities + validateLeafNode cs mIdentity LeafNodeTBSExtraKeyPackage kp.leafNode + + lt <- case kp.leafNode.source of + LeafNodeSourceKeyPackage lt -> pure lt + -- unreachable + _ -> Left "Unexpected leaf node source" + + pure (cs, lt) + +validateLeafNode :: + CipherSuiteTag -> + Maybe ClientIdentity -> + LeafNodeTBSExtra -> + LeafNode -> + Either Text () +validateLeafNode cs mIdentity extra leafNode = do + let tbs = LeafNodeTBS leafNode.core extra + unless + ( csVerifySignatureWithLabel + cs + leafNode.signatureKey + "LeafNodeTBS" + (mkRawMLS tbs) + leafNode.signature_ + ) + $ Left "Invalid LeafNode signature" + + validateCredential mIdentity leafNode.credential + validateSource extra.tag leafNode.source + validateCapabilities leafNode.capabilities + +validateCredential :: Maybe ClientIdentity -> Credential -> Either Text () +validateCredential mIdentity (BasicCredential cred) = do + identity <- + either credentialError pure $ + decodeMLS' cred + unless (maybe True (identity ==) mIdentity) $ + Left "client identity does not match credential identity" + where + credentialError e = + Left $ + "Failed to parse identity: " <> e + +validateSource :: LeafNodeSourceTag -> LeafNodeSource -> Either Text () +validateSource t s = do + let t' = leafNodeSourceTag s + if t == t' + then pure () + else + Left $ + "Expected '" + <> t.name + <> "' source, got '" + <> (leafNodeSourceTag s).name + <> "'" + +validateCapabilities :: Capabilities -> Either Text () +validateCapabilities _ = pure () -- TODO diff --git a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs index c2e81c8b5b..7f1ea227c0 100644 --- a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs +++ b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs @@ -54,8 +54,6 @@ import Wire.API.Error.Brig import Wire.API.MLS.CipherSuite (SignatureSchemeTag) import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.MLS.LeafNode -import Wire.API.MLS.Servant import Wire.API.MakesFederatedCall import Wire.API.Routes.Internal.Brig.Connection import Wire.API.Routes.Internal.Brig.EJPD @@ -279,26 +277,6 @@ type MLSAPI = ) :<|> GetMLSClients :<|> MapKeyPackageRefs - -- TODO: remove the following endpoint - :<|> Named - "put-key-package-add" - ( "key-package-add" - :> ReqBody '[Servant.JSON] NewKeyPackage - :> MultiVerb1 - 'PUT - '[Servant.JSON] - (Respond 200 "Key package ref mapping updated" NewKeyPackageResult) - ) - :<|> Named - "validate-leaf-node" - ( "validate-leaf-node" - :> Capture "identity" ClientIdentity - :> ReqBody '[MLS] LeafNode - :> MultiVerb1 - 'GET - '[Servant.JSON] - (RespondEmpty 200 "Leaf node is valid") - ) ) type PutConversationByKeyPackageRef = diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 526b7a883a..8562747a35 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -41,7 +41,7 @@ tests = T.localOption (T.Timeout (60 * 1000000) "60s") . T.testGroup "MLS roundtrip tests" $ [ testRoundTrip @KeyPackageRef, testRoundTrip @LeafNode, - testRoundTrip @LeafNodeTBS, + testRoundTrip @LeafNodeCore, testRoundTrip @KeyPackageTBS, testRoundTrip @Credential, testRoundTrip @ClientIdentity, diff --git a/libs/wire-api/wire-api.cabal b/libs/wire-api/wire-api.cabal index f66153a3c5..d26801a28b 100644 --- a/libs/wire-api/wire-api.cabal +++ b/libs/wire-api/wire-api.cabal @@ -119,6 +119,7 @@ library Wire.API.MLS.Serialisation Wire.API.MLS.Servant Wire.API.MLS.SubConversation + Wire.API.MLS.Validation Wire.API.MLS.Welcome Wire.API.Notification Wire.API.OAuth diff --git a/services/brig/src/Brig/API/Internal.hs b/services/brig/src/Brig/API/Internal.hs index 4334b93d56..71db75c917 100644 --- a/services/brig/src/Brig/API/Internal.hs +++ b/services/brig/src/Brig/API/Internal.hs @@ -27,7 +27,6 @@ import qualified Brig.API.Client as API import qualified Brig.API.Connection as API import Brig.API.Error import Brig.API.Handler -import Brig.API.MLS.KeyPackages.Validation import Brig.API.OAuth (internalOauthAPI) import Brig.API.Types import qualified Brig.API.User as API @@ -89,7 +88,6 @@ import Wire.API.Federation.API import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig import qualified Wire.API.Routes.Internal.Brig as BrigIRoutes import Wire.API.Routes.Internal.Brig.Connection @@ -146,9 +144,6 @@ mlsAPI = ) :<|> getMLSClients :<|> mapKeyPackageRefsInternal - :<|> Named @"put-key-package-add" upsertKeyPackage - -- Used by galley to validate leaf nodes appearing in an update path - :<|> Named @"validate-leaf-node" validateLeafNode accountAPI :: ( Member BlacklistStore r, @@ -209,39 +204,6 @@ getConvIdByKeyPackageRef = runMaybeT . mapMaybeT wrapClientE . Data.keyPackageRe postKeyPackageRef :: KeyPackageRef -> KeyPackageRef -> Handler r () postKeyPackageRef ref = lift . wrapClient . Data.updateKeyPackageRef ref --- Used by galley to update key package refs and also validate -upsertKeyPackage :: NewKeyPackage -> Handler r NewKeyPackageResult -upsertKeyPackage nkp = do - kp <- - either - (const $ mlsProtocolError "upsertKeyPackage: Cannot decocode KeyPackage") - pure - $ decodeMLS' @(RawMLS KeyPackage) (kpData . nkpKeyPackage $ nkp) - ref <- kpRef' kp & noteH "upsertKeyPackage: Unsupported CipherSuite" - - identity <- - either - (const $ mlsProtocolError "upsertKeyPackage: Cannot decode ClientIdentity") - pure - $ keyPackageIdentity (rmValue kp) - mp <- lift . wrapClient . runMaybeT $ Data.derefKeyPackage ref - when (isNothing mp) $ do - void $ validateKeyPackage identity kp - lift . wrapClient $ - Data.addKeyPackageRef - ref - ( NewKeyPackageRef - (fst <$> cidQualifiedClient identity) - (ciClient identity) - (nkpConversation nkp) - ) - - pure $ NewKeyPackageResult identity ref - where - noteH :: Text -> Maybe a -> Handler r a - noteH errMsg Nothing = mlsProtocolError errMsg - noteH _ (Just y) = pure y - deleteKeyPackageRefs :: DeleteKeyPackageRefsRequest -> Handler r () deleteKeyPackageRefs (DeleteKeyPackageRefsRequest refs) = lift . wrapClient $ pooledForConcurrentlyN_ 16 refs Data.deleteKeyPackageRef diff --git a/services/brig/src/Brig/API/MLS/KeyPackages.hs b/services/brig/src/Brig/API/MLS/KeyPackages.hs index 74742fe176..9426a736c3 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages.hs @@ -51,7 +51,7 @@ uploadKeyPackages :: Local UserId -> ClientId -> KeyPackageUpload -> Handler r ( uploadKeyPackages lusr cid (kpuKeyPackages -> kps) = do assertMLSEnabled let identity = mkClientIdentity (tUntagged lusr) cid - kps' <- traverse (validateKeyPackage identity) kps + kps' <- traverse (validateUploadedKeyPackage identity) kps lift . wrapClient $ Data.insertKeyPackages (tUnqualified lusr) cid kps' claimKeyPackages :: @@ -121,7 +121,7 @@ claimRemoteKeyPackages lusr target = do . kpData . kpbeKeyPackage $ e - (refVal, _) <- validateKeyPackage cid kpRaw + (refVal, _) <- validateUploadedKeyPackage cid kpRaw unless (refVal == kpbeRef e) . throwE . clientDataError diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index bef1faf74d..35ca999fa2 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -17,10 +17,9 @@ module Brig.API.MLS.KeyPackages.Validation ( -- * Main key package validation function - validateKeyPackage, - validateLeafNode, - mlsProtocolError, + validateUploadedKeyPackage, validateLifetime', + mlsProtocolError, ) where @@ -29,103 +28,56 @@ import Brig.API.Handler import Brig.App import qualified Brig.Data.Client as Data import Brig.Options -import Control.Applicative -import Control.Lens (view) -import qualified Data.ByteString.Lazy as LBS +import Control.Lens +import qualified Data.ByteString as LBS import Data.Qualified import Data.Time.Clock import Data.Time.Clock.POSIX import Imports import Wire.API.Error import Wire.API.Error.Brig -import Wire.API.MLS.Capabilities import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.MLS.LeafNode import Wire.API.MLS.Lifetime -import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation +import Wire.API.MLS.Validation -validateKeyPackage :: +validateUploadedKeyPackage :: ClientIdentity -> RawMLS KeyPackage -> Handler r (KeyPackageRef, KeyPackageData) -validateKeyPackage identity (RawMLS (KeyPackageData -> kpd) kp) = do - loc <- qualifyLocal () - -- get ciphersuite - cs <- - maybe - (mlsProtocolError "Unsupported ciphersuite") - pure - $ cipherSuiteTag kp.cipherSuite +validateUploadedKeyPackage identity kp = do + (cs, lt) <- either mlsProtocolError pure $ validateKeyPackage (Just identity) kp.rmValue - let ss = csSignatureScheme cs + validateLifetime lt -- Authenticate signature key. This is performed only upon uploading a key -- package for a local client. + loc <- qualifyLocal () foldQualified loc ( \_ -> do - key <- - fmap LBS.toStrict $ - maybe - (mlsProtocolError "No key associated to the given identity and signature scheme") - pure - =<< lift (wrapClient (Data.lookupMLSPublicKey (ciUser identity) (ciClient identity) ss)) - when (key /= kp.leafNode.signatureKey) $ + mkey :: Maybe LByteString <- + lift . wrapClient $ + Data.lookupMLSPublicKey + (ciUser identity) + (ciClient identity) + (csSignatureScheme cs) + key :: LByteString <- + maybe + (mlsProtocolError "No key associated to the given identity and signature scheme") + pure + mkey + when (key /= LBS.fromStrict kp.rmValue.leafNode.signatureKey) $ mlsProtocolError "Unrecognised signature key" ) - (pure . const ()) + (\_ -> pure ()) (cidQualifiedClient identity) - -- validate signature - unless - ( csVerifySignatureWithLabel - cs - kp.leafNode.signatureKey - "KeyPackageTBS" - kp.tbs - kp.signature_ - ) - $ mlsProtocolError "Invalid signature" - -- validate protocol version - maybe - (mlsProtocolError "Unsupported protocol version") - pure - (pvTag (kp.protocolVersion) >>= guard . (== ProtocolMLS10)) - - -- validate credential, lifetime and capabilities - validateLeafNode identity kp.leafNode - + let kpd = KeyPackageData kp.rmRaw pure (kpRef cs kpd, kpd) -validateLeafNode :: ClientIdentity -> LeafNode -> Handler r () -validateLeafNode identity leafNode = do - validateCredential identity leafNode.credential - validateSource leafNode.source - validateCapabilities leafNode.capabilities - -validateCredential :: ClientIdentity -> Credential -> Handler r () -validateCredential identity (BasicCredential cred) = do - identity' <- - either credentialError pure $ - decodeMLS' cred - when (identity /= identity') $ - throwStd (errorToWai @'MLSIdentityMismatch) - where - credentialError e = - mlsProtocolError $ - "Failed to parse identity: " <> e - -validateSource :: LeafNodeSource -> Handler r () -validateSource (LeafNodeSourceKeyPackage lt) = validateLifetime lt -validateSource s = - mlsProtocolError $ - "Expected 'key_package' source, got '" - <> (leafNodeSourceTag s).name - <> "'" - validateLifetime :: Lifetime -> Handler r () validateLifetime lt = do now <- liftIO getPOSIXTime @@ -143,9 +95,6 @@ validateLifetime' now mMaxLifetime lt = do when (tsPOSIX (ltNotAfter lt) > now + maxLifetime) $ Left "Key package expiration time is too far in the future" -validateCapabilities :: Capabilities -> Handler r () -validateCapabilities _ = pure () - mlsProtocolError :: Text -> Handler r a mlsProtocolError msg = throwStd . toWai $ diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index f898dae4d5..91f3a555fa 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -91,11 +91,13 @@ import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import qualified Wire.API.MLS.Proposal as Proposal import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation +import Wire.API.MLS.Validation import Wire.API.MLS.Welcome import Wire.API.Message import Wire.API.User.Client @@ -103,6 +105,13 @@ import Wire.API.User.Client -- TODO: -- [x] replace ref with index in remove proposals -- [ ] validate leaf nodes and key packages locally on galley +-- - [x] extract validation function to wire-api +-- - [x] validate lifetime and public key consistency only on brig +-- - [x] check that ciphersuite matches conversation on galley +-- - [ ] check the signature on the LeafNode +-- - [ ] ? verify capabilities +-- - [ ] verify that all extensions are present in the capabilities +-- - [ ] ? in the update case (in galley), verify that the encryption_key is different -- [ ] remove MissingSenderClient error -- [ ] PreSharedKey proposal -- [ ] remove all key package ref mapping @@ -638,8 +647,7 @@ getCommitData senderIdentity lConvOrSub epoch commit = do processExternalCommit :: forall r. - ( Member BrigAccess r, - Member ConversationStore r, + ( Member ConversationStore r, Member (Error MLSProtocolError) r, Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MLSStaleMessage) r, @@ -675,8 +683,14 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do throw . mlsProtocolError $ "The external commit must not have add proposals" - -- validate and update mapping in brig - validateLeafNode senderIdentity leafNode >>= \case + -- validate leaf node + let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) + let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) + let extra = LeafNodeTBSExtraCommit groupId (error "calculate index") + + -- TODO: update client in conversation state + + case validateLeafNode cs (Just senderIdentity) extra leafNode.rmValue of Left errMsg -> throw $ mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) @@ -687,8 +701,6 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do unless (isClientMember senderIdentity (mcMembers mlsConv)) $ throwS @'MLSSubConvClientNotInParent - let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) - withCommitLock groupId epoch $ do -- validate remove proposal: an external commit can contain -- @@ -696,7 +708,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do -- > version of themselves remIndex <- case cmAssocs (paRemove action) of [] -> pure Nothing - [(_, idx :: Word32)] -> do + [(_, idx)] -> do cid <- note (mlsProtocolError "Invalid index in remove proposal") $ imLookup (indexMapConvOrSub convOrSub) idx @@ -807,10 +819,10 @@ applyProposalRef convOrSubConvId mlsMeta groupId epoch _suite (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound checkEpoch epoch mlsMeta checkGroup groupId mlsMeta - applyProposal convOrSubConvId groupId (rmValue p) -applyProposalRef convOrSubConvId _mlsMeta groupId _epoch suite (Inline p) = do + applyProposal convOrSubConvId mlsMeta groupId (rmValue p) +applyProposalRef convOrSubConvId mlsMeta groupId _epoch suite (Inline p) = do checkProposalCipherSuite suite p - applyProposal convOrSubConvId groupId p + applyProposal convOrSubConvId mlsMeta groupId p addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction addProposedClient cid = do @@ -825,23 +837,31 @@ applyProposal :: Member (State IndexMap) r ) => ConvOrSubConvId -> + ConversationMLSData -> GroupId -> Proposal -> Sem r ProposalAction -applyProposal _convOrSubConvId _groupId (AddProposal kp) = do - -- TODO: validate key package +applyProposal _convOrSubConvId mlsMeta _groupId (AddProposal kp) = do + (cs, _lifetime) <- + either + (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) + pure + $ validateKeyPackage Nothing kp.rmValue + unless (mlsMeta.cnvmlsCipherSuite == cs) $ + throw (mlsProtocolError "Key package ciphersuite does not match conversation") + -- we are not checking lifetime constraints here cid <- getKeyPackageIdentity kp.rmValue addProposedClient cid -applyProposal _convOrSubConvId _groupId (RemoveProposal idx) = do +applyProposal _convOrSubConvId _mlsMeta _groupId (RemoveProposal idx) = do im <- get (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx put im' pure (paRemoveClient cid idx) -applyProposal _convOrSubConvId _groupId (ExternalInitProposal _) = +applyProposal _convOrSubConvId _mlsMeta _groupId (ExternalInitProposal _) = -- only record the fact there was an external init proposal, but do not -- process it in any way. pure paExternalInitPresent -applyProposal _convOrSubConvId _groupId _ = pure mempty +applyProposal _convOrSubConvId _mlsMeta _groupId _ = pure mempty checkProposalCipherSuite :: Member (Error MLSProtocolError) r => diff --git a/services/galley/src/Galley/Effects/BrigAccess.hs b/services/galley/src/Galley/Effects/BrigAccess.hs index a6802eb165..221eb55d02 100644 --- a/services/galley/src/Galley/Effects/BrigAccess.hs +++ b/services/galley/src/Galley/Effects/BrigAccess.hs @@ -51,8 +51,6 @@ module Galley.Effects.BrigAccess getClientByKeyPackageRef, getLocalMLSClients, addKeyPackageRef, - validateLeafNode, - validateKeyPackage, updateKeyPackageRef, deleteKeyPackageRefs, @@ -77,8 +75,6 @@ import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.MLS.LeafNode -import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig.Connection import qualified Wire.API.Routes.Internal.Galley.TeamFeatureNoConfigMulti as Multi import Wire.API.Team.Feature @@ -136,14 +132,6 @@ data BrigAccess m a where GetClientByKeyPackageRef :: KeyPackageRef -> BrigAccess m (Maybe ClientIdentity) GetLocalMLSClients :: Local UserId -> SignatureSchemeTag -> BrigAccess m (Set ClientInfo) AddKeyPackageRef :: KeyPackageRef -> Qualified UserId -> ClientId -> Qualified ConvId -> BrigAccess m () - ValidateLeafNode :: - ClientIdentity -> - RawMLS LeafNode -> - BrigAccess m (Either Text ()) - ValidateKeyPackage :: - ClientIdentity -> - RawMLS KeyPackage -> - BrigAccess m (Either Text ()) UpdateKeyPackageRef :: KeyPackageUpdate -> BrigAccess m () DeleteKeyPackageRefs :: [KeyPackageRef] -> BrigAccess m () UpdateSearchVisibilityInbound :: diff --git a/services/galley/src/Galley/Intra/Client.hs b/services/galley/src/Galley/Intra/Client.hs index f5143872cb..2f105507cc 100644 --- a/services/galley/src/Galley/Intra/Client.hs +++ b/services/galley/src/Galley/Intra/Client.hs @@ -26,8 +26,6 @@ module Galley.Intra.Client getLocalMLSClients, addKeyPackageRef, updateKeyPackageRef, - validateLeafNode, - validateKeyPackage, deleteKeyPackageRefs, ) where @@ -36,14 +34,12 @@ import Bilge hiding (getHeader, options, statusCode) import Bilge.RPC import Brig.Types.Intra import Brig.Types.Team.LegalHold (LegalHoldClientRequest (..)) -import Control.Monad.Catch import Data.ByteString.Conversion (toByteString') import Data.Id import Data.Misc import Data.Qualified import qualified Data.Set as Set import Data.Text.Encoding -import Data.Text.Lazy (toStrict) import Galley.API.Error import Galley.Effects import Galley.Env @@ -51,12 +47,9 @@ import Galley.External.LegalHoldService.Types import Galley.Intra.Util import Galley.Monad import Imports -import qualified Network.HTTP.Client as Rq -import qualified Network.HTTP.Types as HTTP import Network.HTTP.Types.Method import Network.HTTP.Types.Status import Network.Wai.Utilities.Error hiding (Error) -import qualified Network.Wai.Utilities.Error as Error import Polysemy import Polysemy.Error import Polysemy.Input @@ -67,8 +60,6 @@ import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage -import Wire.API.MLS.LeafNode -import Wire.API.MLS.Serialisation import Wire.API.Routes.Internal.Brig import Wire.API.User.Auth.LegalHold import Wire.API.User.Client @@ -248,39 +239,3 @@ updateKeyPackageRef keyPackageRef = . json (kpupNext keyPackageRef) . expect2xx ) - -validateKeyPackage :: ClientIdentity -> RawMLS KeyPackage -> App (Either Text ()) -validateKeyPackage cid keyPackage = do - res <- - call - Brig - ( method GET - . paths ["i", "mls", "validate-key-package", toHeader cid] - . content "message/mls" - . bytes (encodeMLS' keyPackage) - ) - let statusCode = HTTP.statusCode (Rq.responseStatus res) - if - | statusCode `div` 100 == 2 -> Right <$> parseResponse (mkError status502 "server-error") res - | statusCode `div` 100 == 4 -> do - err <- parseResponse (mkError status502 "server-error") res - pure (Left ("Error validating key package: " <> toStrict (Error.label err) <> ": " <> toStrict (Error.message err))) - | otherwise -> throwM (mkError status502 "server-error" "Unexpected http status returned from /i/mls/validate-leaf-node") - -validateLeafNode :: ClientIdentity -> RawMLS LeafNode -> App (Either Text ()) -validateLeafNode cid leafNode = do - res <- - call - Brig - ( method GET - . paths ["i", "mls", "validate-leaf-node", toHeader cid] - . content "message/mls" - . bytes (encodeMLS' leafNode) - ) - let statusCode = HTTP.statusCode (Rq.responseStatus res) - if - | statusCode `div` 100 == 2 -> Right <$> parseResponse (mkError status502 "server-error") res - | statusCode `div` 100 == 4 -> do - err <- parseResponse (mkError status502 "server-error") res - pure (Left ("Error validating leaf node: " <> toStrict (Error.label err) <> ": " <> toStrict (Error.message err))) - | otherwise -> throwM (mkError status502 "server-error" "Unexpected http status returned from /i/mls/validate-leaf-node") diff --git a/services/galley/src/Galley/Intra/Effects.hs b/services/galley/src/Galley/Intra/Effects.hs index 0a07187c36..ae87f559d9 100644 --- a/services/galley/src/Galley/Intra/Effects.hs +++ b/services/galley/src/Galley/Intra/Effects.hs @@ -86,8 +86,6 @@ interpretBrigAccess = interpret $ \case AddKeyPackageRef ref qusr cl qcnv -> embedApp $ addKeyPackageRef ref qusr cl qcnv - ValidateLeafNode cid ln -> embedApp $ validateLeafNode cid ln - ValidateKeyPackage cid kp -> embedApp $ validateKeyPackage cid kp UpdateKeyPackageRef update -> embedApp $ updateKeyPackageRef update diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 7640b261d5..765486e3d4 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -113,7 +113,7 @@ tests s = testGroup "External commit" [ test s "non-member attempts to join a conversation" testExternalCommitNotMember, - test s "join a conversation with the same client" testExternalCommitSameClient, + test s "join a conversation with the same client XXX" testExternalCommitSameClient, test s "join a conversation with a new client" testExternalCommitNewClient, test s "join a conversation with a new client and resend backend proposals" testExternalCommitNewClientResendBackendProposal ], From 9a00f3db43bf4cd6ade3628482e654e0c2324b4c Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 14 Apr 2023 13:49:49 +0200 Subject: [PATCH 21/75] Apply proposals in the correct order --- services/galley/src/Galley/API/MLS/Message.hs | 76 ++++++++++--------- 1 file changed, 41 insertions(+), 35 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 91f3a555fa..55d50cb7d3 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -112,6 +112,7 @@ import Wire.API.User.Client -- - [ ] ? verify capabilities -- - [ ] verify that all extensions are present in the capabilities -- - [ ] ? in the update case (in galley), verify that the encryption_key is different +-- [ ] validate proposals when processing proposal and commit messages -- [ ] remove MissingSenderClient error -- [ ] PreSharedKey proposal -- [ ] remove all key package ref mapping @@ -123,6 +124,7 @@ import Wire.API.User.Client -- [ ] remove protobuf definitions of CommitBundle -- [ ] (?) rename public_group_state field in conversation table -- [ ] consider adding more integration tests +-- [ ] remove prefixes from fields in Commit and Proposal data IncomingMessage = IncomingMessage { epoch :: Epoch, @@ -608,9 +610,28 @@ paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} paExternalInitPresent :: ProposalAction paExternalInitPresent = mempty {paExternalInit = Any True} +-- | This is used to sort proposals into the correct processing order, as defined by the spec +data ProposalProcessingStage + = ProposalProcessingStageExtensions + | ProposalProcessingStageUpdate + | ProposalProcessingStageRemove + | ProposalProcessingStageAdd + | ProposalProcessingStagePreSharedKey + | ProposalProcessingStageExternalInit + | ProposalProcessingStageReInit + deriving (Eq, Ord) + +proposalProcessingStage :: Proposal -> ProposalProcessingStage +proposalProcessingStage (AddProposal _) = ProposalProcessingStageAdd +proposalProcessingStage (RemoveProposal _) = ProposalProcessingStageRemove +proposalProcessingStage (UpdateProposal _) = ProposalProcessingStageUpdate +proposalProcessingStage (PreSharedKeyProposal _) = ProposalProcessingStagePreSharedKey +proposalProcessingStage (ReInitProposal _) = ProposalProcessingStageReInit +proposalProcessingStage (ExternalInitProposal _) = ProposalProcessingStageExternalInit +proposalProcessingStage (GroupContextExtensionsProposal _) = ProposalProcessingStageExtensions + getCommitData :: ( HasProposalEffects r, - Member (ErrorS 'ConvNotFound) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSStaleMessage) r ) => @@ -624,7 +645,6 @@ getCommitData senderIdentity lConvOrSub epoch commit = do mlsMeta = mlsMetaConvOrSub convOrSub curEpoch = cnvmlsEpoch mlsMeta groupId = cnvmlsGroupId mlsMeta - suite = cnvmlsCipherSuite mlsMeta -- check epoch number when (epoch /= curEpoch) $ throwS @'MLSStaleMessage @@ -633,16 +653,12 @@ getCommitData senderIdentity lConvOrSub epoch commit = do if epoch == Epoch 0 then addProposedClient senderIdentity else mempty + proposals <- traverse (derefProposal groupId epoch) commit.cProposals action <- foldMap - ( applyProposalRef - (idForConvOrSub convOrSub) - mlsMeta - groupId - epoch - suite - ) - (cProposals commit) + (applyProposal mlsMeta groupId) + -- sort proposals before processing + (sortOn proposalProcessingStage proposals) pure (creatorAction <> action) processExternalCommit :: @@ -801,28 +817,18 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do pure updates -applyProposalRef :: - ( HasProposalEffects r, - Member (State IndexMap) r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSStaleMessage) r +derefProposal :: + ( Member ProposalStore r, + Member (ErrorS 'MLSProposalNotFound) r ) => - ConvOrSubConvId -> - ConversationMLSData -> GroupId -> Epoch -> - CipherSuiteTag -> ProposalOrRef -> - Sem r ProposalAction -applyProposalRef convOrSubConvId mlsMeta groupId epoch _suite (Ref ref) = do + Sem r Proposal +derefProposal groupId epoch (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound - checkEpoch epoch mlsMeta - checkGroup groupId mlsMeta - applyProposal convOrSubConvId mlsMeta groupId (rmValue p) -applyProposalRef convOrSubConvId mlsMeta groupId _epoch suite (Inline p) = do - checkProposalCipherSuite suite p - applyProposal convOrSubConvId mlsMeta groupId p + pure p.rmValue +derefProposal _ _ (Inline p) = pure p addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction addProposedClient cid = do @@ -832,16 +838,16 @@ addProposedClient cid = do pure (paAddClient cid idx) applyProposal :: - forall r. - ( HasProposalEffects r, - Member (State IndexMap) r + ( Member (State IndexMap) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r ) => - ConvOrSubConvId -> ConversationMLSData -> GroupId -> Proposal -> Sem r ProposalAction -applyProposal _convOrSubConvId mlsMeta _groupId (AddProposal kp) = do +applyProposal mlsMeta _groupId (AddProposal kp) = do (cs, _lifetime) <- either (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) @@ -852,16 +858,16 @@ applyProposal _convOrSubConvId mlsMeta _groupId (AddProposal kp) = do -- we are not checking lifetime constraints here cid <- getKeyPackageIdentity kp.rmValue addProposedClient cid -applyProposal _convOrSubConvId _mlsMeta _groupId (RemoveProposal idx) = do +applyProposal _mlsMeta _groupId (RemoveProposal idx) = do im <- get (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx put im' pure (paRemoveClient cid idx) -applyProposal _convOrSubConvId _mlsMeta _groupId (ExternalInitProposal _) = +applyProposal _mlsMeta _groupId (ExternalInitProposal _) = -- only record the fact there was an external init proposal, but do not -- process it in any way. pure paExternalInitPresent -applyProposal _convOrSubConvId _mlsMeta _groupId _ = pure mempty +applyProposal _mlsMeta _groupId _ = pure mempty checkProposalCipherSuite :: Member (Error MLSProtocolError) r => From ecdab59c222d37462930ca57ba7b0bc81f21f7b4 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 14 Apr 2023 15:04:59 +0200 Subject: [PATCH 22/75] Remove redundant GroupContext structure --- libs/wire-api/src/Wire/API/MLS/GroupInfo.hs | 3 ++- libs/wire-api/src/Wire/API/MLS/Message.hs | 12 ------------ 2 files changed, 2 insertions(+), 13 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs index 3fe3d0ca5c..f563a0818b 100644 --- a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs +++ b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs @@ -16,7 +16,8 @@ -- with this program. If not, see . module Wire.API.MLS.GroupInfo - ( GroupInfo (..), + ( GroupContext (..), + GroupInfo (..), GroupInfoData (..), ) where diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index 9152e7c7ac..a98e24f1dd 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -56,7 +56,6 @@ import Wire.API.Event.Conversation import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.Epoch -import Wire.API.MLS.Extension import Wire.API.MLS.Group import Wire.API.MLS.GroupInfo import Wire.API.MLS.KeyPackage @@ -354,17 +353,6 @@ instance SerialiseMLS FramedContentAuthData where serialiseMLSBytes @VarInt ad.signature_ traverse_ (serialiseMLSBytes @VarInt) ad.confirmationTag -data GroupContext = GroupContext - { protocolVersion :: ProtocolVersion, - cipherSuite :: CipherSuite, - groupId :: GroupId, - epoch :: Epoch, - treeHash :: ByteString, - confirmedTranscriptHash :: ByteString, - extensions :: [Extension] - } - deriving (Eq, Show) - verifyMessageSignature :: RawMLS GroupContext -> RawMLS FramedContent -> From 3e4f5f1661fe590368acfb446face36eb1167ea2 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 14 Apr 2023 17:47:53 +0200 Subject: [PATCH 23/75] Re-implement processing of external commits --- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 10 + libs/wire-api/src/Wire/API/MLS/ProposalTag.hs | 2 +- services/galley/src/Galley/API/MLS/Message.hs | 199 +++++++++++------- services/galley/src/Galley/API/MLS/Util.hs | 3 +- 4 files changed, 134 insertions(+), 80 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 97cbd3efd7..887114318d 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -24,6 +24,7 @@ import Cassandra import Control.Lens (makePrisms) import Data.Binary import Data.ByteString as B +import GHC.Records import Imports import Test.QuickCheck import Wire.API.MLS.CipherSuite @@ -47,6 +48,15 @@ data Proposal deriving stock (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform Proposal) +instance HasField "tag" Proposal ProposalTag where + getField (AddProposal _) = AddProposalTag + getField (UpdateProposal _) = UpdateProposalTag + getField (RemoveProposal _) = RemoveProposalTag + getField (PreSharedKeyProposal _) = PreSharedKeyProposalTag + getField (ReInitProposal _) = ReInitProposalTag + getField (ExternalInitProposal _) = ExternalInitProposalTag + getField (GroupContextExtensionsProposal _) = GroupContextExtensionsProposalTag + instance ParseMLS Proposal where parseMLS = parseMLS >>= \case diff --git a/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs index 45561c8b78..8e7d8b3670 100644 --- a/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs +++ b/libs/wire-api/src/Wire/API/MLS/ProposalTag.hs @@ -30,7 +30,7 @@ data ProposalTag | ReInitProposalTag | ExternalInitProposalTag | GroupContextExtensionsProposalTag - deriving stock (Bounded, Enum, Eq, Generic, Show) + deriving stock (Bounded, Enum, Eq, Ord, Generic, Show) deriving (Arbitrary) via GenericUniform ProposalTag instance ParseMLS ProposalTag where diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 55d50cb7d3..8ecd745972 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -95,6 +95,7 @@ import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import qualified Wire.API.MLS.Proposal as Proposal +import Wire.API.MLS.ProposalTag import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.MLS.Validation @@ -368,21 +369,35 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do lConvOrSub <- fetchConvOrSub qusr lConvOrSubId senderIdentity <- getSenderIdentity qusr c bundle.sender lConvOrSub - action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue - events <- - processCommitWithAction - senderIdentity - conn - lConvOrSub - bundle.epoch - action - bundle.sender - bundle.commit.rmValue + (events, newClients) <- case bundle.sender of + SenderMember _index -> do + action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue + events <- + processInternalCommit + senderIdentity + conn + lConvOrSub + bundle.epoch + action + bundle.commit.rmValue + pure (events, cmIdentities (paAdd action)) + SenderExternal _ -> throw (mlsProtocolError "Unexpected sender") + SenderNewMemberProposal -> throw (mlsProtocolError "Unexpected sender") + SenderNewMemberCommit -> do + action <- getExternalCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue + processExternalCommit + senderIdentity + lConvOrSub + bundle.epoch + action + (cPath bundle.commit.rmValue) + pure ([], []) + storeGroupInfo (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfo let cm = membersConvOrSub (tUnqualified lConvOrSub) unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.rmRaw cm - traverse_ (sendWelcomes lConvOrSub conn (cmIdentities (paAdd action))) bundle.welcome + traverse_ (sendWelcomes lConvOrSub conn newClients) bundle.welcome pure (events, unreachables) postMLSCommitBundleToRemoteConv :: @@ -647,6 +662,7 @@ getCommitData senderIdentity lConvOrSub epoch commit = do groupId = cnvmlsGroupId mlsMeta -- check epoch number + -- TODO: is this really needed? when (epoch /= curEpoch) $ throwS @'MLSStaleMessage evalState (indexMapConvOrSub convOrSub) $ do creatorAction <- @@ -654,18 +670,75 @@ getCommitData senderIdentity lConvOrSub epoch commit = do then addProposedClient senderIdentity else mempty proposals <- traverse (derefProposal groupId epoch) commit.cProposals - action <- - foldMap - (applyProposal mlsMeta groupId) - -- sort proposals before processing - (sortOn proposalProcessingStage proposals) + action <- applyProposals mlsMeta groupId proposals pure (creatorAction <> action) +getExternalCommitData :: + forall r. + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSStaleMessage) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ClientIdentity -> + Local ConvOrSubConv -> + Epoch -> + Commit -> + Sem r ProposalAction +getExternalCommitData senderIdentity lConvOrSub epoch commit = do + let convOrSub = tUnqualified lConvOrSub + mlsMeta = mlsMetaConvOrSub convOrSub + curEpoch = cnvmlsEpoch mlsMeta + groupId = cnvmlsGroupId mlsMeta + when (epoch /= curEpoch) $ throwS @'MLSStaleMessage + proposals <- traverse getInlineProposal commit.cProposals + + -- According to the spec, an external commit must contain: + -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2 + -- + -- > Exactly one ExternalInit + -- > At most one Remove proposal, with which the joiner removes an old + -- > version of themselves. + -- > Zero or more PreSharedKey proposals. + -- > No other proposals. + let counts = foldr (\x -> Map.insertWith (+) x.tag (1 :: Int)) mempty proposals + + unless (Map.lookup ExternalInitProposalTag counts == Just 1) $ + throw (mlsProtocolError "External commits must contain exactly one ExternalInit proposal") + unless (Map.findWithDefault 0 RemoveProposalTag counts <= 1) $ + throw (mlsProtocolError "External commits must contain at most one Remove proposal") + unless (null (Map.keys counts \\ allowedProposals)) $ + throw (mlsProtocolError "Invalid proposal type in an external commit") + + action <- + evalState (indexMapConvOrSub convOrSub) $ do + -- process optional removal + propAction <- applyProposals mlsMeta groupId proposals + -- add sender + selfAction <- addProposedClient senderIdentity + case cmAssocs (paRemove propAction) of + [(cid, _)] + | cid /= senderIdentity -> + throw $ mlsProtocolError "Only the self client can be removed by an external commit" + _ -> pure () + + pure $ propAction <> selfAction + + pure action + where + allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] + + getInlineProposal :: ProposalOrRef -> Sem r Proposal + getInlineProposal (Ref _) = + throw (mlsProtocolError "External commits cannot reference proposals") + getInlineProposal (Inline p) = pure p + processExternalCommit :: forall r. ( Member ConversationStore r, Member (Error MLSProtocolError) r, Member (ErrorS 'ConvNotFound) r, + Member (Error InternalError) r, Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, Member ExternalAccess r, @@ -687,52 +760,34 @@ processExternalCommit :: Sem r () processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do let convOrSub = tUnqualified lConvOrSub + + -- only members can join a subconversation + forOf_ _SubConv convOrSub $ \(mlsConv, _) -> + unless (isClientMember senderIdentity (mcMembers mlsConv)) $ + throwS @'MLSSubConvClientNotInParent + + -- get index of the newly added client, as calculated when processing proposals + idx <- case cmAssocs (paAdd action) of + [(cid, idx)] | cid == senderIdentity -> pure idx + _ -> throw (InternalErrorWithDescription "Unexpected Add action for external commit") + + -- extract leaf node from update path and validate it leafNode <- upLeaf <$> note (mlsProtocolError "External commits need an update path") updatePath - when (paExternalInit action == mempty) $ - throw . mlsProtocolError $ - "The external commit is missing an external init proposal" - unless (paAdd action == mempty) $ - throw . mlsProtocolError $ - "The external commit must not have add proposals" - - -- validate leaf node let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) - let extra = LeafNodeTBSExtraCommit groupId (error "calculate index") - - -- TODO: update client in conversation state - + let extra = LeafNodeTBSExtraCommit groupId idx case validateLeafNode cs (Just senderIdentity) extra leafNode.rmValue of Left errMsg -> throw $ mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) Right _ -> pure () - -- only members can join a subconversation - forOf_ _SubConv convOrSub $ \(mlsConv, _) -> - unless (isClientMember senderIdentity (mcMembers mlsConv)) $ - throwS @'MLSSubConvClientNotInParent - withCommitLock groupId epoch $ do - -- validate remove proposal: an external commit can contain - -- - -- > At most one Remove proposal, with which the joiner removes an old - -- > version of themselves - remIndex <- case cmAssocs (paRemove action) of - [] -> pure Nothing - [(_, idx)] -> do - cid <- - note (mlsProtocolError "Invalid index in remove proposal") $ - imLookup (indexMapConvOrSub convOrSub) idx - unless (cid == senderIdentity) $ - throw $ - mlsProtocolError "Only the self client can be removed by an external commit" - pure (Just idx) - _ -> throw (mlsProtocolError "Multiple remove proposals in external commits not allowed") + let remIndices = map snd (cmAssocs (paRemove action)) -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch @@ -740,7 +795,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do -- fetch backend remove proposals of the previous epoch indicesInRemoveProposals <- -- skip remove proposals of already removed by the external commit - filter (maybe (const True) (/=) remIndex) + (\\ remIndices) <$> getPendingBackendRemoveProposals groupId epoch -- requeue backend remove proposals for the current epoch @@ -751,35 +806,6 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do (cidQualifiedUser senderIdentity) cm -processCommitWithAction :: - forall r. - ( HasProposalEffects r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, - Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, - Member SubConversationStore r - ) => - ClientIdentity -> - Maybe ConnId -> - Local ConvOrSubConv -> - Epoch -> - ProposalAction -> - Sender -> - Commit -> - Sem r [LocalConversationUpdate] -processCommitWithAction senderIdentity con lConvOrSub epoch action sender commit = - case sender of - SenderMember _index -> - processInternalCommit senderIdentity con lConvOrSub epoch action commit - SenderExternal _ -> throw (mlsProtocolError "Unexpected sender") - SenderNewMemberProposal -> throw (mlsProtocolError "Unexpected sender") - SenderNewMemberCommit -> - processExternalCommit senderIdentity lConvOrSub epoch action (cPath commit) $> [] - processInternalCommit :: forall r. ( HasProposalEffects r, @@ -837,6 +863,21 @@ addProposedClient cid = do put im' pure (paAddClient cid idx) +applyProposals :: + ( Member (State IndexMap) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ConversationMLSData -> + GroupId -> + [Proposal] -> + Sem r ProposalAction +applyProposals mlsMeta groupId = + -- proposals are sorted before processing + foldMap (applyProposal mlsMeta groupId) + . sortOn proposalProcessingStage + applyProposal :: ( Member (State IndexMap) r, Member (Error MLSProtocolError) r, @@ -1121,6 +1162,8 @@ executeProposalAction qusr con lconvOrSub action = do runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do void $ fedClient @'Galley @"on-new-remote-subconversation" nrc + -- TODO: increment epoch here instead of in the calling site + pure (addEvents <> removeEvents) where checkRemoval :: diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index 927e265937..fa95794513 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -37,6 +37,7 @@ import Wire.API.Error import Wire.API.Error.Galley import Wire.API.MLS.Epoch import Wire.API.MLS.Group +import Wire.API.MLS.LeafNode import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -71,7 +72,7 @@ getPendingBackendRemoveProposals :: ) => GroupId -> Epoch -> - Sem r [Word32] + Sem r [LeafIndex] getPendingBackendRemoveProposals gid epoch = do proposals <- getAllPendingProposals gid epoch catMaybes From 289666516070728bd3d5b1465ecf5b635a562c60 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Fri, 14 Apr 2023 16:01:11 +0000 Subject: [PATCH 24/75] add references from data types to MLS spec --- .../wire-api/src/Wire/API/MLS/AuthenticatedContent.hs | 3 ++- libs/wire-api/src/Wire/API/MLS/Capabilities.hs | 1 + libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 2 ++ libs/wire-api/src/Wire/API/MLS/Commit.hs | 4 ++++ libs/wire-api/src/Wire/API/MLS/Credential.hs | 1 + libs/wire-api/src/Wire/API/MLS/Extension.hs | 1 + libs/wire-api/src/Wire/API/MLS/GroupInfo.hs | 3 +++ libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs | 1 + libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 2 ++ libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 8 ++++++++ libs/wire-api/src/Wire/API/MLS/Lifetime.hs | 1 + libs/wire-api/src/Wire/API/MLS/Message.hs | 11 +++++++++++ libs/wire-api/src/Wire/API/MLS/Proposal.hs | 7 +++++++ libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs | 1 + libs/wire-api/src/Wire/API/MLS/Welcome.hs | 2 ++ 15 files changed, 47 insertions(+), 1 deletion(-) diff --git a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs index 8efa64193b..d941b69efa 100644 --- a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs +++ b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs @@ -35,7 +35,8 @@ import Wire.API.MLS.Proposal import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation --- Needed to compute proposal refs. +-- | Needed to compute proposal refs. +-- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-7 data AuthenticatedContent = AuthenticatedContent { wireFormat :: WireFormatTag, content :: RawMLS FramedContent, diff --git a/libs/wire-api/src/Wire/API/MLS/Capabilities.hs b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs index bfbb07cc2b..64386ef72e 100644 --- a/libs/wire-api/src/Wire/API/MLS/Capabilities.hs +++ b/libs/wire-api/src/Wire/API/MLS/Capabilities.hs @@ -26,6 +26,7 @@ import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data Capabilities = Capabilities { versions :: [ProtocolVersion], ciphersuites :: [CipherSuite], diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index 483a113426..c1b4d00da1 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -116,11 +116,13 @@ csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = sig' <- Ed25519.signature sig pure $ Ed25519.verify pub' x.rmRaw sig' +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-5.2-5 type RefHashInput = SignContent pattern RefHashInput :: ByteString -> RawMLS a -> RefHashInput a pattern RefHashInput label content = SignContent label content +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-5.1.2-6 data SignContent a = SignContent { sigLabel :: ByteString, content :: RawMLS a diff --git a/libs/wire-api/src/Wire/API/MLS/Commit.hs b/libs/wire-api/src/Wire/API/MLS/Commit.hs index b130a6036c..83cb4277ad 100644 --- a/libs/wire-api/src/Wire/API/MLS/Commit.hs +++ b/libs/wire-api/src/Wire/API/MLS/Commit.hs @@ -23,6 +23,7 @@ import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4-3 data Commit = Commit { cProposals :: [ProposalOrRef], cPath :: Maybe UpdatePath @@ -41,6 +42,7 @@ instance SerialiseMLS Commit where serialiseMLSVector @VarInt serialiseMLS c.cProposals serialiseMLSOptional serialiseMLS c.cPath +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data UpdatePath = UpdatePath { upLeaf :: RawMLS LeafNode, upNodes :: [UpdatePathNode] @@ -56,6 +58,7 @@ instance SerialiseMLS UpdatePath where serialiseMLS up.upLeaf serialiseMLSVector @VarInt serialiseMLS up.upNodes +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data UpdatePathNode = UpdatePathNode { upnPublicKey :: ByteString, upnSecret :: [HPKECiphertext] @@ -71,6 +74,7 @@ instance SerialiseMLS UpdatePathNode where serialiseMLSBytes @VarInt upn.upnPublicKey serialiseMLSVector @VarInt serialiseMLS upn.upnSecret +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data HPKECiphertext = HPKECiphertext { hcOutput :: ByteString, hcCiphertext :: ByteString diff --git a/libs/wire-api/src/Wire/API/MLS/Credential.hs b/libs/wire-api/src/Wire/API/MLS/Credential.hs index 12a3da5b6d..0ec9d8d3a2 100644 --- a/libs/wire-api/src/Wire/API/MLS/Credential.hs +++ b/libs/wire-api/src/Wire/API/MLS/Credential.hs @@ -47,6 +47,7 @@ import Wire.Arbitrary -- | An MLS credential. -- -- Only the @BasicCredential@ type is supported. +-- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-5.3-3 data Credential = BasicCredential ByteString deriving stock (Eq, Show, Generic) deriving (Arbitrary) via GenericUniform Credential diff --git a/libs/wire-api/src/Wire/API/MLS/Extension.hs b/libs/wire-api/src/Wire/API/MLS/Extension.hs index 3c060f6fc4..c40c99ce1d 100644 --- a/libs/wire-api/src/Wire/API/MLS/Extension.hs +++ b/libs/wire-api/src/Wire/API/MLS/Extension.hs @@ -26,6 +26,7 @@ import Imports import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data Extension = Extension { extType :: Word16, extData :: ByteString diff --git a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs index f563a0818b..77cf203662 100644 --- a/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs +++ b/libs/wire-api/src/Wire/API/MLS/GroupInfo.hs @@ -36,6 +36,7 @@ import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.1-2 data GroupContext = GroupContext { protocolVersion :: ProtocolVersion, cipherSuite :: CipherSuite, @@ -69,6 +70,7 @@ instance SerialiseMLS GroupContext where serialiseMLSBytes @VarInt gc.confirmedTranscriptHash serialiseMLSVector @VarInt serialiseMLS gc.extensions +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4.3-7 data GroupInfoTBS = GroupInfoTBS { groupContext :: GroupContext, extensions :: [Extension], @@ -93,6 +95,7 @@ instance SerialiseMLS GroupInfoTBS where serialiseMLSBytes @VarInt tbs.confirmationTag serialiseMLS tbs.signer +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4.3-2 data GroupInfo = GroupInfo { tbs :: GroupInfoTBS, signature_ :: ByteString diff --git a/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs index 004ed3443d..3d0d947f08 100644 --- a/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs +++ b/libs/wire-api/src/Wire/API/MLS/HPKEPublicKey.hs @@ -23,6 +23,7 @@ import Imports import Test.QuickCheck import Wire.API.MLS.Serialisation +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-5.1.1-2 newtype HPKEPublicKey = HPKEPublicKey {unHPKEPublicKey :: ByteString} deriving (Show, Eq, Arbitrary) diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index 4a426a7c5d..dd7ad72cbc 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -157,6 +157,7 @@ kpRef' kp = -------------------------------------------------------------------------------- +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-10-6 data KeyPackageTBS = KeyPackageTBS { protocolVersion :: ProtocolVersion, cipherSuite :: CipherSuite, @@ -184,6 +185,7 @@ instance SerialiseMLS KeyPackageTBS where serialiseMLS tbs.leafNode serialiseMLSVector @VarInt serialiseMLS tbs.extensions +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-10-6 data KeyPackage = KeyPackage { tbs :: RawMLS KeyPackageTBS, signature_ :: ByteString diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs index 064a6c2e55..6e0d15cef4 100644 --- a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -44,6 +44,8 @@ import Wire.Arbitrary type LeafIndex = Word32 -- LeafNodeCore contains fields in the intersection of LeafNode and LeafNodeTBS +-- +-- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNodeCore = LeafNodeCore { encryptionKey :: HPKEPublicKey, signatureKey :: ByteString, @@ -56,6 +58,8 @@ data LeafNodeCore = LeafNodeCore deriving (Arbitrary) via (GenericUniform LeafNodeCore) -- extra fields in LeafNodeTBS, but not in LeafNode +-- +-- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNodeTBSExtra = LeafNodeTBSExtraKeyPackage | LeafNodeTBSExtraUpdate GroupId LeafIndex @@ -76,6 +80,7 @@ instance HasField "tag" LeafNodeTBSExtra LeafNodeSourceTag where LeafNodeTBSExtraCommit _ _ -> LeafNodeSourceCommitTag LeafNodeTBSExtraUpdate _ _ -> LeafNodeSourceUpdateTag +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNodeTBS = LeafNodeTBS { core :: RawMLS LeafNodeCore, extra :: LeafNodeTBSExtra @@ -107,6 +112,8 @@ instance SerialiseMLS LeafNodeCore where -- | This type can only verify the signature when the LeafNodeSource is -- LeafNodeSourceKeyPackage +-- +-- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNode = LeafNode { core :: RawMLS LeafNodeCore, signature_ :: ByteString @@ -146,6 +153,7 @@ instance HasField "source" LeafNode LeafNodeSource where instance HasField "extensions" LeafNode [Extension] where getField = (.core.rmValue.extensions) +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNodeSource = LeafNodeSourceKeyPackage Lifetime | LeafNodeSourceUpdate diff --git a/libs/wire-api/src/Wire/API/MLS/Lifetime.hs b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs index 8a05ce1c42..0f17c2978d 100644 --- a/libs/wire-api/src/Wire/API/MLS/Lifetime.hs +++ b/libs/wire-api/src/Wire/API/MLS/Lifetime.hs @@ -31,6 +31,7 @@ newtype Timestamp = Timestamp {timestampSeconds :: Word64} tsPOSIX :: Timestamp -> POSIXTime tsPOSIX = fromIntegral . timestampSeconds +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data Lifetime = Lifetime { ltNotBefore :: Timestamp, ltNotAfter :: Timestamp diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index a98e24f1dd..bdd2f93f48 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -80,6 +80,7 @@ instance ParseMLS WireFormatTag where instance SerialiseMLS WireFormatTag where serialiseMLS = serialiseMLSEnum @Word16 +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data Message = Message { protocolVersion :: ProtocolVersion, content :: MessageContent @@ -103,6 +104,7 @@ instance SerialiseMLS Message where instance HasField "wireFormat" Message WireFormatTag where getField = (.content.wireFormat) +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data MessageContent = MessagePrivate (RawMLS PrivateMessage) | MessagePublic PublicMessage @@ -147,10 +149,12 @@ instance SerialiseMLS MessageContent where instance S.ToSchema Message where declareNamedSchema _ = pure (mlsSwagger "MLSMessage") +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.2-2 data PublicMessage = PublicMessage { content :: RawMLS FramedContent, authData :: RawMLS FramedContentAuthData, -- Present iff content.rmValue.sender is of type Member. + -- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.2-4 membershipTag :: Maybe ByteString } deriving (Eq, Show) @@ -175,6 +179,7 @@ instance SerialiseMLS PublicMessage where serialiseMLS msg.authData traverse_ (serialiseMLSBytes @VarInt) msg.membershipTag +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.3.1-2 data PrivateMessage = PrivateMessage { groupId :: GroupId, epoch :: Epoch, @@ -195,6 +200,7 @@ instance ParseMLS PrivateMessage where <*> parseMLSBytes @VarInt <*> parseMLSBytes @VarInt +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data SenderTag = SenderMemberTag | SenderExternalTag @@ -208,6 +214,7 @@ instance ParseMLS SenderTag where instance SerialiseMLS SenderTag where serialiseMLS = serialiseMLSEnum @Word8 +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data Sender = SenderMember LeafIndex | SenderExternal Word32 @@ -241,6 +248,7 @@ needsGroupContext (SenderMember _) = True needsGroupContext (SenderExternal _) = True needsGroupContext _ = False +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data FramedContent = FramedContent { groupId :: GroupId, epoch :: Epoch, @@ -279,6 +287,7 @@ instance ParseMLS FramedContentDataTag where instance SerialiseMLS FramedContentDataTag where serialiseMLS = serialiseMLSEnum @Word8 +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 data FramedContentData = FramedContentApplicationData ByteString | FramedContentProposal (RawMLS Proposal) @@ -309,6 +318,7 @@ instance SerialiseMLS FramedContentData where serialiseMLS FramedContentCommitTag serialiseMLS commit +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.1-2 data FramedContentTBS = FramedContentTBS { protocolVersion :: ProtocolVersion, wireFormat :: WireFormatTag, @@ -333,6 +343,7 @@ framedContentTBS ctx msgContent = groupContext = guard (needsGroupContext msgContent.rmValue.sender) $> ctx } +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.1-2 data FramedContentAuthData = FramedContentAuthData { signature_ :: ByteString, -- Present iff it is part of a commit. diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 887114318d..88f3602f26 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -37,6 +37,7 @@ import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.1-2 data Proposal = AddProposal (RawMLS KeyPackage) | UpdateProposal (RawMLS LeafNode) @@ -92,6 +93,7 @@ instance SerialiseMLS Proposal where serialiseMLS GroupContextExtensionsProposalTag serialiseMLSVector @VarInt serialiseMLS es +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 data PreSharedKeyTag = ExternalKeyTag | ResumptionKeyTag deriving (Bounded, Enum, Eq, Show) @@ -101,6 +103,7 @@ instance ParseMLS PreSharedKeyTag where instance SerialiseMLS PreSharedKeyTag where serialiseMLS = serialiseMLSEnum @Word8 +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 data PreSharedKeyID = ExternalKeyID ByteString | ResumptionKeyID Resumption deriving stock (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform PreSharedKeyID) @@ -120,6 +123,7 @@ instance SerialiseMLS PreSharedKeyID where serialiseMLS ResumptionKeyTag serialiseMLS r +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 data Resumption = Resumption { resUsage :: Word8, resGroupId :: GroupId, @@ -141,6 +145,7 @@ instance SerialiseMLS Resumption where serialiseMLS r.resGroupId serialiseMLS r.resEpoch +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.1.5-2 data ReInit = ReInit { riGroupId :: GroupId, riProtocolVersion :: ProtocolVersion, @@ -188,6 +193,7 @@ instance SerialiseMLS MessageRange where serialiseMLS mrFirstGeneration serialiseMLS mrLastGeneration +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4-3 data ProposalOrRefTag = InlineTag | RefTag deriving stock (Bounded, Enum, Eq, Show) @@ -197,6 +203,7 @@ instance ParseMLS ProposalOrRefTag where instance SerialiseMLS ProposalOrRefTag where serialiseMLS = serialiseMLSEnum @Word8 +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4-3 data ProposalOrRef = Inline Proposal | Ref ProposalRef deriving stock (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform ProposalOrRef) diff --git a/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs index 9fcbb71847..9d8a022068 100644 --- a/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs +++ b/libs/wire-api/src/Wire/API/MLS/ProtocolVersion.hs @@ -30,6 +30,7 @@ import Imports import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6-4 newtype ProtocolVersion = ProtocolVersion {pvNumber :: Word16} deriving newtype (Eq, Ord, Show, Binary, Arbitrary, ParseMLS, SerialiseMLS) diff --git a/libs/wire-api/src/Wire/API/MLS/Welcome.hs b/libs/wire-api/src/Wire/API/MLS/Welcome.hs index cacb183cba..17dc605d8c 100644 --- a/libs/wire-api/src/Wire/API/MLS/Welcome.hs +++ b/libs/wire-api/src/Wire/API/MLS/Welcome.hs @@ -25,6 +25,7 @@ import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation import Wire.Arbitrary +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4.3.1-5 data Welcome = Welcome { welCipherSuite :: CipherSuite, welSecrets :: [GroupSecrets], @@ -49,6 +50,7 @@ instance SerialiseMLS Welcome where serialiseMLSVector @VarInt serialiseMLS ss serialiseMLSBytes @VarInt gi +-- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4.3.1-5 data GroupSecrets = GroupSecrets { gsNewMember :: KeyPackageRef, gsSecrets :: HPKECiphertext From 4b756f443ea013c66d5bf53344fe41aa90c4ea1a Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 18 Apr 2023 10:57:54 +0200 Subject: [PATCH 25/75] Remove key package mapping code --- .../src/Wire/API/Routes/Internal/Brig.hs | 136 +----------------- services/brig/src/Brig/API/Internal.hs | 46 +----- services/brig/src/Brig/API/MLS/KeyPackages.hs | 3 +- services/brig/src/Brig/Data/MLS/KeyPackage.hs | 117 --------------- .../brig/test/integration/API/Internal.hs | 127 +--------------- services/galley/galley.cabal | 1 - .../galley/src/Galley/API/MLS/KeyPackage.hs | 38 ----- services/galley/src/Galley/API/MLS/Message.hs | 2 +- .../galley/src/Galley/Effects/BrigAccess.hs | 10 -- services/galley/src/Galley/Intra/Client.hs | 53 ------- services/galley/src/Galley/Intra/Effects.hs | 11 -- .../galley/test/integration/API/MLS/Util.hs | 16 --- 12 files changed, 5 insertions(+), 555 deletions(-) delete mode 100644 services/galley/src/Galley/API/MLS/KeyPackage.hs diff --git a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs index 7f1ea227c0..50285d680b 100644 --- a/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs +++ b/libs/wire-api/src/Wire/API/Routes/Internal/Brig.hs @@ -29,10 +29,6 @@ module Wire.API.Routes.Internal.Brig DeleteAccountConferenceCallingConfig, swaggerDoc, module Wire.API.Routes.Internal.Brig.EJPD, - NewKeyPackageRef (..), - NewKeyPackage (..), - NewKeyPackageResult (..), - DeleteKeyPackageRefsRequest (..), ) where @@ -52,8 +48,6 @@ import Wire.API.Connection import Wire.API.Error import Wire.API.Error.Brig import Wire.API.MLS.CipherSuite (SignatureSchemeTag) -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage import Wire.API.MakesFederatedCall import Wire.API.Routes.Internal.Brig.Connection import Wire.API.Routes.Internal.Brig.EJPD @@ -183,129 +177,7 @@ instance ToSchema NewKeyPackageRef where <*> nkprClientId .= field "client_id" schema <*> nkprConversation .= field "conversation" schema -data NewKeyPackage = NewKeyPackage - { nkpConversation :: Qualified ConvId, - nkpKeyPackage :: KeyPackageData - } - deriving stock (Eq, Show, Generic) - deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewKeyPackage) - -instance ToSchema NewKeyPackage where - schema = - object "NewKeyPackage" $ - NewKeyPackage - <$> nkpConversation .= field "conversation" schema - <*> nkpKeyPackage .= field "key_package" schema - -data NewKeyPackageResult = NewKeyPackageResult - { nkpresClientIdentity :: ClientIdentity, - nkpresKeyPackageRef :: KeyPackageRef - } - deriving stock (Eq, Show, Generic) - deriving (ToJSON, FromJSON, S.ToSchema) via (Schema NewKeyPackageResult) - -instance ToSchema NewKeyPackageResult where - schema = - object "NewKeyPackageResult" $ - NewKeyPackageResult - <$> nkpresClientIdentity .= field "client_identity" schema - <*> nkpresKeyPackageRef .= field "key_package_ref" schema - -newtype DeleteKeyPackageRefsRequest = DeleteKeyPackageRefsRequest {unDeleteKeyPackageRefsRequest :: [KeyPackageRef]} - deriving (Eq, Show) - deriving (ToJSON, FromJSON, S.ToSchema) via (Schema DeleteKeyPackageRefsRequest) - -instance ToSchema DeleteKeyPackageRefsRequest where - schema = - object "DeleteKeyPackageRefsRequest" $ - DeleteKeyPackageRefsRequest - <$> unDeleteKeyPackageRefsRequest .= field "key_package_refs" (array schema) - -type MLSAPI = - "mls" - :> ( ( "key-packages" - :> ( ( Capture "ref" KeyPackageRef - :> ( Named - "get-client-by-key-package-ref" - ( Summary "Resolve an MLS key package ref to a qualified client ID" - :> MultiVerb - 'GET - '[Servant.JSON] - '[ RespondEmpty 404 "Key package ref not found", - Respond 200 "Key package ref found" ClientIdentity - ] - (Maybe ClientIdentity) - ) - :<|> ( "conversation" - :> ( PutConversationByKeyPackageRef - :<|> GetConversationByKeyPackageRef - ) - ) - :<|> Named - "put-key-package-ref" - ( Summary "Create a new KeyPackageRef mapping" - :> ReqBody '[Servant.JSON] NewKeyPackageRef - :> MultiVerb - 'PUT - '[Servant.JSON] - '[RespondEmpty 201 "Key package ref mapping created"] - () - ) - :<|> Named - "post-key-package-ref" - ( Summary "Update a KeyPackageRef in mapping" - :> ReqBody '[Servant.JSON] KeyPackageRef - :> MultiVerb - 'POST - '[Servant.JSON] - '[RespondEmpty 201 "Key package ref mapping updated"] - () - ) - ) - ) - :<|> Named - "delete-key-package-refs" - ( Summary "Delete a batch of KeyPackageRef mappings" - :> ReqBody '[Servant.JSON] DeleteKeyPackageRefsRequest - :> MultiVerb - 'DELETE - '[Servant.JSON] - '[RespondEmpty 200 "Key package ref mappings deleted"] - () - ) - ) - ) - :<|> GetMLSClients - :<|> MapKeyPackageRefs - ) - -type PutConversationByKeyPackageRef = - Named - "put-conversation-by-key-package-ref" - ( Summary "Associate a conversation with a key package" - :> ReqBody '[Servant.JSON] (Qualified ConvId) - :> MultiVerb - 'PUT - '[Servant.JSON] - [ RespondEmpty 404 "No key package found by reference", - RespondEmpty 204 "Converstaion associated" - ] - Bool - ) - -type GetConversationByKeyPackageRef = - Named - "get-conversation-by-key-package-ref" - ( Summary - "Retrieve the conversation associated with a key package" - :> MultiVerb - 'GET - '[Servant.JSON] - [ RespondEmpty 404 "No associated conversation or bad key package", - Respond 200 "Conversation found" (Qualified ConvId) - ] - (Maybe (Qualified ConvId)) - ) +type MLSAPI = "mls" :> GetMLSClients type GetMLSClients = Summary "Return all clients and all MLS-capable clients of a user" @@ -318,12 +190,6 @@ type GetMLSClients = '[Servant.JSON] (Respond 200 "MLS clients" (Set ClientInfo)) -type MapKeyPackageRefs = - Summary "Insert bundle into the KeyPackage ref mapping. Only for tests." - :> "key-package-refs" - :> ReqBody '[Servant.JSON] KeyPackageBundle - :> MultiVerb 'PUT '[Servant.JSON] '[RespondEmpty 204 "Mapping was updated"] () - type GetVerificationCode = Summary "Get verification code for a given email and action" :> "users" diff --git a/services/brig/src/Brig/API/Internal.hs b/services/brig/src/Brig/API/Internal.hs index 71db75c917..55dc93cbcf 100644 --- a/services/brig/src/Brig/API/Internal.hs +++ b/services/brig/src/Brig/API/Internal.hs @@ -86,9 +86,6 @@ import Wire.API.Error import qualified Wire.API.Error.Brig as E import Wire.API.Federation.API import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage -import Wire.API.Routes.Internal.Brig import qualified Wire.API.Routes.Internal.Brig as BrigIRoutes import Wire.API.Routes.Internal.Brig.Connection import Wire.API.Routes.Named @@ -131,19 +128,7 @@ ejpdAPI = :<|> getConnectionsStatus mlsAPI :: ServerT BrigIRoutes.MLSAPI (Handler r) -mlsAPI = - ( ( \ref -> - Named @"get-client-by-key-package-ref" (getClientByKeyPackageRef ref) - :<|> ( Named @"put-conversation-by-key-package-ref" (putConvIdByKeyPackageRef ref) - :<|> Named @"get-conversation-by-key-package-ref" (getConvIdByKeyPackageRef ref) - ) - :<|> Named @"put-key-package-ref" (putKeyPackageRef ref) - :<|> Named @"post-key-package-ref" (postKeyPackageRef ref) - ) - :<|> Named @"delete-key-package-refs" deleteKeyPackageRefs - ) - :<|> getMLSClients - :<|> mapKeyPackageRefsInternal +mlsAPI = getMLSClients accountAPI :: ( Member BlacklistStore r, @@ -185,29 +170,6 @@ deleteAccountConferenceCallingConfig :: UserId -> (Handler r) NoContent deleteAccountConferenceCallingConfig uid = lift $ wrapClient $ Data.updateFeatureConferenceCalling uid Nothing $> NoContent -getClientByKeyPackageRef :: KeyPackageRef -> Handler r (Maybe ClientIdentity) -getClientByKeyPackageRef = runMaybeT . mapMaybeT wrapClientE . Data.derefKeyPackage - --- Used by galley to update conversation id in mls_key_package_ref -putConvIdByKeyPackageRef :: KeyPackageRef -> Qualified ConvId -> Handler r Bool -putConvIdByKeyPackageRef ref = lift . wrapClient . Data.keyPackageRefSetConvId ref - --- Used by galley to create a new record in mls_key_package_ref -putKeyPackageRef :: KeyPackageRef -> NewKeyPackageRef -> Handler r () -putKeyPackageRef ref = lift . wrapClient . Data.addKeyPackageRef ref - --- Used by galley to retrieve conversation id from mls_key_package_ref -getConvIdByKeyPackageRef :: KeyPackageRef -> Handler r (Maybe (Qualified ConvId)) -getConvIdByKeyPackageRef = runMaybeT . mapMaybeT wrapClientE . Data.keyPackageRefConvId - --- Used by galley to update key packages in mls_key_package_ref on commits with update_path -postKeyPackageRef :: KeyPackageRef -> KeyPackageRef -> Handler r () -postKeyPackageRef ref = lift . wrapClient . Data.updateKeyPackageRef ref - -deleteKeyPackageRefs :: DeleteKeyPackageRefsRequest -> Handler r () -deleteKeyPackageRefs (DeleteKeyPackageRefsRequest refs) = - lift . wrapClient $ pooledForConcurrentlyN_ 16 refs Data.deleteKeyPackageRef - getMLSClients :: UserId -> SignatureSchemeTag -> Handler r (Set ClientInfo) getMLSClients usr _ss = do -- FUTUREWORK: check existence of key packages with a given ciphersuite @@ -225,12 +187,6 @@ getMLSClients usr _ss = do (cid,) . (> 0) <$> Data.countKeyPackages lusr cid -mapKeyPackageRefsInternal :: KeyPackageBundle -> Handler r () -mapKeyPackageRefsInternal bundle = do - wrapClientE $ - for_ (kpbEntries bundle) $ \e -> - Data.mapKeyPackageRef (kpbeRef e) (kpbeUser e) (kpbeClient e) - getVerificationCode :: UserId -> VerificationAction -> Handler r (Maybe Code.Value) getVerificationCode uid action = do user <- wrapClientE $ Api.lookupUser NoPendingInvitations uid diff --git a/services/brig/src/Brig/API/MLS/KeyPackages.hs b/services/brig/src/Brig/API/MLS/KeyPackages.hs index 9426a736c3..e545af1bf6 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages.hs @@ -111,7 +111,7 @@ claimRemoteKeyPackages lusr target = do ckprTarget = tUnqualified target } - -- validate and set up mappings for all claimed key packages + -- validate all claimed key packages for_ (kpbEntries bundle) $ \e -> do let cid = mkClientIdentity (kpbeUser e) (kpbeClient e) kpRaw <- @@ -126,7 +126,6 @@ claimRemoteKeyPackages lusr target = do . throwE . clientDataError $ InvalidKeyPackageRef - wrapClientE $ Data.mapKeyPackageRef (kpbeRef e) (kpbeUser e) (kpbeClient e) pure bundle where diff --git a/services/brig/src/Brig/Data/MLS/KeyPackage.hs b/services/brig/src/Brig/Data/MLS/KeyPackage.hs index 0f88575229..03a69e69ab 100644 --- a/services/brig/src/Brig/Data/MLS/KeyPackage.hs +++ b/services/brig/src/Brig/Data/MLS/KeyPackage.hs @@ -18,14 +18,7 @@ module Brig.Data.MLS.KeyPackage ( insertKeyPackages, claimKeyPackage, - mapKeyPackageRef, countKeyPackages, - derefKeyPackage, - keyPackageRefConvId, - keyPackageRefSetConvId, - addKeyPackageRef, - updateKeyPackageRef, - deleteKeyPackageRef, ) where @@ -33,25 +26,19 @@ import Brig.API.MLS.KeyPackages.Validation import Brig.App import Brig.Options hiding (Timeout) import Cassandra -import Cassandra.Settings import Control.Arrow import Control.Error -import Control.Exception import Control.Lens -import Control.Monad.Catch import Control.Monad.Random (randomRIO) -import Data.Domain import Data.Functor import Data.Id import Data.Qualified import Data.Time.Clock import Data.Time.Clock.POSIX import Imports -import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.LeafNode import Wire.API.MLS.Serialisation -import Wire.API.Routes.Internal.Brig insertKeyPackages :: MonadClient m => UserId -> ClientId -> [(KeyPackageRef, KeyPackageData)] -> m () insertKeyPackages uid cid kps = retry x5 . batch $ do @@ -81,7 +68,6 @@ claimKeyPackage u c = do for mk $ \(ref, kpd) -> do retry x5 $ write deleteByRef (params LocalQuorum (tUnqualified u, c, ref)) pure (ref, kpd) - lift $ mapKeyPackageRef ref (tUntagged u) c pure (ref, kpd) where deleteByRef :: PrepQuery W (UserId, ClientId, KeyPackageRef) () @@ -134,14 +120,6 @@ getNonClaimedKeyPackages u c = do _ -> True -- the assumption is the key package is valid and has the -- required extensions so we return 'True' --- | Add key package ref to mapping table. -mapKeyPackageRef :: MonadClient m => KeyPackageRef -> Qualified UserId -> ClientId -> m () -mapKeyPackageRef ref u c = - write insertQuery (params LocalQuorum (ref, qDomain u, qUnqualified u, c)) - where - insertQuery :: PrepQuery W (KeyPackageRef, Domain, UserId, ClientId) () - insertQuery = "INSERT INTO mls_key_package_refs (ref, domain, user, client) VALUES (?, ?, ?, ?)" - countKeyPackages :: ( MonadReader Env m, MonadClient m @@ -151,104 +129,9 @@ countKeyPackages :: m Int64 countKeyPackages u c = fromIntegral . length <$> getNonClaimedKeyPackages u c -derefKeyPackage :: MonadClient m => KeyPackageRef -> MaybeT m ClientIdentity -derefKeyPackage ref = do - (d, u, c) <- MaybeT . retry x1 $ query1 q (params LocalQuorum (Identity ref)) - pure $ ClientIdentity d u c - where - q :: PrepQuery R (Identity KeyPackageRef) (Domain, UserId, ClientId) - q = "SELECT domain, user, client from mls_key_package_refs WHERE ref = ?" - -keyPackageRefConvId :: MonadClient m => KeyPackageRef -> MaybeT m (Qualified ConvId) -keyPackageRefConvId ref = MaybeT $ do - qr <- retry x1 $ query1 q (params LocalSerial (Identity ref)) - pure $ do - (domain, cid) <- qr - Qualified <$> cid <*> domain - where - q :: PrepQuery R (Identity KeyPackageRef) (Maybe Domain, Maybe ConvId) - q = "SELECT conv_domain, conv FROM mls_key_package_refs WHERE ref = ?" - --- We want to proper update, not an upsert, to avoid "ghost" refs without user+client -keyPackageRefSetConvId :: MonadClient m => KeyPackageRef -> Qualified ConvId -> m Bool -keyPackageRefSetConvId ref convId = do - updated <- - retry x5 $ - trans - q - (params LocalQuorum (qDomain convId, qUnqualified convId, ref)) - { serialConsistency = Just LocalSerialConsistency - } - case updated of - [] -> pure False - [_] -> pure True - _ -> throwM $ ErrorCall "Primary key violation detected mls_key_package_refs.ref" - where - q :: PrepQuery W (Domain, ConvId, KeyPackageRef) x - q = "UPDATE mls_key_package_refs SET conv_domain = ?, conv = ? WHERE ref = ? IF EXISTS" - -addKeyPackageRef :: MonadClient m => KeyPackageRef -> NewKeyPackageRef -> m () -addKeyPackageRef ref nkpr = - retry x5 $ - write - q - (params LocalQuorum (nkprClientId nkpr, qUnqualified (nkprConversation nkpr), qDomain (nkprConversation nkpr), qDomain (nkprUserId nkpr), qUnqualified (nkprUserId nkpr), ref)) - where - q :: PrepQuery W (ClientId, ConvId, Domain, Domain, UserId, KeyPackageRef) x - q = "UPDATE mls_key_package_refs SET client = ?, conv = ?, conv_domain = ?, domain = ?, user = ? WHERE ref = ?" - --- | Update key package ref, used in Galley when commit reveals key package ref update for the sender. --- Nothing is changed if the previous key package ref is not found in the table. --- Updating amounts to INSERT the new key package ref, followed by DELETE the --- previous one. --- --- FUTUREWORK: this function has to be extended if a table mapping (client, --- conversation) to key package ref is added, for instance, when implementing --- external delete proposals. -updateKeyPackageRef :: MonadClient m => KeyPackageRef -> KeyPackageRef -> m () -updateKeyPackageRef prevRef newRef = - void . runMaybeT $ do - backup <- backupKeyPackageMeta prevRef - lift $ do - restoreKeyPackageMeta newRef backup - deleteKeyPackage prevRef - -deleteKeyPackageRef :: MonadClient m => KeyPackageRef -> m () -deleteKeyPackageRef ref = do - retry x5 $ - write q (params LocalQuorum (Identity ref)) - where - q :: PrepQuery W (Identity KeyPackageRef) x - q = "DELETE FROM mls_key_package_refs WHERE ref = ?" - -------------------------------------------------------------------------------- -- Utilities -backupKeyPackageMeta :: MonadClient m => KeyPackageRef -> MaybeT m (ClientId, Maybe (Qualified ConvId), Qualified UserId) -backupKeyPackageMeta ref = do - (clientId, convId, convDomain, userDomain, userId) <- MaybeT . retry x1 $ query1 q (params LocalQuorum (Identity ref)) - pure (clientId, Qualified <$> convId <*> convDomain, Qualified userId userDomain) - where - q :: PrepQuery R (Identity KeyPackageRef) (ClientId, Maybe ConvId, Maybe Domain, Domain, UserId) - q = "SELECT client, conv, conv_domain, domain, user FROM mls_key_package_refs WHERE ref = ?" - -restoreKeyPackageMeta :: MonadClient m => KeyPackageRef -> (ClientId, Maybe (Qualified ConvId), Qualified UserId) -> m () -restoreKeyPackageMeta ref (clientId, convId, userId) = do - write q (params LocalQuorum (ref, clientId, qUnqualified <$> convId, qDomain <$> convId, qDomain userId, qUnqualified userId)) - where - q :: PrepQuery W (KeyPackageRef, ClientId, Maybe ConvId, Maybe Domain, Domain, UserId) () - q = "INSERT INTO mls_key_package_refs (ref, client, conv, conv_domain, domain, user) VALUES (?, ?, ?, ?, ?, ?)" - -deleteKeyPackage :: MonadClient m => KeyPackageRef -> m () -deleteKeyPackage ref = - retry x5 $ - write - q - (params LocalQuorum (Identity ref)) - where - q :: PrepQuery W (Identity KeyPackageRef) x - q = "DELETE FROM mls_key_package_refs WHERE ref = ?" - pick :: [a] -> IO (Maybe a) pick [] = pure Nothing pick xs = do diff --git a/services/brig/test/integration/API/Internal.hs b/services/brig/test/integration/API/Internal.hs index 516b8934c9..b3dd9c1073 100644 --- a/services/brig/test/integration/API/Internal.hs +++ b/services/brig/test/integration/API/Internal.hs @@ -36,26 +36,21 @@ import qualified Cassandra as Cass import Cassandra.Util import Control.Exception (ErrorCall (ErrorCall), throwIO) import Control.Lens ((^.), (^?!)) -import Data.Aeson (decode) import qualified Data.Aeson.Lens as Aeson import qualified Data.Aeson.Types as Aeson import Data.ByteString.Conversion (toByteString') import Data.Default import Data.Id -import Data.Qualified (Qualified (qDomain, qUnqualified)) +import Data.Qualified import qualified Data.Set as Set import GHC.TypeLits (KnownSymbol) import Imports -import Servant.API (ToHttpApiData (toUrlPiece)) -import Test.QuickCheck (Arbitrary (arbitrary), generate) import Test.Tasty import Test.Tasty.HUnit import UnliftIO (withSystemTempDirectory) import Util import Util.Options (Endpoint) import qualified Wire.API.Connection as Conn -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage import Wire.API.Routes.Internal.Brig import Wire.API.Team.Feature import qualified Wire.API.Team.Feature as ApiFt @@ -74,14 +69,6 @@ tests opts mgr db brig brigep gundeck galley = do test mgr "suspend non existing user and verify no db entry" $ testSuspendNonExistingUser db brig, test mgr "mls/clients" $ testGetMlsClients brig, - testGroup - "mls/key-packages" - $ [ test mgr "fresh get" $ testKpcFreshGet brig, - test mgr "put,get" $ testKpcPutGet brig, - test mgr "get,get" $ testKpcGetGet brig, - test mgr "put,put" $ testKpcPutPut brig, - test mgr "add key package ref" $ testAddKeyPackageRef brig - ], test mgr "writetimeToInt64" $ testWritetimeRepresentation opts mgr db brig brigep galley ] @@ -256,118 +243,6 @@ testGetMlsClients brig = do ) liftIO $ toList cs1 @?= [ClientInfo c True] -keyPackageCreate :: HasCallStack => Brig -> Http KeyPackageRef -keyPackageCreate brig = do - uid <- userQualifiedId <$> randomUser brig - clid <- createClient brig uid 0 - withSystemTempDirectory "mls" $ \tmp -> - uploadKeyPackages brig tmp def uid clid 2 - - uid2 <- userQualifiedId <$> randomUser brig - claimResp <- - post - ( brig - . paths - [ "mls", - "key-packages", - "claim", - toByteString' (qDomain uid), - toByteString' (qUnqualified uid) - ] - . zUser (qUnqualified uid2) - . contentJson - ) - liftIO $ - assertEqual "POST mls/key-packages/claim/:domain/:user failed" 200 (statusCode claimResp) - case responseBody claimResp >>= decode of - Nothing -> liftIO $ assertFailure "Claim response empty" - Just bundle -> case toList $ kpbEntries bundle of - [] -> liftIO $ assertFailure "Claim response held no bundles" - (h : _) -> pure $ kpbeRef h - -kpcPut :: HasCallStack => Brig -> KeyPackageRef -> Qualified ConvId -> Http () -kpcPut brig ref qConv = do - resp <- - put - ( brig - . paths ["i", "mls", "key-packages", toByteString' $ toUrlPiece ref, "conversation"] - . contentJson - . json qConv - ) - liftIO $ assertEqual "PUT i/mls/key-packages/:ref/conversation failed" 204 (statusCode resp) - -kpcGet :: HasCallStack => Brig -> KeyPackageRef -> Http (Maybe (Qualified ConvId)) -kpcGet brig ref = do - resp <- - get (brig . paths ["i", "mls", "key-packages", toByteString' $ toUrlPiece ref, "conversation"]) - liftIO $ case statusCode resp of - 404 -> pure Nothing - 200 -> pure $ responseBody resp >>= decode - _ -> assertFailure "GET i/mls/key-packages/:ref/conversation failed" - -testKpcFreshGet :: Brig -> Http () -testKpcFreshGet brig = do - ref <- keyPackageCreate brig - mqConv <- kpcGet brig ref - liftIO $ assertEqual "(fresh) Get ~= Nothing" Nothing mqConv - -testKpcPutGet :: Brig -> Http () -testKpcPutGet brig = do - ref <- keyPackageCreate brig - qConv <- liftIO $ generate arbitrary - kpcPut brig ref qConv - mqConv <- kpcGet brig ref - liftIO $ assertEqual "Put x; Get ~= x" (Just qConv) mqConv - -testKpcGetGet :: Brig -> Http () -testKpcGetGet brig = do - ref <- keyPackageCreate brig - liftIO (generate arbitrary) >>= kpcPut brig ref - mqConv1 <- kpcGet brig ref - mqConv2 <- kpcGet brig ref - liftIO $ assertEqual "Get; Get ~= Get" mqConv1 mqConv2 - -testKpcPutPut :: Brig -> Http () -testKpcPutPut brig = do - ref <- keyPackageCreate brig - qConv <- liftIO $ generate arbitrary - qConv2 <- liftIO $ generate arbitrary - kpcPut brig ref qConv - kpcPut brig ref qConv2 - mqConv <- kpcGet brig ref - liftIO $ assertEqual "Put x; Put y ~= Put y" (Just qConv2) mqConv - -testAddKeyPackageRef :: Brig -> Http () -testAddKeyPackageRef brig = do - ref <- keyPackageCreate brig - qcnv <- liftIO $ generate arbitrary - qusr <- liftIO $ generate arbitrary - c <- liftIO $ generate arbitrary - put - ( brig - . paths ["i", "mls", "key-packages", toByteString' $ toUrlPiece ref] - . json - NewKeyPackageRef - { nkprUserId = qusr, - nkprClientId = c, - nkprConversation = qcnv - } - ) - !!! const 201 === statusCode - ci <- - responseJsonError - =<< get (brig . paths ["i", "mls", "key-packages", toByteString' $ toUrlPiece ref]) - (Request -> Request) -> UserId -> m ResponseLBS getFeatureConfig galley uid = do get $ apiVersion "v1" . galley . paths ["feature-configs", featureNameBS @cfg] . zUser uid diff --git a/services/galley/galley.cabal b/services/galley/galley.cabal index 5af98a02c4..0ac0530eb5 100644 --- a/services/galley/galley.cabal +++ b/services/galley/galley.cabal @@ -87,7 +87,6 @@ library Galley.API.MLS.Conversation Galley.API.MLS.Enabled Galley.API.MLS.GroupInfo - Galley.API.MLS.KeyPackage Galley.API.MLS.Keys Galley.API.MLS.Message Galley.API.MLS.Propagate diff --git a/services/galley/src/Galley/API/MLS/KeyPackage.hs b/services/galley/src/Galley/API/MLS/KeyPackage.hs deleted file mode 100644 index 23fe2760c0..0000000000 --- a/services/galley/src/Galley/API/MLS/KeyPackage.hs +++ /dev/null @@ -1,38 +0,0 @@ --- This file is part of the Wire Server implementation. --- --- Copyright (C) 2022 Wire Swiss GmbH --- --- This program is free software: you can redistribute it and/or modify it under --- the terms of the GNU Affero General Public License as published by the Free --- Software Foundation, either version 3 of the License, or (at your option) any --- later version. --- --- This program is distributed in the hope that it will be useful, but WITHOUT --- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS --- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more --- details. --- --- You should have received a copy of the GNU Affero General Public License along --- with this program. If not, see . - -module Galley.API.MLS.KeyPackage where - -import qualified Data.ByteString as BS -import Galley.Effects.BrigAccess -import Imports -import Polysemy -import Wire.API.Error -import Wire.API.Error.Galley -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage - -nullKeyPackageRef :: KeyPackageRef -nullKeyPackageRef = KeyPackageRef (BS.replicate 16 0) - -derefKeyPackage :: - ( Member BrigAccess r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r - ) => - KeyPackageRef -> - Sem r ClientIdentity -derefKeyPackage = noteS @'MLSKeyPackageRefNotFound <=< getClientByKeyPackageRef diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 8ecd745972..3f2759f9d3 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -116,7 +116,7 @@ import Wire.API.User.Client -- [ ] validate proposals when processing proposal and commit messages -- [ ] remove MissingSenderClient error -- [ ] PreSharedKey proposal --- [ ] remove all key package ref mapping +-- [x] remove all key package ref mapping -- [x] initialise index maps -- [ ] newtype for leaf node indices -- [x] compute new indices for add proposals diff --git a/services/galley/src/Galley/Effects/BrigAccess.hs b/services/galley/src/Galley/Effects/BrigAccess.hs index 221eb55d02..8631ef1f7d 100644 --- a/services/galley/src/Galley/Effects/BrigAccess.hs +++ b/services/galley/src/Galley/Effects/BrigAccess.hs @@ -48,11 +48,7 @@ module Galley.Effects.BrigAccess removeLegalHoldClientFromUser, -- * MLS - getClientByKeyPackageRef, getLocalMLSClients, - addKeyPackageRef, - updateKeyPackageRef, - deleteKeyPackageRefs, -- * Features getAccountConferenceCallingConfigClient, @@ -73,8 +69,6 @@ import Polysemy.Error import Wire.API.Connection import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage import Wire.API.Routes.Internal.Brig.Connection import qualified Wire.API.Routes.Internal.Galley.TeamFeatureNoConfigMulti as Multi import Wire.API.Team.Feature @@ -129,11 +123,7 @@ data BrigAccess m a where BrigAccess m (Either AuthenticationError ClientId) RemoveLegalHoldClientFromUser :: UserId -> BrigAccess m () GetAccountConferenceCallingConfigClient :: UserId -> BrigAccess m (WithStatusNoLock ConferenceCallingConfig) - GetClientByKeyPackageRef :: KeyPackageRef -> BrigAccess m (Maybe ClientIdentity) GetLocalMLSClients :: Local UserId -> SignatureSchemeTag -> BrigAccess m (Set ClientInfo) - AddKeyPackageRef :: KeyPackageRef -> Qualified UserId -> ClientId -> Qualified ConvId -> BrigAccess m () - UpdateKeyPackageRef :: KeyPackageUpdate -> BrigAccess m () - DeleteKeyPackageRefs :: [KeyPackageRef] -> BrigAccess m () UpdateSearchVisibilityInbound :: Multi.TeamStatus SearchVisibilityInboundConfig -> BrigAccess m () diff --git a/services/galley/src/Galley/Intra/Client.hs b/services/galley/src/Galley/Intra/Client.hs index 2f105507cc..96cce82ece 100644 --- a/services/galley/src/Galley/Intra/Client.hs +++ b/services/galley/src/Galley/Intra/Client.hs @@ -22,11 +22,7 @@ module Galley.Intra.Client addLegalHoldClientToUser, removeLegalHoldClientFromUser, getLegalHoldAuthToken, - getClientByKeyPackageRef, getLocalMLSClients, - addKeyPackageRef, - updateKeyPackageRef, - deleteKeyPackageRefs, ) where @@ -54,13 +50,9 @@ import Polysemy import Polysemy.Error import Polysemy.Input import qualified Polysemy.TinyLog as P -import Servant import qualified System.Logger.Class as Logger import Wire.API.Error.Galley import Wire.API.MLS.CipherSuite -import Wire.API.MLS.Credential -import Wire.API.MLS.KeyPackage -import Wire.API.Routes.Internal.Brig import Wire.API.User.Auth.LegalHold import Wire.API.User.Client import Wire.API.User.Client.Prekey @@ -178,18 +170,6 @@ brigAddClient uid connId client = do then Right <$> parseResponse (mkError status502 "server-error") r else pure (Left ReAuthFailed) --- | Calls 'Brig.API.Internal.getClientByKeyPackageRef'. -getClientByKeyPackageRef :: KeyPackageRef -> App (Maybe ClientIdentity) -getClientByKeyPackageRef ref = do - r <- - call Brig $ - method GET - . paths ["i", "mls", "key-packages", toHeader ref] - . expectStatus (flip elem [200, 404]) - if statusCode (responseStatus r) == 200 - then Just <$> parseResponse (mkError status502 "server-error") r - else pure Nothing - -- | Calls 'Brig.API.Internal.getMLSClients'. getLocalMLSClients :: Local UserId -> SignatureSchemeTag -> App (Set ClientInfo) getLocalMLSClients lusr ss = @@ -206,36 +186,3 @@ getLocalMLSClients lusr ss = . expect2xx ) >>= parseResponse (mkError status502 "server-error") - -deleteKeyPackageRefs :: [KeyPackageRef] -> App () -deleteKeyPackageRefs refs = - void $ - call - Brig - ( method DELETE - . paths ["i", "mls", "key-packages"] - . json (DeleteKeyPackageRefsRequest refs) - . expect2xx - ) - -addKeyPackageRef :: KeyPackageRef -> Qualified UserId -> ClientId -> Qualified ConvId -> App () -addKeyPackageRef ref qusr cl qcnv = - void $ - call - Brig - ( method PUT - . paths ["i", "mls", "key-packages", toHeader ref] - . json (NewKeyPackageRef qusr cl qcnv) - . expect2xx - ) - -updateKeyPackageRef :: KeyPackageUpdate -> App () -updateKeyPackageRef keyPackageRef = - void $ - call - Brig - ( method POST - . paths ["i", "mls", "key-packages", toHeader $ kpupPrevious keyPackageRef] - . json (kpupNext keyPackageRef) - . expect2xx - ) diff --git a/services/galley/src/Galley/Intra/Effects.hs b/services/galley/src/Galley/Intra/Effects.hs index ae87f559d9..782228140c 100644 --- a/services/galley/src/Galley/Intra/Effects.hs +++ b/services/galley/src/Galley/Intra/Effects.hs @@ -80,18 +80,7 @@ interpretBrigAccess = interpret $ \case embedApp $ removeLegalHoldClientFromUser uid GetAccountConferenceCallingConfigClient uid -> embedApp $ getAccountConferenceCallingConfigClient uid - GetClientByKeyPackageRef ref -> - embedApp $ getClientByKeyPackageRef ref GetLocalMLSClients qusr ss -> embedApp $ getLocalMLSClients qusr ss - AddKeyPackageRef ref qusr cl qcnv -> - embedApp $ - addKeyPackageRef ref qusr cl qcnv - UpdateKeyPackageRef update -> - embedApp $ - updateKeyPackageRef update - DeleteKeyPackageRefs refs -> - embedApp $ - deleteKeyPackageRefs refs UpdateSearchVisibilityInbound status -> embedApp $ updateSearchVisibilityInbound status diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 923b727e03..3b58317a19 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -91,20 +91,6 @@ cid2Str cid = <> "@" <> T.unpack (domainText (ciDomain cid)) -mapRemoteKeyPackageRef :: - (MonadIO m, MonadHttp m, MonadCatch m) => - (Request -> Request) -> - KeyPackageBundle -> - m () -mapRemoteKeyPackageRef brig bundle = - void $ - put - ( brig - . paths ["i", "mls", "key-package-refs"] - . json bundle - ) - !!! const 204 === statusCode - postMessage :: ( HasCallStack, MonadIO m, @@ -551,7 +537,6 @@ getUserClients qusr = do -- | Generate one key package for each client of a remote user claimRemoteKeyPackages :: HasCallStack => Remote UserId -> MLSTest KeyPackageBundle claimRemoteKeyPackages (tUntagged -> qusr) = do - brig <- viewBrig clients <- getUserClients qusr bundle <- fmap (KeyPackageBundle . Set.fromList) $ for clients $ \cid -> do @@ -563,7 +548,6 @@ claimRemoteKeyPackages (tUntagged -> qusr) = do kpbeRef = ref, kpbeKeyPackage = KeyPackageData (rmRaw kp) } - mapRemoteKeyPackageRef brig bundle pure bundle -- | Claim key package for a local user, or generate and map key packages for remote ones. From d93febf4dd1bcd7209f113da92019f72595be9ce Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Tue, 18 Apr 2023 08:40:39 +0000 Subject: [PATCH 26/75] fix more integration tests --- libs/wire-api/src/Wire/API/Error/Galley.hs | 2 +- services/galley/src/Galley/API/MLS/Message.hs | 59 ++++++++----------- services/galley/test/integration/API/MLS.hs | 14 ++--- 3 files changed, 31 insertions(+), 44 deletions(-) diff --git a/libs/wire-api/src/Wire/API/Error/Galley.hs b/libs/wire-api/src/Wire/API/Error/Galley.hs index 9c0d317fa1..8a92fa13ba 100644 --- a/libs/wire-api/src/Wire/API/Error/Galley.hs +++ b/libs/wire-api/src/Wire/API/Error/Galley.hs @@ -204,7 +204,7 @@ type instance MapError 'MLSDuplicatePublicKey = 'StaticError 400 "mls-duplicate- type instance MapError 'MLSKeyPackageRefNotFound = 'StaticError 404 "mls-key-package-ref-not-found" "A referenced key package could not be mapped to a known client" -type instance MapError 'MLSInvalidLeafNodeIndex = 'StaticError 400 "mls-invalid-leaf-node-index" "A referenced leaf node index points to a black or non-existing node" +type instance MapError 'MLSInvalidLeafNodeIndex = 'StaticError 400 "mls-invalid-leaf-node-index" "A referenced leaf node index points to a blank or non-existing node" type instance MapError 'MLSUnsupportedMessage = 'StaticError 422 "mls-unsupported-message" "Attempted to send a message with an unsupported combination of content type and wire format" diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 3f2759f9d3..6946fd785d 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -710,21 +710,18 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do unless (null (Map.keys counts \\ allowedProposals)) $ throw (mlsProtocolError "Invalid proposal type in an external commit") - action <- - evalState (indexMapConvOrSub convOrSub) $ do - -- process optional removal - propAction <- applyProposals mlsMeta groupId proposals - -- add sender - selfAction <- addProposedClient senderIdentity - case cmAssocs (paRemove propAction) of - [(cid, _)] - | cid /= senderIdentity -> - throw $ mlsProtocolError "Only the self client can be removed by an external commit" - _ -> pure () - - pure $ propAction <> selfAction - - pure action + evalState (indexMapConvOrSub convOrSub) $ do + -- process optional removal + propAction <- applyProposals mlsMeta groupId proposals + -- add sender + selfAction <- addProposedClient senderIdentity + case cmAssocs (paRemove propAction) of + [(cid, _)] + | cid /= senderIdentity -> + throw $ mlsProtocolError "Only the self client can be removed by an external commit" + _ -> pure () + + pure $ propAction <> selfAction where allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] @@ -735,22 +732,10 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do processExternalCommit :: forall r. - ( Member ConversationStore r, - Member (Error MLSProtocolError) r, - Member (ErrorS 'ConvNotFound) r, - Member (Error InternalError) r, - Member (ErrorS 'MLSStaleMessage) r, + ( Member (ErrorS 'MLSStaleMessage) r, Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member ExternalAccess r, - Member FederatorAccess r, - Member GundeckAccess r, - Member (Input Env) r, - Member (Input UTCTime) r, - Member MemberStore r, - Member ProposalStore r, Member Resource r, - Member SubConversationStore r, - Member TinyLog r + HasProposalActionEffects r ) => ClientIdentity -> Local ConvOrSubConv -> @@ -787,6 +772,9 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do Right _ -> pure () withCommitLock groupId epoch $ do + -- no events for external commits + void $ executeProposalAction senderIdentity Nothing lConvOrSub action + let remIndices = map snd (cmAssocs (paRemove action)) -- increment epoch number @@ -836,7 +824,7 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do throwS @'MLSCommitMissingReferences -- process and execute proposals - updates <- executeProposalAction (cidQualifiedUser senderIdentity) con lConvOrSub action + updates <- executeProposalAction senderIdentity con lConvOrSub action -- increment epoch number for_ lConvOrSub incrementEpoch @@ -1045,20 +1033,21 @@ type HasProposalActionEffects r = executeProposalAction :: forall r. HasProposalActionEffects r => - Qualified UserId -> + ClientIdentity -> Maybe ConnId -> Local ConvOrSubConv -> ProposalAction -> Sem r [LocalConversationUpdate] -executeProposalAction qusr con lconvOrSub action = do - let convOrSub = tUnqualified lconvOrSub +executeProposalAction senderIdentity con lconvOrSub action = do + let qusr = cidQualifiedUser senderIdentity + convOrSub = tUnqualified lconvOrSub mlsMeta = mlsMetaConvOrSub convOrSub cm = membersConvOrSub convOrSub ss = csSignatureScheme (cnvmlsCipherSuite mlsMeta) newUserClients = Map.assocs (paAdd action) -- no client can be directly added to a subconversation - when (is _SubConv convOrSub && not (null newUserClients)) $ + when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ throw (mlsProtocolError "Add proposals in subconversations are not supported") -- Note [client removal] @@ -1178,7 +1167,7 @@ executeProposalAction qusr con lconvOrSub action = do when (not isSubConv && clients /= clientsInConv) $ do -- FUTUREWORK: turn this error into a proper response throwS @'MLSClientMismatch - when (qusr == qtarget) $ + when (cidQualifiedUser senderIdentity == qtarget) $ throwS @'MLSSelfRemovalNotAllowed pure (Just qtarget) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 765486e3d4..8cfaf2a859 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -928,6 +928,7 @@ testLocalToRemoteNonMember = do . paths ["mls", "messages"] . zUser (qUnqualified bob) . zConn "conn" + . zClient (ciClient bob1) . Bilge.content "message/mls" . bytes (mpMessage message) ) @@ -2132,8 +2133,9 @@ testSelfConversationOtherUser = do void $ uploadNewKeyPackage bob1 void $ setupMLSSelfGroup alice1 commit <- createAddCommit alice1 [bob] + bundle <- createBundle commit mlsBracket [alice1, bob1] $ \wss -> do - postMessage (mpSender commit) (mpMessage commit) + localPostCommitBundle (mpSender commit) bundle !!! do const 403 === statusCode const (Just "invalid-op") === fmap Wai.label . responseJsonError @@ -2275,7 +2277,6 @@ testJoinSubConv = do resetGroup bob1 (fmap (flip SubConv subId) qcnv) (pscGroupId sub) - bobRefsBefore <- getClientsFromGroupState bob1 bob -- bob adds his first client to the subconversation void $ createPendingProposalCommit bob1 >>= sendAndConsumeCommitBundle @@ -2284,11 +2285,8 @@ testJoinSubConv = do responseJsonError =<< getSubConv (qUnqualified bob) qcnv subId >= sendAndConsumeCommitBundle - Just (_, kpBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob + Just (_, idxBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob -- bob1 leaves and immediately rejoins mlsBracket [alice1, bob1] $ \[wsA, wsB] -> do void $ leaveCurrentConv bob1 qsub WS.assertMatchN_ (5 # WS.Second) [wsA] $ - wsAssertBackendRemoveProposal bob qsub kpBob1 + wsAssertBackendRemoveProposal bob qsub idxBob1 void $ createExternalCommit bob1 Nothing qsub >>= sendAndConsumeCommitBundle From d03d750187bc623f2eb3dccd75fbca24173a71b8 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Tue, 18 Apr 2023 15:08:31 +0000 Subject: [PATCH 27/75] track client scheduled for removal in Cassandra [ ] conversations [x] subconversations --- services/galley/schema/src/V82_MLSDraft17.hs | 3 ++- .../galley/src/Galley/API/MLS/Conversation.hs | 4 ++-- services/galley/src/Galley/API/MLS/Removal.hs | 16 ++++++++++++---- .../src/Galley/API/MLS/SubConversation.hs | 4 ++-- services/galley/src/Galley/API/MLS/Types.hs | 16 ++++++++++------ .../src/Galley/Cassandra/Conversation/MLS.hs | 17 ++++++++++++----- .../Galley/Cassandra/Conversation/Members.hs | 16 +++++++++++++++- services/galley/src/Galley/Cassandra/Queries.hs | 9 ++++++--- .../src/Galley/Cassandra/SubConversation.hs | 6 +++--- .../galley/src/Galley/Effects/MemberStore.hs | 6 ++++++ 10 files changed, 70 insertions(+), 27 deletions(-) diff --git a/services/galley/schema/src/V82_MLSDraft17.hs b/services/galley/schema/src/V82_MLSDraft17.hs index c8f4410e20..b277d89cf2 100644 --- a/services/galley/schema/src/V82_MLSDraft17.hs +++ b/services/galley/schema/src/V82_MLSDraft17.hs @@ -26,6 +26,7 @@ migration = Migration 82 "Upgrade to MLS draft 17 structures" $ do schema' [r| ALTER TABLE mls_group_member_client - ADD (leaf_node_index int + ADD (leaf_node_index int, + removal_pending boolean ); |] diff --git a/services/galley/src/Galley/API/MLS/Conversation.hs b/services/galley/src/Galley/API/MLS/Conversation.hs index f63c038244..5d91d1e4ba 100644 --- a/services/galley/src/Galley/API/MLS/Conversation.hs +++ b/services/galley/src/Galley/API/MLS/Conversation.hs @@ -34,7 +34,7 @@ mkMLSConversation :: Sem r (Maybe MLSConversation) mkMLSConversation conv = for (Data.mlsMetadata conv) $ \mlsData -> do - cm <- lookupMLSClients (cnvmlsGroupId mlsData) + (cm, im) <- lookupMLSClientLeafIndices (cnvmlsGroupId mlsData) pure MLSConversation { mcId = Data.convId conv, @@ -43,7 +43,7 @@ mkMLSConversation conv = mcRemoteMembers = Data.convRemoteMembers conv, mcMLSData = mlsData, mcMembers = cm, - mcIndexMap = mkIndexMap cm + mcIndexMap = im } mcConv :: MLSConversation -> Data.Conversation diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 6a4ab73530..a862fd30a6 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -32,9 +32,9 @@ import Galley.API.MLS.Propagate import Galley.API.MLS.Types import qualified Galley.Data.Conversation.Types as Data import Galley.Effects +import Galley.Effects.MemberStore import Galley.Effects.ProposalStore import Galley.Effects.SubConversationStore -import qualified Galley.Effects.SubConversationStore as E import Galley.Env import Imports import Polysemy @@ -44,6 +44,7 @@ import qualified System.Logger as Log import Wire.API.Conversation.Protocol import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.Credential +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -61,7 +62,7 @@ createAndSendRemoveProposals :: Foldable t ) => Local ConvOrSubConv -> - t Word32 -> + t LeafIndex -> Qualified UserId -> -- | The client map that has all the recipients of the message. This is an -- argument, and not constructed within the function, because of a special @@ -104,6 +105,7 @@ removeClientsWithClientMapRecursively :: ExternalAccess, FederatorAccess, GundeckAccess, + MemberStore, ProposalStore, SubConversationStore, Input Env @@ -112,19 +114,25 @@ removeClientsWithClientMapRecursively :: Foldable f ) => Local MLSConversation -> - (ConvOrSubConv -> f Word32) -> + (ConvOrSubConv -> f LeafIndex) -> Qualified UserId -> Sem r () removeClientsWithClientMapRecursively lMlsConv getIndices qusr = do let mainConv = fmap Conv lMlsConv cm = mcMembers (tUnqualified lMlsConv) + cs = foldMap Map.keysSet $ Map.lookup qusr cm + gid = cnvmlsGroupId . mcMLSData . tUnqualified $ lMlsConv + + planClientRemoval gid qusr cs createAndSendRemoveProposals mainConv (getIndices (tUnqualified mainConv)) qusr cm -- remove this client from all subconversations subs <- listSubConversations' (mcId (tUnqualified lMlsConv)) for_ subs $ \sub -> do let subConv = fmap (flip SubConv sub) lMlsConv + sgid = cnvmlsGroupId . scMLSData $ sub + planClientRemoval sgid qusr cs createAndSendRemoveProposals subConv (getIndices (tUnqualified subConv)) @@ -180,7 +188,7 @@ listSubConversations' :: ConvId -> Sem r [SubConversation] listSubConversations' cid = do - subs <- E.listSubConversations cid + subs <- listSubConversations cid msubs <- for (Map.assocs subs) $ \(subId, _) -> do getSubConversation cid subId pure (catMaybes msubs) diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index c3fc90c08d..19e171a5fe 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -427,9 +427,9 @@ leaveLocalSubConversation cid lcnv sub = do idx <- note (mlsProtocolError "Client is not a member of the subconversation") $ cmLookupIndex cid (scMembers subConv) - -- remove the leaver from the member list let (gid, epoch) = (cnvmlsGroupId &&& cnvmlsEpoch) (scMLSData subConv) - Eff.removeMLSClients gid (cidQualifiedUser cid) . Set.singleton . ciClient $ cid + -- plan to remove the leaver from the member list + Eff.planClientRemoval gid (cidQualifiedUser cid) . Set.singleton . ciClient $ cid let cm = cmRemoveClient cid (scMembers subConv) if Map.null cm then do diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index cf9be1b49e..49591176ff 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -36,8 +36,11 @@ newtype IndexMap = IndexMap {unIndexMap :: IntMap ClientIdentity} deriving (Eq, Show) deriving newtype (Semigroup, Monoid) -mkIndexMap :: ClientMap -> IndexMap -mkIndexMap = IndexMap . IntMap.fromList . map (swap . fmap fromIntegral) . cmAssocs +mkIndexMap :: [(Domain, UserId, ClientId, Int32, Bool)] -> IndexMap +mkIndexMap = IndexMap . foldr addEntry mempty + where + addEntry (dom, usr, c, leafidx, _pending_removal) = + IntMap.insert (fromIntegral leafidx) (ClientIdentity dom usr c) imLookup :: IndexMap -> LeafIndex -> Maybe ClientIdentity imLookup m i = IntMap.lookup (fromIntegral i) (unIndexMap m) @@ -57,12 +60,13 @@ imRemoveClient im idx = do type ClientMap = Map (Qualified UserId) (Map ClientId LeafIndex) -mkClientMap :: [(Domain, UserId, ClientId, Int32)] -> ClientMap +mkClientMap :: [(Domain, UserId, ClientId, Int32, Bool)] -> ClientMap mkClientMap = foldr addEntry mempty where - addEntry :: (Domain, UserId, ClientId, Int32) -> ClientMap -> ClientMap - addEntry (dom, usr, c, kpi) = - Map.insertWith (<>) (Qualified usr dom) (Map.singleton c (fromIntegral kpi)) + addEntry :: (Domain, UserId, ClientId, Int32, Bool) -> ClientMap -> ClientMap + addEntry (dom, usr, c, leafidx, pending_removal) + | pending_removal = id -- treat as removed, don't add to ClientMap + | otherwise = Map.insertWith (<>) (Qualified usr dom) (Map.singleton c (fromIntegral leafidx)) cmLookupIndex :: ClientIdentity -> ClientMap -> Maybe LeafIndex cmLookupIndex cid cm = do diff --git a/services/galley/src/Galley/Cassandra/Conversation/MLS.hs b/services/galley/src/Galley/Cassandra/Conversation/MLS.hs index 7ca5f89d35..80d488c728 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/MLS.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/MLS.hs @@ -19,11 +19,14 @@ module Galley.Cassandra.Conversation.MLS ( acquireCommitLock, releaseCommitLock, lookupMLSClients, + lookupMLSLeafIndices, + lookupMLSClientLeafIndices, ) where import Cassandra import Cassandra.Settings (fromRow) +import Control.Arrow import Data.Time import Galley.API.MLS.Types import qualified Galley.Cassandra.Queries as Cql @@ -61,9 +64,13 @@ checkTransSuccess :: [Row] -> Bool checkTransSuccess [] = False checkTransSuccess (row : _) = either (const False) (fromMaybe False) $ fromRow 0 row +lookupMLSClientLeafIndices :: GroupId -> Client (ClientMap, IndexMap) +lookupMLSClientLeafIndices groupId = do + entries <- retry x5 (query Cql.lookupMLSClients (params LocalQuorum (Identity groupId))) + pure $ (mkClientMap &&& mkIndexMap) entries + lookupMLSClients :: GroupId -> Client ClientMap -lookupMLSClients groupId = - mkClientMap - <$> retry - x5 - (query Cql.lookupMLSClients (params LocalQuorum (Identity groupId))) +lookupMLSClients = fmap fst . lookupMLSClientLeafIndices + +lookupMLSLeafIndices :: GroupId -> Client IndexMap +lookupMLSLeafIndices = fmap snd . lookupMLSClientLeafIndices diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index 7d0eee8260..fe13423776 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -32,7 +32,7 @@ import qualified Data.List.Extra as List import Data.Monoid import Data.Qualified import qualified Data.Set as Set -import Galley.Cassandra.Conversation.MLS (lookupMLSClients) +import Galley.Cassandra.Conversation.MLS import Galley.Cassandra.Instances () import qualified Galley.Cassandra.Queries as Cql import Galley.Cassandra.Services @@ -348,6 +348,17 @@ addMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do for_ cs $ \(c, idx) -> addPrepQuery Cql.addMLSClient (groupId, domain, usr, c, fromIntegral idx) +-- TODO Could (and should) we use batch instead? +planMLSClientRemoval :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () +planMLSClientRemoval groupId (Qualified usr domain) cs = for_ cs $ \c -> do + retry x5 $ + trans + Cql.planMLSClientRemoval + ( params + LocalQuorum + (groupId, domain, usr, c) + ) + removeMLSClients :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () removeMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do setType BatchLogged @@ -384,6 +395,9 @@ interpretMemberStoreToCassandra = interpret $ \case embedClient $ removeLocalMembersFromRemoteConv rcnv uids AddMLSClients lcnv quid cs -> embedClient $ addMLSClients lcnv quid cs + PlanClientRemoval lcnv quid cs -> embedClient $ planMLSClientRemoval lcnv quid cs RemoveMLSClients lcnv quid cs -> embedClient $ removeMLSClients lcnv quid cs RemoveAllMLSClients gid -> embedClient $ removeAllMLSClients gid LookupMLSClients lcnv -> embedClient $ lookupMLSClients lcnv + LookupMLSLeafIndices lcnv -> embedClient $ lookupMLSLeafIndices lcnv + LookupMLSClientLeafIndices lcnv -> embedClient $ lookupMLSClientLeafIndices lcnv diff --git a/services/galley/src/Galley/Cassandra/Queries.hs b/services/galley/src/Galley/Cassandra/Queries.hs index d05e2f41ca..53e3ba0bfb 100644 --- a/services/galley/src/Galley/Cassandra/Queries.hs +++ b/services/galley/src/Galley/Cassandra/Queries.hs @@ -462,7 +462,10 @@ rmMemberClient c = -- MLS Clients -------------------------------------------------------------- addMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId, Int32) () -addMLSClient = "insert into mls_group_member_client (group_id, user_domain, user, client, leaf_node_index) values (?, ?, ?, ?, ?)" +addMLSClient = "insert into mls_group_member_client (group_id, user_domain, user, client, leaf_node_index, removal_pending) values (?, ?, ?, ?, ?, false)" + +planMLSClientRemoval :: PrepQuery W (GroupId, Domain, UserId, ClientId) Row +planMLSClientRemoval = "update mls_group_member_client set removal_pending = true where group_id = ? and user_domain = ? and user = ? and client = ? if exists" removeMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId) () removeMLSClient = "delete from mls_group_member_client where group_id = ? and user_domain = ? and user = ? and client = ?" @@ -470,8 +473,8 @@ removeMLSClient = "delete from mls_group_member_client where group_id = ? and us removeAllMLSClients :: PrepQuery W (Identity GroupId) () removeAllMLSClients = "DELETE FROM mls_group_member_client WHERE group_id = ?" -lookupMLSClients :: PrepQuery R (Identity GroupId) (Domain, UserId, ClientId, Int32) -lookupMLSClients = "select user_domain, user, client, leaf_node_index from mls_group_member_client where group_id = ?" +lookupMLSClients :: PrepQuery R (Identity GroupId) (Domain, UserId, ClientId, Int32, Bool) +lookupMLSClients = "select user_domain, user, client, leaf_node_index, removal_pending from mls_group_member_client where group_id = ?" acquireCommitLock :: PrepQuery W (GroupId, Epoch, Int32) Row acquireCommitLock = "insert into mls_commit_locks (group_id, epoch) values (?, ?) if not exists using ttl ?" diff --git a/services/galley/src/Galley/Cassandra/SubConversation.hs b/services/galley/src/Galley/Cassandra/SubConversation.hs index f445d0ce88..bca060e56e 100644 --- a/services/galley/src/Galley/Cassandra/SubConversation.hs +++ b/services/galley/src/Galley/Cassandra/SubConversation.hs @@ -27,7 +27,7 @@ import qualified Data.Map as Map import Data.Qualified import Data.Time.Clock import Galley.API.MLS.Types -import Galley.Cassandra.Conversation.MLS (lookupMLSClients) +import Galley.Cassandra.Conversation.MLS import qualified Galley.Cassandra.Queries as Cql import Galley.Cassandra.Store (embedClient) import Galley.Effects.SubConversationStore (SubConversationStore (..)) @@ -44,7 +44,7 @@ selectSubConversation :: ConvId -> SubConvId -> Client (Maybe SubConversation) selectSubConversation convId subConvId = do m <- retry x5 (query1 Cql.selectSubConversation (params LocalQuorum (convId, subConvId))) for m $ \(suite, epoch, epochWritetime, groupId) -> do - cm <- lookupMLSClients groupId + (cm, im) <- lookupMLSClientLeafIndices groupId pure $ SubConversation { scParentConvId = convId, @@ -57,7 +57,7 @@ selectSubConversation convId subConvId = do cnvmlsCipherSuite = suite }, scMembers = cm, - scIndexMap = mkIndexMap cm + scIndexMap = im } insertSubConversation :: diff --git a/services/galley/src/Galley/Effects/MemberStore.hs b/services/galley/src/Galley/Effects/MemberStore.hs index 10b7e168d2..8700c31459 100644 --- a/services/galley/src/Galley/Effects/MemberStore.hs +++ b/services/galley/src/Galley/Effects/MemberStore.hs @@ -39,9 +39,12 @@ module Galley.Effects.MemberStore setSelfMember, setOtherMember, addMLSClients, + planClientRemoval, removeMLSClients, removeAllMLSClients, lookupMLSClients, + lookupMLSLeafIndices, + lookupMLSClientLeafIndices, -- * Delete members deleteMembers, @@ -77,9 +80,12 @@ data MemberStore m a where DeleteMembers :: ConvId -> UserList UserId -> MemberStore m () DeleteMembersInRemoteConversation :: Remote ConvId -> [UserId] -> MemberStore m () AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, Word32) -> MemberStore m () + PlanClientRemoval :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveMLSClients :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveAllMLSClients :: GroupId -> MemberStore m () LookupMLSClients :: GroupId -> MemberStore m ClientMap + LookupMLSLeafIndices :: GroupId -> MemberStore m IndexMap + LookupMLSClientLeafIndices :: GroupId -> MemberStore m (ClientMap, IndexMap) makeSem ''MemberStore From 59258c3551df9691345d8b51178f54c294b0a26d Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 20 Apr 2023 09:29:34 +0000 Subject: [PATCH 28/75] minor typos --- services/galley/src/Galley/API/MLS/Message.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 6946fd785d..b76940608d 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -616,10 +616,10 @@ instance Semigroup ProposalAction where instance Monoid ProposalAction where mempty = ProposalAction mempty mempty mempty -paAddClient :: ClientIdentity -> Word32 -> ProposalAction +paAddClient :: ClientIdentity -> LeafIndex -> ProposalAction paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} -paRemoveClient :: ClientIdentity -> Word32 -> ProposalAction +paRemoveClient :: ClientIdentity -> LeafIndex -> ProposalAction paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} paExternalInitPresent :: ProposalAction @@ -694,7 +694,7 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do proposals <- traverse getInlineProposal commit.cProposals -- According to the spec, an external commit must contain: - -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2 + -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2) -- -- > Exactly one ExternalInit -- > At most one Remove proposal, with which the joiner removes an old @@ -775,12 +775,11 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do -- no events for external commits void $ executeProposalAction senderIdentity Nothing lConvOrSub action - let remIndices = map snd (cmAssocs (paRemove action)) - -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch -- fetch backend remove proposals of the previous epoch + let remIndices = map snd (cmAssocs (paRemove action)) indicesInRemoveProposals <- -- skip remove proposals of already removed by the external commit (\\ remIndices) From 51614201ad9b8d571f5c4d046369871693478bfb Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 20 Apr 2023 13:20:55 +0000 Subject: [PATCH 29/75] split executing proposals for int and ext commits --- services/galley/src/Galley/API/MLS/Message.hs | 56 +++++++++++++++++-- .../Galley/Cassandra/Conversation/Members.hs | 3 +- .../galley/src/Galley/Effects/MemberStore.hs | 3 +- 3 files changed, 55 insertions(+), 7 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index b76940608d..877ef54d0f 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -744,6 +744,10 @@ processExternalCommit :: Maybe UpdatePath -> Sem r () processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do + -- TODO from talk with Stefan M + -- [ ] should leaf nodes be calclulated within the commit lock? + -- [x] split executeProposalAction for internal and external commits + let convOrSub = tUnqualified lConvOrSub -- only members can join a subconversation @@ -772,8 +776,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do Right _ -> pure () withCommitLock groupId epoch $ do - -- no events for external commits - void $ executeProposalAction senderIdentity Nothing lConvOrSub action + executeExtCommitProposalAction senderIdentity lConvOrSub action -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch @@ -823,7 +826,7 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do throwS @'MLSCommitMissingReferences -- process and execute proposals - updates <- executeProposalAction senderIdentity con lConvOrSub action + updates <- executeIntCommitProposalAction senderIdentity con lConvOrSub action -- increment epoch number for_ lConvOrSub incrementEpoch @@ -1029,7 +1032,7 @@ type HasProposalActionEffects r = Member TinyLog r ) -executeProposalAction :: +executeIntCommitProposalAction :: forall r. HasProposalActionEffects r => ClientIdentity -> @@ -1037,7 +1040,7 @@ executeProposalAction :: Local ConvOrSubConv -> ProposalAction -> Sem r [LocalConversationUpdate] -executeProposalAction senderIdentity con lconvOrSub action = do +executeIntCommitProposalAction senderIdentity con lconvOrSub action = do let qusr = cidQualifiedUser senderIdentity convOrSub = tUnqualified lconvOrSub mlsMeta = mlsMetaConvOrSub convOrSub @@ -1170,6 +1173,49 @@ executeProposalAction senderIdentity con lconvOrSub action = do throwS @'MLSSelfRemovalNotAllowed pure (Just qtarget) +executeExtCommitProposalAction :: + forall r. + HasProposalActionEffects r => + ClientIdentity -> + Local ConvOrSubConv -> + ProposalAction -> + Sem r () +executeExtCommitProposalAction senderIdentity lconvOrSub action = do + let mlsMeta = mlsMetaConvOrSub $ tUnqualified lconvOrSub + newCILeaves = cmAssocs (paAdd action) + deprecatedCILeaves = cmAssocs (paRemove action) + + -- Adding clients: sender's client must be added and no other other client may + -- be added. + when (length newCILeaves /= 1 || fst (head newCILeaves) /= senderIdentity) $ + throw (mlsProtocolError "No add proposals are allowed in external commits") + + -- Client removal: only the sender's client can be removed when rejoining the + -- (sub)conversation. + when (length deprecatedCILeaves > 1) $ + throw (mlsProtocolError "Up to one client can be removed in an external commit") + for_ (listToMaybe deprecatedCILeaves) $ \ciLeaf -> do + when (fst ciLeaf /= senderIdentity) $ + throw (mlsProtocolError "Only the sender can rejoin in an external commit") + + -- TODO required for external proposals? + -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 + -- foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr + + -- Remove deprecated sender client from conversation state. + for_ deprecatedCILeaves $ \(ci, _) -> do + removeMLSClients + (cnvmlsGroupId mlsMeta) + (cidQualifiedUser ci) + (Set.singleton $ ciClient ci) + + -- Add new sender client to the conversation state. + for_ newCILeaves $ \(ci, idx) -> do + addMLSClients + (cnvmlsGroupId mlsMeta) + (cidQualifiedUser ci) + (Set.singleton (ciClient ci, idx)) + existingLocalMembers :: Local Data.Conversation -> Set (Qualified UserId) existingLocalMembers lconv = (Set.fromList . map (fmap lmId . tUntagged)) (traverse convLocalMembers lconv) diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index fe13423776..09e0fddab8 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -48,6 +48,7 @@ import qualified UnliftIO import Wire.API.Conversation.Member hiding (Member) import Wire.API.Conversation.Role import Wire.API.MLS.Group +import Wire.API.MLS.LeafNode (LeafIndex) import Wire.API.Provider.Service -- | Add members to a local conversation. @@ -341,7 +342,7 @@ removeLocalMembersFromRemoteConv (tUntagged -> Qualified conv convDomain) victim setConsistency LocalQuorum for_ victims $ \u -> addPrepQuery Cql.deleteUserRemoteConv (u, convDomain, conv) -addMLSClients :: GroupId -> Qualified UserId -> Set.Set (ClientId, Word32) -> Client () +addMLSClients :: GroupId -> Qualified UserId -> Set.Set (ClientId, LeafIndex) -> Client () addMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do setType BatchLogged setConsistency LocalQuorum diff --git a/services/galley/src/Galley/Effects/MemberStore.hs b/services/galley/src/Galley/Effects/MemberStore.hs index 8700c31459..b0891bcc7d 100644 --- a/services/galley/src/Galley/Effects/MemberStore.hs +++ b/services/galley/src/Galley/Effects/MemberStore.hs @@ -63,6 +63,7 @@ import Imports import Polysemy import Wire.API.Conversation.Member hiding (Member) import Wire.API.MLS.Group +import Wire.API.MLS.LeafNode import Wire.API.Provider.Service data MemberStore m a where @@ -79,7 +80,7 @@ data MemberStore m a where SetOtherMember :: Local ConvId -> Qualified UserId -> OtherMemberUpdate -> MemberStore m () DeleteMembers :: ConvId -> UserList UserId -> MemberStore m () DeleteMembersInRemoteConversation :: Remote ConvId -> [UserId] -> MemberStore m () - AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, Word32) -> MemberStore m () + AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, LeafIndex) -> MemberStore m () PlanClientRemoval :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveMLSClients :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveAllMLSClients :: GroupId -> MemberStore m () From 003e625aeebeb7b8f8a72fad909c1db9c68f18c6 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 20 Apr 2023 14:25:27 +0000 Subject: [PATCH 30/75] execute remove proposals before add proposals This makes sure that all leaf indices are freed in the database before they are occupied again. --- services/galley/src/Galley/API/MLS/Message.hs | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 877ef54d0f..3d07288ba9 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -1048,6 +1048,9 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do ss = csSignatureScheme (cnvmlsCipherSuite mlsMeta) newUserClients = Map.assocs (paAdd action) + -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 + foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr + -- no client can be directly added to a subconversation when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ throw (mlsProtocolError "Add proposals in subconversations are not supported") @@ -1077,8 +1080,7 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do throw () pure (qtarget, clients) - -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 - foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr + membersToRemove <- catMaybes <$> for removedUsers (uncurry (checkRemoval (is _SubConv convOrSub) cm)) -- for each user, we compare their clients with the ones being added to the conversation for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of @@ -1110,19 +1112,6 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do -- FUTUREWORK: turn this error into a proper response throwS @'MLSClientMismatch - membersToRemove <- catMaybes <$> for removedUsers (uncurry (checkRemoval (is _SubConv convOrSub) cm)) - - -- add users to the conversation and send events - addEvents <- - foldMap (addMembers qusr con lconvOrSub) - . nonEmpty - . map fst - $ newUserClients - - -- add clients in the conversation state - for_ newUserClients $ \(qtarget, newClients) -> do - addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) - -- remove users from the conversation and send events removeEvents <- foldMap @@ -1153,6 +1142,17 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do void $ fedClient @'Galley @"on-new-remote-subconversation" nrc + -- add users to the conversation and send events + addEvents <- + foldMap (addMembers qusr con lconvOrSub) + . nonEmpty + . map fst + $ newUserClients + + -- add clients in the conversation state + for_ newUserClients $ \(qtarget, newClients) -> do + addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) + -- TODO: increment epoch here instead of in the calling site pure (addEvents <> removeEvents) From d9a03b471099935bfa23a3d19947bd59f0c5d3b2 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Fri, 21 Apr 2023 08:07:44 +0000 Subject: [PATCH 31/75] rename Word32 and ref to LeafIndex and idx --- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 2 +- services/galley/test/integration/API/MLS.hs | 56 +++++++++---------- .../galley/test/integration/API/MLS/Util.hs | 5 +- services/galley/test/integration/API/Util.hs | 7 ++- 4 files changed, 36 insertions(+), 34 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 88f3602f26..06998a5157 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -41,7 +41,7 @@ import Wire.Arbitrary data Proposal = AddProposal (RawMLS KeyPackage) | UpdateProposal (RawMLS LeafNode) - | RemoveProposal Word32 + | RemoveProposal LeafIndex | PreSharedKeyProposal (RawMLS PreSharedKeyID) | ReInitProposal (RawMLS ReInit) | ExternalInitProposal ByteString diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 8cfaf2a859..903489220b 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -1595,7 +1595,7 @@ testBackendRemoveProposalRecreateClient = do void $ createPendingProposalCommit alice1 >>= sendAndConsumeCommitBundle - (_, ref) <- assertOne =<< getClientsFromGroupState alice1 alice + (_, idx) <- assertOne =<< getClientsFromGroupState alice1 alice liftTest $ deleteClient (qUnqualified alice) (ciClient alice1) (Just defPassword) @@ -1611,7 +1611,7 @@ testBackendRemoveProposalRecreateClient = do createExternalCommit alice2 Nothing cnv >>= sendAndConsumeCommitBundle WS.assertMatch (5 # WS.Second) wsA $ - wsAssertBackendRemoveProposal alice (Conv <$> qcnv) ref + wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idx consumeMessage1 alice2 proposal void $ createPendingProposalCommit alice2 >>= sendAndConsumeCommitBundle @@ -1636,9 +1636,9 @@ testBackendRemoveProposalLocalConvLocalUser = do { mlsMembers = Set.difference (mlsMembers mls) (Set.fromList [bob1, bob2]) } - for bobClients $ \(_, ref) -> do + for bobClients $ \(_, idx) -> do [msg] <- WS.assertMatchN (5 # Second) wss $ \n -> - wsAssertBackendRemoveProposal bob (Conv <$> qcnv) ref n + wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idx n consumeMessage1 alice1 msg -- alice commits the external proposals @@ -1671,9 +1671,9 @@ testBackendRemoveProposalLocalConvRemoteUser = do } ) - for_ bobClients $ \(_, ref) -> + for_ bobClients $ \(_, idx) -> WS.assertMatch (5 # WS.Second) wsA $ - wsAssertBackendRemoveProposal bob (Conv <$> qcnv) ref + wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idx sendRemoteMLSWelcome :: TestM () sendRemoteMLSWelcome = do @@ -1725,10 +1725,10 @@ testBackendRemoveProposalLocalConvLocalLeaverCreator = do { mlsMembers = Set.difference (mlsMembers mls) (Set.fromList [alice1]) } - for_ aliceClients $ \(_, ref) -> do + for_ aliceClients $ \(_, idx) -> do -- only bob's clients should receive the external proposals msgs <- WS.assertMatchN (5 # Second) (drop 1 wss) $ \n -> - wsAssertBackendRemoveProposal alice (Conv <$> qcnv) ref n + wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idx n traverse_ (uncurry consumeMessage1) (zip [bob1, bob2] msgs) -- but everyone should receive leave events @@ -1770,10 +1770,10 @@ testBackendRemoveProposalLocalConvLocalLeaverCommitter = do { mlsMembers = Set.difference (mlsMembers mls) (Set.fromList [bob1, bob2]) } - for_ bobClients $ \(_, ref) -> do + for_ bobClients $ \(_, idx) -> do -- only alice and charlie should receive the external proposals msgs <- WS.assertMatchN (5 # Second) (take 2 wss) $ \n -> - wsAssertBackendRemoveProposal bob (Conv <$> qcnv) ref n + wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idx n traverse_ (uncurry consumeMessage1) (zip [alice1, charlie1] msgs) -- but everyone should receive leave events @@ -1814,9 +1814,9 @@ testBackendRemoveProposalLocalConvRemoteLeaver = do curAction = SomeConversationAction SConversationLeaveTag () } - for_ bobClients $ \(_, ref) -> + for_ bobClients $ \(_, idx) -> WS.assertMatch_ (5 # WS.Second) wsA $ - wsAssertBackendRemoveProposal bob (Conv <$> qcnv) ref + wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idx testBackendRemoveProposalLocalConvLocalClient :: TestM () testBackendRemoveProposalLocalConvLocalClient = do @@ -1827,7 +1827,7 @@ testBackendRemoveProposalLocalConvLocalClient = do traverse_ uploadNewKeyPackage [bob1, bob2, charlie1] (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle - Just (_, kpBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob + Just (_, idxBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob mlsBracket [alice1, bob1] $ \[wsA, wsB] -> do liftTest $ @@ -1843,7 +1843,7 @@ testBackendRemoveProposalLocalConvLocalClient = do wsAssertClientRemoved (ciClient bob1) msg <- WS.assertMatch (5 # WS.Second) wsA $ \notification -> do - wsAssertBackendRemoveProposal bob (Conv <$> qcnv) kpBob1 notification + wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idxBob1 notification for_ [alice1, bob2, charlie1] $ flip consumeMessage1 msg @@ -1863,7 +1863,7 @@ testBackendRemoveProposalLocalConvRemoteClient = do (_, qcnv) <- setupMLSGroup alice1 commit <- createAddCommit alice1 [bob] - [(_, bob1KP)] <- getClientsFromGroupState alice1 bob + [(_, idxBob1)] <- getClientsFromGroupState alice1 bob let mock = receiveCommitMock [bob1] <|> welcomeMock <|> messageSentMock void . withTempMockFederator' mock $ do mlsBracket [alice1] $ \[wsA] -> void $ do @@ -1879,7 +1879,7 @@ testBackendRemoveProposalLocalConvRemoteClient = do WS.assertMatch_ (5 # WS.Second) wsA $ \notification -> - void $ wsAssertBackendRemoveProposal bob (Conv <$> qcnv) bob1KP notification + void $ wsAssertBackendRemoveProposal bob (Conv <$> qcnv) idxBob1 notification testGetGroupInfoOfLocalConv :: TestM () testGetGroupInfoOfLocalConv = do @@ -3022,7 +3022,7 @@ testLeaveSubConv isSubConvCreator = do let firstLeaver = if isSubConvCreator then bob1 else alice1 -- a member leaves the subconversation - [firstLeaverKP] <- + [idxFirstLeaver] <- map snd . filter (\(cid, _) -> cid == firstLeaver) <$> getClientsFromGroupState alice1 @@ -3047,7 +3047,7 @@ testLeaveSubConv isSubConvCreator = do wsAssertBackendRemoveProposal (cidQualifiedUser firstLeaver) (Conv <$> qcnv) - firstLeaverKP + idxFirstLeaver traverse_ (uncurry consumeMessage1) (zip others msgs) -- assert the leaver gets no proposal or event void . liftIO $ WS.assertNoEvent (5 # WS.Second) [wsLeaver] @@ -3083,7 +3083,7 @@ testLeaveSubConv isSubConvCreator = do liftIO $ length (pscMembers psc) @?= 3 -- charlie1 leaves - [charlie1KP] <- + [idxCharlie1] <- map snd . filter (\(cid, _) -> cid == charlie1) <$> getClientsFromGroupState (head others) charlie mlsBracket others $ \wss -> do @@ -3091,7 +3091,7 @@ testLeaveSubConv isSubConvCreator = do msgs <- WS.assertMatchN (5 # WS.Second) wss $ - wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) charlie1KP + wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) idxCharlie1 traverse_ (uncurry consumeMessage1) (zip others msgs) -- a member commits the pending proposal @@ -3202,7 +3202,7 @@ testRemoveUserParent = do for_ [alice1, bob2, charlie1, charlie2] $ \c -> void $ createExternalCommit c Nothing qcs >>= sendAndConsumeCommitBundle - [(_, kpref1), (_, kpref2)] <- getClientsFromGroupState alice1 charlie + [(_, idxRef1), (_, idxRef2)] <- getClientsFromGroupState alice1 charlie -- charlie leaves the main conversation mlsBracket [alice1, bob1, bob2] $ \wss -> do @@ -3217,12 +3217,12 @@ testRemoveUserParent = do } msg1 <- WS.assertMatchN (5 # Second) wss $ \n -> - wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) kpref1 n + wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) idxRef1 n traverse_ (uncurry consumeMessage1) (zip [alice1, bob1, bob2] msg1) msg2 <- WS.assertMatchN (5 # Second) wss $ \n -> - wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) kpref2 n + wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) idxRef2 n traverse_ (uncurry consumeMessage1) (zip [alice1, bob1, bob2] msg2) @@ -3264,7 +3264,7 @@ testRemoveCreatorParent = do for_ [bob1, bob2, charlie1, charlie2] $ \c -> void $ createExternalCommit c Nothing qcs >>= sendAndConsumeCommitBundle - [(_, kpref1)] <- getClientsFromGroupState alice1 alice + [(_, idxRef1)] <- getClientsFromGroupState alice1 alice -- creator leaves the main conversation mlsBracket [bob1, bob2, charlie1, charlie2] $ \wss -> do @@ -3281,7 +3281,7 @@ testRemoveCreatorParent = do msg <- WS.assertMatchN (5 # Second) wss $ \n -> -- Checks proposal for subconv, parent doesn't get one -- since alice is not notified of her own removal - wsAssertBackendRemoveProposal alice (Conv <$> qcnv) kpref1 n + wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idxRef1 n traverse_ (uncurry consumeMessage1) (zip [bob1, bob2, charlie1, charlie2] msg) @@ -3352,13 +3352,13 @@ testCreatorRemovesUserFromParent = do State.put stateSub -- Get client state for alice and fetch bob client identities - [(_, kprefBob1), (_, kprefBob2)] <- getClientsFromGroupState alice1 bob + [(_, idxBob1), (_, idxBob2)] <- getClientsFromGroupState alice1 bob -- handle bob1 removal msgs <- WS.assertMatchN (5 # Second) wss $ \n -> do -- it was an alice proposal for the parent, -- but it's a backend proposal for the sub - wsAssertBackendRemoveProposal bob qcs kprefBob1 n + wsAssertBackendRemoveProposal bob qcs idxBob1 n traverse_ (uncurry consumeMessage1) (zip [alice1, charlie1, charlie2] msgs) @@ -3366,7 +3366,7 @@ testCreatorRemovesUserFromParent = do msgs2 <- WS.assertMatchN (5 # Second) wss $ \n -> do -- it was an alice proposal for the parent, -- but it's a backend proposal for the sub - wsAssertBackendRemoveProposal bob qcs kprefBob2 n + wsAssertBackendRemoveProposal bob qcs idxBob2 n traverse_ (uncurry consumeMessage1) (zip [alice1, charlie1, charlie2] msgs2) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 3b58317a19..ac59f82f81 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -77,6 +77,7 @@ import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.Keys +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation @@ -913,7 +914,7 @@ mlsBracket clients k = do c <- view tsCannon WS.bracketAsClientRN c (map (ciUser &&& ciClient) clients) k -readGroupState :: ByteString -> [(ClientIdentity, Word32)] +readGroupState :: ByteString -> [(ClientIdentity, LeafIndex)] readGroupState j = do (node, n) <- zip (j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse . key "node") [0 ..] case node ^? key "leaf_node" of @@ -929,7 +930,7 @@ readGroupState j = do getClientsFromGroupState :: ClientIdentity -> Qualified UserId -> - MLSTest [(ClientIdentity, Word32)] + MLSTest [(ClientIdentity, LeafIndex)] getClientsFromGroupState cid u = do groupState <- readGroupState <$> getClientGroupState cid pure $ filter (\(cid', _) -> cidQualifiedUser cid' == u) groupState diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index cf54add2b7..ca9540f19c 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -121,6 +121,7 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Domain (originDomainHeaderName) import Wire.API.Internal.Notification hiding (target) +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation @@ -2898,7 +2899,7 @@ wsAssertConvReceiptModeUpdate conv usr new n = do evtFrom e @?= usr evtData e @?= EdConvReceiptModeUpdate (ConversationReceiptModeUpdate new) -wsAssertBackendRemoveProposalWithEpoch :: HasCallStack => Qualified UserId -> Qualified ConvId -> Word32 -> Epoch -> Notification -> IO ByteString +wsAssertBackendRemoveProposalWithEpoch :: HasCallStack => Qualified UserId -> Qualified ConvId -> LeafIndex -> Epoch -> Notification -> IO ByteString wsAssertBackendRemoveProposalWithEpoch fromUser convId idx epoch n = do bs <- wsAssertBackendRemoveProposal fromUser (Conv <$> convId) idx n let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs @@ -2907,7 +2908,7 @@ wsAssertBackendRemoveProposalWithEpoch fromUser convId idx epoch n = do _ -> assertFailure "unexpected message content" pure bs -wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> Word32 -> Notification -> IO ByteString +wsAssertBackendRemoveProposal :: HasCallStack => Qualified UserId -> Qualified ConvOrSubConvId -> LeafIndex -> Notification -> IO ByteString wsAssertBackendRemoveProposal fromUser cnvOrSubCnv idx n = do let e = List1.head (WS.unpackPayload n) ntfTransient n @?= False @@ -2921,7 +2922,7 @@ wsAssertBackendRemoveProposal fromUser cnvOrSubCnv idx n = do pmsg.content.rmValue.sender @?= SenderExternal 0 case pmsg.content.rmValue.content of FramedContentProposal prop -> case prop.rmValue of - RemoveProposal kpRefRemove -> kpRefRemove @?= idx + RemoveProposal removedIdx -> removedIdx @?= idx otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload _ -> assertFailure $ "Expected PublicMessage" From 31c66ee2ee361d01285f3e4878b7b79dd5de8c91 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 18 Apr 2023 15:18:39 +0200 Subject: [PATCH 32/75] Remove MissingSenderClient error --- libs/wire-api/src/Wire/API/Error/Galley.hs | 3 --- .../src/Wire/API/Routes/Public/Galley/Conversation.hs | 3 --- libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs | 2 -- services/galley/src/Galley/API/MLS/Message.hs | 10 +++------- 4 files changed, 3 insertions(+), 15 deletions(-) diff --git a/libs/wire-api/src/Wire/API/Error/Galley.hs b/libs/wire-api/src/Wire/API/Error/Galley.hs index 8a92fa13ba..1a2c1ee816 100644 --- a/libs/wire-api/src/Wire/API/Error/Galley.hs +++ b/libs/wire-api/src/Wire/API/Error/Galley.hs @@ -86,7 +86,6 @@ data GalleyError | MLSClientSenderUserMismatch | MLSWelcomeMismatch | MLSMissingGroupInfo - | MLSMissingSenderClient | MLSUnexpectedSenderClient | MLSSubConvUnsupportedConvType | MLSSubConvClientNotInParent @@ -230,8 +229,6 @@ type instance MapError 'MLSWelcomeMismatch = 'StaticError 400 "mls-welcome-misma type instance MapError 'MLSMissingGroupInfo = 'StaticError 404 "mls-missing-group-info" "The conversation has no group information" -type instance MapError 'MLSMissingSenderClient = 'StaticError 403 "mls-missing-sender-client" "The client has to refresh their access token and provide their client ID" - type instance MapError 'MLSSubConvUnsupportedConvType = 'StaticError 403 "mls-subconv-unsupported-convtype" "MLS subconversations are only supported for regular conversations" type instance MapError 'MLSSubConvClientNotInParent = 'StaticError 403 "mls-subconv-join-parent-missing" "MLS client cannot join the subconversation because it is not member of the parent conversation" diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs index 080a5f7b78..b0da0f7509 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/Conversation.hs @@ -371,7 +371,6 @@ type ConversationAPI = :> MakesFederatedCall 'Galley "on-conversation-created" :> Until 'V3 :> CanThrow 'ConvAccessDenied - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNonEmptyMemberList :> CanThrow 'MLSNotEnabled :> CanThrow 'NotConnected @@ -393,7 +392,6 @@ type ConversationAPI = :> From 'V3 :> Until 'V4 :> CanThrow 'ConvAccessDenied - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNonEmptyMemberList :> CanThrow 'MLSNotEnabled :> CanThrow 'NotConnected @@ -413,7 +411,6 @@ type ConversationAPI = :> MakesFederatedCall 'Galley "on-conversation-created" :> From 'V4 :> CanThrow 'ConvAccessDenied - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNonEmptyMemberList :> CanThrow 'MLSNotEnabled :> CanThrow 'NotConnected diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index 5ba0da1807..5ef98ae396 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -53,7 +53,6 @@ type MLSMessagingAPI = :> CanThrow 'MLSGroupConversationMismatch :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSKeyPackageRefNotFound - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag @@ -92,7 +91,6 @@ type MLSMessagingAPI = :> CanThrow 'MLSGroupConversationMismatch :> CanThrow 'MLSInvalidLeafNodeIndex :> CanThrow 'MLSKeyPackageRefNotFound - :> CanThrow 'MLSMissingSenderClient :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 3d07288ba9..309a529538 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -109,13 +109,13 @@ import Wire.API.User.Client -- - [x] extract validation function to wire-api -- - [x] validate lifetime and public key consistency only on brig -- - [x] check that ciphersuite matches conversation on galley --- - [ ] check the signature on the LeafNode +-- - [x] check the signature on the LeafNode -- - [ ] ? verify capabilities -- - [ ] verify that all extensions are present in the capabilities -- - [ ] ? in the update case (in galley), verify that the encryption_key is different -- [ ] validate proposals when processing proposal and commit messages --- [ ] remove MissingSenderClient error --- [ ] PreSharedKey proposal +-- [x] remove MissingSenderClient error +-- [ ] ? PreSharedKey proposal -- [x] remove all key package ref mapping -- [x] initialise index maps -- [ ] newtype for leaf node indices @@ -236,7 +236,6 @@ type MLSMessageStaticErrors = ErrorS 'MLSSelfRemovalNotAllowed, ErrorS 'MLSClientSenderUserMismatch, ErrorS 'MLSGroupConversationMismatch, - ErrorS 'MLSMissingSenderClient, ErrorS 'MLSSubConvClientNotInParent ] @@ -255,7 +254,6 @@ postMLSMessageFromLocalUserV1 :: Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSGroupConversationMismatch) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSNotEnabled) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, @@ -286,7 +284,6 @@ postMLSMessageFromLocalUser :: Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSGroupConversationMismatch) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSNotEnabled) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, @@ -457,7 +454,6 @@ postMLSMessage :: Member (ErrorS 'MLSClientSenderUserMismatch) r, Member (ErrorS 'MLSCommitMissingReferences) r, Member (ErrorS 'MLSGroupConversationMismatch) r, - Member (ErrorS 'MLSMissingSenderClient) r, Member (ErrorS 'MLSProposalNotFound) r, Member (ErrorS 'MLSSelfRemovalNotAllowed) r, Member (ErrorS 'MLSStaleMessage) r, From 08895eaf41af16ead5dfc9c7702816b85bb86922 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 18 Apr 2023 15:47:49 +0200 Subject: [PATCH 33/75] Remove some prefixes from MLS structures --- libs/wire-api/src/Wire/API/MLS/Commit.hs | 28 ++++++------- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 30 +++++++------- libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 32 ++++++--------- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 40 +++++++++---------- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 2 +- services/brig/src/Brig/API/MLS/KeyPackages.hs | 13 +++--- .../brig/test/integration/API/Federation.hs | 8 ++-- services/brig/test/integration/API/MLS.hs | 24 +++++------ .../test/integration/Federation/End2end.hs | 12 +++--- services/galley/src/Galley/API/MLS/Message.hs | 21 +++++----- .../galley/test/integration/API/MLS/Util.hs | 16 ++++---- 11 files changed, 108 insertions(+), 118 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Commit.hs b/libs/wire-api/src/Wire/API/MLS/Commit.hs index 83cb4277ad..81223db550 100644 --- a/libs/wire-api/src/Wire/API/MLS/Commit.hs +++ b/libs/wire-api/src/Wire/API/MLS/Commit.hs @@ -25,8 +25,8 @@ import Wire.Arbitrary -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4-3 data Commit = Commit - { cProposals :: [ProposalOrRef], - cPath :: Maybe UpdatePath + { proposals :: [ProposalOrRef], + path :: Maybe UpdatePath } deriving (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform Commit) @@ -39,13 +39,13 @@ instance ParseMLS Commit where instance SerialiseMLS Commit where serialiseMLS c = do - serialiseMLSVector @VarInt serialiseMLS c.cProposals - serialiseMLSOptional serialiseMLS c.cPath + serialiseMLSVector @VarInt serialiseMLS c.proposals + serialiseMLSOptional serialiseMLS c.path -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data UpdatePath = UpdatePath - { upLeaf :: RawMLS LeafNode, - upNodes :: [UpdatePathNode] + { leaf :: RawMLS LeafNode, + nodes :: [UpdatePathNode] } deriving (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform UpdatePath) @@ -55,13 +55,13 @@ instance ParseMLS UpdatePath where instance SerialiseMLS UpdatePath where serialiseMLS up = do - serialiseMLS up.upLeaf - serialiseMLSVector @VarInt serialiseMLS up.upNodes + serialiseMLS up.leaf + serialiseMLSVector @VarInt serialiseMLS up.nodes -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data UpdatePathNode = UpdatePathNode - { upnPublicKey :: ByteString, - upnSecret :: [HPKECiphertext] + { publicKey :: ByteString, + secret :: [HPKECiphertext] } deriving (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform UpdatePathNode) @@ -71,13 +71,13 @@ instance ParseMLS UpdatePathNode where instance SerialiseMLS UpdatePathNode where serialiseMLS upn = do - serialiseMLSBytes @VarInt upn.upnPublicKey - serialiseMLSVector @VarInt serialiseMLS upn.upnSecret + serialiseMLSBytes @VarInt upn.publicKey + serialiseMLSVector @VarInt serialiseMLS upn.secret -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.6-2 data HPKECiphertext = HPKECiphertext - { hcOutput :: ByteString, - hcCiphertext :: ByteString + { output :: ByteString, + ciphertext :: ByteString } deriving (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform HPKECiphertext) diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index ed185ec90e..053b59bfb3 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -27,16 +27,16 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome data CommitBundle = CommitBundle - { cbCommitMsg :: RawMLS Message, -- TODO: change this type to Commit - cbWelcome :: Maybe (RawMLS Welcome), - cbGroupInfo :: RawMLS GroupInfo + { commitMsg :: RawMLS Message, -- TODO: change this type to Commit + welcome :: Maybe (RawMLS Welcome), + groupInfo :: RawMLS GroupInfo } deriving stock (Eq, Show, Generic) data CommitBundleF f = CommitBundleF - { cbCommitMsg :: f (RawMLS Message), - cbWelcome :: f (RawMLS Welcome), - cbGroupInfo :: f (RawMLS GroupInfo) + { commitMsg :: f (RawMLS Message), + welcome :: f (RawMLS Welcome), + groupInfo :: f (RawMLS GroupInfo) } deriving instance Show (CommitBundleF []) @@ -44,9 +44,9 @@ deriving instance Show (CommitBundleF []) instance Alternative f => Semigroup (CommitBundleF f) where cb1 <> cb2 = CommitBundleF - (cb1.cbCommitMsg <|> cb2.cbCommitMsg) - (cb1.cbWelcome <|> cb2.cbWelcome) - (cb1.cbGroupInfo <|> cb2.cbGroupInfo) + (cb1.commitMsg <|> cb2.commitMsg) + (cb1.welcome <|> cb2.welcome) + (cb1.groupInfo <|> cb2.groupInfo) instance Alternative f => Monoid (CommitBundleF f) where mempty = CommitBundleF empty empty empty @@ -54,9 +54,9 @@ instance Alternative f => Monoid (CommitBundleF f) where checkCommitBundleF :: CommitBundleF [] -> Either Text CommitBundle checkCommitBundleF cb = CommitBundle - <$> check "commit" cb.cbCommitMsg - <*> checkOpt "welcome" cb.cbWelcome - <*> check "group info" cb.cbGroupInfo + <$> check "commit" cb.commitMsg + <*> checkOpt "welcome" cb.welcome + <*> check "group info" cb.groupInfo where check :: Text -> [a] -> Either Text a check _ [x] = pure x @@ -88,9 +88,9 @@ instance ParseMLS CommitBundle where instance SerialiseMLS CommitBundle where serialiseMLS cb = do - serialiseMLS cb.cbCommitMsg - traverse_ (serialiseMLS . mkMessage . MessageWelcome) cb.cbWelcome - serialiseMLS $ mkMessage (MessageGroupInfo cb.cbGroupInfo) + serialiseMLS cb.commitMsg + traverse_ (serialiseMLS . mkMessage . MessageWelcome) cb.welcome + serialiseMLS $ mkMessage (MessageGroupInfo cb.groupInfo) instance S.ToSchema CommitBundle where declareNamedSchema _ = pure (mlsSwagger "CommitBundle") diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index dd7ad72cbc..0bdf2a69d9 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -27,7 +27,6 @@ module Wire.API.MLS.KeyPackage kpRef', KeyPackageTBS (..), KeyPackageRef (..), - KeyPackageUpdate (..), ) where @@ -56,14 +55,14 @@ import Wire.API.MLS.Serialisation import Wire.Arbitrary data KeyPackageUpload = KeyPackageUpload - {kpuKeyPackages :: [RawMLS KeyPackage]} + {keyPackages :: [RawMLS KeyPackage]} deriving (FromJSON, ToJSON, S.ToSchema) via Schema KeyPackageUpload instance ToSchema KeyPackageUpload where schema = object "KeyPackageUpload" $ KeyPackageUpload - <$> kpuKeyPackages .= field "key_packages" (array rawKeyPackageSchema) + <$> keyPackages .= field "key_packages" (array rawKeyPackageSchema) newtype KeyPackageData = KeyPackageData {kpData :: ByteString} deriving stock (Eq, Ord, Show) @@ -83,10 +82,10 @@ instance Cql KeyPackageData where fromCql _ = Left "Expected CqlBlob" data KeyPackageBundleEntry = KeyPackageBundleEntry - { kpbeUser :: Qualified UserId, - kpbeClient :: ClientId, - kpbeRef :: KeyPackageRef, - kpbeKeyPackage :: KeyPackageData + { user :: Qualified UserId, + client :: ClientId, + ref :: KeyPackageRef, + keyPackage :: KeyPackageData } deriving stock (Eq, Ord, Show) @@ -94,12 +93,12 @@ instance ToSchema KeyPackageBundleEntry where schema = object "KeyPackageBundleEntry" $ KeyPackageBundleEntry - <$> kpbeUser .= qualifiedObjectSchema "user" schema - <*> kpbeClient .= field "client" schema - <*> kpbeRef .= field "key_package_ref" schema - <*> kpbeKeyPackage .= field "key_package" schema + <$> (.user) .= qualifiedObjectSchema "user" schema + <*> (.client) .= field "client" schema + <*> (.ref) .= field "key_package_ref" schema + <*> (.keyPackage) .= field "key_package" schema -newtype KeyPackageBundle = KeyPackageBundle {kpbEntries :: Set KeyPackageBundleEntry} +newtype KeyPackageBundle = KeyPackageBundle {entries :: Set KeyPackageBundleEntry} deriving stock (Eq, Show) deriving (FromJSON, ToJSON, S.ToSchema) via Schema KeyPackageBundle @@ -107,7 +106,7 @@ instance ToSchema KeyPackageBundle where schema = object "KeyPackageBundle" $ KeyPackageBundle - <$> kpbEntries .= field "key_packages" (set schema) + <$> (.entries) .= field "key_packages" (set schema) newtype KeyPackageCount = KeyPackageCount {unKeyPackageCount :: Int} deriving newtype (Eq, Ord, Num, Show) @@ -232,10 +231,3 @@ instance SerialiseMLS KeyPackage where serialiseMLS kp = do serialiseMLS kp.tbs serialiseMLSBytes @VarInt kp.signature_ - --------------------------------------------------------------------------------- - -data KeyPackageUpdate = KeyPackageUpdate - { kpupPrevious :: KeyPackageRef, - kpupNext :: KeyPackageRef - } diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 06998a5157..02f5334ac8 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -125,9 +125,9 @@ instance SerialiseMLS PreSharedKeyID where -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 data Resumption = Resumption - { resUsage :: Word8, - resGroupId :: GroupId, - resEpoch :: Word64 + { usage :: Word8, + groupId :: GroupId, + epoch :: Word64 } deriving stock (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform Resumption) @@ -141,16 +141,16 @@ instance ParseMLS Resumption where instance SerialiseMLS Resumption where serialiseMLS r = do - serialiseMLS r.resUsage - serialiseMLS r.resGroupId - serialiseMLS r.resEpoch + serialiseMLS r.usage + serialiseMLS r.groupId + serialiseMLS r.epoch -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.1.5-2 data ReInit = ReInit - { riGroupId :: GroupId, - riProtocolVersion :: ProtocolVersion, - riCipherSuite :: CipherSuite, - riExtensions :: [Extension] + { groupId :: GroupId, + protocolVersion :: ProtocolVersion, + cipherSuite :: CipherSuite, + extensions :: [Extension] } deriving stock (Eq, Show, Generic) deriving (Arbitrary) via (GenericUniform ReInit) @@ -165,15 +165,15 @@ instance ParseMLS ReInit where instance SerialiseMLS ReInit where serialiseMLS ri = do - serialiseMLS ri.riGroupId - serialiseMLS ri.riProtocolVersion - serialiseMLS ri.riCipherSuite - serialiseMLSVector @VarInt serialiseMLS ri.riExtensions + serialiseMLS ri.groupId + serialiseMLS ri.protocolVersion + serialiseMLS ri.cipherSuite + serialiseMLSVector @VarInt serialiseMLS ri.extensions data MessageRange = MessageRange - { mrSender :: KeyPackageRef, - mrFirstGeneration :: Word32, - mrLastGeneration :: Word32 + { sender :: KeyPackageRef, + firstGeneration :: Word32, + lastGeneration :: Word32 } deriving stock (Eq, Show) @@ -189,9 +189,9 @@ instance ParseMLS MessageRange where instance SerialiseMLS MessageRange where serialiseMLS MessageRange {..} = do - serialiseMLS mrSender - serialiseMLS mrFirstGeneration - serialiseMLS mrLastGeneration + serialiseMLS sender + serialiseMLS firstGeneration + serialiseMLS lastGeneration -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-12.4-3 data ProposalOrRefTag = InlineTag | RefTag diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index 6d7a084322..e096d5b0f2 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -193,7 +193,7 @@ userClientQid :: Qualified UserId -> ClientId -> String userClientQid usr c = show (qUnqualified usr) <> ":" - <> T.unpack (client c) + <> T.unpack c.client <> "@" <> T.unpack (domainText (qDomain usr)) diff --git a/services/brig/src/Brig/API/MLS/KeyPackages.hs b/services/brig/src/Brig/API/MLS/KeyPackages.hs index e545af1bf6..53bd3fe164 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages.hs @@ -48,10 +48,10 @@ import Wire.API.Team.LegalHold import Wire.API.User.Client uploadKeyPackages :: Local UserId -> ClientId -> KeyPackageUpload -> Handler r () -uploadKeyPackages lusr cid (kpuKeyPackages -> kps) = do +uploadKeyPackages lusr cid kps = do assertMLSEnabled let identity = mkClientIdentity (tUntagged lusr) cid - kps' <- traverse (validateUploadedKeyPackage identity) kps + kps' <- traverse (validateUploadedKeyPackage identity) kps.keyPackages lift . wrapClient $ Data.insertKeyPackages (tUnqualified lusr) cid kps' claimKeyPackages :: @@ -112,17 +112,16 @@ claimRemoteKeyPackages lusr target = do } -- validate all claimed key packages - for_ (kpbEntries bundle) $ \e -> do - let cid = mkClientIdentity (kpbeUser e) (kpbeClient e) + for_ bundle.entries $ \e -> do + let cid = mkClientIdentity e.user e.client kpRaw <- withExceptT (const . clientDataError $ KeyPackageDecodingError) . except . decodeMLS' . kpData - . kpbeKeyPackage - $ e + $ e.keyPackage (refVal, _) <- validateUploadedKeyPackage cid kpRaw - unless (refVal == kpbeRef e) + unless (refVal == e.ref) . throwE . clientDataError $ InvalidKeyPackageRef diff --git a/services/brig/test/integration/API/Federation.hs b/services/brig/test/integration/API/Federation.hs index af65db356b..b7580143c1 100644 --- a/services/brig/test/integration/API/Federation.hs +++ b/services/brig/test/integration/API/Federation.hs @@ -426,7 +426,7 @@ testClaimKeyPackages brig fedBrigClient = do ClaimKeyPackageRequest (qUnqualified alice) (qUnqualified bob) liftIO $ - Set.map (\e -> (kpbeUser e, kpbeClient e)) (kpbEntries bundle) + Set.map (\e -> (e.user, e.client)) bundle.entries @?= Set.fromList [(bob, c) | c <- bobClients] -- check that we have one fewer key package now @@ -435,15 +435,15 @@ testClaimKeyPackages brig fedBrigClient = do liftIO $ count @?= 1 -- check that the package refs are correctly mapped - for_ (kpbEntries bundle) $ \e -> do + for_ bundle.entries $ \e -> do cid <- responseJsonError - =<< get (brig . paths ["i", "mls", "key-packages", toHeader (kpbeRef e)]) + =<< get (brig . paths ["i", "mls", "key-packages", toHeader e.ref]) Opt.Opts -> Brig -> Http () testClaimKeyPackagesMLSDisabled opts brig = do diff --git a/services/brig/test/integration/API/MLS.hs b/services/brig/test/integration/API/MLS.hs index 440da8e28d..60614b0916 100644 --- a/services/brig/test/integration/API/MLS.hs +++ b/services/brig/test/integration/API/MLS.hs @@ -124,7 +124,7 @@ testKeyPackageClaim brig = do ) (kpbeUser e, kpbeClient e)) (kpbEntries bundle) @?= Set.fromList [(u, c1), (u, c2)] + liftIO $ Set.map (\e -> (e.user, e.client)) bundle.entries @?= Set.fromList [(u, c1), (u, c2)] checkMapping brig u bundle -- check that we have one fewer key package now @@ -145,7 +145,7 @@ testKeyPackageSelfClaim brig = do -- claim own packages but skip the first do - bundle <- + bundle :: KeyPackageBundle <- responseJsonError =<< post ( brig @@ -154,7 +154,7 @@ testKeyPackageSelfClaim brig = do . zUser (qUnqualified u) ) (kpbeUser e, kpbeClient e)) (kpbEntries bundle) @?= Set.fromList [(u, c2)] + liftIO $ Set.map (\e -> (e.user, e.client)) bundle.entries @?= Set.fromList [(u, c2)] -- check that we still have all keypackages for client c1 count <- getKeyPackageCount brig u c1 @@ -163,7 +163,7 @@ testKeyPackageSelfClaim brig = do -- if another user sets skip_own, nothing is skipped do u' <- userQualifiedId <$> randomUser brig - bundle <- + bundle :: KeyPackageBundle <- responseJsonError =<< post ( brig @@ -172,7 +172,7 @@ testKeyPackageSelfClaim brig = do . zUser (qUnqualified u') ) (kpbeUser e, kpbeClient e)) (kpbEntries bundle) @?= Set.fromList [(u, c1), (u, c2)] + liftIO $ Set.map (\e -> (e.user, e.client)) bundle.entries @?= Set.fromList [(u, c1), (u, c2)] -- check package counts again for_ [(c1, 2), (c2, 1)] $ \(c, n) -> do @@ -192,10 +192,10 @@ testKeyPackageRemoteClaim opts brig = do (r, kp) <- generateKeyPackage tmp qcid Nothing pure $ KeyPackageBundleEntry - { kpbeUser = u, - kpbeClient = ciClient qcid, - kpbeRef = kp, - kpbeKeyPackage = KeyPackageData . rmRaw $ r + { user = u, + client = ciClient qcid, + ref = kp, + keyPackage = KeyPackageData . rmRaw $ r } let mockBundle = KeyPackageBundle (Set.fromList entries) (bundle :: KeyPackageBundle, _reqs) <- @@ -216,15 +216,15 @@ testKeyPackageRemoteClaim opts brig = do -- | Check that the package refs are correctly mapped checkMapping :: Brig -> Qualified UserId -> KeyPackageBundle -> Http () checkMapping brig u bundle = - for_ (kpbEntries bundle) $ \e -> do + for_ bundle.entries $ \e -> do cid <- responseJsonError - =<< get (brig . paths ["i", "mls", "key-packages", toHeader (kpbeRef e)]) + =<< get (brig . paths ["i", "mls", "key-packages", toHeader e.ref]) Qualified UserId -> Int -> Http ClientId createClient brig u i = diff --git a/services/brig/test/integration/Federation/End2end.hs b/services/brig/test/integration/Federation/End2end.hs index c0ab4ee374..b089275599 100644 --- a/services/brig/test/integration/Federation/End2end.hs +++ b/services/brig/test/integration/Federation/End2end.hs @@ -686,7 +686,7 @@ claimRemoteKeyPackages brig1 brig2 = do for_ bobClients $ \c -> uploadKeyPackages brig2 tmp def bob c 5 - bundle <- + bundle :: KeyPackageBundle <- responseJsonError =<< post ( brig1 @@ -696,7 +696,7 @@ claimRemoteKeyPackages brig1 brig2 = do (kpbeUser e, kpbeClient e)) (kpbEntries bundle) + Set.map (\e -> (e.user, e.client)) bundle.entries @?= Set.fromList [(bob, c) | c <- bobClients] -- bob creates an MLS conversation on domain 2 with alice on domain 1, then sends a @@ -719,7 +719,7 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do let aliceClientId = show (userId alice) <> ":" - <> T.unpack (client aliceClient) + <> T.unpack aliceClient.client <> "@" <> T.unpack (domainText (qDomain (userQualifiedId alice))) @@ -769,7 +769,7 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do let bobClientId = show (userId bob) <> ":" - <> T.unpack (client bobClient) + <> T.unpack bobClient.client <> "@" <> T.unpack (domainText (qDomain (userQualifiedId bob))) void . liftIO $ spawn (cli bobClientId tmp ["init", bobClientId]) Nothing @@ -982,7 +982,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 let aliceClientId = show (userId alice) <> ":" - <> T.unpack (client aliceClient) + <> T.unpack aliceClient.client <> "@" <> T.unpack (domainText (qDomain (userQualifiedId alice))) @@ -997,7 +997,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 let bobClientId = show (userId bob) <> ":" - <> T.unpack (client bobClient) + <> T.unpack (bobClient.client) <> "@" <> T.unpack (domainText (qDomain (userQualifiedId bob))) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 309a529538..a5c9192cb7 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -122,10 +122,9 @@ import Wire.API.User.Client -- [x] compute new indices for add proposals -- [ ] remove prefixes from rmValue and rmRaw -- [x] remove PublicGroupState and GroupInfoBundle modules --- [ ] remove protobuf definitions of CommitBundle -- [ ] (?) rename public_group_state field in conversation table -- [ ] consider adding more integration tests --- [ ] remove prefixes from fields in Commit and Proposal +-- [x] remove prefixes from fields in Commit and Proposal data IncomingMessage = IncomingMessage { epoch :: Epoch, @@ -200,7 +199,7 @@ incomingMessageAuthenticatedContent pmsg = mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle mkIncomingBundle bundle = do - imsg <- mkIncomingMessage bundle.rmValue.cbCommitMsg + imsg <- mkIncomingMessage bundle.rmValue.commitMsg content <- case imsg.content of IncomingMessageContentPublic c -> pure c _ -> Nothing @@ -213,9 +212,9 @@ mkIncomingBundle bundle = do groupId = imsg.groupId, sender = content.sender, commit = commit, - rawMessage = bundle.rmValue.cbCommitMsg, - welcome = bundle.rmValue.cbWelcome, - groupInfo = GroupInfoData bundle.rmValue.cbGroupInfo.rmRaw, + rawMessage = bundle.rmValue.commitMsg, + welcome = bundle.rmValue.welcome, + groupInfo = GroupInfoData bundle.rmValue.groupInfo.rmRaw, serialized = bundle.rmRaw } @@ -387,7 +386,7 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do lConvOrSub bundle.epoch action - (cPath bundle.commit.rmValue) + bundle.commit.rmValue.path pure ([], []) storeGroupInfo (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfo @@ -665,7 +664,7 @@ getCommitData senderIdentity lConvOrSub epoch commit = do if epoch == Epoch 0 then addProposedClient senderIdentity else mempty - proposals <- traverse (derefProposal groupId epoch) commit.cProposals + proposals <- traverse (derefProposal groupId epoch) commit.proposals action <- applyProposals mlsMeta groupId proposals pure (creatorAction <> action) @@ -687,7 +686,7 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do curEpoch = cnvmlsEpoch mlsMeta groupId = cnvmlsGroupId mlsMeta when (epoch /= curEpoch) $ throwS @'MLSStaleMessage - proposals <- traverse getInlineProposal commit.cProposals + proposals <- traverse getInlineProposal commit.proposals -- According to the spec, an external commit must contain: -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2) @@ -758,7 +757,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do -- extract leaf node from update path and validate it leafNode <- - upLeaf + (.leaf) <$> note (mlsProtocolError "External commits need an update path") updatePath @@ -817,7 +816,7 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do withCommitLock (cnvmlsGroupId . mlsMetaConvOrSub $ convOrSub) epoch $ do -- check all pending proposals are referenced in the commit allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch - let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) (cProposals commit) + let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals unless (all (`Set.member` referencedProposals) allPendingProposals) $ throwS @'MLSCommitMissingReferences diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index ac59f82f81..ad738c73a6 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -88,7 +88,7 @@ cid2Str :: ClientIdentity -> String cid2Str cid = show (ciUser cid) <> ":" - <> T.unpack (client . ciClient $ cid) + <> T.unpack cid.ciClient.client <> "@" <> T.unpack (domainText (ciDomain cid)) @@ -544,10 +544,10 @@ claimRemoteKeyPackages (tUntagged -> qusr) = do (kp, ref) <- generateKeyPackage cid pure $ KeyPackageBundleEntry - { kpbeUser = qusr, - kpbeClient = ciClient cid, - kpbeRef = ref, - kpbeKeyPackage = KeyPackageData (rmRaw kp) + { user = qusr, + client = ciClient cid, + ref = ref, + keyPackage = KeyPackageData (rmRaw kp) } pure bundle @@ -564,10 +564,10 @@ claimKeyPackages cid qusr = do bundleKeyPackages :: KeyPackageBundle -> [(ClientIdentity, ByteString)] bundleKeyPackages bundle = let getEntry be = - ( mkClientIdentity (kpbeUser be) (kpbeClient be), - kpData (kpbeKeyPackage be) + ( mkClientIdentity be.user be.client, + kpData be.keyPackage ) - in map getEntry (toList (kpbEntries bundle)) + in map getEntry (toList bundle.entries) -- | Claim keypackages and create a commit/welcome pair on a given client. -- Note that this alters the state of the group immediately. If we want to test From 82969e33d64d74cfc60f29d56d86feb13e043e06 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 18 Apr 2023 16:52:42 +0200 Subject: [PATCH 34/75] Remove prefixes from RawMLS fields --- libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 4 +- .../wire-api/src/Wire/API/MLS/CommitBundle.hs | 4 +- libs/wire-api/src/Wire/API/MLS/KeyPackage.hs | 14 ++--- libs/wire-api/src/Wire/API/MLS/LeafNode.hs | 12 ++-- libs/wire-api/src/Wire/API/MLS/Message.hs | 12 ++-- .../src/Wire/API/MLS/Serialisation.hs | 8 +-- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 4 +- .../Brig/API/MLS/KeyPackages/Validation.hs | 6 +- services/brig/test/integration/API/MLS.hs | 2 +- .../brig/test/integration/API/MLS/Util.hs | 2 +- .../test/integration/Federation/End2end.hs | 8 +-- services/galley/src/Galley/API/MLS/Message.hs | 60 +++++++++---------- services/galley/src/Galley/API/MLS/Util.hs | 2 +- services/galley/src/Galley/API/MLS/Welcome.hs | 4 +- .../galley/src/Galley/Cassandra/Instances.hs | 2 +- services/galley/test/integration/API/MLS.hs | 2 +- .../galley/test/integration/API/MLS/Util.hs | 2 +- services/galley/test/integration/API/Util.hs | 14 ++--- 18 files changed, 81 insertions(+), 81 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index c1b4d00da1..a3571466e4 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -114,7 +114,7 @@ csVerifySignature MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 pub x sig = fromMaybe False . maybeCryptoError $ do pub' <- Ed25519.publicKey pub sig' <- Ed25519.signature sig - pure $ Ed25519.verify pub' x.rmRaw sig' + pure $ Ed25519.verify pub' x.raw sig' -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-5.2-5 type RefHashInput = SignContent @@ -131,7 +131,7 @@ data SignContent a = SignContent instance SerialiseMLS (SignContent a) where serialiseMLS c = do serialiseMLSBytes @VarInt c.sigLabel - serialiseMLSBytes @VarInt c.content.rmRaw + serialiseMLSBytes @VarInt c.content.raw mkSignContent :: ByteString -> RawMLS a -> SignContent a mkSignContent sigLabel content = diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index 053b59bfb3..f828196819 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -69,8 +69,8 @@ checkCommitBundleF cb = checkOpt name _ = Left ("Redundant occurrence of " <> name) findMessageInStream :: Alternative f => RawMLS Message -> Either Text (CommitBundleF f) -findMessageInStream msg = case msg.rmValue.content of - MessagePublic mp -> case mp.content.rmValue.content of +findMessageInStream msg = case msg.value.content of + MessagePublic mp -> case mp.content.value.content of FramedContentCommit _ -> pure (CommitBundleF (pure msg) empty empty) _ -> Left "unexpected public message" MessageWelcome w -> pure (CommitBundleF empty (pure w) empty) diff --git a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs index 0bdf2a69d9..19e9490993 100644 --- a/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs +++ b/libs/wire-api/src/Wire/API/MLS/KeyPackage.hs @@ -151,8 +151,8 @@ kpRef cs = kpRef' :: RawMLS KeyPackage -> Maybe KeyPackageRef kpRef' kp = kpRef - <$> cipherSuiteTag (kp.rmValue.cipherSuite) - <*> pure (KeyPackageData (rmRaw kp)) + <$> cipherSuiteTag (kp.value.cipherSuite) + <*> pure (KeyPackageData (raw kp)) -------------------------------------------------------------------------------- @@ -196,19 +196,19 @@ instance S.ToSchema KeyPackage where declareNamedSchema _ = pure (mlsSwagger "KeyPackage") instance HasField "protocolVersion" KeyPackage ProtocolVersion where - getField = (.tbs.rmValue.protocolVersion) + getField = (.tbs.value.protocolVersion) instance HasField "cipherSuite" KeyPackage CipherSuite where - getField = (.tbs.rmValue.cipherSuite) + getField = (.tbs.value.cipherSuite) instance HasField "initKey" KeyPackage HPKEPublicKey where - getField = (.tbs.rmValue.initKey) + getField = (.tbs.value.initKey) instance HasField "extensions" KeyPackage [Extension] where - getField = (.tbs.rmValue.extensions) + getField = (.tbs.value.extensions) instance HasField "leafNode" KeyPackage LeafNode where - getField = (.tbs.rmValue.leafNode) + getField = (.tbs.value.leafNode) keyPackageIdentity :: KeyPackage -> Either Text ClientIdentity keyPackageIdentity = decodeMLS' @ClientIdentity . (.leafNode.credential.identityData) diff --git a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs index 6e0d15cef4..9e362bd6c7 100644 --- a/libs/wire-api/src/Wire/API/MLS/LeafNode.hs +++ b/libs/wire-api/src/Wire/API/MLS/LeafNode.hs @@ -136,22 +136,22 @@ instance S.ToSchema LeafNode where declareNamedSchema _ = pure (mlsSwagger "LeafNode") instance HasField "encryptionKey" LeafNode HPKEPublicKey where - getField = (.core.rmValue.encryptionKey) + getField = (.core.value.encryptionKey) instance HasField "signatureKey" LeafNode ByteString where - getField = (.core.rmValue.signatureKey) + getField = (.core.value.signatureKey) instance HasField "credential" LeafNode Credential where - getField = (.core.rmValue.credential) + getField = (.core.value.credential) instance HasField "capabilities" LeafNode Capabilities where - getField = (.core.rmValue.capabilities) + getField = (.core.value.capabilities) instance HasField "source" LeafNode LeafNodeSource where - getField = (.core.rmValue.source) + getField = (.core.value.source) instance HasField "extensions" LeafNode [Extension] where - getField = (.core.rmValue.extensions) + getField = (.core.value.extensions) -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-7.2-2 data LeafNodeSource diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index bdd2f93f48..8c4e19f854 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -153,7 +153,7 @@ instance S.ToSchema Message where data PublicMessage = PublicMessage { content :: RawMLS FramedContent, authData :: RawMLS FramedContentAuthData, - -- Present iff content.rmValue.sender is of type Member. + -- Present iff content.value.sender is of type Member. -- https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.2-4 membershipTag :: Maybe ByteString } @@ -162,8 +162,8 @@ data PublicMessage = PublicMessage instance ParseMLS PublicMessage where parseMLS = do content <- parseMLS - authData <- parseRawMLS (parseFramedContentAuthData (framedContentDataTag (content.rmValue.content))) - membershipTag <- case content.rmValue.sender of + authData <- parseRawMLS (parseFramedContentAuthData (framedContentDataTag (content.value.content))) + membershipTag <- case content.value.sender of SenderMember _ -> Just <$> parseMLSBytes @VarInt _ -> pure Nothing pure @@ -340,7 +340,7 @@ framedContentTBS ctx msgContent = { protocolVersion = defaultProtocolVersion, wireFormat = WireFormatPublicTag, content = msgContent, - groupContext = guard (needsGroupContext msgContent.rmValue.sender) $> ctx + groupContext = guard (needsGroupContext msgContent.value.sender) $> ctx } -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-6.1-2 @@ -372,8 +372,8 @@ verifyMessageSignature :: Bool verifyMessageSignature ctx msgContent authData pubkey = isJust $ do let tbs = mkRawMLS (framedContentTBS ctx msgContent) - sig = authData.rmValue.signature_ - cs <- cipherSuiteTag ctx.rmValue.cipherSuite + sig = authData.value.signature_ + cs <- cipherSuiteTag ctx.value.cipherSuite guard $ csVerifySignature cs pubkey tbs sig -------------------------------------------------------------------------------- diff --git a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs index 1dd394a0ac..ebb4b4307a 100644 --- a/libs/wire-api/src/Wire/API/MLS/Serialisation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Serialisation.hs @@ -264,8 +264,8 @@ decodeMLSWith' p = decodeMLSWith p . LBS.fromStrict -- retain the original serialised bytes (e.g. for signature verification, or to -- forward them verbatim). data RawMLS a = RawMLS - { rmRaw :: ByteString, - rmValue :: a + { raw :: ByteString, + value :: a } deriving stock (Eq, Show, Foldable) @@ -281,7 +281,7 @@ instance (Arbitrary a, SerialiseMLS a) => Arbitrary (RawMLS a) where -- Note that a 'ValueSchema' for the underlying type @a@ is /not/ required. rawMLSSchema :: Text -> (ByteString -> Either Text a) -> ValueSchema NamedSwaggerDoc (RawMLS a) rawMLSSchema name p = - (toBase64Text . rmRaw) + (toBase64Text . raw) .= parsedText name (rawMLSFromText p) mlsSwagger :: Text -> S.NamedSchema @@ -322,7 +322,7 @@ instance ParseMLS a => ParseMLS (RawMLS a) where parseMLS = parseRawMLS parseMLS instance SerialiseMLS (RawMLS a) where - serialiseMLS = putByteString . rmRaw + serialiseMLS = putByteString . raw mkRawMLS :: SerialiseMLS a => a -> RawMLS a mkRawMLS x = RawMLS (LBS.toStrict (runPut (serialiseMLS x))) x diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index e096d5b0f2..9f9dcd5601 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -131,7 +131,7 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do void $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing kp :: RawMLS KeyPackage <- decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing - BS.writeFile (tmp qcid2) (rmRaw kp) + BS.writeFile (tmp qcid2) (raw kp) let groupFilename = "group" let gid = GroupId "abcd" @@ -152,7 +152,7 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do message = mkMessage $ MessagePublic pmessage messageFilename = "signed-message.mls" - BS.writeFile (tmp messageFilename) (rmRaw (mkRawMLS message)) + BS.writeFile (tmp messageFilename) (raw (mkRawMLS message)) let signerKeyFilename = "signer-key.bin" BS.writeFile (tmp signerKeyFilename) (convert publicKey) diff --git a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs index 35ca999fa2..26de9a143f 100644 --- a/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs +++ b/services/brig/src/Brig/API/MLS/KeyPackages/Validation.hs @@ -48,7 +48,7 @@ validateUploadedKeyPackage :: RawMLS KeyPackage -> Handler r (KeyPackageRef, KeyPackageData) validateUploadedKeyPackage identity kp = do - (cs, lt) <- either mlsProtocolError pure $ validateKeyPackage (Just identity) kp.rmValue + (cs, lt) <- either mlsProtocolError pure $ validateKeyPackage (Just identity) kp.value validateLifetime lt @@ -69,13 +69,13 @@ validateUploadedKeyPackage identity kp = do (mlsProtocolError "No key associated to the given identity and signature scheme") pure mkey - when (key /= LBS.fromStrict kp.rmValue.leafNode.signatureKey) $ + when (key /= LBS.fromStrict kp.value.leafNode.signatureKey) $ mlsProtocolError "Unrecognised signature key" ) (\_ -> pure ()) (cidQualifiedClient identity) - let kpd = KeyPackageData kp.rmRaw + let kpd = KeyPackageData kp.raw pure (kpRef cs kpd, kpd) validateLifetime :: Lifetime -> Handler r () diff --git a/services/brig/test/integration/API/MLS.hs b/services/brig/test/integration/API/MLS.hs index 60614b0916..a390313228 100644 --- a/services/brig/test/integration/API/MLS.hs +++ b/services/brig/test/integration/API/MLS.hs @@ -195,7 +195,7 @@ testKeyPackageRemoteClaim opts brig = do { user = u, client = ciClient qcid, ref = kp, - keyPackage = KeyPackageData . rmRaw $ r + keyPackage = KeyPackageData . raw $ r } let mockBundle = KeyPackageBundle (Set.fromList entries) (bundle :: KeyPackageBundle, _reqs) <- diff --git a/services/brig/test/integration/API/MLS/Util.hs b/services/brig/test/integration/API/MLS/Util.hs index 51b9dd5105..e22bd40568 100644 --- a/services/brig/test/integration/API/MLS/Util.hs +++ b/services/brig/test/integration/API/MLS/Util.hs @@ -110,7 +110,7 @@ uploadKeyPackages brig tmp KeyingInfo {..} u c n = do . json defUpdateClient {updateClientMLSPublicKeys = Map.fromList [(Ed25519, pk)]} ) !!! const 200 === statusCode - let upload = object ["key_packages" .= toJSON (map (Base64ByteString . rmRaw) kps)] + let upload = object ["key_packages" .= toJSON (map (Base64ByteString . raw) kps)] post ( brig . paths ["mls", "key-packages", "self", toByteString' c] diff --git a/services/brig/test/integration/Federation/End2end.hs b/services/brig/test/integration/Federation/End2end.hs index b089275599..60b3120a5d 100644 --- a/services/brig/test/integration/Federation/End2end.hs +++ b/services/brig/test/integration/Federation/End2end.hs @@ -737,7 +737,7 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do { updateClientMLSPublicKeys = Map.singleton Ed25519 - aliceKP.rmValue.leafNode.signatureKey + aliceKP.value.leafNode.signatureKey } put ( brig1 @@ -820,7 +820,7 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do liftIO $ BS.writeFile (tmp "group.json") groupJSON -- invite alice - liftIO $ BS.writeFile (tmp aliceClientId) (rmRaw aliceKP) + liftIO $ BS.writeFile (tmp aliceClientId) (raw aliceKP) commit <- liftIO $ spawn @@ -1015,7 +1015,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 { updateClientMLSPublicKeys = Map.singleton Ed25519 - aliceKP.rmValue.leafNode.signatureKey + aliceKP.value.leafNode.signatureKey } put ( brig1 @@ -1084,7 +1084,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 liftIO $ BS.writeFile (tmp "group.json") groupJSON -- invite alice - liftIO $ BS.writeFile (tmp aliceClientId) (rmRaw aliceKP) + liftIO $ BS.writeFile (tmp aliceClientId) (raw aliceKP) commit <- liftIO $ spawn diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index a5c9192cb7..d249ef7dcc 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -120,7 +120,7 @@ import Wire.API.User.Client -- [x] initialise index maps -- [ ] newtype for leaf node indices -- [x] compute new indices for add proposals --- [ ] remove prefixes from rmValue and rmRaw +-- [ ] remove prefixes from value and raw -- [x] remove PublicGroupState and GroupInfoBundle modules -- [ ] (?) rename public_group_state field in conversation table -- [ ] consider adding more integration tests @@ -162,28 +162,28 @@ data IncomingBundle = IncomingBundle } mkIncomingMessage :: RawMLS Message -> Maybe IncomingMessage -mkIncomingMessage msg = case msg.rmValue.content of +mkIncomingMessage msg = case msg.value.content of MessagePublic pmsg -> Just IncomingMessage - { epoch = pmsg.content.rmValue.epoch, - groupId = pmsg.content.rmValue.groupId, + { epoch = pmsg.content.value.epoch, + groupId = pmsg.content.value.groupId, content = IncomingMessageContentPublic IncomingPublicMessageContent - { sender = pmsg.content.rmValue.sender, - content = pmsg.content.rmValue.content, + { sender = pmsg.content.value.sender, + content = pmsg.content.value.content, framedContent = pmsg.content, authData = pmsg.authData }, rawMessage = msg } MessagePrivate pmsg - | pmsg.rmValue.tag == FramedContentApplicationDataTag -> + | pmsg.value.tag == FramedContentApplicationDataTag -> Just IncomingMessage - { epoch = pmsg.rmValue.epoch, - groupId = pmsg.rmValue.groupId, + { epoch = pmsg.value.epoch, + groupId = pmsg.value.groupId, content = IncomingMessageContentPrivate, rawMessage = msg } @@ -199,7 +199,7 @@ incomingMessageAuthenticatedContent pmsg = mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle mkIncomingBundle bundle = do - imsg <- mkIncomingMessage bundle.rmValue.commitMsg + imsg <- mkIncomingMessage bundle.value.commitMsg content <- case imsg.content of IncomingMessageContentPublic c -> pure c _ -> Nothing @@ -212,10 +212,10 @@ mkIncomingBundle bundle = do groupId = imsg.groupId, sender = content.sender, commit = commit, - rawMessage = bundle.rmValue.commitMsg, - welcome = bundle.rmValue.welcome, - groupInfo = GroupInfoData bundle.rmValue.groupInfo.rmRaw, - serialized = bundle.rmRaw + rawMessage = bundle.value.commitMsg, + welcome = bundle.value.welcome, + groupInfo = GroupInfoData bundle.value.groupInfo.raw, + serialized = bundle.raw } type MLSMessageStaticErrors = @@ -367,7 +367,7 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do (events, newClients) <- case bundle.sender of SenderMember _index -> do - action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue + action <- getCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.value events <- processInternalCommit senderIdentity @@ -375,24 +375,24 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do lConvOrSub bundle.epoch action - bundle.commit.rmValue + bundle.commit.value pure (events, cmIdentities (paAdd action)) SenderExternal _ -> throw (mlsProtocolError "Unexpected sender") SenderNewMemberProposal -> throw (mlsProtocolError "Unexpected sender") SenderNewMemberCommit -> do - action <- getExternalCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.rmValue + action <- getExternalCommitData senderIdentity lConvOrSub bundle.epoch bundle.commit.value processExternalCommit senderIdentity lConvOrSub bundle.epoch action - bundle.commit.rmValue.path + bundle.commit.value.path pure ([], []) storeGroupInfo (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfo let cm = membersConvOrSub (tUnqualified lConvOrSub) - unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.rmRaw cm + unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.raw cm traverse_ (sendWelcomes lConvOrSub conn newClients) bundle.welcome pure (events, unreachables) @@ -524,7 +524,7 @@ postMLSMessageToLocalConv qusr c con msg convOrSubId = do IncomingMessageContentPrivate -> pure mempty let cm = membersConvOrSub (tUnqualified lConvOrSub) - unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage.rmRaw cm + unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage.raw cm pure (events, unreachables) postMLSMessageToRemoteConv :: @@ -554,7 +554,7 @@ postMLSMessageToRemoteConv loc qusr senderClient con msg rConvOrSubId = do { mmsrConvOrSubId = tUnqualified rConvOrSubId, mmsrSender = tUnqualified lusr, mmsrSenderClient = senderClient, - mmsrRawMessage = Base64ByteString msg.rawMessage.rmRaw + mmsrRawMessage = Base64ByteString msg.rawMessage.raw } case resp of MLSMessageResponseError e -> rethrowErrors @MLSMessageStaticErrors e @@ -764,7 +764,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) let extra = LeafNodeTBSExtraCommit groupId idx - case validateLeafNode cs (Just senderIdentity) extra leafNode.rmValue of + case validateLeafNode cs (Just senderIdentity) extra leafNode.value of Left errMsg -> throw $ mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) @@ -838,7 +838,7 @@ derefProposal :: Sem r Proposal derefProposal groupId epoch (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound - pure p.rmValue + pure p.value derefProposal _ _ (Inline p) = pure p addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction @@ -878,11 +878,11 @@ applyProposal mlsMeta _groupId (AddProposal kp) = do either (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) pure - $ validateKeyPackage Nothing kp.rmValue + $ validateKeyPackage Nothing kp.value unless (mlsMeta.cnvmlsCipherSuite == cs) $ throw (mlsProtocolError "Key package ciphersuite does not match conversation") -- we are not checking lifetime constraints here - cid <- getKeyPackageIdentity kp.rmValue + cid <- getKeyPackageIdentity kp.value addProposedClient cid applyProposal _mlsMeta _groupId (RemoveProposal idx) = do im <- get @@ -901,7 +901,7 @@ checkProposalCipherSuite :: Proposal -> Sem r () checkProposalCipherSuite suite (AddProposal kpRaw) = do - let kp = rmValue kpRaw + let kp = value kpRaw unless (kp.cipherSuite == tagCipherSuite suite) . throw . mlsProtocolError @@ -949,7 +949,7 @@ processProposal qusr lConvOrSub msg pub prop = do unless isMember' $ throwS @'ConvNotFound -- FUTUREWORK: validate the member's conversation role - let propValue = rmValue prop + let propValue = value prop checkProposalCipherSuite suiteTag propValue when (isExternal pub.sender) $ do checkExternalProposalSignature pub prop @@ -966,9 +966,9 @@ checkExternalProposalSignature :: IncomingPublicMessageContent -> RawMLS Proposal -> Sem r () -checkExternalProposalSignature msg prop = case rmValue prop of +checkExternalProposalSignature msg prop = case value prop of AddProposal kp -> do - let pubkey = kp.rmValue.leafNode.signatureKey + let pubkey = kp.value.leafNode.signatureKey ctx = error "TODO: get group context" unless (verifyMessageSignature ctx msg.framedContent msg.authData pubkey) $ throwS @'MLSUnsupportedProposal _ -> pure () -- FUTUREWORK: check signature of other proposals as well @@ -988,7 +988,7 @@ checkExternalProposalUser qusr prop = do loc ( \lusr -> case prop of AddProposal kp -> do - ClientIdentity {ciUser, ciClient} <- getKeyPackageIdentity kp.rmValue + ClientIdentity {ciUser, ciClient} <- getKeyPackageIdentity kp.value -- requesting user must match key package owner when (tUnqualified lusr /= ciUser) $ throwS @'MLSUnsupportedProposal -- client referenced in key package must be one of the user's clients diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index fa95794513..58c971045e 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -79,7 +79,7 @@ getPendingBackendRemoveProposals gid epoch = do <$> for proposals ( \case - (Just ProposalOriginBackend, proposal) -> case rmValue proposal of + (Just ProposalOriginBackend, proposal) -> case value proposal of RemoveProposal i -> pure (Just i) _ -> pure Nothing (Just ProposalOriginClient, _) -> pure Nothing diff --git a/services/galley/src/Galley/API/MLS/Welcome.hs b/services/galley/src/Galley/API/MLS/Welcome.hs index 04ff7f55a6..213ad9a865 100644 --- a/services/galley/src/Galley/API/MLS/Welcome.hs +++ b/services/galley/src/Galley/API/MLS/Welcome.hs @@ -83,7 +83,7 @@ sendLocalWelcomes con now welcome lclients = do -- FUTUREWORK: use the conversation ID stored in the key package mapping table let lcnv = qualifyAs lclients (selfConv u) lusr = qualifyAs lclients u - e = Event (tUntagged lcnv) Nothing (tUntagged lusr) now $ EdMLSWelcome welcome.rmRaw + e = Event (tUntagged lcnv) Nothing (tUntagged lusr) now $ EdMLSWelcome welcome.raw in newMessagePush lclients mempty con defMessageMetadata (u, c) e sendRemoteWelcomes :: @@ -94,7 +94,7 @@ sendRemoteWelcomes :: [Remote (UserId, ClientId)] -> Sem r () sendRemoteWelcomes welcome clients = do - let msg = Base64ByteString welcome.rmRaw + let msg = Base64ByteString welcome.raw traverse_ handleError <=< runFederatedConcurrentlyEither clients $ \rcpts -> fedClient @'Galley @"mls-welcome" MLSWelcomeRequest diff --git a/services/galley/src/Galley/Cassandra/Instances.hs b/services/galley/src/Galley/Cassandra/Instances.hs index b315912134..eaeaa87505 100644 --- a/services/galley/src/Galley/Cassandra/Instances.hs +++ b/services/galley/src/Galley/Cassandra/Instances.hs @@ -244,7 +244,7 @@ instance Cql ProposalRef where instance Cql (RawMLS Proposal) where ctype = Tagged BlobColumn - toCql = CqlBlob . LBS.fromStrict . rmRaw + toCql = CqlBlob . LBS.fromStrict . raw fromCql (CqlBlob b) = mapLeft T.unpack $ decodeMLS b fromCql _ = Left "Proposal: blob expected" diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 903489220b..02a2c9bec4 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -481,7 +481,7 @@ testAddClientPartial = do kp <- uploadNewKeyPackage bob2 void $ uploadNewKeyPackage bob3 void $ - createAddCommitWithKeyPackages alice1 [(bob2, kp.rmRaw)] + createAddCommitWithKeyPackages alice1 [(bob2, kp.raw)] >>= sendAndConsumeCommitBundle testSendAnotherUsersCommit :: TestM () diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index ad738c73a6..ea69cc88ba 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -547,7 +547,7 @@ claimRemoteKeyPackages (tUntagged -> qusr) = do { user = qusr, client = ciClient cid, ref = ref, - keyPackage = KeyPackageData (rmRaw kp) + keyPackage = KeyPackageData (raw kp) } pure bundle diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index ca9540f19c..5b3f883e7b 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -2904,7 +2904,7 @@ wsAssertBackendRemoveProposalWithEpoch fromUser convId idx epoch n = do bs <- wsAssertBackendRemoveProposal fromUser (Conv <$> convId) idx n let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs case msg.content of - MessagePublic pmsg -> liftIO $ pmsg.content.rmValue.epoch @?= epoch + MessagePublic pmsg -> liftIO $ pmsg.content.value.epoch @?= epoch _ -> assertFailure "unexpected message content" pure bs @@ -2919,9 +2919,9 @@ wsAssertBackendRemoveProposal fromUser cnvOrSubCnv idx n = do let msg = fromRight (error "Failed to parse Message") $ decodeMLS' @Message bs liftIO $ case msg.content of MessagePublic pmsg -> do - pmsg.content.rmValue.sender @?= SenderExternal 0 - case pmsg.content.rmValue.content of - FramedContentProposal prop -> case prop.rmValue of + pmsg.content.value.sender @?= SenderExternal 0 + case pmsg.content.value.content of + FramedContentProposal prop -> case prop.value of RemoveProposal removedIdx -> removedIdx @?= idx otherProp -> assertFailure $ "Expected RemoveProposal but got " <> show otherProp otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload @@ -2948,9 +2948,9 @@ wsAssertAddProposal fromUser convId n = do let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' @Message bs liftIO $ case msg.content of MessagePublic pmsg -> do - pmsg.content.rmValue.sender @?= SenderExternal 0 - case pmsg.content.rmValue.content of - FramedContentProposal prop -> case prop.rmValue of + pmsg.content.value.sender @?= SenderExternal 0 + case pmsg.content.value.content of + FramedContentProposal prop -> case prop.value of AddProposal _ -> pure () otherProp -> assertFailure $ "Expected AddProposal but got " <> show otherProp otherPayload -> assertFailure $ "Expected ProposalMessage but got " <> show otherPayload From edf46476a20e01d349abd8cf70bc01712c3d8c87 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 21 Apr 2023 13:36:22 +0200 Subject: [PATCH 35/75] Reorganise TODOs --- services/galley/src/Galley/API/MLS/Message.hs | 18 +++++++++--------- services/galley/src/Galley/API/MLS/Util.hs | 2 +- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index d249ef7dcc..de94c6fc90 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -115,16 +115,20 @@ import Wire.API.User.Client -- - [ ] ? in the update case (in galley), verify that the encryption_key is different -- [ ] validate proposals when processing proposal and commit messages -- [x] remove MissingSenderClient error --- [ ] ? PreSharedKey proposal -- [x] remove all key package ref mapping -- [x] initialise index maps --- [ ] newtype for leaf node indices -- [x] compute new indices for add proposals --- [ ] remove prefixes from value and raw +-- [x] remove prefixes from value and raw -- [x] remove PublicGroupState and GroupInfoBundle modules --- [ ] (?) rename public_group_state field in conversation table --- [ ] consider adding more integration tests -- [x] remove prefixes from fields in Commit and Proposal +-- [ ] move external commit logic to a separate module and improve types +-- [ ] check epoch inside commit lock +-- [x] split executeProposalAction for internal and external commits + +-- [ ] ? consider adding more integration tests +-- [ ] ? rename public_group_state field in conversation table +-- [ ] ? PreSharedKey proposal +-- [ ] ? newtype for leaf node indices data IncomingMessage = IncomingMessage { epoch :: Epoch, @@ -739,10 +743,6 @@ processExternalCommit :: Maybe UpdatePath -> Sem r () processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do - -- TODO from talk with Stefan M - -- [ ] should leaf nodes be calclulated within the commit lock? - -- [x] split executeProposalAction for internal and external commits - let convOrSub = tUnqualified lConvOrSub -- only members can join a subconversation diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index 58c971045e..ae21850978 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -109,7 +109,7 @@ withCommitLock gid epoch action = ) (const $ releaseCommitLock gid epoch) $ \_ -> do - -- FUTUREWORK: fetch epoch again and check that it matches + -- TODO: fetch epoch again and check that it matches action where ttl = fromIntegral (600 :: Int) -- 10 minutes From 54c074a652c5fa88b298a1b532fda663e1835397 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 21 Apr 2023 14:21:59 +0200 Subject: [PATCH 36/75] Check epoch again after taking commit lock --- services/galley/src/Galley/API/MLS/Message.hs | 6 +++--- .../galley/src/Galley/API/MLS/SubConversation.hs | 3 ++- services/galley/src/Galley/API/MLS/Util.hs | 14 +++++++++++--- .../galley/src/Galley/Cassandra/Conversation.hs | 8 ++++++++ services/galley/src/Galley/Cassandra/Queries.hs | 6 ++++++ .../galley/src/Galley/Cassandra/SubConversation.hs | 5 +++++ .../galley/src/Galley/Effects/ConversationStore.hs | 2 ++ .../src/Galley/Effects/SubConversationStore.hs | 1 + services/galley/test/integration/API/MLS.hs | 2 +- 9 files changed, 39 insertions(+), 8 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index de94c6fc90..59d630b841 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -122,7 +122,7 @@ import Wire.API.User.Client -- [x] remove PublicGroupState and GroupInfoBundle modules -- [x] remove prefixes from fields in Commit and Proposal -- [ ] move external commit logic to a separate module and improve types --- [ ] check epoch inside commit lock +-- [x] check epoch inside commit lock -- [x] split executeProposalAction for internal and external commits -- [ ] ? consider adding more integration tests @@ -770,7 +770,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) Right _ -> pure () - withCommitLock groupId epoch $ do + withCommitLock (fmap idForConvOrSub lConvOrSub) groupId epoch $ do executeExtCommitProposalAction senderIdentity lConvOrSub action -- increment epoch number @@ -813,7 +813,7 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do let convOrSub = tUnqualified lConvOrSub mlsMeta = mlsMetaConvOrSub convOrSub - withCommitLock (cnvmlsGroupId . mlsMetaConvOrSub $ convOrSub) epoch $ do + withCommitLock (fmap idForConvOrSub lConvOrSub) (cnvmlsGroupId (mlsMetaConvOrSub convOrSub)) epoch $ do -- check all pending proposals are referenced in the commit allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index 19e171a5fe..f38f153091 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -280,9 +280,10 @@ deleteLocalSubConversation :: deleteLocalSubConversation qusr lcnvId scnvId dsc = do assertMLSEnabled let cnvId = tUnqualified lcnvId + lConvOrSubId = qualifyAs lcnvId (SubConv cnvId scnvId) cnv <- getConversationAndCheckMembership qusr lcnvId cs <- cnvmlsCipherSuite <$> noteS @'ConvNotFound (mlsMetadata cnv) - (mlsData, oldGid) <- withCommitLock (dscGroupId dsc) (dscEpoch dsc) $ do + (mlsData, oldGid) <- withCommitLock lConvOrSubId (dscGroupId dsc) (dscEpoch dsc) $ do sconv <- Eff.getSubConversation cnvId scnvId >>= noteS @'ConvNotFound diff --git a/services/galley/src/Galley/API/MLS/Util.hs b/services/galley/src/Galley/API/MLS/Util.hs index ae21850978..7091a4989c 100644 --- a/services/galley/src/Galley/API/MLS/Util.hs +++ b/services/galley/src/Galley/API/MLS/Util.hs @@ -27,6 +27,7 @@ import Galley.Effects import Galley.Effects.ConversationStore import Galley.Effects.MemberStore import Galley.Effects.ProposalStore +import Galley.Effects.SubConversationStore import Imports import Polysemy import Polysemy.Resource (Resource, bracket) @@ -40,6 +41,7 @@ import Wire.API.MLS.Group import Wire.API.MLS.LeafNode import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation +import Wire.API.MLS.SubConversation getLocalConvForUser :: ( Member (ErrorS 'ConvNotFound) r, @@ -93,15 +95,17 @@ withCommitLock :: ( Members '[ Resource, ConversationStore, - ErrorS 'MLSStaleMessage + ErrorS 'MLSStaleMessage, + SubConversationStore ] r ) => + Local ConvOrSubConvId -> GroupId -> Epoch -> Sem r a -> Sem r a -withCommitLock gid epoch action = +withCommitLock lConvOrSubId gid epoch action = bracket ( acquireCommitLock gid epoch ttl >>= \lockAcquired -> when (lockAcquired == NotAcquired) $ @@ -109,7 +113,11 @@ withCommitLock gid epoch action = ) (const $ releaseCommitLock gid epoch) $ \_ -> do - -- TODO: fetch epoch again and check that it matches + actualEpoch <- + fromMaybe (Epoch 0) <$> case tUnqualified lConvOrSubId of + Conv cnv -> getConversationEpoch cnv + SubConv cnv sub -> getSubConversationEpoch cnv sub + unless (actualEpoch == epoch) $ throwS @'MLSStaleMessage action where ttl = fromIntegral (600 :: Int) -- 10 minutes diff --git a/services/galley/src/Galley/Cassandra/Conversation.hs b/services/galley/src/Galley/Cassandra/Conversation.hs index df4667fd9f..98000e9523 100644 --- a/services/galley/src/Galley/Cassandra/Conversation.hs +++ b/services/galley/src/Galley/Cassandra/Conversation.hs @@ -237,6 +237,13 @@ updateConvReceiptMode cid receiptMode = retry x5 $ write Cql.updateConvReceiptMo updateConvMessageTimer :: ConvId -> Maybe Milliseconds -> Client () updateConvMessageTimer cid mtimer = retry x5 $ write Cql.updateConvMessageTimer (params LocalQuorum (mtimer, cid)) +getConvEpoch :: ConvId -> Client (Maybe Epoch) +getConvEpoch cid = + (runIdentity =<<) + <$> retry + x1 + (query1 Cql.getConvEpoch (params LocalQuorum (Identity cid))) + updateConvEpoch :: ConvId -> Epoch -> Client () updateConvEpoch cid epoch = retry x5 $ write Cql.updateConvEpoch (params LocalQuorum (epoch, cid)) @@ -459,6 +466,7 @@ interpretConversationStoreToCassandra = interpret $ \case CreateConversation loc nc -> embedClient $ createConversation loc nc CreateMLSSelfConversation lusr -> embedClient $ createMLSSelfConversation lusr GetConversation cid -> embedClient $ getConversation cid + GetConversationEpoch cid -> embedClient $ getConvEpoch cid LookupConvByGroupId gId -> embedClient $ lookupConvByGroupId gId GetConversations cids -> localConversations cids GetConversationMetadata cid -> embedClient $ conversationMeta cid diff --git a/services/galley/src/Galley/Cassandra/Queries.hs b/services/galley/src/Galley/Cassandra/Queries.hs index 53e3ba0bfb..b1ea419595 100644 --- a/services/galley/src/Galley/Cassandra/Queries.hs +++ b/services/galley/src/Galley/Cassandra/Queries.hs @@ -275,6 +275,9 @@ updateConvName = "update conversation set name = ? where conv = ?" updateConvType :: PrepQuery W (ConvType, ConvId) () updateConvType = "update conversation set type = ? where conv = ?" +getConvEpoch :: PrepQuery R (Identity ConvId) (Identity (Maybe Epoch)) +getConvEpoch = "select epoch from conversation where conv = ?" + updateConvEpoch :: PrepQuery W (Epoch, ConvId) () updateConvEpoch = "update conversation set epoch = ? where conv = ?" @@ -340,6 +343,9 @@ updateSubConvGroupInfo = "INSERT INTO subconversation (conv_id, subconv_id, publ selectSubConvGroupInfo :: PrepQuery R (ConvId, SubConvId) (Identity (Maybe GroupInfoData)) selectSubConvGroupInfo = "SELECT public_group_state FROM subconversation WHERE conv_id = ? AND subconv_id = ?" +selectSubConvEpoch :: PrepQuery R (ConvId, SubConvId) (Identity (Maybe Epoch)) +selectSubConvEpoch = "SELECT epoch FROM subconversation WHERE conv_id = ? AND subconv_id = ?" + deleteGroupId :: PrepQuery W (Identity GroupId) () deleteGroupId = "DELETE FROM group_id_conv_id WHERE group_id = ?" diff --git a/services/galley/src/Galley/Cassandra/SubConversation.hs b/services/galley/src/Galley/Cassandra/SubConversation.hs index bca060e56e..9dd9dd02d0 100644 --- a/services/galley/src/Galley/Cassandra/SubConversation.hs +++ b/services/galley/src/Galley/Cassandra/SubConversation.hs @@ -79,6 +79,10 @@ selectSubConvGroupInfo :: ConvId -> SubConvId -> Client (Maybe GroupInfoData) selectSubConvGroupInfo convId subConvId = (runIdentity =<<) <$> retry x5 (query1 Cql.selectSubConvGroupInfo (params LocalQuorum (convId, subConvId))) +selectSubConvEpoch :: ConvId -> SubConvId -> Client (Maybe Epoch) +selectSubConvEpoch convId subConvId = + (runIdentity =<<) <$> retry x5 (query1 Cql.selectSubConvEpoch (params LocalQuorum (convId, subConvId))) + setGroupIdForSubConversation :: GroupId -> Qualified ConvId -> SubConvId -> Client () setGroupIdForSubConversation groupId qconv sconv = retry x5 (write Cql.insertGroupIdForSubConversation (params LocalQuorum (groupId, qUnqualified qconv, qDomain qconv, sconv))) @@ -119,6 +123,7 @@ interpretSubConversationStoreToCassandra = interpret $ \case embedClient (insertSubConversation convId subConvId suite epoch groupId mGroupInfo) GetSubConversation convId subConvId -> embedClient (selectSubConversation convId subConvId) GetSubConversationGroupInfo convId subConvId -> embedClient (selectSubConvGroupInfo convId subConvId) + GetSubConversationEpoch convId subConvId -> embedClient (selectSubConvEpoch convId subConvId) SetSubConversationGroupInfo convId subConvId mPgs -> embedClient (updateSubConvGroupInfo convId subConvId mPgs) SetGroupIdForSubConversation gId cid sconv -> embedClient $ setGroupIdForSubConversation gId cid sconv SetSubConversationEpoch cid sconv epoch -> embedClient $ setEpochForSubConversation cid sconv epoch diff --git a/services/galley/src/Galley/Effects/ConversationStore.hs b/services/galley/src/Galley/Effects/ConversationStore.hs index b305d98842..fe47bb376c 100644 --- a/services/galley/src/Galley/Effects/ConversationStore.hs +++ b/services/galley/src/Galley/Effects/ConversationStore.hs @@ -28,6 +28,7 @@ module Galley.Effects.ConversationStore -- * Read conversation getConversation, + getConversationEpoch, lookupConvByGroupId, getConversations, getConversationMetadata, @@ -83,6 +84,7 @@ data ConversationStore m a where ConversationStore m Conversation DeleteConversation :: ConvId -> ConversationStore m () GetConversation :: ConvId -> ConversationStore m (Maybe Conversation) + GetConversationEpoch :: ConvId -> ConversationStore m (Maybe Epoch) LookupConvByGroupId :: GroupId -> ConversationStore m (Maybe (Qualified ConvOrSubConvId)) GetConversations :: [ConvId] -> ConversationStore m [Conversation] GetConversationMetadata :: ConvId -> ConversationStore m (Maybe ConversationMetadata) diff --git a/services/galley/src/Galley/Effects/SubConversationStore.hs b/services/galley/src/Galley/Effects/SubConversationStore.hs index ed95279539..4dff138c6a 100644 --- a/services/galley/src/Galley/Effects/SubConversationStore.hs +++ b/services/galley/src/Galley/Effects/SubConversationStore.hs @@ -34,6 +34,7 @@ data SubConversationStore m a where CreateSubConversation :: ConvId -> SubConvId -> CipherSuiteTag -> Epoch -> GroupId -> Maybe GroupInfoData -> SubConversationStore m () GetSubConversation :: ConvId -> SubConvId -> SubConversationStore m (Maybe SubConversation) GetSubConversationGroupInfo :: ConvId -> SubConvId -> SubConversationStore m (Maybe GroupInfoData) + GetSubConversationEpoch :: ConvId -> SubConvId -> SubConversationStore m (Maybe Epoch) SetSubConversationGroupInfo :: ConvId -> SubConvId -> Maybe GroupInfoData -> SubConversationStore m () SetGroupIdForSubConversation :: GroupId -> Qualified ConvId -> SubConvId -> SubConversationStore m () SetSubConversationEpoch :: ConvId -> SubConvId -> Epoch -> SubConversationStore m () diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 02a2c9bec4..2a32af580d 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -113,7 +113,7 @@ tests s = testGroup "External commit" [ test s "non-member attempts to join a conversation" testExternalCommitNotMember, - test s "join a conversation with the same client XXX" testExternalCommitSameClient, + test s "join a conversation with the same client" testExternalCommitSameClient, test s "join a conversation with a new client" testExternalCommitNewClient, test s "join a conversation with a new client and resend backend proposals" testExternalCommitNewClientResendBackendProposal ], From 26209c6ed7b0b46409927334d784f2c17c2d49fd Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 21 Apr 2023 16:51:43 +0200 Subject: [PATCH 37/75] Remove MLSPackageRefNotFound error --- libs/wire-api/src/Wire/API/Error/Galley.hs | 3 --- libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs | 2 -- services/galley/src/Galley/API/MLS/Message.hs | 2 -- services/galley/test/integration/API/MLS.hs | 8 ++++---- 4 files changed, 4 insertions(+), 11 deletions(-) diff --git a/libs/wire-api/src/Wire/API/Error/Galley.hs b/libs/wire-api/src/Wire/API/Error/Galley.hs index 1a2c1ee816..b412fa2cdd 100644 --- a/libs/wire-api/src/Wire/API/Error/Galley.hs +++ b/libs/wire-api/src/Wire/API/Error/Galley.hs @@ -72,7 +72,6 @@ data GalleyError MLSNotEnabled | MLSNonEmptyMemberList | MLSDuplicatePublicKey - | MLSKeyPackageRefNotFound | MLSInvalidLeafNodeIndex | MLSUnsupportedMessage | MLSProposalNotFound @@ -201,8 +200,6 @@ type instance MapError 'MLSNonEmptyMemberList = 'StaticError 400 "non-empty-memb type instance MapError 'MLSDuplicatePublicKey = 'StaticError 400 "mls-duplicate-public-key" "MLS public key for the given signature scheme already exists" -type instance MapError 'MLSKeyPackageRefNotFound = 'StaticError 404 "mls-key-package-ref-not-found" "A referenced key package could not be mapped to a known client" - type instance MapError 'MLSInvalidLeafNodeIndex = 'StaticError 400 "mls-invalid-leaf-node-index" "A referenced leaf node index points to a blank or non-existing node" type instance MapError 'MLSUnsupportedMessage = 'StaticError 422 "mls-unsupported-message" "Attempted to send a message with an unsupported combination of content type and wire format" diff --git a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs index 5ef98ae396..b0c9dce832 100644 --- a/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs +++ b/libs/wire-api/src/Wire/API/Routes/Public/Galley/MLS.hs @@ -52,7 +52,6 @@ type MLSMessagingAPI = :> CanThrow 'MLSCommitMissingReferences :> CanThrow 'MLSGroupConversationMismatch :> CanThrow 'MLSInvalidLeafNodeIndex - :> CanThrow 'MLSKeyPackageRefNotFound :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag @@ -90,7 +89,6 @@ type MLSMessagingAPI = :> CanThrow 'MLSCommitMissingReferences :> CanThrow 'MLSGroupConversationMismatch :> CanThrow 'MLSInvalidLeafNodeIndex - :> CanThrow 'MLSKeyPackageRefNotFound :> CanThrow 'MLSNotEnabled :> CanThrow 'MLSProposalNotFound :> CanThrow 'MLSProtocolErrorTag diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 59d630b841..373b5ba033 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -231,7 +231,6 @@ type MLSMessageStaticErrors = ErrorS 'MLSStaleMessage, ErrorS 'MLSProposalNotFound, ErrorS 'MissingLegalholdConsent, - ErrorS 'MLSKeyPackageRefNotFound, ErrorS 'MLSInvalidLeafNodeIndex, ErrorS 'MLSClientMismatch, ErrorS 'MLSUnsupportedProposal, @@ -579,7 +578,6 @@ type HasProposalEffects r = Member (Error MLSProtocolError) r, Member (ErrorS 'MLSClientMismatch) r, Member (ErrorS 'MLSInvalidLeafNodeIndex) r, - Member (ErrorS 'MLSKeyPackageRefNotFound) r, Member (ErrorS 'MLSUnsupportedProposal) r, Member ExternalAccess r, Member FederatorAccess r, diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 2a32af580d..51116453ee 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -1394,14 +1394,14 @@ propInvalidEpoch = do do void $ uploadNewKeyPackage dee1 void $ uploadNewKeyPackage charlie1 - setClientGroupState alice1 gsBackup - void $ createAddCommit alice1 [charlie] + setClientGroupState alice1 gsBackup2 + void $ createAddCommit alice1 [charlie] -- --> epoch 2 [prop] <- createAddProposals alice1 [dee] err <- responseJsonError =<< postMessage alice1 (mpMessage prop) - mls {mlsNewMembers = mempty} From 6c3242a9abef2d4c06c3896bf2270c09459dc741 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 21 Apr 2023 17:11:21 +0200 Subject: [PATCH 38/75] Simplify testRemoveUserParent --- services/galley/test/integration/API/MLS.hs | 61 +++++++-------------- 1 file changed, 20 insertions(+), 41 deletions(-) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 51116453ee..0fd97c456a 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -3183,10 +3183,11 @@ testLeaveRemoteSubConv = do testRemoveUserParent :: TestM () testRemoveUserParent = do [alice, bob, charlie] <- createAndConnectUsers [Nothing, Nothing, Nothing] + let subname = SubConvId "conference" - runMLSTest $ + (qcnv, [alice1, bob1, bob2, _charlie1, _charlie2]) <- runMLSTest $ do - [alice1, bob1, bob2, charlie1, charlie2] <- + clients@[alice1, bob1, bob2, charlie1, charlie2] <- traverse createMLSClient [alice, bob, bob, charlie, charlie] @@ -3194,7 +3195,6 @@ testRemoveUserParent = do (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle - let subname = SubConvId "conference" void $ createSubConv qcnv bob1 subname let qcs = fmap (flip SubConv subname) qcnv @@ -3202,45 +3202,24 @@ testRemoveUserParent = do for_ [alice1, bob2, charlie1, charlie2] $ \c -> void $ createExternalCommit c Nothing qcs >>= sendAndConsumeCommitBundle - [(_, idxRef1), (_, idxRef2)] <- getClientsFromGroupState alice1 charlie - - -- charlie leaves the main conversation - mlsBracket [alice1, bob1, bob2] $ \wss -> do - liftTest $ do - deleteMemberQualified (qUnqualified charlie) charlie qcnv - !!! const 200 === statusCode - - -- Remove charlie from our state as well - State.modify $ \mls -> - mls - { mlsMembers = Set.difference (mlsMembers mls) (Set.fromList [charlie1, charlie2]) - } - - msg1 <- WS.assertMatchN (5 # Second) wss $ \n -> - wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) idxRef1 n - - traverse_ (uncurry consumeMessage1) (zip [alice1, bob1, bob2] msg1) - - msg2 <- WS.assertMatchN (5 # Second) wss $ \n -> - wsAssertBackendRemoveProposal charlie (Conv <$> qcnv) idxRef2 n + pure (qcnv, clients) - traverse_ (uncurry consumeMessage1) (zip [alice1, bob1, bob2] msg2) + -- charlie leaves the main conversation + deleteMemberQualified (qUnqualified charlie) charlie qcnv + !!! const 200 === statusCode - void $ createPendingProposalCommit alice1 >>= sendAndConsumeCommitBundle + getSubConv (qUnqualified charlie) qcnv subname + !!! const 403 === statusCode - liftTest $ do - getSubConv (qUnqualified charlie) qcnv (SubConvId "conference") - !!! const 403 === statusCode - - sub :: PublicSubConversation <- - responseJsonError - =<< getSubConv (qUnqualified bob) qcnv (SubConvId "conference") - void $ createExternalCommit c Nothing qcs >>= sendAndConsumeCommitBundle - [(_, idxRef1)] <- getClientsFromGroupState alice1 alice + [(_, idx1)] <- getClientsFromGroupState alice1 alice -- creator leaves the main conversation mlsBracket [bob1, bob2, charlie1, charlie2] $ \wss -> do @@ -3281,7 +3260,7 @@ testRemoveCreatorParent = do msg <- WS.assertMatchN (5 # Second) wss $ \n -> -- Checks proposal for subconv, parent doesn't get one -- since alice is not notified of her own removal - wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idxRef1 n + wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idx1 n traverse_ (uncurry consumeMessage1) (zip [bob1, bob2, charlie1, charlie2] msg) From 25a3c48336b4628105e9074c3be6fe35e26ed011 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 21 Apr 2023 17:11:21 +0200 Subject: [PATCH 39/75] Simplify testRemoveCreatorParent --- services/galley/test/integration/API/MLS.hs | 76 ++++++++------------- 1 file changed, 29 insertions(+), 47 deletions(-) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 0fd97c456a..fa29380d7f 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -3224,10 +3224,11 @@ testRemoveUserParent = do testRemoveCreatorParent :: TestM () testRemoveCreatorParent = do [alice, bob, charlie] <- createAndConnectUsers [Nothing, Nothing, Nothing] + let subname = SubConvId "conference" - runMLSTest $ + (qcnv, [_alice1, bob1, bob2, charlie1, charlie2]) <- runMLSTest $ do - [alice1, bob1, bob2, charlie1, charlie2] <- + clients@[alice1, bob1, bob2, charlie1, charlie2] <- traverse createMLSClient [alice, bob, bob, charlie, charlie] @@ -3235,7 +3236,6 @@ testRemoveCreatorParent = do (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle - let subname = SubConvId "conference" void $ createSubConv qcnv alice1 subname let qcs = fmap (flip SubConv subname) qcnv @@ -3243,54 +3243,36 @@ testRemoveCreatorParent = do for_ [bob1, bob2, charlie1, charlie2] $ \c -> void $ createExternalCommit c Nothing qcs >>= sendAndConsumeCommitBundle - [(_, idx1)] <- getClientsFromGroupState alice1 alice - - -- creator leaves the main conversation - mlsBracket [bob1, bob2, charlie1, charlie2] $ \wss -> do - liftTest $ do - deleteMemberQualified (qUnqualified alice) alice qcnv - !!! const 200 === statusCode - - -- Remove alice1 from our state as well - State.modify $ \mls -> - mls - { mlsMembers = Set.difference (mlsMembers mls) (Set.fromList [alice1]) - } - - msg <- WS.assertMatchN (5 # Second) wss $ \n -> - -- Checks proposal for subconv, parent doesn't get one - -- since alice is not notified of her own removal - wsAssertBackendRemoveProposal alice (Conv <$> qcnv) idx1 n - - traverse_ (uncurry consumeMessage1) (zip [bob1, bob2, charlie1, charlie2] msg) + pure (qcnv, clients) - void $ createPendingProposalCommit bob1 >>= sendAndConsumeCommitBundle + -- creator leaves the main conversation + deleteMemberQualified (qUnqualified alice) alice qcnv + !!! const 200 === statusCode - liftTest $ do - getSubConv (qUnqualified alice) qcnv subname - !!! const 403 === statusCode + getSubConv (qUnqualified alice) qcnv subname + !!! const 403 === statusCode - -- charlie sees updated memberlist - sub :: PublicSubConversation <- - responseJsonError - =<< getSubConv (qUnqualified charlie) qcnv subname - Date: Mon, 24 Apr 2023 14:47:09 +0200 Subject: [PATCH 40/75] Pass correct list of clients to planClientRemoval --- services/galley/src/Galley/API/MLS/Removal.hs | 37 ++++++++++++------- .../src/Galley/API/MLS/SubConversation.hs | 3 +- .../Galley/Cassandra/Conversation/Members.hs | 9 +++-- .../galley/src/Galley/Effects/MemberStore.hs | 3 +- 4 files changed, 32 insertions(+), 20 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index a862fd30a6..85ea296d39 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -22,6 +22,7 @@ module Galley.API.MLS.Removal ) where +import Data.Bifunctor import Data.Id import qualified Data.Map as Map import Data.Qualified @@ -111,31 +112,35 @@ removeClientsWithClientMapRecursively :: Input Env ] r, + Functor f, Foldable f ) => Local MLSConversation -> - (ConvOrSubConv -> f LeafIndex) -> + (ConvOrSubConv -> f (ClientIdentity, LeafIndex)) -> + -- | Originating user. The resulting proposals will appear to be sent by this user. Qualified UserId -> Sem r () -removeClientsWithClientMapRecursively lMlsConv getIndices qusr = do +removeClientsWithClientMapRecursively lMlsConv getClients qusr = do let mainConv = fmap Conv lMlsConv cm = mcMembers (tUnqualified lMlsConv) - cs = foldMap Map.keysSet $ Map.lookup qusr cm - gid = cnvmlsGroupId . mcMLSData . tUnqualified $ lMlsConv + do + let gid = cnvmlsGroupId . mcMLSData . tUnqualified $ lMlsConv + clients = getClients (tUnqualified mainConv) - planClientRemoval gid qusr cs - createAndSendRemoveProposals mainConv (getIndices (tUnqualified mainConv)) qusr cm + planClientRemoval gid (fmap fst clients) + createAndSendRemoveProposals mainConv (fmap snd clients) qusr cm -- remove this client from all subconversations subs <- listSubConversations' (mcId (tUnqualified lMlsConv)) for_ subs $ \sub -> do let subConv = fmap (flip SubConv sub) lMlsConv sgid = cnvmlsGroupId . scMLSData $ sub + clients = getClients (tUnqualified subConv) - planClientRemoval sgid qusr cs + planClientRemoval sgid (fmap fst clients) createAndSendRemoveProposals subConv - (getIndices (tUnqualified subConv)) + (fmap snd clients) qusr cm @@ -155,11 +160,12 @@ removeClient :: Qualified UserId -> ClientId -> Sem r () -removeClient lc qusr cid = do +removeClient lc qusr c = do mMlsConv <- mkMLSConversation (tUnqualified lc) for_ mMlsConv $ \mlsConv -> do - let getIndices = cmLookupIndex (mkClientIdentity qusr cid) . membersConvOrSub - removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getIndices qusr + let cid = mkClientIdentity qusr c + let getClients = fmap (cid,) . cmLookupIndex cid . membersConvOrSub + removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getClients qusr -- | Send remove proposals for all clients of the user to the local conversation. removeUser :: @@ -179,8 +185,13 @@ removeUser :: removeUser lc qusr = do mMlsConv <- mkMLSConversation (tUnqualified lc) for_ mMlsConv $ \mlsConv -> do - let getKPs = Map.findWithDefault mempty qusr . membersConvOrSub - removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getKPs qusr + let getClients :: ConvOrSubConv -> [(ClientIdentity, LeafIndex)] + getClients = + map (first (mkClientIdentity qusr)) + . Map.assocs + . Map.findWithDefault mempty qusr + . membersConvOrSub + removeClientsWithClientMapRecursively (qualifyAs lc mlsConv) getClients qusr -- | Convert cassandra subconv maps into SubConversations listSubConversations' :: diff --git a/services/galley/src/Galley/API/MLS/SubConversation.hs b/services/galley/src/Galley/API/MLS/SubConversation.hs index f38f153091..461836174d 100644 --- a/services/galley/src/Galley/API/MLS/SubConversation.hs +++ b/services/galley/src/Galley/API/MLS/SubConversation.hs @@ -35,7 +35,6 @@ import Control.Arrow import Data.Id import qualified Data.Map as Map import Data.Qualified -import qualified Data.Set as Set import Data.Time.Clock import Galley.API.MLS import Galley.API.MLS.Conversation @@ -430,7 +429,7 @@ leaveLocalSubConversation cid lcnv sub = do cmLookupIndex cid (scMembers subConv) let (gid, epoch) = (cnvmlsGroupId &&& cnvmlsEpoch) (scMLSData subConv) -- plan to remove the leaver from the member list - Eff.planClientRemoval gid (cidQualifiedUser cid) . Set.singleton . ciClient $ cid + Eff.planClientRemoval gid (Identity cid) let cm = cmRemoveClient cid (scMembers subConv) if Map.null cm then do diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index 09e0fddab8..5e5fa15496 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -47,6 +47,7 @@ import Polysemy.Input import qualified UnliftIO import Wire.API.Conversation.Member hiding (Member) import Wire.API.Conversation.Role +import Wire.API.MLS.Credential import Wire.API.MLS.Group import Wire.API.MLS.LeafNode (LeafIndex) import Wire.API.Provider.Service @@ -350,14 +351,14 @@ addMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do addPrepQuery Cql.addMLSClient (groupId, domain, usr, c, fromIntegral idx) -- TODO Could (and should) we use batch instead? -planMLSClientRemoval :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () -planMLSClientRemoval groupId (Qualified usr domain) cs = for_ cs $ \c -> do +planMLSClientRemoval :: Foldable f => GroupId -> f ClientIdentity -> Client () +planMLSClientRemoval groupId cids = for_ cids $ \cid -> do retry x5 $ trans Cql.planMLSClientRemoval ( params LocalQuorum - (groupId, domain, usr, c) + (groupId, ciDomain cid, ciUser cid, ciClient cid) ) removeMLSClients :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () @@ -396,7 +397,7 @@ interpretMemberStoreToCassandra = interpret $ \case embedClient $ removeLocalMembersFromRemoteConv rcnv uids AddMLSClients lcnv quid cs -> embedClient $ addMLSClients lcnv quid cs - PlanClientRemoval lcnv quid cs -> embedClient $ planMLSClientRemoval lcnv quid cs + PlanClientRemoval lcnv cids -> embedClient $ planMLSClientRemoval lcnv cids RemoveMLSClients lcnv quid cs -> embedClient $ removeMLSClients lcnv quid cs RemoveAllMLSClients gid -> embedClient $ removeAllMLSClients gid LookupMLSClients lcnv -> embedClient $ lookupMLSClients lcnv diff --git a/services/galley/src/Galley/Effects/MemberStore.hs b/services/galley/src/Galley/Effects/MemberStore.hs index b0891bcc7d..f42284b6fc 100644 --- a/services/galley/src/Galley/Effects/MemberStore.hs +++ b/services/galley/src/Galley/Effects/MemberStore.hs @@ -62,6 +62,7 @@ import Galley.Types.UserList import Imports import Polysemy import Wire.API.Conversation.Member hiding (Member) +import Wire.API.MLS.Credential import Wire.API.MLS.Group import Wire.API.MLS.LeafNode import Wire.API.Provider.Service @@ -81,7 +82,7 @@ data MemberStore m a where DeleteMembers :: ConvId -> UserList UserId -> MemberStore m () DeleteMembersInRemoteConversation :: Remote ConvId -> [UserId] -> MemberStore m () AddMLSClients :: GroupId -> Qualified UserId -> Set (ClientId, LeafIndex) -> MemberStore m () - PlanClientRemoval :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () + PlanClientRemoval :: Foldable f => GroupId -> f ClientIdentity -> MemberStore m () RemoveMLSClients :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveAllMLSClients :: GroupId -> MemberStore m () LookupMLSClients :: GroupId -> MemberStore m ClientMap From aebb275cc35f14faa8b15f5750b5058603ea588f Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Mon, 24 Apr 2023 15:21:58 +0200 Subject: [PATCH 41/75] Fix assertion in external add proposal test --- services/galley/test/integration/API/MLS.hs | 8 +++++--- services/galley/test/integration/API/Util.hs | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index fa29380d7f..b1f1ae7aac 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -1009,7 +1009,7 @@ testExternalCommitNewClientResendBackendProposal = do forM_ [bob1, bob2] uploadNewKeyPackage (_, qcnv) <- setupMLSGroup alice1 void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle - Just (_, kpBob2) <- find (\(ci, _) -> ci == bob2) <$> getClientsFromGroupState alice1 bob + Just (_, bobIdx2) <- find (\(ci, _) -> ci == bob2) <$> getClientsFromGroupState alice1 bob mlsBracket [alice1, bob1] $ \[wsA, wsB] -> do liftTest $ @@ -1024,7 +1024,7 @@ testExternalCommitNewClientResendBackendProposal = do } WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ - wsAssertBackendRemoveProposalWithEpoch bob qcnv kpBob2 (Epoch 1) + wsAssertBackendRemoveProposalWithEpoch bob qcnv bobIdx2 (Epoch 1) [bob3, bob4] <- for [bob, bob] $ \qusr' -> do ci <- createMLSClient qusr' @@ -1035,6 +1035,7 @@ testExternalCommitNewClientResendBackendProposal = do void $ createExternalAddProposal bob3 >>= sendAndConsumeMessage + WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ void . wsAssertAddProposal bob qcnv @@ -1042,6 +1043,7 @@ testExternalCommitNewClientResendBackendProposal = do ecEvents <- sendAndConsumeCommitBundle mp liftIO $ assertBool "No events after external commit expected" (null ecEvents) + WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ wsAssertMLSMessage (fmap Conv qcnv) bob (mpMessage mp) @@ -1049,7 +1051,7 @@ testExternalCommitNewClientResendBackendProposal = do -- proposal for bob3 has to replayed by the client and is thus not found -- here. WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ - wsAssertBackendRemoveProposalWithEpoch bob qcnv kpBob2 (Epoch 2) + wsAssertBackendRemoveProposalWithEpoch bob qcnv bobIdx2 (Epoch 2) WS.assertNoEvent (2 # WS.Second) [wsA, wsB] testAppMessage :: TestM () diff --git a/services/galley/test/integration/API/Util.hs b/services/galley/test/integration/API/Util.hs index 5b3f883e7b..d00521efdd 100644 --- a/services/galley/test/integration/API/Util.hs +++ b/services/galley/test/integration/API/Util.hs @@ -2948,7 +2948,7 @@ wsAssertAddProposal fromUser convId n = do let msg = fromRight (error "Failed to parse Message 'MLSPlaintext") $ decodeMLS' @Message bs liftIO $ case msg.content of MessagePublic pmsg -> do - pmsg.content.value.sender @?= SenderExternal 0 + pmsg.content.value.sender @?= SenderNewMemberProposal case pmsg.content.value.content of FramedContentProposal prop -> case prop.value of AddProposal _ -> pure () From 85c0d6aa5ed4bd37560971763ba5cd71af3189da Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Mon, 24 Apr 2023 15:22:08 +0200 Subject: [PATCH 42/75] Propagate actual message, not just commit --- services/galley/src/Galley/API/MLS/Message.hs | 15 +++++++++------ services/galley/src/Galley/API/MLS/Propagate.hs | 9 +++++---- services/galley/src/Galley/API/MLS/Removal.hs | 5 ++--- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 373b5ba033..778d938d67 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -395,7 +395,7 @@ postMLSCommitBundleToLocalConv qusr c conn bundle lConvOrSubId = do storeGroupInfo (idForConvOrSub . tUnqualified $ lConvOrSub) bundle.groupInfo let cm = membersConvOrSub (tUnqualified lConvOrSub) - unreachables <- propagateMessage qusr lConvOrSub conn bundle.commit.raw cm + unreachables <- propagateMessage qusr lConvOrSub conn bundle.rawMessage cm traverse_ (sendWelcomes lConvOrSub conn newClients) bundle.welcome pure (events, unreachables) @@ -527,7 +527,7 @@ postMLSMessageToLocalConv qusr c con msg convOrSubId = do IncomingMessageContentPrivate -> pure mempty let cm = membersConvOrSub (tUnqualified lConvOrSub) - unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage.raw cm + unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage cm pure (events, unreachables) postMLSMessageToRemoteConv :: @@ -964,11 +964,14 @@ checkExternalProposalSignature :: IncomingPublicMessageContent -> RawMLS Proposal -> Sem r () -checkExternalProposalSignature msg prop = case value prop of +checkExternalProposalSignature _msg prop = case value prop of AddProposal kp -> do - let pubkey = kp.value.leafNode.signatureKey - ctx = error "TODO: get group context" - unless (verifyMessageSignature ctx msg.framedContent msg.authData pubkey) $ throwS @'MLSUnsupportedProposal + let _pubkey = kp.value.leafNode.signatureKey + _ctx = error "TODO: get group context" + -- TODO + unless True $ + -- unless (verifyMessageSignature ctx msg.framedContent msg.authData pubkey) $ + throwS @'MLSUnsupportedProposal _ -> pure () -- FUTUREWORK: check signature of other proposals as well -- check owner/subject of the key package exists and belongs to the user diff --git a/services/galley/src/Galley/API/MLS/Propagate.hs b/services/galley/src/Galley/API/MLS/Propagate.hs index 31a60d97eb..10d0dcedeb 100644 --- a/services/galley/src/Galley/API/MLS/Propagate.hs +++ b/services/galley/src/Galley/API/MLS/Propagate.hs @@ -44,6 +44,7 @@ import Wire.API.Federation.API import Wire.API.Federation.API.Galley import Wire.API.Federation.Error import Wire.API.MLS.Message +import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.Message @@ -58,10 +59,10 @@ propagateMessage :: Qualified UserId -> Local ConvOrSubConv -> Maybe ConnId -> - ByteString -> + RawMLS Message -> ClientMap -> Sem r UnreachableUsers -propagateMessage qusr lConvOrSub con raw cm = do +propagateMessage qusr lConvOrSub con msg cm = do now <- input @UTCTime let mlsConv = convOfConvOrSub <$> lConvOrSub lmems = mcLocalMembers . tUnqualified $ mlsConv @@ -77,7 +78,7 @@ propagateMessage qusr lConvOrSub con raw cm = do SubConv c s -> (mcId c, Just (scSubConvId s)) qcnv = fst <$> qt sconv = snd (qUnqualified qt) - e = Event qcnv sconv qusr now $ EdMLSMessage raw + e = Event qcnv sconv qusr now $ EdMLSMessage msg.raw mkPush :: UserId -> ClientId -> MessagePush 'NormalMessage mkPush u c = newMessagePush mlsConv botMap con mm (u, c) e runMessagePush mlsConv (Just qcnv) $ @@ -95,7 +96,7 @@ propagateMessage qusr lConvOrSub con raw cm = do rmmMetadata = mm, rmmConversation = qUnqualified qcnv, rmmRecipients = rs >>= remoteMemberMLSClients, - rmmMessage = Base64ByteString raw + rmmMessage = Base64ByteString msg.raw } where localMemberMLSClients :: Local x -> LocalMember -> [(UserId, ClientId)] diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 85ea296d39..2a7e977dfd 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -89,15 +89,14 @@ createAndSendRemoveProposals lConvOrSubConv indices qusr cm = do (cnvmlsGroupId meta) (cnvmlsEpoch meta) (FramedContentProposal proposal) - msg = mkMessage (MessagePublic pmsg) - msgEncoded = encodeMLS' msg + msg = mkRawMLS (mkMessage (MessagePublic pmsg)) storeProposal (cnvmlsGroupId meta) (cnvmlsEpoch meta) (publicMessageRef (cnvmlsCipherSuite meta) pmsg) ProposalOriginBackend proposal - propagateMessage qusr lConvOrSubConv Nothing msgEncoded cm + propagateMessage qusr lConvOrSubConv Nothing msg cm removeClientsWithClientMapRecursively :: ( Members From 041e93caaa858c6fc89cb4ccfba062c860160466 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 25 Apr 2023 11:31:05 +0200 Subject: [PATCH 43/75] Fix signature calculation when generating messages --- libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs | 3 +-- libs/wire-api/src/Wire/API/MLS/CipherSuite.hs | 6 ++++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs index d941b69efa..5daf745bfb 100644 --- a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs +++ b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs @@ -24,7 +24,6 @@ module Wire.API.MLS.AuthenticatedContent where import Crypto.PubKey.Ed25519 -import qualified Data.ByteArray as BA import Imports import Wire.API.MLS.CipherSuite import Wire.API.MLS.Context @@ -85,7 +84,7 @@ mkSignedPublicMessage priv pub gid epoch payload = content = framedContent, groupContext = Nothing } - sig = BA.convert $ sign priv pub (encodeMLS' tbs) + sig = signWithLabel "FramedContentTBS" priv pub (mkRawMLS tbs) in PublicMessage { content = framedContent, authData = mkRawMLS (FramedContentAuthData sig Nothing), diff --git a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs index a3571466e4..c4fc037648 100644 --- a/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs +++ b/libs/wire-api/src/Wire/API/MLS/CipherSuite.hs @@ -36,6 +36,7 @@ module Wire.API.MLS.CipherSuite csHash, csVerifySignatureWithLabel, csVerifySignature, + signWithLabel, ) where @@ -50,6 +51,7 @@ import qualified Data.Aeson as Aeson import Data.Aeson.Types (FromJSON (..), FromJSONKey (..), ToJSON (..), ToJSONKey (..)) import qualified Data.Aeson.Types as Aeson import Data.ByteArray hiding (index) +import qualified Data.ByteArray as BA import Data.Proxy import Data.Schema import qualified Data.Swagger as S @@ -150,6 +152,10 @@ csVerifySignatureWithLabel :: csVerifySignatureWithLabel cs pub label x sig = csVerifySignature cs pub (mkRawMLS (mkSignContent label x)) sig +-- FUTUREWORK: generalise to arbitrary ciphersuites +signWithLabel :: ByteString -> Ed25519.SecretKey -> Ed25519.PublicKey -> RawMLS a -> ByteString +signWithLabel sigLabel priv pub x = BA.convert $ Ed25519.sign priv pub (encodeMLS' (mkSignContent sigLabel x)) + csSignatureScheme :: CipherSuiteTag -> SignatureSchemeTag csSignatureScheme MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 = Ed25519 From 805ed96abcd720f02cc526dbe444e48f7bf9f84e Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 25 Apr 2023 11:31:44 +0200 Subject: [PATCH 44/75] Pass removal key to mls-test-cli on group creation --- services/galley/test/integration/API/MLS.hs | 2 +- .../galley/test/integration/API/MLS/Util.hs | 21 ++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index b1f1ae7aac..7ee779c3b5 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -248,7 +248,7 @@ tests s = test s "client of a remote user joins subconversation" testRemoteUserJoinSubConv, test s "delete subconversation as a remote member" (testRemoteMemberDeleteSubConv True), test s "delete subconversation as a remote non-member" (testRemoteMemberDeleteSubConv False), - test s "delete parent conversation of a remote subconveration" testDeleteRemoteParentOfSubConv + test s "delete parent conversation of a remote subconversation" testDeleteRemoteParentOfSubConv ] ], testGroup diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index ea69cc88ba..1d7a9ec105 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -471,7 +471,17 @@ resetGroup cid qcs gid = do resetClientGroup :: ClientIdentity -> GroupId -> MLSTest () resetClientGroup cid gid = do - groupJSON <- mlscli cid ["group", "create", T.unpack (toBase64Text (unGroupId gid))] Nothing + bd <- State.gets mlsBaseDir + groupJSON <- + mlscli + cid + [ "group", + "create", + "--removal-key", + bd "removal.key", + T.unpack (toBase64Text (unGroupId gid)) + ] + Nothing setClientGroupState cid groupJSON getConvId :: MLSTest (Qualified ConvOrSubConvId) @@ -836,8 +846,7 @@ consumeMessage msg = do consumeMessage1 cid (mpMessage msg) consumeMessage1 :: HasCallStack => ClientIdentity -> ByteString -> MLSTest () -consumeMessage1 cid msg = do - bd <- State.gets mlsBaseDir +consumeMessage1 cid msg = void $ mlscli cid @@ -846,8 +855,6 @@ consumeMessage1 cid msg = do "", "--group-out", "", - "--signer-key", - bd "removal.key", "-" ] (Just msg) @@ -916,8 +923,8 @@ mlsBracket clients k = do readGroupState :: ByteString -> [(ClientIdentity, LeafIndex)] readGroupState j = do - (node, n) <- zip (j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse . key "node") [0 ..] - case node ^? key "leaf_node" of + (node, n) <- zip (j ^.. key "group" . key "public_group" . key "treesync" . key "tree" . key "leaf_nodes" . _Array . traverse) [0 ..] + case node ^? key "node" of Just leafNode -> do identityBytes <- leafNode ^.. key "payload" . key "credential" . key "credential" . key "Basic" . key "identity" . key "vec" let identity = BS.pack (identityBytes ^.. _Array . traverse . _Integer . to fromIntegral) From 8c567a4190aa7166c7e320bf33ddbcc05c0c98bb Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 25 Apr 2023 15:10:01 +0200 Subject: [PATCH 45/75] Take pending clients into account in removal logic --- services/galley/src/Galley/API/MLS/Message.hs | 41 ++++++++----------- .../src/Galley/Cassandra/Conversation/MLS.hs | 4 -- .../Galley/Cassandra/Conversation/Members.hs | 1 - .../galley/src/Galley/Effects/MemberStore.hs | 2 - services/galley/test/integration/API/MLS.hs | 2 + 5 files changed, 18 insertions(+), 32 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 778d938d67..ee8fb3a823 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -1064,19 +1064,26 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do -- Furthermore, subconversation clients can be removed arbitrarily, so this -- processing is only necessary for main conversations. In the -- subconversation case, an empty list is returned. - removedUsers <- case convOrSub of + membersToRemove <- case convOrSub of SubConv _ _ -> pure [] Conv _ -> mapMaybe hush <$$> for (Map.assocs (paRemove action)) $ \(qtarget, Map.keysSet -> clients) -> runError @() $ do - -- fetch clients from brig - clientInfo <- Set.map ciId <$> getClientInfo lconvOrSub qtarget ss - -- if the clients being removed don't exist, consider this as a removal of - -- type 2, and skip it - when (Set.null (clientInfo `Set.intersection` clients)) $ - throw () - pure (qtarget, clients) + let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) + let removedClients = Set.intersection clients clientsInConv - membersToRemove <- catMaybes <$> for removedUsers (uncurry (checkRemoval (is _SubConv convOrSub) cm)) + -- ignore user if none of their clients are being removed + when (Set.null removedClients) $ throw () + + -- return error if the user is trying to remove themself + when (cidQualifiedUser senderIdentity == qtarget) $ + throwS @'MLSSelfRemovalNotAllowed + + -- FUTUREWORK: add tests against this situation for conv v subconv + when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do + -- FUTUREWORK: turn this error into a proper response + throwS @'MLSClientMismatch + + pure qtarget -- for each user, we compare their clients with the ones being added to the conversation for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of @@ -1152,22 +1159,6 @@ executeIntCommitProposalAction senderIdentity con lconvOrSub action = do -- TODO: increment epoch here instead of in the calling site pure (addEvents <> removeEvents) - where - checkRemoval :: - Bool -> - ClientMap -> - Qualified UserId -> - Set ClientId -> - Sem r (Maybe (Qualified UserId)) - checkRemoval isSubConv cm qtarget clients = do - let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) - -- FUTUREWORK: add tests against this situation for conv v subconv - when (not isSubConv && clients /= clientsInConv) $ do - -- FUTUREWORK: turn this error into a proper response - throwS @'MLSClientMismatch - when (cidQualifiedUser senderIdentity == qtarget) $ - throwS @'MLSSelfRemovalNotAllowed - pure (Just qtarget) executeExtCommitProposalAction :: forall r. diff --git a/services/galley/src/Galley/Cassandra/Conversation/MLS.hs b/services/galley/src/Galley/Cassandra/Conversation/MLS.hs index 80d488c728..06e2e65d91 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/MLS.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/MLS.hs @@ -19,7 +19,6 @@ module Galley.Cassandra.Conversation.MLS ( acquireCommitLock, releaseCommitLock, lookupMLSClients, - lookupMLSLeafIndices, lookupMLSClientLeafIndices, ) where @@ -71,6 +70,3 @@ lookupMLSClientLeafIndices groupId = do lookupMLSClients :: GroupId -> Client ClientMap lookupMLSClients = fmap fst . lookupMLSClientLeafIndices - -lookupMLSLeafIndices :: GroupId -> Client IndexMap -lookupMLSLeafIndices = fmap snd . lookupMLSClientLeafIndices diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index 5e5fa15496..97bcb237e3 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -401,5 +401,4 @@ interpretMemberStoreToCassandra = interpret $ \case RemoveMLSClients lcnv quid cs -> embedClient $ removeMLSClients lcnv quid cs RemoveAllMLSClients gid -> embedClient $ removeAllMLSClients gid LookupMLSClients lcnv -> embedClient $ lookupMLSClients lcnv - LookupMLSLeafIndices lcnv -> embedClient $ lookupMLSLeafIndices lcnv LookupMLSClientLeafIndices lcnv -> embedClient $ lookupMLSClientLeafIndices lcnv diff --git a/services/galley/src/Galley/Effects/MemberStore.hs b/services/galley/src/Galley/Effects/MemberStore.hs index f42284b6fc..bb8d1c6c33 100644 --- a/services/galley/src/Galley/Effects/MemberStore.hs +++ b/services/galley/src/Galley/Effects/MemberStore.hs @@ -43,7 +43,6 @@ module Galley.Effects.MemberStore removeMLSClients, removeAllMLSClients, lookupMLSClients, - lookupMLSLeafIndices, lookupMLSClientLeafIndices, -- * Delete members @@ -86,7 +85,6 @@ data MemberStore m a where RemoveMLSClients :: GroupId -> Qualified UserId -> Set ClientId -> MemberStore m () RemoveAllMLSClients :: GroupId -> MemberStore m () LookupMLSClients :: GroupId -> MemberStore m ClientMap - LookupMLSLeafIndices :: GroupId -> MemberStore m IndexMap LookupMLSClientLeafIndices :: GroupId -> MemberStore m (ClientMap, IndexMap) makeSem ''MemberStore diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 7ee779c3b5..0a8416c06f 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -1609,6 +1609,8 @@ testBackendRemoveProposalRecreateClient = do alice2 <- createMLSClient alice proposal <- mlsBracket [alice2] $ \[wsA] -> do + -- alice2 joins the conversation, causing the external remove proposal to + -- be re-established void $ createExternalCommit alice2 Nothing cnv >>= sendAndConsumeCommitBundle From 52d7f14a827104b3f9dba1bb22b2ad820e80d4c1 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 25 Apr 2023 15:41:32 +0200 Subject: [PATCH 46/75] Fix assertion in remove proposal test --- services/galley/test/integration/API/MLS.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 0a8416c06f..c0b3d475d8 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -1833,7 +1833,7 @@ testBackendRemoveProposalLocalConvLocalClient = do void $ createAddCommit alice1 [bob, charlie] >>= sendAndConsumeCommitBundle Just (_, idxBob1) <- find (\(ci, _) -> ci == bob1) <$> getClientsFromGroupState alice1 bob - mlsBracket [alice1, bob1] $ \[wsA, wsB] -> do + mlsBracket [alice1, bob1, charlie1] $ \[wsA, wsB, wsC] -> do liftTest $ deleteClient (ciUser bob1) (ciClient bob1) (Just defPassword) !!! statusCode === const 200 @@ -1855,7 +1855,7 @@ testBackendRemoveProposalLocalConvLocalClient = do mp <- createPendingProposalCommit charlie1 events <- sendAndConsumeCommitBundle mp liftIO $ events @?= [] - WS.assertMatchN_ (5 # WS.Second) [wsA, wsB] $ \n -> do + WS.assertMatchN_ (5 # WS.Second) [wsA, wsC] $ \n -> do wsAssertMLSMessage (Conv <$> qcnv) charlie (mpMessage mp) n testBackendRemoveProposalLocalConvRemoteClient :: TestM () From 3b764fae95403ae95738359621db6579ff1a8c2b Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Wed, 26 Apr 2023 09:17:53 +0000 Subject: [PATCH 47/75] apply linter suggestions --- libs/wire-api/src/Wire/API/MLS/Credential.hs | 2 -- libs/wire-api/src/Wire/API/MLS/Extension.hs | 4 ---- libs/wire-api/src/Wire/API/MLS/Message.hs | 3 --- libs/wire-api/src/Wire/API/MLS/Validation.hs | 2 +- libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs | 3 +-- services/galley/test/integration/API/MLS/Util.hs | 3 +-- 6 files changed, 3 insertions(+), 14 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Credential.hs b/libs/wire-api/src/Wire/API/MLS/Credential.hs index 0ec9d8d3a2..f614269b83 100644 --- a/libs/wire-api/src/Wire/API/MLS/Credential.hs +++ b/libs/wire-api/src/Wire/API/MLS/Credential.hs @@ -1,5 +1,3 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH diff --git a/libs/wire-api/src/Wire/API/MLS/Extension.hs b/libs/wire-api/src/Wire/API/MLS/Extension.hs index c40c99ce1d..eab027e715 100644 --- a/libs/wire-api/src/Wire/API/MLS/Extension.hs +++ b/libs/wire-api/src/Wire/API/MLS/Extension.hs @@ -1,7 +1,3 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE StandaloneKindSignatures #-} -{-# LANGUAGE TemplateHaskell #-} - -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH diff --git a/libs/wire-api/src/Wire/API/MLS/Message.hs b/libs/wire-api/src/Wire/API/MLS/Message.hs index 8c4e19f854..df56293ccf 100644 --- a/libs/wire-api/src/Wire/API/MLS/Message.hs +++ b/libs/wire-api/src/Wire/API/MLS/Message.hs @@ -1,6 +1,3 @@ -{-# LANGUAGE StandaloneKindSignatures #-} -{-# LANGUAGE TemplateHaskell #-} - -- This file is part of the Wire Server implementation. -- -- Copyright (C) 2022 Wire Swiss GmbH diff --git a/libs/wire-api/src/Wire/API/MLS/Validation.hs b/libs/wire-api/src/Wire/API/MLS/Validation.hs index c99450c70e..5b2eeec36f 100644 --- a/libs/wire-api/src/Wire/API/MLS/Validation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Validation.hs @@ -116,7 +116,7 @@ validateSource t s = do "Expected '" <> t.name <> "' source, got '" - <> (leafNodeSourceTag s).name + <> t'.name <> "'" validateCapabilities :: Capabilities -> Either Text () diff --git a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs index 8562747a35..df82c017e0 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/Roundtrip/MLS.hs @@ -180,8 +180,7 @@ instance Arbitrary TestCommitBundle where mkRawMLS . unMessageGenerator @(FramedContentGenerator Sender CommitPayload) <$> arbitrary welcome <- arbitrary - gi <- arbitrary - pure $ CommitBundle commitMsg welcome gi + CommitBundle commitMsg welcome <$> arbitrary newtype CommitPayload = CommitPayload {unCommitPayload :: RawMLS Commit} deriving newtype (Arbitrary) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 1d7a9ec105..4884c73dc5 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -549,7 +549,7 @@ getUserClients qusr = do claimRemoteKeyPackages :: HasCallStack => Remote UserId -> MLSTest KeyPackageBundle claimRemoteKeyPackages (tUntagged -> qusr) = do clients <- getUserClients qusr - bundle <- fmap (KeyPackageBundle . Set.fromList) $ + fmap (KeyPackageBundle . Set.fromList) $ for clients $ \cid -> do (kp, ref) <- generateKeyPackage cid pure $ @@ -559,7 +559,6 @@ claimRemoteKeyPackages (tUntagged -> qusr) = do ref = ref, keyPackage = KeyPackageData (raw kp) } - pure bundle -- | Claim key package for a local user, or generate and map key packages for remote ones. claimKeyPackages :: From bf63e2d4676e4036738f0c2a9083ed0be7a3edc3 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Wed, 26 Apr 2023 12:11:15 +0000 Subject: [PATCH 48/75] fix unit test: MLS remove proposal --- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 27 +++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index 9f9dcd5601..6842012fee 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -133,15 +133,17 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do decodeMLSError <$> spawn (cli qcid2 tmp ["key-package", "create"]) Nothing BS.writeFile (tmp qcid2) (raw kp) + secretKey <- Ed25519.generateSecretKey let groupFilename = "group" - let gid = GroupId "abcd" - createGroup tmp qcid groupFilename gid + gid = GroupId "abcd" + signerKeyFilename = "signer-key.bin" + publicKey = Ed25519.toPublic secretKey + BS.writeFile (tmp signerKeyFilename) (convert publicKey) + createGroup tmp qcid groupFilename signerKeyFilename gid void $ spawn (cli qcid tmp ["member", "add", "--group", tmp groupFilename, "--in-place", tmp qcid2]) Nothing - secretKey <- Ed25519.generateSecretKey - let publicKey = Ed25519.toPublic secretKey - proposal = mkRawMLS (RemoveProposal 1) + let proposal = mkRawMLS (RemoveProposal 1) pmessage = mkSignedPublicMessage secretKey @@ -153,8 +155,6 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do messageFilename = "signed-message.mls" BS.writeFile (tmp messageFilename) (raw (mkRawMLS message)) - let signerKeyFilename = "signer-key.bin" - BS.writeFile (tmp signerKeyFilename) (convert publicKey) void $ spawn @@ -164,22 +164,25 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do [ "consume", "--group", tmp groupFilename, - "--signer-key", - tmp signerKeyFilename, tmp messageFilename ] ) Nothing -createGroup :: FilePath -> String -> String -> GroupId -> IO () -createGroup tmp store groupName gid = do +createGroup :: FilePath -> String -> String -> String -> GroupId -> IO () +createGroup tmp store groupName removalKey gid = do groupJSON <- liftIO $ spawn ( cli store tmp - ["group", "create", T.unpack (toBase64Text (unGroupId gid))] + [ "group", + "create", + "--removal-key", + tmp removalKey, + T.unpack (toBase64Text (unGroupId gid)) + ] ) Nothing liftIO $ BS.writeFile (tmp groupName) groupJSON From 58021a766c178aa54c1ff6387e0f04cbe274f792 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 26 Apr 2023 16:41:24 +0200 Subject: [PATCH 49/75] Upgrade mls-test-cli in the nix environment --- nix/pkgs/mls-test-cli/default.nix | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/nix/pkgs/mls-test-cli/default.nix b/nix/pkgs/mls-test-cli/default.nix index 968b60d904..8565d22c35 100644 --- a/nix/pkgs/mls-test-cli/default.nix +++ b/nix/pkgs/mls-test-cli/default.nix @@ -8,20 +8,29 @@ , gitMinimal }: -rustPlatform.buildRustPackage rec { - name = "mls-test-cli-${version}"; - version = "0.6.0"; - nativeBuildInputs = [ pkg-config perl gitMinimal ]; - buildInputs = [ libsodium ]; +let + version = "0.7.0"; src = fetchFromGitHub { owner = "wireapp"; repo = "mls-test-cli"; - sha256 = "sha256-FjgAcYdUr/ZWdQxbck2UEG6NEEQLuz0S4a55hrAxUs4="; - rev = "82fc148964ef5baa92a90d086fdc61adaa2b5dbf"; + rev = "f539bcc60ab3f7e2303742a37aa17b281b44bf3a"; + sha256 = "sha256-oyf+sot/aVnfoodecPGxTDxqNGk/KCX24LG7W9uP8mI="; }; - doCheck = false; - cargoSha256 = "sha256-AlZrxa7f5JwxxrzFBgeFSaYU6QttsUpfLYfq1HzsdbE="; - cargoDepsHook = '' - mkdir -p mls-test-cli-${version}-vendor.tar.gz/ring/.git + cargoLockFile = builtins.toFile "cargo.lock" (builtins.readFile "${src}/Cargo.lock"); +in rustPlatform.buildRustPackage rec { + name = "mls-test-cli-${version}"; + inherit version src; + + cargoLock = { + lockFile = cargoLockFile; + outputHashes = { + "hpke-0.10.0" = "sha256-XYkG72ZeQ3nM4JjgNU5Fe0HqNGkBGcI70rE1Kbz/6vs="; + "openmls-0.20.0" = "sha256-i5xNTYP1wPzwlnqz+yPu8apKCibRZacz4OV5VVZwY5Y="; + }; + }; + + postPatch = '' + cp ${cargoLockFile} Cargo.lock ''; + doCheck = false; } From 1ced1decc9f9fee3af6130f4abd456ce00c8b042 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Thu, 27 Apr 2023 13:26:47 +0200 Subject: [PATCH 50/75] Update cassandra-schema.cql --- cassandra-schema.cql | 2901 +++++++++++++++++++++++++++++++++++------- 1 file changed, 2475 insertions(+), 426 deletions(-) diff --git a/cassandra-schema.cql b/cassandra-schema.cql index 3bc45633bc..455ce6e185 100644 --- a/cassandra-schema.cql +++ b/cassandra-schema.cql @@ -444,6 +444,8 @@ CREATE TABLE galley_test.mls_group_member_client ( user uuid, client text, key_package_ref blob, + leaf_node_index int, + removal_pending boolean, PRIMARY KEY (group_id, user_domain, user, client) ) WITH CLUSTERING ORDER BY (user_domain ASC, user ASC, client ASC) AND bloom_filter_fp_chance = 0.01 @@ -644,17 +646,46 @@ CREATE TABLE galley_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE KEYSPACE gundeck_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; +CREATE KEYSPACE galley_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; -CREATE TABLE gundeck_test.push ( - ptoken text, - app text, - transport int, - client text, - connection blob, - usr uuid, - PRIMARY KEY (ptoken, app, transport) -) WITH CLUSTERING ORDER BY (app ASC, transport ASC) +CREATE TYPE galley_test2.permissions ( + self bigint, + copy bigint +); + +CREATE TYPE galley_test2.pubkey ( + typ int, + size int, + pem blob +); + +CREATE TABLE galley_test2.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.team_conv ( + team uuid, + conv uuid, + PRIMARY KEY (team, conv) +) WITH CLUSTERING ORDER BY (conv ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -670,29 +701,51 @@ CREATE TABLE gundeck_test.push ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE gundeck_test.notifications ( +CREATE TABLE galley_test2.user_team ( user uuid, - id timeuuid, - clients set, - payload blob, - PRIMARY KEY (user, id) -) WITH CLUSTERING ORDER BY (id ASC) + team uuid, + PRIMARY KEY (user, team) +) WITH CLUSTERING ORDER BY (team ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE gundeck_test.meta ( +CREATE TABLE galley_test2.service ( + provider uuid, + id uuid, + auth_token ascii, + base_url blob, + enabled boolean, + fingerprints set, + PRIMARY KEY (provider, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.data_migration ( id int, version int, date timestamp, @@ -714,17 +767,39 @@ CREATE TABLE gundeck_test.meta ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE gundeck_test.user_push ( - usr uuid, - ptoken text, - app text, - transport int, - arn text, - client text, - connection blob, - PRIMARY KEY (usr, ptoken, app, transport) -) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE galley_test2.team_features ( + team_id uuid PRIMARY KEY, + app_lock_enforce int, + app_lock_inactivity_timeout_secs int, + app_lock_status int, + conference_calling int, + digital_signatures int, + expose_invitation_urls_to_team_admin int, + file_sharing int, + file_sharing_lock_status int, + guest_links_lock_status int, + guest_links_status int, + legalhold_status int, + mls_allowed_ciphersuites set, + mls_default_ciphersuite int, + mls_default_protocol int, + mls_e2eid_lock_status int, + mls_e2eid_status int, + mls_e2eid_ver_exp timestamp, + mls_protocol_toggle_users set, + mls_status int, + outlook_cal_integration_lock_status int, + outlook_cal_integration_status int, + search_visibility_inbound_status int, + search_visibility_status int, + self_deleting_messages_lock_status int, + self_deleting_messages_status int, + self_deleting_messages_ttl int, + snd_factor_password_challenge_lock_status int, + snd_factor_password_challenge_status int, + sso_status int, + validate_saml_emails int +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -739,24 +814,41 @@ CREATE TABLE gundeck_test.user_push ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE KEYSPACE brig_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TYPE brig_test.asset ( - typ int, - key text, - size int -); - -CREATE TYPE brig_test.pubkey ( - typ int, - size int, - pem blob -); +CREATE TABLE galley_test2.member ( + conv uuid, + user uuid, + conversation_role text, + hidden boolean, + hidden_ref text, + otr_archived boolean, + otr_archived_ref text, + otr_muted boolean, + otr_muted_ref text, + otr_muted_status int, + provider uuid, + service uuid, + status int, + PRIMARY KEY (conv, user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.team_invitation_info ( - code ascii PRIMARY KEY, - id uuid, - team uuid +CREATE TABLE galley_test2.custom_backend ( + domain text PRIMARY KEY, + config_json_url blob, + webapp_welcome_url blob ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -772,10 +864,19 @@ CREATE TABLE brig_test.team_invitation_info ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.provider_keys ( - key text PRIMARY KEY, - provider uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE galley_test2.user_remote_conv ( + user uuid, + conv_remote_domain text, + conv_remote_id uuid, + hidden boolean, + hidden_ref text, + otr_archived boolean, + otr_archived_ref text, + otr_muted_ref text, + otr_muted_status int, + PRIMARY KEY (user, conv_remote_domain, conv_remote_id) +) WITH CLUSTERING ORDER BY (conv_remote_domain ASC, conv_remote_id ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -790,12 +891,8 @@ CREATE TABLE brig_test.provider_keys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_refresh_token ( - id uuid PRIMARY KEY, - client uuid, - created_at timestamp, - scope set, - user uuid +CREATE TABLE galley_test2.legalhold_whitelisted ( + team uuid PRIMARY KEY ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -803,7 +900,7 @@ CREATE TABLE brig_test.oauth_refresh_token ( AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 14515200 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -811,17 +908,1900 @@ CREATE TABLE brig_test.oauth_refresh_token ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.team_invitation_email ( - email text, +CREATE TABLE galley_test2.member_remote_user ( + conv uuid, + user_remote_domain text, + user_remote_id uuid, + conversation_role text, + PRIMARY KEY (conv, user_remote_domain, user_remote_id) +) WITH CLUSTERING ORDER BY (user_remote_domain ASC, user_remote_id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.team_member ( team uuid, - code ascii, - invitation uuid, - PRIMARY KEY (email, team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.01 + user uuid, + invited_at timestamp, + invited_by uuid, + legalhold_status int, + perms frozen, + PRIMARY KEY (team, user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.team_notifications ( + team uuid, + id timeuuid, + payload blob, + PRIMARY KEY (team, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.legalhold_pending_prekeys ( + user uuid, + key int, + data text, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.group_id_conv_id ( + group_id blob PRIMARY KEY, + conv_id uuid, + domain text, + subconv_id text +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.member_client ( + conv uuid, + user_domain text, + user uuid, + client text, + key_package_ref blob, + PRIMARY KEY (conv, user_domain, user, client) +) WITH CLUSTERING ORDER BY (user_domain ASC, user ASC, client ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.legalhold_service ( + team_id uuid PRIMARY KEY, + auth_token ascii, + base_url blob, + fingerprint blob, + pubkey pubkey +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.conversation_codes ( + key ascii, + scope int, + conversation uuid, + value ascii, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.mls_group_member_client ( + group_id blob, + user_domain text, + user uuid, + client text, + key_package_ref blob, + PRIMARY KEY (group_id, user_domain, user, client) +) WITH CLUSTERING ORDER BY (user_domain ASC, user ASC, client ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.clients ( + user uuid PRIMARY KEY, + clients set +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.conversation ( + conv uuid PRIMARY KEY, + access set, + access_role int, + access_roles_v2 set, + cipher_suite int, + creator uuid, + deleted boolean, + epoch bigint, + group_id blob, + message_timer bigint, + name text, + protocol int, + public_group_state blob, + receipt_mode int, + team uuid, + type int +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.mls_commit_locks ( + group_id blob, + epoch bigint, + PRIMARY KEY (group_id, epoch) +) WITH CLUSTERING ORDER BY (epoch ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.subconversation ( + conv_id uuid, + subconv_id text, + cipher_suite int, + epoch bigint, + group_id blob, + public_group_state blob, + PRIMARY KEY (conv_id, subconv_id) +) WITH CLUSTERING ORDER BY (subconv_id ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.team ( + team uuid PRIMARY KEY, + binding boolean, + creator uuid, + deleted boolean, + icon text, + icon_key text, + name text, + search_visibility int, + splash_screen text, + status int +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.billing_team_member ( + team uuid, + user uuid, + PRIMARY KEY (team, user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.mls_proposal_refs ( + group_id blob, + epoch bigint, + ref blob, + origin int, + proposal blob, + PRIMARY KEY (group_id, epoch, ref) +) WITH CLUSTERING ORDER BY (epoch ASC, ref ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE galley_test2.user ( + user uuid, + conv uuid, + PRIMARY KEY (user, conv) +) WITH CLUSTERING ORDER BY (conv ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE KEYSPACE gundeck_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; + +CREATE TABLE gundeck_test.push ( + ptoken text, + app text, + transport int, + client text, + connection blob, + usr uuid, + PRIMARY KEY (ptoken, app, transport) +) WITH CLUSTERING ORDER BY (app ASC, transport ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test.notifications ( + user uuid, + id timeuuid, + clients set, + payload blob, + PRIMARY KEY (user, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test.user_push ( + usr uuid, + ptoken text, + app text, + transport int, + arn text, + client text, + connection blob, + PRIMARY KEY (usr, ptoken, app, transport) +) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE KEYSPACE brig_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; + +CREATE TYPE brig_test2.asset ( + typ int, + key text, + size int +); + +CREATE TYPE brig_test2.pubkey ( + typ int, + size int, + pem blob +); + +CREATE TABLE brig_test2.team_invitation_info ( + code ascii PRIMARY KEY, + id uuid, + team uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.rich_info ( + user uuid PRIMARY KEY, + json blob +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.user_keys_hash ( + key blob PRIMARY KEY, + key_type int, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_tag ( + bucket int, + tag bigint, + name text, + service uuid, + provider uuid, + PRIMARY KEY ((bucket, tag), name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.login_codes ( + user uuid PRIMARY KEY, + code text, + retries int, + timeout timestamp +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.unique_claims ( + value text PRIMARY KEY, + claims set +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.user_cookies ( + user uuid, + expires timestamp, + id bigint, + created timestamp, + label text, + succ_id bigint, + type int, + PRIMARY KEY (user, expires, id) +) WITH CLUSTERING ORDER BY (expires ASC, id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.mls_key_packages ( + user uuid, + client text, + ref blob, + data blob, + PRIMARY KEY ((user, client), ref) +) WITH CLUSTERING ORDER BY (ref ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.mls_key_package_refs ( + ref blob PRIMARY KEY, + client text, + conv uuid, + conv_domain text, + domain text, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.excluded_phones ( + prefix text PRIMARY KEY, + comment text +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.codes ( + user uuid, + scope int, + code text, + retries int, + PRIMARY KEY (user, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.user_handle ( + handle text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service ( + provider uuid, + id uuid, + assets list>, + auth_tokens list, + base_url blob, + descr text, + enabled boolean, + fingerprints list, + name text, + pubkeys list>, + summary text, + tags set, + PRIMARY KEY (provider, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.team_invitation_email ( + email text, + team uuid, + code ascii, + invitation uuid, + PRIMARY KEY (email, team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.invitation_info ( + code ascii PRIMARY KEY, + id uuid, + inviter uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_whitelist ( + team uuid, + provider uuid, + service uuid, + PRIMARY KEY (team, provider, service) +) WITH CLUSTERING ORDER BY (provider ASC, service ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.provider ( + id uuid PRIMARY KEY, + descr text, + email text, + name text, + password blob, + url blob +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.user_keys ( + key text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.mls_public_keys ( + user uuid, + client text, + sig_scheme text, + key blob, + PRIMARY KEY (user, client, sig_scheme) +) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.invitee_info ( + invitee uuid PRIMARY KEY, + conv uuid, + inviter uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.nonce ( + user uuid, + key text, + nonce uuid, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 300 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.provider_keys ( + key text PRIMARY KEY, + provider uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_team ( + provider uuid, + service uuid, + team uuid, + user uuid, + conv uuid, + PRIMARY KEY ((provider, service), team, user) +) WITH CLUSTERING ORDER BY (team ASC, user ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.blacklist ( + key text PRIMARY KEY +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_whitelist_rev ( + provider uuid, + service uuid, + team uuid, + PRIMARY KEY ((provider, service), team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.team_invitation ( + team uuid, + id uuid, + code ascii, + created_at timestamp, + created_by uuid, + email text, + name text, + phone text, + role int, + PRIMARY KEY (team, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.user ( + id uuid PRIMARY KEY, + accent list, + accent_id int, + activated boolean, + assets list>, + country ascii, + email text, + email_unvalidated text, + expires timestamp, + feature_conference_calling int, + handle text, + language ascii, + managed_by int, + name text, + password blob, + phone text, + picture list, + provider uuid, + searchable boolean, + service uuid, + sso_id text, + status int, + team uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.vcodes_throttle ( + key ascii, + scope int, + initial_delay int, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.properties ( + user uuid, + key ascii, + value blob, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_user ( + provider uuid, + service uuid, + user uuid, + conv uuid, + team uuid, + PRIMARY KEY ((provider, service), user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.prekeys ( + user uuid, + client text, + key int, + data text, + PRIMARY KEY (user, client, key) +) WITH CLUSTERING ORDER BY (client ASC, key ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.password_reset ( + key ascii PRIMARY KEY, + code ascii, + retries int, + timeout timestamp, + user uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.clients ( + user uuid, + client text, + capabilities set, + class int, + cookie text, + ip inet, + label text, + lat double, + lon double, + model text, + tstamp timestamp, + type int, + PRIMARY KEY (user, client) +) WITH CLUSTERING ORDER BY (client ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.budget ( + key text PRIMARY KEY, + budget int +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.connection_remote ( + left uuid, + right_domain text, + right_user uuid, + conv_domain text, + conv_id uuid, + last_update timestamp, + status int, + PRIMARY KEY (left, right_domain, right_user) +) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.users_pending_activation ( + user uuid PRIMARY KEY, + expires_at timestamp +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.connection ( + left uuid, + right uuid, + conv uuid, + last_update timestamp, + message text, + status int, + PRIMARY KEY (left, right) +) WITH CLUSTERING ORDER BY (right ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; +CREATE INDEX conn_status ON brig_test2.connection (status); + +CREATE TABLE brig_test2.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.invitation ( + inviter uuid, + id uuid, + code ascii, + created_at timestamp, + email text, + name text, + phone text, + PRIMARY KEY (inviter, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.activation_keys ( + key ascii PRIMARY KEY, + challenge ascii, + code ascii, + key_text text, + key_type ascii, + retries int, + user uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.vcodes ( + key ascii, + scope int, + account uuid, + email text, + phone text, + retries int, + value ascii, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test2.service_prefix ( + prefix text, + name text, + service uuid, + provider uuid, + PRIMARY KEY (prefix, name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE KEYSPACE brig_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; + +CREATE TYPE brig_test.asset ( + typ int, + key text, + size int +); + +CREATE TYPE brig_test.pubkey ( + typ int, + size int, + pem blob +); + +CREATE TABLE brig_test.team_invitation_info ( + code ascii PRIMARY KEY, + id uuid, + team uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.provider_keys ( + key text PRIMARY KEY, + provider uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.oauth_refresh_token ( + id uuid PRIMARY KEY, + client uuid, + created_at timestamp, + scope set, + user uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 14515200 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.team_invitation_email ( + email text, + team uuid, + code ascii, + invitation uuid, + PRIMARY KEY (email, team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.rich_info ( + user uuid PRIMARY KEY, + json blob +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.user_keys_hash ( + key blob PRIMARY KEY, + key_type int, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.service_tag ( + bucket int, + tag bigint, + name text, + service uuid, + provider uuid, + PRIMARY KEY ((bucket, tag), name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.unique_claims ( + value text PRIMARY KEY, + claims set +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.user_cookies ( + user uuid, + expires timestamp, + id bigint, + created timestamp, + label text, + succ_id bigint, + type int, + PRIMARY KEY (user, expires, id) +) WITH CLUSTERING ORDER BY (expires ASC, id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.mls_key_packages ( + user uuid, + client text, + ref blob, + data blob, + PRIMARY KEY ((user, client), ref) +) WITH CLUSTERING ORDER BY (ref ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.mls_key_package_refs ( + ref blob PRIMARY KEY, + client text, + conv uuid, + conv_domain text, + domain text, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.excluded_phones ( + prefix text PRIMARY KEY, + comment text +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.codes ( + user uuid, + scope int, + code text, + retries int, + PRIMARY KEY (user, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.user_handle ( + handle text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.service ( + provider uuid, + id uuid, + assets list>, + auth_tokens list, + base_url blob, + descr text, + enabled boolean, + fingerprints list, + name text, + pubkeys list>, + summary text, + tags set, + PRIMARY KEY (provider, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.oauth_user_refresh_token ( + user uuid, + token_id uuid, + PRIMARY KEY (user, token_id) +) WITH CLUSTERING ORDER BY (token_id ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 14515200 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.invitation_info ( + code ascii PRIMARY KEY, + id uuid, + inviter uuid +) WITH bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.service_whitelist ( + team uuid, + provider uuid, + service uuid, + PRIMARY KEY (team, provider, service) +) WITH CLUSTERING ORDER BY (provider ASC, service ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.provider ( + id uuid PRIMARY KEY, + descr text, + email text, + name text, + password blob, + url blob +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.user_keys ( + key text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.mls_public_keys ( + user uuid, + client text, + sig_scheme text, + key blob, + PRIMARY KEY (user, client, sig_scheme) +) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.invitee_info ( + invitee uuid PRIMARY KEY, + conv uuid, + inviter uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -833,17 +2813,20 @@ CREATE TABLE brig_test.team_invitation_email ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.rich_info ( - user uuid PRIMARY KEY, - json blob -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.nonce ( + user uuid, + key text, + nonce uuid, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 300 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -851,14 +2834,15 @@ CREATE TABLE brig_test.rich_info ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user_keys_hash ( - key blob PRIMARY KEY, - key_type int, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.login_codes ( + user uuid PRIMARY KEY, + code text, + retries int, + timeout timestamp +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -870,18 +2854,15 @@ CREATE TABLE brig_test.user_keys_hash ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_tag ( - bucket int, - tag bigint, +CREATE TABLE brig_test.oauth_client ( + id uuid PRIMARY KEY, name text, - service uuid, - provider uuid, - PRIMARY KEY ((bucket, tag), name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) - AND bloom_filter_fp_chance = 0.1 + redirect_uri blob, + secret blob +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -893,13 +2874,14 @@ CREATE TABLE brig_test.service_tag ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) +CREATE TABLE brig_test.service_team ( + provider uuid, + service uuid, + team uuid, + user uuid, + conv uuid, + PRIMARY KEY ((provider, service), team, user) +) WITH CLUSTERING ORDER BY (team ASC, user ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -915,35 +2897,34 @@ CREATE TABLE brig_test.meta ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.unique_claims ( - value text PRIMARY KEY, - claims set -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.invitation ( + inviter uuid, + id uuid, + code ascii, + created_at timestamp, + email text, + name text, + phone text, + PRIMARY KEY (inviter, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user_cookies ( - user uuid, - expires timestamp, - id bigint, - created timestamp, - label text, - succ_id bigint, - type int, - PRIMARY KEY (user, expires, id) -) WITH CLUSTERING ORDER BY (expires ASC, id ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.blacklist ( + key text PRIMARY KEY +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -958,17 +2939,16 @@ CREATE TABLE brig_test.user_cookies ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.mls_key_packages ( - user uuid, - client text, - ref blob, - data blob, - PRIMARY KEY ((user, client), ref) -) WITH CLUSTERING ORDER BY (ref ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.service_whitelist_rev ( + provider uuid, + service uuid, + team uuid, + PRIMARY KEY ((provider, service), team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -980,17 +2960,22 @@ CREATE TABLE brig_test.mls_key_packages ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.mls_key_package_refs ( - ref blob PRIMARY KEY, - client text, - conv uuid, - conv_domain text, - domain text, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.team_invitation ( + team uuid, + id uuid, + code ascii, + created_at timestamp, + created_by uuid, + email text, + name text, + phone text, + role int, + PRIMARY KEY (team, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1002,13 +2987,34 @@ CREATE TABLE brig_test.mls_key_package_refs ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.excluded_phones ( - prefix text PRIMARY KEY, - comment text -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.user ( + id uuid PRIMARY KEY, + accent list, + accent_id int, + activated boolean, + assets list>, + country ascii, + email text, + email_unvalidated text, + expires timestamp, + feature_conference_calling int, + handle text, + language ascii, + managed_by int, + name text, + password blob, + phone text, + picture list, + provider uuid, + searchable boolean, + service uuid, + sso_id text, + status int, + team uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1020,12 +3026,11 @@ CREATE TABLE brig_test.excluded_phones ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.codes ( - user uuid, +CREATE TABLE brig_test.vcodes_throttle ( + key ascii, scope int, - code text, - retries int, - PRIMARY KEY (user, scope) + initial_delay int, + PRIMARY KEY (key, scope) ) WITH CLUSTERING ORDER BY (scope ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} @@ -1042,13 +3047,16 @@ CREATE TABLE brig_test.codes ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user_handle ( - handle text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.properties ( + user uuid, + key ascii, + value blob, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1060,25 +3068,18 @@ CREATE TABLE brig_test.user_handle ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service ( +CREATE TABLE brig_test.service_user ( provider uuid, - id uuid, - assets list>, - auth_tokens list, - base_url blob, - descr text, - enabled boolean, - fingerprints list, - name text, - pubkeys list>, - summary text, - tags set, - PRIMARY KEY (provider, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 + service uuid, + user uuid, + conv uuid, + team uuid, + PRIMARY KEY ((provider, service), user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1090,19 +3091,21 @@ CREATE TABLE brig_test.service ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_user_refresh_token ( +CREATE TABLE brig_test.prekeys ( user uuid, - token_id uuid, - PRIMARY KEY (user, token_id) -) WITH CLUSTERING ORDER BY (token_id ASC) + client text, + key int, + data text, + PRIMARY KEY (user, client, key) +) WITH CLUSTERING ORDER BY (client ASC, key ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 14515200 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -1110,10 +3113,13 @@ CREATE TABLE brig_test.oauth_user_refresh_token ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.invitation_info ( +CREATE TABLE brig_test.oauth_auth_code ( code ascii PRIMARY KEY, - id uuid, - inviter uuid + client uuid, + code_challenge blob, + redirect_uri blob, + scope set, + user uuid ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1121,7 +3127,7 @@ CREATE TABLE brig_test.invitation_info ( AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 300 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -1129,16 +3135,25 @@ CREATE TABLE brig_test.invitation_info ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_whitelist ( - team uuid, - provider uuid, - service uuid, - PRIMARY KEY (team, provider, service) -) WITH CLUSTERING ORDER BY (provider ASC, service ASC) +CREATE TABLE brig_test.clients ( + user uuid, + client text, + capabilities set, + class int, + cookie text, + ip inet, + label text, + lat double, + lon double, + model text, + tstamp timestamp, + type int, + PRIMARY KEY (user, client) +) WITH CLUSTERING ORDER BY (client ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1150,13 +3165,9 @@ CREATE TABLE brig_test.service_whitelist ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.provider ( - id uuid PRIMARY KEY, - descr text, - email text, - name text, - password blob, - url blob +CREATE TABLE brig_test.budget ( + key text PRIMARY KEY, + budget int ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1165,17 +3176,24 @@ CREATE TABLE brig_test.provider ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user_keys ( - key text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.connection_remote ( + left uuid, + right_domain text, + right_user uuid, + conv_domain text, + conv_id uuid, + last_update timestamp, + status int, + PRIMARY KEY (left, right_domain, right_user) +) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1190,17 +3208,13 @@ CREATE TABLE brig_test.user_keys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.mls_public_keys ( - user uuid, - client text, - sig_scheme text, - key blob, - PRIMARY KEY (user, client, sig_scheme) -) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.users_pending_activation ( + user uuid PRIMARY KEY, + expires_at timestamp +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1212,11 +3226,16 @@ CREATE TABLE brig_test.mls_public_keys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.invitee_info ( - invitee uuid PRIMARY KEY, +CREATE TABLE brig_test.connection ( + left uuid, + right uuid, conv uuid, - inviter uuid -) WITH bloom_filter_fp_chance = 0.1 + last_update timestamp, + message text, + status int, + PRIMARY KEY (left, right) +) WITH CLUSTERING ORDER BY (right ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1230,21 +3249,22 @@ CREATE TABLE brig_test.invitee_info ( AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; +CREATE INDEX conn_status ON brig_test.connection (status); -CREATE TABLE brig_test.nonce ( - user uuid, - key text, - nonce uuid, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.password_reset ( + key ascii PRIMARY KEY, + code ascii, + retries int, + timeout timestamp, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 300 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -1252,15 +3272,18 @@ CREATE TABLE brig_test.nonce ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.login_codes ( - user uuid PRIMARY KEY, - code text, +CREATE TABLE brig_test.activation_keys ( + key ascii PRIMARY KEY, + challenge ascii, + code ascii, + key_text text, + key_type ascii, retries int, - timeout timestamp -) WITH bloom_filter_fp_chance = 0.01 + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1272,38 +3295,42 @@ CREATE TABLE brig_test.login_codes ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_client ( - id uuid PRIMARY KEY, - name text, - redirect_uri blob, - secret blob -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.vcodes ( + key ascii, + scope int, + account uuid, + email text, + phone text, + retries int, + value ascii, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_team ( - provider uuid, +CREATE TABLE brig_test.service_prefix ( + prefix text, + name text, service uuid, - team uuid, - user uuid, - conv uuid, - PRIMARY KEY ((provider, service), team, user) -) WITH CLUSTERING ORDER BY (team ASC, user ASC) - AND bloom_filter_fp_chance = 0.01 + provider uuid, + PRIMARY KEY (prefix, name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1315,17 +3342,12 @@ CREATE TABLE brig_test.service_team ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.invitation ( - inviter uuid, - id uuid, - code ascii, - created_at timestamp, - email text, - name text, - phone text, - PRIMARY KEY (inviter, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE KEYSPACE spar_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; + +CREATE TABLE spar_test2.bind_cookie ( + cookie text PRIMARY KEY, + session_owner uuid +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} @@ -1340,9 +3362,14 @@ CREATE TABLE brig_test.invitation ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.blacklist ( - key text PRIMARY KEY -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE spar_test2.user_v2 ( + issuer text, + normalized_uname_id text, + sso_id text, + uid uuid, + PRIMARY KEY (issuer, normalized_uname_id) +) WITH CLUSTERING ORDER BY (normalized_uname_id ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1357,12 +3384,13 @@ CREATE TABLE brig_test.blacklist ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_whitelist_rev ( - provider uuid, - service uuid, - team uuid, - PRIMARY KEY ((provider, service), team) -) WITH CLUSTERING ORDER BY (team ASC) +CREATE TABLE spar_test2.data_migration ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1378,22 +3406,13 @@ CREATE TABLE brig_test.service_whitelist_rev ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.team_invitation ( - team uuid, - id uuid, - code ascii, - created_at timestamp, - created_by uuid, - email text, - name text, - phone text, - role int, - PRIMARY KEY (team, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.authresp ( + resp text PRIMARY KEY, + end_of_life timestamp +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1405,30 +3424,9 @@ CREATE TABLE brig_test.team_invitation ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user ( +CREATE TABLE spar_test2.idp_raw_metadata ( id uuid PRIMARY KEY, - accent list, - accent_id int, - activated boolean, - assets list>, - country ascii, - email text, - email_unvalidated text, - expires timestamp, - feature_conference_calling int, - handle text, - language ascii, - managed_by int, - name text, - password blob, - phone text, - picture list, - provider uuid, - searchable boolean, - service uuid, - sso_id text, - status int, - team uuid + metadata text ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1444,37 +3442,13 @@ CREATE TABLE brig_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.vcodes_throttle ( - key ascii, - scope int, - initial_delay int, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.properties ( - user uuid, - key ascii, - value blob, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.issuer_idp ( + issuer text PRIMARY KEY, + idp uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1486,18 +3460,21 @@ CREATE TABLE brig_test.properties ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_user ( - provider uuid, - service uuid, - user uuid, - conv uuid, - team uuid, - PRIMARY KEY ((provider, service), user) -) WITH CLUSTERING ORDER BY (user ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.idp ( + idp uuid PRIMARY KEY, + api_version int, + extra_public_keys list, + handle text, + issuer text, + old_issuers list, + public_key blob, + replaced_by uuid, + request_uri text, + team uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1509,14 +3486,12 @@ CREATE TABLE brig_test.service_user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.prekeys ( - user uuid, - client text, - key int, - data text, - PRIMARY KEY (user, client, key) -) WITH CLUSTERING ORDER BY (client ASC, key ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.default_idp ( + partition_key_always_default text, + idp uuid, + PRIMARY KEY (partition_key_always_default, idp) +) WITH CLUSTERING ORDER BY (idp ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1531,21 +3506,23 @@ CREATE TABLE brig_test.prekeys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_auth_code ( - code ascii PRIMARY KEY, - client uuid, - code_challenge blob, - redirect_uri blob, - scope set, - user uuid -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.team_provisioning_by_team ( + team uuid, + id uuid, + created_at timestamp, + descr text, + idp uuid, + token_ text, + PRIMARY KEY (team, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 300 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -1553,25 +3530,17 @@ CREATE TABLE brig_test.oauth_auth_code ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.clients ( - user uuid, - client text, - capabilities set, - class int, - cookie text, - ip inet, - label text, - lat double, - lon double, - model text, - tstamp timestamp, - type int, - PRIMARY KEY (user, client) -) WITH CLUSTERING ORDER BY (client ASC) +CREATE TABLE spar_test2.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1583,9 +3552,11 @@ CREATE TABLE brig_test.clients ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.budget ( - key text PRIMARY KEY, - budget int +CREATE TABLE spar_test2.verdict ( + req text PRIMARY KEY, + format_con int, + format_mobile_error text, + format_mobile_success text ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1594,24 +3565,17 @@ CREATE TABLE brig_test.budget ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.connection_remote ( - left uuid, - right_domain text, - right_user uuid, - conv_domain text, - conv_id uuid, - last_update timestamp, - status int, - PRIMARY KEY (left, right_domain, right_user) -) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE spar_test2.authreq ( + req text PRIMARY KEY, + end_of_life timestamp +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1626,13 +3590,17 @@ CREATE TABLE brig_test.connection_remote ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.users_pending_activation ( - user uuid PRIMARY KEY, - expires_at timestamp -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE spar_test2.team_provisioning_by_token ( + token_ text PRIMARY KEY, + created_at timestamp, + descr text, + id uuid, + idp uuid, + team uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -1644,15 +3612,11 @@ CREATE TABLE brig_test.users_pending_activation ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.connection ( - left uuid, - right uuid, - conv uuid, - last_update timestamp, - message text, - status int, - PRIMARY KEY (left, right) -) WITH CLUSTERING ORDER BY (right ASC) +CREATE TABLE spar_test2.team_idp ( + team uuid, + idp uuid, + PRIMARY KEY (team, idp) +) WITH CLUSTERING ORDER BY (idp ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1667,15 +3631,14 @@ CREATE TABLE brig_test.connection ( AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE INDEX conn_status ON brig_test.connection (status); -CREATE TABLE brig_test.password_reset ( - key ascii PRIMARY KEY, - code ascii, - retries int, - timeout timestamp, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE spar_test2.issuer_idp_v2 ( + issuer text, + team uuid, + idp uuid, + PRIMARY KEY (issuer, team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -1690,14 +3653,10 @@ CREATE TABLE brig_test.password_reset ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.activation_keys ( - key ascii PRIMARY KEY, - challenge ascii, - code ascii, - key_text text, - key_type ascii, - retries int, - user uuid +CREATE TABLE spar_test2.scim_user_times ( + uid uuid PRIMARY KEY, + created_at timestamp, + last_updated_at timestamp ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1713,16 +3672,12 @@ CREATE TABLE brig_test.activation_keys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.vcodes ( - key ascii, - scope int, - account uuid, - email text, - phone text, - retries int, - value ascii, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) +CREATE TABLE spar_test2.scim_external ( + team uuid, + external_id text, + user uuid, + PRIMARY KEY (team, external_id) +) WITH CLUSTERING ORDER BY (external_id ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -1731,20 +3686,19 @@ CREATE TABLE brig_test.vcodes ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_prefix ( - prefix text, - name text, - service uuid, - provider uuid, - PRIMARY KEY (prefix, name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) +CREATE TABLE spar_test2.user ( + issuer text, + sso_id text, + uid uuid, + PRIMARY KEY (issuer, sso_id) +) WITH CLUSTERING ORDER BY (sso_id ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -2132,3 +4086,98 @@ CREATE TABLE spar_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; +CREATE KEYSPACE gundeck_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; + +CREATE TABLE gundeck_test2.push ( + ptoken text, + app text, + transport int, + client text, + connection blob, + usr uuid, + PRIMARY KEY (ptoken, app, transport) +) WITH CLUSTERING ORDER BY (app ASC, transport ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test2.notifications ( + user uuid, + id timeuuid, + clients set, + payload blob, + PRIMARY KEY (user, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 0 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test2.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE gundeck_test2.user_push ( + usr uuid, + ptoken text, + app text, + transport int, + arn text, + client text, + connection blob, + PRIMARY KEY (usr, ptoken, app, transport) +) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) + AND bloom_filter_fp_chance = 0.1 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + From 31dbc382a15708fa2bcccc53aace8062daaacc2f Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 27 Apr 2023 12:12:19 +0000 Subject: [PATCH 51/75] disable testing the keypackage lifetime --- services/brig/test/integration/API/MLS.hs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/brig/test/integration/API/MLS.hs b/services/brig/test/integration/API/MLS.hs index a390313228..4d1d58443a 100644 --- a/services/brig/test/integration/API/MLS.hs +++ b/services/brig/test/integration/API/MLS.hs @@ -28,6 +28,7 @@ import Data.Id import Data.Qualified import qualified Data.Set as Set import Data.Timeout +import Debug.Trace (traceM) import Federation.Util import Imports import Test.Tasty @@ -48,7 +49,7 @@ tests m b opts = [ test m "POST /mls/key-packages/self/:client" (testKeyPackageUpload b), test m "POST /mls/key-packages/self/:client (no public keys)" (testKeyPackageUploadNoKey b), test m "GET /mls/key-packages/self/:client/count" (testKeyPackageZeroCount b), - test m "GET /mls/key-packages/self/:client/count (expired package)" (testKeyPackageExpired b), + -- FUTUREWORK test m "GET /mls/key-packages/self/:client/count (expired package)" (testKeyPackageExpired b), test m "GET /mls/key-packages/claim/local/:user" (testKeyPackageClaim b), test m "GET /mls/key-packages/claim/local/:user - self claim" (testKeyPackageSelfClaim b), test m "GET /mls/key-packages/claim/remote/:user" (testKeyPackageRemoteClaim opts b) @@ -181,6 +182,7 @@ testKeyPackageSelfClaim brig = do testKeyPackageRemoteClaim :: Opts -> Brig -> Http () testKeyPackageRemoteClaim opts brig = do + traceM "sun" u <- fakeRemoteUser u' <- userQualifiedId <$> randomUser brig @@ -198,6 +200,7 @@ testKeyPackageRemoteClaim opts brig = do keyPackage = KeyPackageData . raw $ r } let mockBundle = KeyPackageBundle (Set.fromList entries) + traceM "gun" (bundle :: KeyPackageBundle, _reqs) <- liftIO . withTempMockFederator opts (Aeson.encode mockBundle) $ responseJsonError @@ -209,6 +212,7 @@ testKeyPackageRemoteClaim opts brig = do Date: Thu, 27 Apr 2023 12:16:12 +0000 Subject: [PATCH 52/75] remove checks for keypackage assignments --- .../brig/test/integration/API/Federation.hs | 13 ------------- services/brig/test/integration/API/MLS.hs | 18 +----------------- 2 files changed, 1 insertion(+), 30 deletions(-) diff --git a/services/brig/test/integration/API/Federation.hs b/services/brig/test/integration/API/Federation.hs index b7580143c1..36e1e42b65 100644 --- a/services/brig/test/integration/API/Federation.hs +++ b/services/brig/test/integration/API/Federation.hs @@ -47,14 +47,12 @@ import qualified Test.Tasty.Cannon as WS import Test.Tasty.HUnit import UnliftIO.Temporary import Util -import Web.HttpApiData import Wire.API.Connection import Wire.API.Federation.API.Brig import qualified Wire.API.Federation.API.Brig as FedBrig import qualified Wire.API.Federation.API.Brig as S import Wire.API.Federation.Component import Wire.API.Federation.Version -import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.User import Wire.API.User.Client @@ -434,17 +432,6 @@ testClaimKeyPackages brig fedBrigClient = do count <- getKeyPackageCount brig bob c liftIO $ count @?= 1 - -- check that the package refs are correctly mapped - for_ bundle.entries $ \e -> do - cid <- - responseJsonError - =<< get (brig . paths ["i", "mls", "key-packages", toHeader e.ref]) - Opt.Opts -> Brig -> Http () testClaimKeyPackagesMLSDisabled opts brig = do alice <- fakeRemoteUser diff --git a/services/brig/test/integration/API/MLS.hs b/services/brig/test/integration/API/MLS.hs index 4d1d58443a..e236f3f54d 100644 --- a/services/brig/test/integration/API/MLS.hs +++ b/services/brig/test/integration/API/MLS.hs @@ -35,7 +35,6 @@ import Test.Tasty import Test.Tasty.HUnit import UnliftIO.Temporary import Util -import Web.HttpApiData import Wire.API.MLS.Credential import Wire.API.MLS.KeyPackage import Wire.API.MLS.Serialisation @@ -116,7 +115,7 @@ testKeyPackageClaim brig = do -- claim packages for both clients of u u' <- userQualifiedId <$> randomUser brig - bundle <- + bundle :: KeyPackageBundle <- responseJsonError =<< post ( brig @@ -126,7 +125,6 @@ testKeyPackageClaim brig = do (e.user, e.client)) bundle.entries @?= Set.fromList [(u, c1), (u, c2)] - checkMapping brig u bundle -- check that we have one fewer key package now for_ [c1, c2] $ \c -> do @@ -213,23 +211,9 @@ testKeyPackageRemoteClaim opts brig = do liftIO $ bundle @?= mockBundle traceM "fun" - checkMapping brig u bundle -------------------------------------------------------------------------------- --- | Check that the package refs are correctly mapped -checkMapping :: Brig -> Qualified UserId -> KeyPackageBundle -> Http () -checkMapping brig u bundle = - for_ bundle.entries $ \e -> do - cid <- - responseJsonError - =<< get (brig . paths ["i", "mls", "key-packages", toHeader e.ref]) - Qualified UserId -> Int -> Http ClientId createClient brig u i = fmap clientId $ From 415695ef3b3f6fafee20f7d60984630da2b472c6 Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 27 Apr 2023 13:44:49 +0000 Subject: [PATCH 53/75] validate bare proposals and inline proposal --- services/galley/src/Galley/API/MLS.hs | 1 - services/galley/src/Galley/API/MLS/Message.hs | 134 +++++------------- 2 files changed, 39 insertions(+), 96 deletions(-) diff --git a/services/galley/src/Galley/API/MLS.hs b/services/galley/src/Galley/API/MLS.hs index 92574f416a..2b06791739 100644 --- a/services/galley/src/Galley/API/MLS.hs +++ b/services/galley/src/Galley/API/MLS.hs @@ -21,7 +21,6 @@ module Galley.API.MLS postMLSMessage, postMLSCommitBundleFromLocalUser, postMLSMessageFromLocalUser, - postMLSMessageFromLocalUserV1, getMLSPublicKeys, ) where diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index ee8fb3a823..8234b69618 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -23,7 +23,6 @@ module Galley.API.MLS.Message postMLSCommitBundle, postMLSCommitBundleFromLocalUser, postMLSMessageFromLocalUser, - postMLSMessageFromLocalUserV1, postMLSMessage, MLSMessageStaticErrors, MLSBundleStaticErrors, @@ -40,7 +39,6 @@ import Data.List.NonEmpty (NonEmpty, nonEmpty) import qualified Data.Map as Map import Data.Qualified import qualified Data.Set as Set -import qualified Data.Text as T import Data.Time import Data.Tuple.Extra import GHC.Records @@ -79,7 +77,6 @@ import Wire.API.Conversation.Protocol import Wire.API.Conversation.Role import Wire.API.Error import Wire.API.Error.Galley -import Wire.API.Event.Conversation import Wire.API.Federation.API import Wire.API.Federation.API.Brig import Wire.API.Federation.API.Galley @@ -113,7 +110,7 @@ import Wire.API.User.Client -- - [ ] ? verify capabilities -- - [ ] verify that all extensions are present in the capabilities -- - [ ] ? in the update case (in galley), verify that the encryption_key is different --- [ ] validate proposals when processing proposal and commit messages +-- [x] validate proposals when processing proposal and commit messages -- [x] remove MissingSenderClient error -- [x] remove all key package ref mapping -- [x] initialise index maps @@ -246,36 +243,6 @@ type MLSBundleStaticErrors = MLSMessageStaticErrors '[ErrorS 'MLSWelcomeMismatch] -postMLSMessageFromLocalUserV1 :: - ( HasProposalEffects r, - Member (Error FederationError) r, - Member (ErrorS 'ConvAccessDenied) r, - Member (ErrorS 'ConvMemberNotFound) r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSClientSenderUserMismatch) r, - Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSGroupConversationMismatch) r, - Member (ErrorS 'MLSNotEnabled) r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, - Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSUnsupportedMessage) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member SubConversationStore r - ) => - Local UserId -> - ClientId -> - ConnId -> - RawMLS Message -> - Sem r [Event] -postMLSMessageFromLocalUserV1 lusr c conn smsg = do - assertMLSEnabled - imsg <- noteS @'MLSUnsupportedMessage $ mkIncomingMessage smsg - cnvOrSub <- lookupConvByGroupId imsg.groupId >>= noteS @'ConvNotFound - map lcuEvent . fst - <$> postMLSMessage lusr (tUntagged lusr) c cnvOrSub (Just conn) imsg - postMLSMessageFromLocalUser :: ( HasProposalEffects r, Member (Error FederationError) r, @@ -666,7 +633,7 @@ getCommitData senderIdentity lConvOrSub epoch commit = do if epoch == Epoch 0 then addProposedClient senderIdentity else mempty - proposals <- traverse (derefProposal groupId epoch) commit.proposals + proposals <- traverse (derefOrCheckProposal mlsMeta groupId epoch) commit.proposals action <- applyProposals mlsMeta groupId proposals pure (creatorAction <> action) @@ -826,18 +793,48 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do pure updates -derefProposal :: - ( Member ProposalStore r, +derefOrCheckProposal :: + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r, + Member ProposalStore r, + Member (State IndexMap) r, Member (ErrorS 'MLSProposalNotFound) r ) => + ConversationMLSData -> GroupId -> Epoch -> ProposalOrRef -> Sem r Proposal -derefProposal groupId epoch (Ref ref) = do +derefOrCheckProposal _mlsMeta groupId epoch (Ref ref) = do p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound pure p.value -derefProposal _ _ (Inline p) = pure p +derefOrCheckProposal mlsMeta _ _ (Inline p) = do + im <- get + checkProposal mlsMeta im p + pure p + +checkProposal :: + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ConversationMLSData -> + IndexMap -> + Proposal -> + Sem r () +checkProposal mlsMeta im p = + case p of + AddProposal kp -> do + (cs, _lifetime) <- + either + (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) + pure + $ validateKeyPackage Nothing kp.value + -- we are not checking lifetime constraints here + unless (mlsMeta.cnvmlsCipherSuite == cs) $ + throw (mlsProtocolError "Key package ciphersuite does not match conversation") + RemoveProposal idx -> do + void $ noteS @'MLSInvalidLeafNodeIndex $ imLookup im idx + _ -> pure () addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction addProposedClient cid = do @@ -893,24 +890,6 @@ applyProposal _mlsMeta _groupId (ExternalInitProposal _) = pure paExternalInitPresent applyProposal _mlsMeta _groupId _ = pure mempty -checkProposalCipherSuite :: - Member (Error MLSProtocolError) r => - CipherSuiteTag -> - Proposal -> - Sem r () -checkProposalCipherSuite suite (AddProposal kpRaw) = do - let kp = value kpRaw - unless (kp.cipherSuite == tagCipherSuite suite) - . throw - . mlsProtocolError - . T.pack - $ "The group's cipher suite " - <> show (cipherSuiteNumber (tagCipherSuite suite)) - <> " and the cipher suite of the proposal's key package " - <> show (cipherSuiteNumber kp.cipherSuite) - <> " do not match." -checkProposalCipherSuite _suite _prop = pure () - processProposal :: HasProposalEffects r => ( Member (ErrorS 'ConvNotFound) r, @@ -927,31 +906,11 @@ processProposal qusr lConvOrSub msg pub prop = do checkEpoch msg.epoch mlsMeta checkGroup msg.groupId mlsMeta let suiteTag = cnvmlsCipherSuite mlsMeta - let cid = mcId . convOfConvOrSub . tUnqualified $ lConvOrSub - - -- validate the proposal - -- - -- is the user a member of the conversation? - loc <- qualifyLocal () - isMember' <- - foldQualified - loc - ( fmap isJust - . getLocalMember cid - . tUnqualified - ) - ( fmap isJust - . getRemoteMember cid - ) - qusr - unless isMember' $ throwS @'ConvNotFound -- FUTUREWORK: validate the member's conversation role - let propValue = value prop - checkProposalCipherSuite suiteTag propValue - when (isExternal pub.sender) $ do - checkExternalProposalSignature pub prop - checkExternalProposalUser qusr propValue + let im = indexMapConvOrSub $ tUnqualified lConvOrSub + checkProposal mlsMeta im prop.value + when (isExternal pub.sender) $ checkExternalProposalUser qusr prop.value let propRef = authContentRef suiteTag (incomingMessageAuthenticatedContent pub) storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop @@ -959,21 +918,6 @@ isExternal :: Sender -> Bool isExternal (SenderMember _) = False isExternal _ = True -checkExternalProposalSignature :: - Member (ErrorS 'MLSUnsupportedProposal) r => - IncomingPublicMessageContent -> - RawMLS Proposal -> - Sem r () -checkExternalProposalSignature _msg prop = case value prop of - AddProposal kp -> do - let _pubkey = kp.value.leafNode.signatureKey - _ctx = error "TODO: get group context" - -- TODO - unless True $ - -- unless (verifyMessageSignature ctx msg.framedContent msg.authData pubkey) $ - throwS @'MLSUnsupportedProposal - _ -> pure () -- FUTUREWORK: check signature of other proposals as well - -- check owner/subject of the key package exists and belongs to the user checkExternalProposalUser :: ( Member BrigAccess r, From 4bb764b22f640ea797caa8956a6433a6da85cdca Mon Sep 17 00:00:00 2001 From: Stefan Berthold Date: Thu, 27 Apr 2023 14:00:11 +0000 Subject: [PATCH 54/75] rephrase and filter the left TODOs --- services/galley/src/Galley/API/MLS/Message.hs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 8234b69618..9c2e560da5 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -107,9 +107,7 @@ import Wire.API.User.Client -- - [x] validate lifetime and public key consistency only on brig -- - [x] check that ciphersuite matches conversation on galley -- - [x] check the signature on the LeafNode --- - [ ] ? verify capabilities --- - [ ] verify that all extensions are present in the capabilities --- - [ ] ? in the update case (in galley), verify that the encryption_key is different +-- - [ ] verify that capabilities include basic credentials -- [x] validate proposals when processing proposal and commit messages -- [x] remove MissingSenderClient error -- [x] remove all key package ref mapping @@ -118,14 +116,16 @@ import Wire.API.User.Client -- [x] remove prefixes from value and raw -- [x] remove PublicGroupState and GroupInfoBundle modules -- [x] remove prefixes from fields in Commit and Proposal --- [ ] move external commit logic to a separate module and improve types -- [x] check epoch inside commit lock -- [x] split executeProposalAction for internal and external commits +-- [ ] add nonce to PreSharedKeyID +-- [ ] move external commit logic to a separate module and improve types --- [ ] ? consider adding more integration tests --- [ ] ? rename public_group_state field in conversation table --- [ ] ? PreSharedKey proposal --- [ ] ? newtype for leaf node indices +-- FUTUREWORK +-- - Check that the capabilities of a leaf node in an add proposal contains all +-- the required_capabilities of the group context. This would require fetching +-- the group info from the DB in order to read the group context. +-- - Verify message signature, this also requires the group context. (see above) data IncomingMessage = IncomingMessage { epoch :: Epoch, From 61433065ef24c4af6d8cf92fde10689f5690bc4b Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 10:57:49 +0200 Subject: [PATCH 55/75] Verify that capabilities include basic credentials --- libs/wire-api/src/Wire/API/MLS/Validation.hs | 4 +++- services/galley/src/Galley/API/MLS/Message.hs | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Validation.hs b/libs/wire-api/src/Wire/API/MLS/Validation.hs index 5b2eeec36f..bb13cd4cd7 100644 --- a/libs/wire-api/src/Wire/API/MLS/Validation.hs +++ b/libs/wire-api/src/Wire/API/MLS/Validation.hs @@ -120,4 +120,6 @@ validateSource t s = do <> "'" validateCapabilities :: Capabilities -> Either Text () -validateCapabilities _ = pure () -- TODO +validateCapabilities caps = + unless (BasicCredentialTag `elem` caps.credentials) $ + Left "missing BasicCredential capability" diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 9c2e560da5..9cb562fd26 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -107,7 +107,7 @@ import Wire.API.User.Client -- - [x] validate lifetime and public key consistency only on brig -- - [x] check that ciphersuite matches conversation on galley -- - [x] check the signature on the LeafNode --- - [ ] verify that capabilities include basic credentials +-- - [x] verify that capabilities include basic credentials -- [x] validate proposals when processing proposal and commit messages -- [x] remove MissingSenderClient error -- [x] remove all key package ref mapping From 709f2c3216dd099c58482d542bfc84e46d313ec5 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 11:06:48 +0200 Subject: [PATCH 56/75] Add nonce to PreSharedKeyID structure --- libs/wire-api/src/Wire/API/MLS/Proposal.hs | 23 +++++++++++++++---- services/galley/src/Galley/API/MLS/Message.hs | 4 ++-- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/Proposal.hs b/libs/wire-api/src/Wire/API/MLS/Proposal.hs index 02f5334ac8..1ae2ef989a 100644 --- a/libs/wire-api/src/Wire/API/MLS/Proposal.hs +++ b/libs/wire-api/src/Wire/API/MLS/Proposal.hs @@ -104,18 +104,18 @@ instance SerialiseMLS PreSharedKeyTag where serialiseMLS = serialiseMLSEnum @Word8 -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 -data PreSharedKeyID = ExternalKeyID ByteString | ResumptionKeyID Resumption +data PreSharedKeyIDCore = ExternalKeyID ByteString | ResumptionKeyID Resumption deriving stock (Eq, Show, Generic) - deriving (Arbitrary) via (GenericUniform PreSharedKeyID) + deriving (Arbitrary) via (GenericUniform PreSharedKeyIDCore) -instance ParseMLS PreSharedKeyID where +instance ParseMLS PreSharedKeyIDCore where parseMLS = do t <- parseMLS case t of ExternalKeyTag -> ExternalKeyID <$> parseMLSBytes @VarInt ResumptionKeyTag -> ResumptionKeyID <$> parseMLS -instance SerialiseMLS PreSharedKeyID where +instance SerialiseMLS PreSharedKeyIDCore where serialiseMLS (ExternalKeyID bs) = do serialiseMLS ExternalKeyTag serialiseMLSBytes @VarInt bs @@ -123,6 +123,21 @@ instance SerialiseMLS PreSharedKeyID where serialiseMLS ResumptionKeyTag serialiseMLS r +data PreSharedKeyID = PreSharedKeyID + { core :: PreSharedKeyIDCore, + nonce :: ByteString + } + deriving stock (Eq, Show, Generic) + deriving (Arbitrary) via (GenericUniform PreSharedKeyID) + +instance ParseMLS PreSharedKeyID where + parseMLS = PreSharedKeyID <$> parseMLS <*> parseMLSBytes @VarInt + +instance SerialiseMLS PreSharedKeyID where + serialiseMLS psk = do + serialiseMLS psk.core + serialiseMLSBytes @VarInt psk.nonce + -- | https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol-20/draft-ietf-mls-protocol.html#section-8.4-6 data Resumption = Resumption { usage :: Word8, diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 9cb562fd26..3f761e7bbd 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -102,7 +102,7 @@ import Wire.API.User.Client -- TODO: -- [x] replace ref with index in remove proposals --- [ ] validate leaf nodes and key packages locally on galley +-- [x] validate leaf nodes and key packages locally on galley -- - [x] extract validation function to wire-api -- - [x] validate lifetime and public key consistency only on brig -- - [x] check that ciphersuite matches conversation on galley @@ -118,7 +118,7 @@ import Wire.API.User.Client -- [x] remove prefixes from fields in Commit and Proposal -- [x] check epoch inside commit lock -- [x] split executeProposalAction for internal and external commits --- [ ] add nonce to PreSharedKeyID +-- [x] add nonce to PreSharedKeyID -- [ ] move external commit logic to a separate module and improve types -- FUTUREWORK From da40f1b5089701c1daa63fd054ced76bbeab31e2 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 11:45:54 +0200 Subject: [PATCH 57/75] Split Galley.API.MLS.Message --- services/galley/galley.cabal | 6 + services/galley/src/Galley/API/MLS/Commit.hs | 28 + .../galley/src/Galley/API/MLS/Commit/Core.hs | 195 ++++ .../Galley/API/MLS/Commit/ExternalCommit.hs | 211 ++++ .../Galley/API/MLS/Commit/InternalCommit.hs | 288 ++++++ .../src/Galley/API/MLS/IncomingMessage.hs | 131 +++ services/galley/src/Galley/API/MLS/Message.hs | 931 +----------------- .../galley/src/Galley/API/MLS/Proposal.hs | 306 ++++++ 8 files changed, 1168 insertions(+), 928 deletions(-) create mode 100644 services/galley/src/Galley/API/MLS/Commit.hs create mode 100644 services/galley/src/Galley/API/MLS/Commit/Core.hs create mode 100644 services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs create mode 100644 services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs create mode 100644 services/galley/src/Galley/API/MLS/IncomingMessage.hs create mode 100644 services/galley/src/Galley/API/MLS/Proposal.hs diff --git a/services/galley/galley.cabal b/services/galley/galley.cabal index 0ac0530eb5..20711d9312 100644 --- a/services/galley/galley.cabal +++ b/services/galley/galley.cabal @@ -84,12 +84,18 @@ library Galley.API.Mapping Galley.API.Message Galley.API.MLS + Galley.API.MLS.Commit + Galley.API.MLS.Commit.Core + Galley.API.MLS.Commit.ExternalCommit + Galley.API.MLS.Commit.InternalCommit Galley.API.MLS.Conversation Galley.API.MLS.Enabled Galley.API.MLS.GroupInfo + Galley.API.MLS.IncomingMessage Galley.API.MLS.Keys Galley.API.MLS.Message Galley.API.MLS.Propagate + Galley.API.MLS.Proposal Galley.API.MLS.Removal Galley.API.MLS.SubConversation Galley.API.MLS.Types diff --git a/services/galley/src/Galley/API/MLS/Commit.hs b/services/galley/src/Galley/API/MLS/Commit.hs new file mode 100644 index 0000000000..39088273b8 --- /dev/null +++ b/services/galley/src/Galley/API/MLS/Commit.hs @@ -0,0 +1,28 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.Commit + ( getCommitData, + getExternalCommitData, + processInternalCommit, + processExternalCommit, + ) +where + +import Galley.API.MLS.Commit.Core +import Galley.API.MLS.Commit.ExternalCommit +import Galley.API.MLS.Commit.InternalCommit diff --git a/services/galley/src/Galley/API/MLS/Commit/Core.hs b/services/galley/src/Galley/API/MLS/Commit/Core.hs new file mode 100644 index 0000000000..50eca037f1 --- /dev/null +++ b/services/galley/src/Galley/API/MLS/Commit/Core.hs @@ -0,0 +1,195 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.Commit.Core + ( getCommitData, + incrementEpoch, + getClientInfo, + HasProposalActionEffects, + ProposalErrors, + HandleMLSProposalFailures (..), + ) +where + +import Control.Comonad +import Data.Id +import Data.Qualified +import Data.Time +import Galley.API.Error +import Galley.API.MLS.Conversation +import Galley.API.MLS.Proposal +import Galley.API.MLS.Types +import Galley.Effects +import Galley.Effects.BrigAccess +import Galley.Effects.ConversationStore +import Galley.Effects.FederatorAccess +import Galley.Effects.SubConversationStore +import Galley.Env +import Galley.Options +import Imports +import Polysemy +import Polysemy.Error +import Polysemy.Input +import Polysemy.Internal +import Polysemy.State +import Polysemy.TinyLog +import Wire.API.Conversation.Protocol +import Wire.API.Conversation.Role +import Wire.API.Error +import Wire.API.Error.Galley +import Wire.API.Federation.API +import Wire.API.Federation.API.Brig +import Wire.API.Federation.Error +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Commit +import Wire.API.MLS.Credential +import Wire.API.MLS.SubConversation +import Wire.API.User.Client + +type HasProposalActionEffects r = + ( Member BrigAccess r, + Member ConversationStore r, + Member (Error InternalError) r, + Member (ErrorS 'ConvNotFound) r, + Member (ErrorS 'MLSClientMismatch) r, + Member (Error MLSProposalFailure) r, + Member (ErrorS 'MissingLegalholdConsent) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSSelfRemovalNotAllowed) r, + Member ExternalAccess r, + Member FederatorAccess r, + Member GundeckAccess r, + Member (Input Env) r, + Member (Input Opts) r, + Member (Input UTCTime) r, + Member LegalHoldStore r, + Member MemberStore r, + Member ProposalStore r, + Member SubConversationStore r, + Member TeamStore r, + Member TinyLog r + ) + +getCommitData :: + ( HasProposalEffects r, + Member (ErrorS 'MLSProposalNotFound) r + ) => + ClientIdentity -> + Local ConvOrSubConv -> + Epoch -> + Commit -> + Sem r ProposalAction +getCommitData senderIdentity lConvOrSub epoch commit = do + let convOrSub = tUnqualified lConvOrSub + mlsMeta = mlsMetaConvOrSub convOrSub + groupId = cnvmlsGroupId mlsMeta + + evalState (indexMapConvOrSub convOrSub) $ do + creatorAction <- + if epoch == Epoch 0 + then addProposedClient senderIdentity + else mempty + proposals <- traverse (derefOrCheckProposal mlsMeta groupId epoch) commit.proposals + action <- applyProposals mlsMeta groupId proposals + pure (creatorAction <> action) + +incrementEpoch :: + ( Member ConversationStore r, + Member (ErrorS 'ConvNotFound) r, + Member MemberStore r, + Member SubConversationStore r + ) => + ConvOrSubConv -> + Sem r ConvOrSubConv +incrementEpoch (Conv c) = do + let epoch' = succ (cnvmlsEpoch (mcMLSData c)) + setConversationEpoch (mcId c) epoch' + conv <- getConversation (mcId c) >>= noteS @'ConvNotFound + fmap Conv (mkMLSConversation conv >>= noteS @'ConvNotFound) +incrementEpoch (SubConv c s) = do + let epoch' = succ (cnvmlsEpoch (scMLSData s)) + setSubConversationEpoch (scParentConvId s) (scSubConvId s) epoch' + subconv <- + getSubConversation (mcId c) (scSubConvId s) >>= noteS @'ConvNotFound + pure (SubConv c subconv) + +getClientInfo :: + ( Member BrigAccess r, + Member FederatorAccess r + ) => + Local x -> + Qualified UserId -> + SignatureSchemeTag -> + Sem r (Set ClientInfo) +getClientInfo loc = foldQualified loc getLocalMLSClients getRemoteMLSClients + +getRemoteMLSClients :: + ( Member FederatorAccess r + ) => + Remote UserId -> + SignatureSchemeTag -> + Sem r (Set ClientInfo) +getRemoteMLSClients rusr ss = do + runFederated rusr $ + fedClient @'Brig @"get-mls-clients" $ + MLSClientsRequest + { mcrUserId = tUnqualified rusr, + mcrSignatureScheme = ss + } + +-------------------------------------------------------------------------------- +-- Error handling of proposal execution + +-- The following errors are caught by 'executeProposalAction' and wrapped in a +-- 'MLSProposalFailure'. This way errors caused by the execution of proposals are +-- separated from those caused by the commit processing itself. +type ProposalErrors = + '[ Error FederationError, + Error InvalidInput, + ErrorS ('ActionDenied 'AddConversationMember), + ErrorS ('ActionDenied 'LeaveConversation), + ErrorS ('ActionDenied 'RemoveConversationMember), + ErrorS 'ConvAccessDenied, + ErrorS 'InvalidOperation, + ErrorS 'NotATeamMember, + ErrorS 'NotConnected, + ErrorS 'TooManyMembers + ] + +class HandleMLSProposalFailures effs r where + handleMLSProposalFailures :: Sem (Append effs r) a -> Sem r a + +class HandleMLSProposalFailure eff r where + handleMLSProposalFailure :: Sem (eff ': r) a -> Sem r a + +instance HandleMLSProposalFailures '[] r where + handleMLSProposalFailures = id + +instance + ( HandleMLSProposalFailures effs r, + HandleMLSProposalFailure eff (Append effs r) + ) => + HandleMLSProposalFailures (eff ': effs) r + where + handleMLSProposalFailures = handleMLSProposalFailures @effs . handleMLSProposalFailure @eff + +instance + (APIError e, Member (Error MLSProposalFailure) r) => + HandleMLSProposalFailure (Error e) r + where + handleMLSProposalFailure = mapError (MLSProposalFailure . toWai) diff --git a/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs new file mode 100644 index 0000000000..eca278f9ca --- /dev/null +++ b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs @@ -0,0 +1,211 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.Commit.ExternalCommit + ( getExternalCommitData, + processExternalCommit, + ) +where + +import Control.Comonad +import Control.Lens (forOf_) +import qualified Data.Map as Map +import Data.Qualified +import qualified Data.Set as Set +import Data.Tuple.Extra +import Galley.API.Error +import Galley.API.MLS.Commit.Core +import Galley.API.MLS.Proposal +import Galley.API.MLS.Removal +import Galley.API.MLS.Types +import Galley.API.MLS.Util +import Galley.Effects +import Galley.Effects.MemberStore +import Imports +import Polysemy +import Polysemy.Error +import Polysemy.Resource (Resource) +import Polysemy.State +import Wire.API.Conversation.Protocol +import Wire.API.Error +import Wire.API.Error.Galley +import Wire.API.MLS.Commit +import Wire.API.MLS.Credential +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Proposal +import Wire.API.MLS.ProposalTag +import Wire.API.MLS.Serialisation +import Wire.API.MLS.SubConversation +import Wire.API.MLS.Validation + +getExternalCommitData :: + forall r. + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSStaleMessage) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ClientIdentity -> + Local ConvOrSubConv -> + Epoch -> + Commit -> + Sem r ProposalAction +getExternalCommitData senderIdentity lConvOrSub epoch commit = do + let convOrSub = tUnqualified lConvOrSub + mlsMeta = mlsMetaConvOrSub convOrSub + curEpoch = cnvmlsEpoch mlsMeta + groupId = cnvmlsGroupId mlsMeta + when (epoch /= curEpoch) $ throwS @'MLSStaleMessage + proposals <- traverse getInlineProposal commit.proposals + + -- According to the spec, an external commit must contain: + -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2) + -- + -- > Exactly one ExternalInit + -- > At most one Remove proposal, with which the joiner removes an old + -- > version of themselves. + -- > Zero or more PreSharedKey proposals. + -- > No other proposals. + let counts = foldr (\x -> Map.insertWith (+) x.tag (1 :: Int)) mempty proposals + + unless (Map.lookup ExternalInitProposalTag counts == Just 1) $ + throw (mlsProtocolError "External commits must contain exactly one ExternalInit proposal") + unless (Map.findWithDefault 0 RemoveProposalTag counts <= 1) $ + throw (mlsProtocolError "External commits must contain at most one Remove proposal") + unless (null (Map.keys counts \\ allowedProposals)) $ + throw (mlsProtocolError "Invalid proposal type in an external commit") + + evalState (indexMapConvOrSub convOrSub) $ do + -- process optional removal + propAction <- applyProposals mlsMeta groupId proposals + -- add sender + selfAction <- addProposedClient senderIdentity + case cmAssocs (paRemove propAction) of + [(cid, _)] + | cid /= senderIdentity -> + throw $ mlsProtocolError "Only the self client can be removed by an external commit" + _ -> pure () + + pure $ propAction <> selfAction + where + allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] + + getInlineProposal :: ProposalOrRef -> Sem r Proposal + getInlineProposal (Ref _) = + throw (mlsProtocolError "External commits cannot reference proposals") + getInlineProposal (Inline p) = pure p + +processExternalCommit :: + forall r. + ( Member (ErrorS 'MLSStaleMessage) r, + Member (ErrorS 'MLSSubConvClientNotInParent) r, + Member Resource r, + HasProposalActionEffects r + ) => + ClientIdentity -> + Local ConvOrSubConv -> + Epoch -> + ProposalAction -> + Maybe UpdatePath -> + Sem r () +processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do + let convOrSub = tUnqualified lConvOrSub + + -- only members can join a subconversation + forOf_ _SubConv convOrSub $ \(mlsConv, _) -> + unless (isClientMember senderIdentity (mcMembers mlsConv)) $ + throwS @'MLSSubConvClientNotInParent + + -- get index of the newly added client, as calculated when processing proposals + idx <- case cmAssocs (paAdd action) of + [(cid, idx)] | cid == senderIdentity -> pure idx + _ -> throw (InternalErrorWithDescription "Unexpected Add action for external commit") + + -- extract leaf node from update path and validate it + leafNode <- + (.leaf) + <$> note + (mlsProtocolError "External commits need an update path") + updatePath + let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) + let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) + let extra = LeafNodeTBSExtraCommit groupId idx + case validateLeafNode cs (Just senderIdentity) extra leafNode.value of + Left errMsg -> + throw $ + mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) + Right _ -> pure () + + withCommitLock (fmap idForConvOrSub lConvOrSub) groupId epoch $ do + executeExtCommitProposalAction senderIdentity lConvOrSub action + + -- increment epoch number + lConvOrSub' <- for lConvOrSub incrementEpoch + + -- fetch backend remove proposals of the previous epoch + let remIndices = map snd (cmAssocs (paRemove action)) + indicesInRemoveProposals <- + -- skip remove proposals of already removed by the external commit + (\\ remIndices) + <$> getPendingBackendRemoveProposals groupId epoch + + -- requeue backend remove proposals for the current epoch + let cm = membersConvOrSub (tUnqualified lConvOrSub') + createAndSendRemoveProposals + lConvOrSub' + indicesInRemoveProposals + (cidQualifiedUser senderIdentity) + cm + +executeExtCommitProposalAction :: + forall r. + HasProposalActionEffects r => + ClientIdentity -> + Local ConvOrSubConv -> + ProposalAction -> + Sem r () +executeExtCommitProposalAction senderIdentity lconvOrSub action = do + let mlsMeta = mlsMetaConvOrSub $ tUnqualified lconvOrSub + newCILeaves = cmAssocs (paAdd action) + deprecatedCILeaves = cmAssocs (paRemove action) + + -- Adding clients: sender's client must be added and no other other client may + -- be added. + when (length newCILeaves /= 1 || fst (head newCILeaves) /= senderIdentity) $ + throw (mlsProtocolError "No add proposals are allowed in external commits") + + -- Client removal: only the sender's client can be removed when rejoining the + -- (sub)conversation. + when (length deprecatedCILeaves > 1) $ + throw (mlsProtocolError "Up to one client can be removed in an external commit") + for_ (listToMaybe deprecatedCILeaves) $ \ciLeaf -> do + when (fst ciLeaf /= senderIdentity) $ + throw (mlsProtocolError "Only the sender can rejoin in an external commit") + + -- Remove deprecated sender client from conversation state. + for_ deprecatedCILeaves $ \(ci, _) -> do + removeMLSClients + (cnvmlsGroupId mlsMeta) + (cidQualifiedUser ci) + (Set.singleton $ ciClient ci) + + -- Add new sender client to the conversation state. + for_ newCILeaves $ \(ci, idx) -> do + addMLSClients + (cnvmlsGroupId mlsMeta) + (cidQualifiedUser ci) + (Set.singleton (ciClient ci, idx)) diff --git a/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs new file mode 100644 index 0000000000..debd3493fa --- /dev/null +++ b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs @@ -0,0 +1,288 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.Commit.InternalCommit (processInternalCommit) where + +import Control.Comonad +import Control.Error.Util (hush) +import Control.Lens (forOf_, preview) +import Control.Lens.Extras (is) +import Data.Id +import Data.List.NonEmpty (NonEmpty, nonEmpty) +import qualified Data.Map as Map +import Data.Qualified +import qualified Data.Set as Set +import Data.Tuple.Extra +import Galley.API.Action +import Galley.API.MLS.Commit.Core +import Galley.API.MLS.Conversation +import Galley.API.MLS.Proposal +import Galley.API.MLS.Types +import Galley.API.MLS.Util +import Galley.Data.Conversation.Types hiding (Conversation) +import qualified Galley.Data.Conversation.Types as Data +import Galley.Effects +import Galley.Effects.FederatorAccess +import Galley.Effects.MemberStore +import Galley.Effects.ProposalStore +import Galley.Types.Conversations.Members +import Imports +import Polysemy +import Polysemy.Error +import Polysemy.Resource (Resource) +import Wire.API.Conversation.Protocol +import Wire.API.Conversation.Role +import Wire.API.Error +import Wire.API.Error.Galley +import Wire.API.Federation.API +import Wire.API.Federation.API.Galley +import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Commit +import Wire.API.MLS.Credential +import qualified Wire.API.MLS.Proposal as Proposal +import Wire.API.MLS.SubConversation +import Wire.API.User.Client + +processInternalCommit :: + forall r. + ( HasProposalEffects r, + Member (ErrorS 'ConvNotFound) r, + Member (ErrorS 'MLSCommitMissingReferences) r, + Member (ErrorS 'MLSSelfRemovalNotAllowed) r, + Member (ErrorS 'MLSStaleMessage) r, + Member (ErrorS 'MissingLegalholdConsent) r, + Member SubConversationStore r, + Member Resource r + ) => + ClientIdentity -> + Maybe ConnId -> + Local ConvOrSubConv -> + Epoch -> + ProposalAction -> + Commit -> + Sem r [LocalConversationUpdate] +processInternalCommit senderIdentity con lConvOrSub epoch action commit = do + let convOrSub = tUnqualified lConvOrSub + mlsMeta = mlsMetaConvOrSub convOrSub + + withCommitLock (fmap idForConvOrSub lConvOrSub) (cnvmlsGroupId (mlsMetaConvOrSub convOrSub)) epoch $ do + -- check all pending proposals are referenced in the commit + allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch + let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals + unless (all (`Set.member` referencedProposals) allPendingProposals) $ + throwS @'MLSCommitMissingReferences + + -- process and execute proposals + updates <- executeIntCommitProposalAction senderIdentity con lConvOrSub action + + -- increment epoch number + for_ lConvOrSub incrementEpoch + + pure updates + +executeIntCommitProposalAction :: + forall r. + HasProposalActionEffects r => + ClientIdentity -> + Maybe ConnId -> + Local ConvOrSubConv -> + ProposalAction -> + Sem r [LocalConversationUpdate] +executeIntCommitProposalAction senderIdentity con lconvOrSub action = do + let qusr = cidQualifiedUser senderIdentity + convOrSub = tUnqualified lconvOrSub + mlsMeta = mlsMetaConvOrSub convOrSub + cm = membersConvOrSub convOrSub + ss = csSignatureScheme (cnvmlsCipherSuite mlsMeta) + newUserClients = Map.assocs (paAdd action) + + -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 + foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr + + -- no client can be directly added to a subconversation + when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ + throw (mlsProtocolError "Add proposals in subconversations are not supported") + + -- Note [client removal] + -- We support two types of removals: + -- 1. when a user is removed from a group, all their clients have to be removed + -- 2. when a client is deleted, that particular client (but not necessarily + -- other clients of the same user) has to be removed. + -- + -- Type 2 requires no special processing on the backend, so here we filter + -- out all removals of that type, so that further checks and processing can + -- be applied only to type 1 removals. + -- + -- Furthermore, subconversation clients can be removed arbitrarily, so this + -- processing is only necessary for main conversations. In the + -- subconversation case, an empty list is returned. + membersToRemove <- case convOrSub of + SubConv _ _ -> pure [] + Conv _ -> mapMaybe hush <$$> for (Map.assocs (paRemove action)) $ + \(qtarget, Map.keysSet -> clients) -> runError @() $ do + let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) + let removedClients = Set.intersection clients clientsInConv + + -- ignore user if none of their clients are being removed + when (Set.null removedClients) $ throw () + + -- return error if the user is trying to remove themself + when (cidQualifiedUser senderIdentity == qtarget) $ + throwS @'MLSSelfRemovalNotAllowed + + -- FUTUREWORK: add tests against this situation for conv v subconv + when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do + -- FUTUREWORK: turn this error into a proper response + throwS @'MLSClientMismatch + + pure qtarget + + -- for each user, we compare their clients with the ones being added to the conversation + for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of + -- user is already present, skip check in this case + Just _ -> pure () + -- new user + Nothing -> do + -- final set of clients in the conversation + let clients = Map.keysSet (newclients <> Map.findWithDefault mempty qtarget cm) + -- get list of mls clients from brig + clientInfo <- getClientInfo lconvOrSub qtarget ss + let allClients = Set.map ciId clientInfo + let allMLSClients = Set.map ciId (Set.filter ciMLS clientInfo) + -- We check the following condition: + -- allMLSClients ⊆ clients ⊆ allClients + -- i.e. + -- - if a client has at least 1 key package, it has to be added + -- - if a client is being added, it has to still exist + -- + -- The reason why we can't simply check that clients == allMLSClients is + -- that a client with no remaining key packages might be added by a user + -- who just fetched its last key package. + unless + ( Set.isSubsetOf allMLSClients clients + && Set.isSubsetOf clients allClients + ) + $ do + -- unless (Set.isSubsetOf allClients clients) $ do + -- FUTUREWORK: turn this error into a proper response + throwS @'MLSClientMismatch + + -- remove users from the conversation and send events + removeEvents <- + foldMap + (removeMembers qusr con lconvOrSub) + (nonEmpty membersToRemove) + + -- Remove clients from the conversation state. This includes client removals + -- of all types (see Note [client removal]). + for_ (Map.assocs (paRemove action)) $ \(qtarget, clients) -> do + removeMLSClients (cnvmlsGroupId mlsMeta) qtarget (Map.keysSet clients) + + -- if this is a new subconversation, call `on-new-remote-conversation` on all + -- the remote backends involved in the main conversation + forOf_ _SubConv convOrSub $ \(mlsConv, subConv) -> do + when (cnvmlsEpoch (scMLSData subConv) == Epoch 0) $ do + let remoteDomains = + Set.fromList + ( map + (void . rmId) + (mcRemoteMembers mlsConv) + ) + let nrc = + NewRemoteSubConversation + { nrscConvId = mcId mlsConv, + nrscSubConvId = scSubConvId subConv, + nrscMlsData = scMLSData subConv + } + runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do + void $ fedClient @'Galley @"on-new-remote-subconversation" nrc + + -- add users to the conversation and send events + addEvents <- + foldMap (addMembers qusr con lconvOrSub) + . nonEmpty + . map fst + $ newUserClients + + -- add clients in the conversation state + for_ newUserClients $ \(qtarget, newClients) -> do + addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) + + -- TODO: increment epoch here instead of in the calling site + + pure (addEvents <> removeEvents) + +addMembers :: + HasProposalActionEffects r => + Qualified UserId -> + Maybe ConnId -> + Local ConvOrSubConv -> + NonEmpty (Qualified UserId) -> + Sem r [LocalConversationUpdate] +addMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of + Conv mlsConv -> do + let lconv = qualifyAs lconvOrSub (mcConv mlsConv) + -- FUTUREWORK: update key package ref mapping to reflect conversation membership + foldMap + ( handleNoChanges + . handleMLSProposalFailures @ProposalErrors + . fmap pure + . updateLocalConversationUnchecked @'ConversationJoinTag lconv qusr con + . flip ConversationJoin roleNameWireMember + ) + . nonEmpty + . filter (flip Set.notMember (existingMembers lconv)) + . toList + $ users + SubConv _ _ -> pure [] + +removeMembers :: + HasProposalActionEffects r => + Qualified UserId -> + Maybe ConnId -> + Local ConvOrSubConv -> + NonEmpty (Qualified UserId) -> + Sem r [LocalConversationUpdate] +removeMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of + Conv mlsConv -> do + let lconv = qualifyAs lconvOrSub (mcConv mlsConv) + foldMap + ( handleNoChanges + . handleMLSProposalFailures @ProposalErrors + . fmap pure + . updateLocalConversationUnchecked @'ConversationRemoveMembersTag lconv qusr con + ) + . nonEmpty + . filter (flip Set.member (existingMembers lconv)) + . toList + $ users + SubConv _ _ -> pure [] + +handleNoChanges :: Monoid a => Sem (Error NoChanges ': r) a -> Sem r a +handleNoChanges = fmap fold . runError + +existingLocalMembers :: Local Data.Conversation -> Set (Qualified UserId) +existingLocalMembers lconv = + (Set.fromList . map (fmap lmId . tUntagged)) (traverse convLocalMembers lconv) + +existingRemoteMembers :: Local Data.Conversation -> Set (Qualified UserId) +existingRemoteMembers lconv = + Set.fromList . map (tUntagged . rmId) . convRemoteMembers . tUnqualified $ + lconv + +existingMembers :: Local Data.Conversation -> Set (Qualified UserId) +existingMembers lconv = existingLocalMembers lconv <> existingRemoteMembers lconv diff --git a/services/galley/src/Galley/API/MLS/IncomingMessage.hs b/services/galley/src/Galley/API/MLS/IncomingMessage.hs new file mode 100644 index 0000000000..96b63cc697 --- /dev/null +++ b/services/galley/src/Galley/API/MLS/IncomingMessage.hs @@ -0,0 +1,131 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.IncomingMessage + ( IncomingMessage (..), + IncomingMessageContent (..), + IncomingPublicMessageContent (..), + IncomingBundle (..), + mkIncomingMessage, + incomingMessageAuthenticatedContent, + mkIncomingBundle, + ) +where + +import GHC.Records +import Imports +import Wire.API.MLS.AuthenticatedContent +import Wire.API.MLS.Commit +import Wire.API.MLS.CommitBundle +import Wire.API.MLS.Epoch +import Wire.API.MLS.Group +import Wire.API.MLS.GroupInfo +import Wire.API.MLS.Message +import Wire.API.MLS.Serialisation +import Wire.API.MLS.Welcome + +data IncomingMessage = IncomingMessage + { epoch :: Epoch, + groupId :: GroupId, + content :: IncomingMessageContent, + rawMessage :: RawMLS Message + } + +instance HasField "sender" IncomingMessage (Maybe Sender) where + getField msg = case msg.content of + IncomingMessageContentPublic pub -> Just pub.sender + _ -> Nothing + +data IncomingMessageContent + = IncomingMessageContentPublic IncomingPublicMessageContent + | IncomingMessageContentPrivate + +data IncomingPublicMessageContent = IncomingPublicMessageContent + { sender :: Sender, + content :: FramedContentData, + -- for verification + framedContent :: RawMLS FramedContent, + authData :: RawMLS FramedContentAuthData + } + +data IncomingBundle = IncomingBundle + { epoch :: Epoch, + groupId :: GroupId, + sender :: Sender, + commit :: RawMLS Commit, + rawMessage :: RawMLS Message, + welcome :: Maybe (RawMLS Welcome), + groupInfo :: GroupInfoData, + serialized :: ByteString + } + +mkIncomingMessage :: RawMLS Message -> Maybe IncomingMessage +mkIncomingMessage msg = case msg.value.content of + MessagePublic pmsg -> + Just + IncomingMessage + { epoch = pmsg.content.value.epoch, + groupId = pmsg.content.value.groupId, + content = + IncomingMessageContentPublic + IncomingPublicMessageContent + { sender = pmsg.content.value.sender, + content = pmsg.content.value.content, + framedContent = pmsg.content, + authData = pmsg.authData + }, + rawMessage = msg + } + MessagePrivate pmsg + | pmsg.value.tag == FramedContentApplicationDataTag -> + Just + IncomingMessage + { epoch = pmsg.value.epoch, + groupId = pmsg.value.groupId, + content = IncomingMessageContentPrivate, + rawMessage = msg + } + _ -> Nothing + +incomingMessageAuthenticatedContent :: IncomingPublicMessageContent -> AuthenticatedContent +incomingMessageAuthenticatedContent pmsg = + AuthenticatedContent + { wireFormat = WireFormatPublicTag, + content = pmsg.framedContent, + authData = pmsg.authData + } + +mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle +mkIncomingBundle bundle = do + imsg <- mkIncomingMessage bundle.value.commitMsg + content <- case imsg.content of + IncomingMessageContentPublic c -> pure c + _ -> Nothing + commit <- case content.content of + FramedContentCommit c -> pure c + _ -> Nothing + pure + IncomingBundle + { epoch = imsg.epoch, + groupId = imsg.groupId, + sender = content.sender, + commit = commit, + rawMessage = bundle.value.commitMsg, + welcome = bundle.value.welcome, + groupInfo = GroupInfoData bundle.value.groupInfo.raw, + serialized = bundle.raw + } diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 3f761e7bbd..70d6169b76 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -30,96 +30,46 @@ module Galley.API.MLS.Message where import Control.Comonad -import Control.Error.Util (hush) -import Control.Lens (forOf_, preview) -import Control.Lens.Extras (is) import Data.Id import Data.Json.Util -import Data.List.NonEmpty (NonEmpty, nonEmpty) -import qualified Data.Map as Map import Data.Qualified -import qualified Data.Set as Set -import Data.Time import Data.Tuple.Extra -import GHC.Records import Galley.API.Action -import Galley.API.Error +import Galley.API.MLS.Commit import Galley.API.MLS.Conversation import Galley.API.MLS.Enabled +import Galley.API.MLS.IncomingMessage import Galley.API.MLS.Propagate -import Galley.API.MLS.Removal +import Galley.API.MLS.Proposal import Galley.API.MLS.Types import Galley.API.MLS.Util import Galley.API.MLS.Welcome (sendWelcomes) import Galley.API.Util -import Galley.Data.Conversation.Types hiding (Conversation) -import qualified Galley.Data.Conversation.Types as Data import Galley.Effects -import Galley.Effects.BrigAccess import Galley.Effects.ConversationStore import Galley.Effects.FederatorAccess import Galley.Effects.MemberStore -import Galley.Effects.ProposalStore import Galley.Effects.SubConversationStore -import Galley.Env -import Galley.Options -import Galley.Types.Conversations.Members import Imports import Polysemy import Polysemy.Error import Polysemy.Input import Polysemy.Internal import Polysemy.Resource (Resource) -import Polysemy.State import Polysemy.TinyLog -import Wire.API.Conversation hiding (Member) import Wire.API.Conversation.Protocol -import Wire.API.Conversation.Role import Wire.API.Error import Wire.API.Error.Galley import Wire.API.Federation.API -import Wire.API.Federation.API.Brig import Wire.API.Federation.API.Galley import Wire.API.Federation.Error -import Wire.API.MLS.AuthenticatedContent -import Wire.API.MLS.CipherSuite import Wire.API.MLS.Commit import Wire.API.MLS.CommitBundle import Wire.API.MLS.Credential import Wire.API.MLS.GroupInfo -import Wire.API.MLS.KeyPackage -import Wire.API.MLS.LeafNode import Wire.API.MLS.Message -import Wire.API.MLS.Proposal -import qualified Wire.API.MLS.Proposal as Proposal -import Wire.API.MLS.ProposalTag import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation -import Wire.API.MLS.Validation -import Wire.API.MLS.Welcome -import Wire.API.Message -import Wire.API.User.Client - --- TODO: --- [x] replace ref with index in remove proposals --- [x] validate leaf nodes and key packages locally on galley --- - [x] extract validation function to wire-api --- - [x] validate lifetime and public key consistency only on brig --- - [x] check that ciphersuite matches conversation on galley --- - [x] check the signature on the LeafNode --- - [x] verify that capabilities include basic credentials --- [x] validate proposals when processing proposal and commit messages --- [x] remove MissingSenderClient error --- [x] remove all key package ref mapping --- [x] initialise index maps --- [x] compute new indices for add proposals --- [x] remove prefixes from value and raw --- [x] remove PublicGroupState and GroupInfoBundle modules --- [x] remove prefixes from fields in Commit and Proposal --- [x] check epoch inside commit lock --- [x] split executeProposalAction for internal and external commits --- [x] add nonce to PreSharedKeyID --- [ ] move external commit logic to a separate module and improve types -- FUTUREWORK -- - Check that the capabilities of a leaf node in an add proposal contains all @@ -127,98 +77,6 @@ import Wire.API.User.Client -- the group info from the DB in order to read the group context. -- - Verify message signature, this also requires the group context. (see above) -data IncomingMessage = IncomingMessage - { epoch :: Epoch, - groupId :: GroupId, - content :: IncomingMessageContent, - rawMessage :: RawMLS Message - } - -instance HasField "sender" IncomingMessage (Maybe Sender) where - getField msg = case msg.content of - IncomingMessageContentPublic pub -> Just pub.sender - _ -> Nothing - -data IncomingMessageContent - = IncomingMessageContentPublic IncomingPublicMessageContent - | IncomingMessageContentPrivate - -data IncomingPublicMessageContent = IncomingPublicMessageContent - { sender :: Sender, - content :: FramedContentData, - -- for verification - framedContent :: RawMLS FramedContent, - authData :: RawMLS FramedContentAuthData - } - -data IncomingBundle = IncomingBundle - { epoch :: Epoch, - groupId :: GroupId, - sender :: Sender, - commit :: RawMLS Commit, - rawMessage :: RawMLS Message, - welcome :: Maybe (RawMLS Welcome), - groupInfo :: GroupInfoData, - serialized :: ByteString - } - -mkIncomingMessage :: RawMLS Message -> Maybe IncomingMessage -mkIncomingMessage msg = case msg.value.content of - MessagePublic pmsg -> - Just - IncomingMessage - { epoch = pmsg.content.value.epoch, - groupId = pmsg.content.value.groupId, - content = - IncomingMessageContentPublic - IncomingPublicMessageContent - { sender = pmsg.content.value.sender, - content = pmsg.content.value.content, - framedContent = pmsg.content, - authData = pmsg.authData - }, - rawMessage = msg - } - MessagePrivate pmsg - | pmsg.value.tag == FramedContentApplicationDataTag -> - Just - IncomingMessage - { epoch = pmsg.value.epoch, - groupId = pmsg.value.groupId, - content = IncomingMessageContentPrivate, - rawMessage = msg - } - _ -> Nothing - -incomingMessageAuthenticatedContent :: IncomingPublicMessageContent -> AuthenticatedContent -incomingMessageAuthenticatedContent pmsg = - AuthenticatedContent - { wireFormat = WireFormatPublicTag, - content = pmsg.framedContent, - authData = pmsg.authData - } - -mkIncomingBundle :: RawMLS CommitBundle -> Maybe IncomingBundle -mkIncomingBundle bundle = do - imsg <- mkIncomingMessage bundle.value.commitMsg - content <- case imsg.content of - IncomingMessageContentPublic c -> pure c - _ -> Nothing - commit <- case content.content of - FramedContentCommit c -> pure c - _ -> Nothing - pure - IncomingBundle - { epoch = imsg.epoch, - groupId = imsg.groupId, - sender = content.sender, - commit = commit, - rawMessage = bundle.value.commitMsg, - welcome = bundle.value.welcome, - groupInfo = GroupInfoData bundle.value.groupInfo.raw, - serialized = bundle.raw - } - type MLSMessageStaticErrors = '[ ErrorS 'ConvAccessDenied, ErrorS 'ConvMemberNotFound, @@ -537,769 +395,6 @@ postMLSMessageToRemoteConv loc qusr senderClient con msg rConvOrSubId = do pure (LocalConversationUpdate e update) pure (lcus, unreachables) -type HasProposalEffects r = - ( Member BrigAccess r, - Member ConversationStore r, - Member (Error InternalError) r, - Member (Error MLSProposalFailure) r, - Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSClientMismatch) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member ExternalAccess r, - Member FederatorAccess r, - Member GundeckAccess r, - Member (Input Env) r, - Member (Input (Local ())) r, - Member (Input Opts) r, - Member (Input UTCTime) r, - Member LegalHoldStore r, - Member MemberStore r, - Member ProposalStore r, - Member TeamStore r, - Member TeamStore r, - Member TinyLog r - ) - -data ProposalAction = ProposalAction - { paAdd :: ClientMap, - paRemove :: ClientMap, - -- The backend does not process external init proposals, but still it needs - -- to know if a commit has one when processing external commits - paExternalInit :: Any - } - deriving (Show) - -instance Semigroup ProposalAction where - ProposalAction add1 rem1 init1 <> ProposalAction add2 rem2 init2 = - ProposalAction - (Map.unionWith mappend add1 add2) - (Map.unionWith mappend rem1 rem2) - (init1 <> init2) - -instance Monoid ProposalAction where - mempty = ProposalAction mempty mempty mempty - -paAddClient :: ClientIdentity -> LeafIndex -> ProposalAction -paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} - -paRemoveClient :: ClientIdentity -> LeafIndex -> ProposalAction -paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} - -paExternalInitPresent :: ProposalAction -paExternalInitPresent = mempty {paExternalInit = Any True} - --- | This is used to sort proposals into the correct processing order, as defined by the spec -data ProposalProcessingStage - = ProposalProcessingStageExtensions - | ProposalProcessingStageUpdate - | ProposalProcessingStageRemove - | ProposalProcessingStageAdd - | ProposalProcessingStagePreSharedKey - | ProposalProcessingStageExternalInit - | ProposalProcessingStageReInit - deriving (Eq, Ord) - -proposalProcessingStage :: Proposal -> ProposalProcessingStage -proposalProcessingStage (AddProposal _) = ProposalProcessingStageAdd -proposalProcessingStage (RemoveProposal _) = ProposalProcessingStageRemove -proposalProcessingStage (UpdateProposal _) = ProposalProcessingStageUpdate -proposalProcessingStage (PreSharedKeyProposal _) = ProposalProcessingStagePreSharedKey -proposalProcessingStage (ReInitProposal _) = ProposalProcessingStageReInit -proposalProcessingStage (ExternalInitProposal _) = ProposalProcessingStageExternalInit -proposalProcessingStage (GroupContextExtensionsProposal _) = ProposalProcessingStageExtensions - -getCommitData :: - ( HasProposalEffects r, - Member (ErrorS 'MLSProposalNotFound) r, - Member (ErrorS 'MLSStaleMessage) r - ) => - ClientIdentity -> - Local ConvOrSubConv -> - Epoch -> - Commit -> - Sem r ProposalAction -getCommitData senderIdentity lConvOrSub epoch commit = do - let convOrSub = tUnqualified lConvOrSub - mlsMeta = mlsMetaConvOrSub convOrSub - curEpoch = cnvmlsEpoch mlsMeta - groupId = cnvmlsGroupId mlsMeta - - -- check epoch number - -- TODO: is this really needed? - when (epoch /= curEpoch) $ throwS @'MLSStaleMessage - evalState (indexMapConvOrSub convOrSub) $ do - creatorAction <- - if epoch == Epoch 0 - then addProposedClient senderIdentity - else mempty - proposals <- traverse (derefOrCheckProposal mlsMeta groupId epoch) commit.proposals - action <- applyProposals mlsMeta groupId proposals - pure (creatorAction <> action) - -getExternalCommitData :: - forall r. - ( Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r - ) => - ClientIdentity -> - Local ConvOrSubConv -> - Epoch -> - Commit -> - Sem r ProposalAction -getExternalCommitData senderIdentity lConvOrSub epoch commit = do - let convOrSub = tUnqualified lConvOrSub - mlsMeta = mlsMetaConvOrSub convOrSub - curEpoch = cnvmlsEpoch mlsMeta - groupId = cnvmlsGroupId mlsMeta - when (epoch /= curEpoch) $ throwS @'MLSStaleMessage - proposals <- traverse getInlineProposal commit.proposals - - -- According to the spec, an external commit must contain: - -- (https://messaginglayersecurity.rocks/mls-protocol/draft-ietf-mls-protocol.html#section-12.2) - -- - -- > Exactly one ExternalInit - -- > At most one Remove proposal, with which the joiner removes an old - -- > version of themselves. - -- > Zero or more PreSharedKey proposals. - -- > No other proposals. - let counts = foldr (\x -> Map.insertWith (+) x.tag (1 :: Int)) mempty proposals - - unless (Map.lookup ExternalInitProposalTag counts == Just 1) $ - throw (mlsProtocolError "External commits must contain exactly one ExternalInit proposal") - unless (Map.findWithDefault 0 RemoveProposalTag counts <= 1) $ - throw (mlsProtocolError "External commits must contain at most one Remove proposal") - unless (null (Map.keys counts \\ allowedProposals)) $ - throw (mlsProtocolError "Invalid proposal type in an external commit") - - evalState (indexMapConvOrSub convOrSub) $ do - -- process optional removal - propAction <- applyProposals mlsMeta groupId proposals - -- add sender - selfAction <- addProposedClient senderIdentity - case cmAssocs (paRemove propAction) of - [(cid, _)] - | cid /= senderIdentity -> - throw $ mlsProtocolError "Only the self client can be removed by an external commit" - _ -> pure () - - pure $ propAction <> selfAction - where - allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] - - getInlineProposal :: ProposalOrRef -> Sem r Proposal - getInlineProposal (Ref _) = - throw (mlsProtocolError "External commits cannot reference proposals") - getInlineProposal (Inline p) = pure p - -processExternalCommit :: - forall r. - ( Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MLSSubConvClientNotInParent) r, - Member Resource r, - HasProposalActionEffects r - ) => - ClientIdentity -> - Local ConvOrSubConv -> - Epoch -> - ProposalAction -> - Maybe UpdatePath -> - Sem r () -processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do - let convOrSub = tUnqualified lConvOrSub - - -- only members can join a subconversation - forOf_ _SubConv convOrSub $ \(mlsConv, _) -> - unless (isClientMember senderIdentity (mcMembers mlsConv)) $ - throwS @'MLSSubConvClientNotInParent - - -- get index of the newly added client, as calculated when processing proposals - idx <- case cmAssocs (paAdd action) of - [(cid, idx)] | cid == senderIdentity -> pure idx - _ -> throw (InternalErrorWithDescription "Unexpected Add action for external commit") - - -- extract leaf node from update path and validate it - leafNode <- - (.leaf) - <$> note - (mlsProtocolError "External commits need an update path") - updatePath - let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) - let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) - let extra = LeafNodeTBSExtraCommit groupId idx - case validateLeafNode cs (Just senderIdentity) extra leafNode.value of - Left errMsg -> - throw $ - mlsProtocolError ("Tried to add invalid LeafNode: " <> errMsg) - Right _ -> pure () - - withCommitLock (fmap idForConvOrSub lConvOrSub) groupId epoch $ do - executeExtCommitProposalAction senderIdentity lConvOrSub action - - -- increment epoch number - lConvOrSub' <- for lConvOrSub incrementEpoch - - -- fetch backend remove proposals of the previous epoch - let remIndices = map snd (cmAssocs (paRemove action)) - indicesInRemoveProposals <- - -- skip remove proposals of already removed by the external commit - (\\ remIndices) - <$> getPendingBackendRemoveProposals groupId epoch - - -- requeue backend remove proposals for the current epoch - let cm = membersConvOrSub (tUnqualified lConvOrSub') - createAndSendRemoveProposals - lConvOrSub' - indicesInRemoveProposals - (cidQualifiedUser senderIdentity) - cm - -processInternalCommit :: - forall r. - ( HasProposalEffects r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSCommitMissingReferences) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, - Member (ErrorS 'MLSStaleMessage) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member SubConversationStore r, - Member Resource r - ) => - ClientIdentity -> - Maybe ConnId -> - Local ConvOrSubConv -> - Epoch -> - ProposalAction -> - Commit -> - Sem r [LocalConversationUpdate] -processInternalCommit senderIdentity con lConvOrSub epoch action commit = do - let convOrSub = tUnqualified lConvOrSub - mlsMeta = mlsMetaConvOrSub convOrSub - - withCommitLock (fmap idForConvOrSub lConvOrSub) (cnvmlsGroupId (mlsMetaConvOrSub convOrSub)) epoch $ do - -- check all pending proposals are referenced in the commit - allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch - let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals - unless (all (`Set.member` referencedProposals) allPendingProposals) $ - throwS @'MLSCommitMissingReferences - - -- process and execute proposals - updates <- executeIntCommitProposalAction senderIdentity con lConvOrSub action - - -- increment epoch number - for_ lConvOrSub incrementEpoch - - pure updates - -derefOrCheckProposal :: - ( Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r, - Member ProposalStore r, - Member (State IndexMap) r, - Member (ErrorS 'MLSProposalNotFound) r - ) => - ConversationMLSData -> - GroupId -> - Epoch -> - ProposalOrRef -> - Sem r Proposal -derefOrCheckProposal _mlsMeta groupId epoch (Ref ref) = do - p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound - pure p.value -derefOrCheckProposal mlsMeta _ _ (Inline p) = do - im <- get - checkProposal mlsMeta im p - pure p - -checkProposal :: - ( Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r - ) => - ConversationMLSData -> - IndexMap -> - Proposal -> - Sem r () -checkProposal mlsMeta im p = - case p of - AddProposal kp -> do - (cs, _lifetime) <- - either - (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) - pure - $ validateKeyPackage Nothing kp.value - -- we are not checking lifetime constraints here - unless (mlsMeta.cnvmlsCipherSuite == cs) $ - throw (mlsProtocolError "Key package ciphersuite does not match conversation") - RemoveProposal idx -> do - void $ noteS @'MLSInvalidLeafNodeIndex $ imLookup im idx - _ -> pure () - -addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction -addProposedClient cid = do - im <- get - let (idx, im') = imAddClient im cid - put im' - pure (paAddClient cid idx) - -applyProposals :: - ( Member (State IndexMap) r, - Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r - ) => - ConversationMLSData -> - GroupId -> - [Proposal] -> - Sem r ProposalAction -applyProposals mlsMeta groupId = - -- proposals are sorted before processing - foldMap (applyProposal mlsMeta groupId) - . sortOn proposalProcessingStage - -applyProposal :: - ( Member (State IndexMap) r, - Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member (ErrorS 'MLSInvalidLeafNodeIndex) r - ) => - ConversationMLSData -> - GroupId -> - Proposal -> - Sem r ProposalAction -applyProposal mlsMeta _groupId (AddProposal kp) = do - (cs, _lifetime) <- - either - (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) - pure - $ validateKeyPackage Nothing kp.value - unless (mlsMeta.cnvmlsCipherSuite == cs) $ - throw (mlsProtocolError "Key package ciphersuite does not match conversation") - -- we are not checking lifetime constraints here - cid <- getKeyPackageIdentity kp.value - addProposedClient cid -applyProposal _mlsMeta _groupId (RemoveProposal idx) = do - im <- get - (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx - put im' - pure (paRemoveClient cid idx) -applyProposal _mlsMeta _groupId (ExternalInitProposal _) = - -- only record the fact there was an external init proposal, but do not - -- process it in any way. - pure paExternalInitPresent -applyProposal _mlsMeta _groupId _ = pure mempty - -processProposal :: - HasProposalEffects r => - ( Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSStaleMessage) r - ) => - Qualified UserId -> - Local ConvOrSubConv -> - IncomingMessage -> -- TODO: just pass header? - IncomingPublicMessageContent -> - RawMLS Proposal -> - Sem r () -processProposal qusr lConvOrSub msg pub prop = do - let mlsMeta = mlsMetaConvOrSub (tUnqualified lConvOrSub) - checkEpoch msg.epoch mlsMeta - checkGroup msg.groupId mlsMeta - let suiteTag = cnvmlsCipherSuite mlsMeta - - -- FUTUREWORK: validate the member's conversation role - let im = indexMapConvOrSub $ tUnqualified lConvOrSub - checkProposal mlsMeta im prop.value - when (isExternal pub.sender) $ checkExternalProposalUser qusr prop.value - let propRef = authContentRef suiteTag (incomingMessageAuthenticatedContent pub) - storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop - -isExternal :: Sender -> Bool -isExternal (SenderMember _) = False -isExternal _ = True - --- check owner/subject of the key package exists and belongs to the user -checkExternalProposalUser :: - ( Member BrigAccess r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member (Input (Local ())) r - ) => - Qualified UserId -> - Proposal -> - Sem r () -checkExternalProposalUser qusr prop = do - loc <- qualifyLocal () - foldQualified - loc - ( \lusr -> case prop of - AddProposal kp -> do - ClientIdentity {ciUser, ciClient} <- getKeyPackageIdentity kp.value - -- requesting user must match key package owner - when (tUnqualified lusr /= ciUser) $ throwS @'MLSUnsupportedProposal - -- client referenced in key package must be one of the user's clients - UserClients {userClients} <- lookupClients [ciUser] - maybe - (throwS @'MLSUnsupportedProposal) - (flip when (throwS @'MLSUnsupportedProposal) . Set.null . Set.filter (== ciClient)) - $ userClients Map.!? ciUser - _ -> throwS @'MLSUnsupportedProposal - ) - (const $ pure ()) -- FUTUREWORK: check external proposals from remote backends - qusr - -type HasProposalActionEffects r = - ( Member BrigAccess r, - Member ConversationStore r, - Member (Error InternalError) r, - Member (ErrorS 'ConvNotFound) r, - Member (ErrorS 'MLSClientMismatch) r, - Member (Error MLSProposalFailure) r, - Member (ErrorS 'MissingLegalholdConsent) r, - Member (ErrorS 'MLSUnsupportedProposal) r, - Member (Error MLSProtocolError) r, - Member (ErrorS 'MLSSelfRemovalNotAllowed) r, - Member ExternalAccess r, - Member FederatorAccess r, - Member GundeckAccess r, - Member (Input Env) r, - Member (Input Opts) r, - Member (Input UTCTime) r, - Member LegalHoldStore r, - Member MemberStore r, - Member ProposalStore r, - Member SubConversationStore r, - Member TeamStore r, - Member TinyLog r - ) - -executeIntCommitProposalAction :: - forall r. - HasProposalActionEffects r => - ClientIdentity -> - Maybe ConnId -> - Local ConvOrSubConv -> - ProposalAction -> - Sem r [LocalConversationUpdate] -executeIntCommitProposalAction senderIdentity con lconvOrSub action = do - let qusr = cidQualifiedUser senderIdentity - convOrSub = tUnqualified lconvOrSub - mlsMeta = mlsMetaConvOrSub convOrSub - cm = membersConvOrSub convOrSub - ss = csSignatureScheme (cnvmlsCipherSuite mlsMeta) - newUserClients = Map.assocs (paAdd action) - - -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 - foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr - - -- no client can be directly added to a subconversation - when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ - throw (mlsProtocolError "Add proposals in subconversations are not supported") - - -- Note [client removal] - -- We support two types of removals: - -- 1. when a user is removed from a group, all their clients have to be removed - -- 2. when a client is deleted, that particular client (but not necessarily - -- other clients of the same user) has to be removed. - -- - -- Type 2 requires no special processing on the backend, so here we filter - -- out all removals of that type, so that further checks and processing can - -- be applied only to type 1 removals. - -- - -- Furthermore, subconversation clients can be removed arbitrarily, so this - -- processing is only necessary for main conversations. In the - -- subconversation case, an empty list is returned. - membersToRemove <- case convOrSub of - SubConv _ _ -> pure [] - Conv _ -> mapMaybe hush <$$> for (Map.assocs (paRemove action)) $ - \(qtarget, Map.keysSet -> clients) -> runError @() $ do - let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) - let removedClients = Set.intersection clients clientsInConv - - -- ignore user if none of their clients are being removed - when (Set.null removedClients) $ throw () - - -- return error if the user is trying to remove themself - when (cidQualifiedUser senderIdentity == qtarget) $ - throwS @'MLSSelfRemovalNotAllowed - - -- FUTUREWORK: add tests against this situation for conv v subconv - when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do - -- FUTUREWORK: turn this error into a proper response - throwS @'MLSClientMismatch - - pure qtarget - - -- for each user, we compare their clients with the ones being added to the conversation - for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of - -- user is already present, skip check in this case - Just _ -> pure () - -- new user - Nothing -> do - -- final set of clients in the conversation - let clients = Map.keysSet (newclients <> Map.findWithDefault mempty qtarget cm) - -- get list of mls clients from brig - clientInfo <- getClientInfo lconvOrSub qtarget ss - let allClients = Set.map ciId clientInfo - let allMLSClients = Set.map ciId (Set.filter ciMLS clientInfo) - -- We check the following condition: - -- allMLSClients ⊆ clients ⊆ allClients - -- i.e. - -- - if a client has at least 1 key package, it has to be added - -- - if a client is being added, it has to still exist - -- - -- The reason why we can't simply check that clients == allMLSClients is - -- that a client with no remaining key packages might be added by a user - -- who just fetched its last key package. - unless - ( Set.isSubsetOf allMLSClients clients - && Set.isSubsetOf clients allClients - ) - $ do - -- unless (Set.isSubsetOf allClients clients) $ do - -- FUTUREWORK: turn this error into a proper response - throwS @'MLSClientMismatch - - -- remove users from the conversation and send events - removeEvents <- - foldMap - (removeMembers qusr con lconvOrSub) - (nonEmpty membersToRemove) - - -- Remove clients from the conversation state. This includes client removals - -- of all types (see Note [client removal]). - for_ (Map.assocs (paRemove action)) $ \(qtarget, clients) -> do - removeMLSClients (cnvmlsGroupId mlsMeta) qtarget (Map.keysSet clients) - - -- if this is a new subconversation, call `on-new-remote-conversation` on all - -- the remote backends involved in the main conversation - forOf_ _SubConv convOrSub $ \(mlsConv, subConv) -> do - when (cnvmlsEpoch (scMLSData subConv) == Epoch 0) $ do - let remoteDomains = - Set.fromList - ( map - (void . rmId) - (mcRemoteMembers mlsConv) - ) - let nrc = - NewRemoteSubConversation - { nrscConvId = mcId mlsConv, - nrscSubConvId = scSubConvId subConv, - nrscMlsData = scMLSData subConv - } - runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do - void $ fedClient @'Galley @"on-new-remote-subconversation" nrc - - -- add users to the conversation and send events - addEvents <- - foldMap (addMembers qusr con lconvOrSub) - . nonEmpty - . map fst - $ newUserClients - - -- add clients in the conversation state - for_ newUserClients $ \(qtarget, newClients) -> do - addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) - - -- TODO: increment epoch here instead of in the calling site - - pure (addEvents <> removeEvents) - -executeExtCommitProposalAction :: - forall r. - HasProposalActionEffects r => - ClientIdentity -> - Local ConvOrSubConv -> - ProposalAction -> - Sem r () -executeExtCommitProposalAction senderIdentity lconvOrSub action = do - let mlsMeta = mlsMetaConvOrSub $ tUnqualified lconvOrSub - newCILeaves = cmAssocs (paAdd action) - deprecatedCILeaves = cmAssocs (paRemove action) - - -- Adding clients: sender's client must be added and no other other client may - -- be added. - when (length newCILeaves /= 1 || fst (head newCILeaves) /= senderIdentity) $ - throw (mlsProtocolError "No add proposals are allowed in external commits") - - -- Client removal: only the sender's client can be removed when rejoining the - -- (sub)conversation. - when (length deprecatedCILeaves > 1) $ - throw (mlsProtocolError "Up to one client can be removed in an external commit") - for_ (listToMaybe deprecatedCILeaves) $ \ciLeaf -> do - when (fst ciLeaf /= senderIdentity) $ - throw (mlsProtocolError "Only the sender can rejoin in an external commit") - - -- TODO required for external proposals? - -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 - -- foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr - - -- Remove deprecated sender client from conversation state. - for_ deprecatedCILeaves $ \(ci, _) -> do - removeMLSClients - (cnvmlsGroupId mlsMeta) - (cidQualifiedUser ci) - (Set.singleton $ ciClient ci) - - -- Add new sender client to the conversation state. - for_ newCILeaves $ \(ci, idx) -> do - addMLSClients - (cnvmlsGroupId mlsMeta) - (cidQualifiedUser ci) - (Set.singleton (ciClient ci, idx)) - -existingLocalMembers :: Local Data.Conversation -> Set (Qualified UserId) -existingLocalMembers lconv = - (Set.fromList . map (fmap lmId . tUntagged)) (traverse convLocalMembers lconv) - -existingRemoteMembers :: Local Data.Conversation -> Set (Qualified UserId) -existingRemoteMembers lconv = - Set.fromList . map (tUntagged . rmId) . convRemoteMembers . tUnqualified $ - lconv - -existingMembers :: Local Data.Conversation -> Set (Qualified UserId) -existingMembers lconv = existingLocalMembers lconv <> existingRemoteMembers lconv - -addMembers :: - HasProposalActionEffects r => - Qualified UserId -> - Maybe ConnId -> - Local ConvOrSubConv -> - NonEmpty (Qualified UserId) -> - Sem r [LocalConversationUpdate] -addMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of - Conv mlsConv -> do - let lconv = qualifyAs lconvOrSub (mcConv mlsConv) - -- FUTUREWORK: update key package ref mapping to reflect conversation membership - foldMap - ( handleNoChanges - . handleMLSProposalFailures @ProposalErrors - . fmap pure - . updateLocalConversationUnchecked @'ConversationJoinTag lconv qusr con - . flip ConversationJoin roleNameWireMember - ) - . nonEmpty - . filter (flip Set.notMember (existingMembers lconv)) - . toList - $ users - SubConv _ _ -> pure [] - -removeMembers :: - HasProposalActionEffects r => - Qualified UserId -> - Maybe ConnId -> - Local ConvOrSubConv -> - NonEmpty (Qualified UserId) -> - Sem r [LocalConversationUpdate] -removeMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of - Conv mlsConv -> do - let lconv = qualifyAs lconvOrSub (mcConv mlsConv) - foldMap - ( handleNoChanges - . handleMLSProposalFailures @ProposalErrors - . fmap pure - . updateLocalConversationUnchecked @'ConversationRemoveMembersTag lconv qusr con - ) - . nonEmpty - . filter (flip Set.member (existingMembers lconv)) - . toList - $ users - SubConv _ _ -> pure [] - -getKeyPackageIdentity :: - Member (ErrorS 'MLSUnsupportedProposal) r => - KeyPackage -> - Sem r ClientIdentity -getKeyPackageIdentity = - either (\_ -> throwS @'MLSUnsupportedProposal) pure - . keyPackageIdentity - -handleNoChanges :: Monoid a => Sem (Error NoChanges ': r) a -> Sem r a -handleNoChanges = fmap fold . runError - -getClientInfo :: - ( Member BrigAccess r, - Member FederatorAccess r - ) => - Local x -> - Qualified UserId -> - SignatureSchemeTag -> - Sem r (Set ClientInfo) -getClientInfo loc = foldQualified loc getLocalMLSClients getRemoteMLSClients - -getRemoteMLSClients :: - ( Member FederatorAccess r - ) => - Remote UserId -> - SignatureSchemeTag -> - Sem r (Set ClientInfo) -getRemoteMLSClients rusr ss = do - runFederated rusr $ - fedClient @'Brig @"get-mls-clients" $ - MLSClientsRequest - { mcrUserId = tUnqualified rusr, - mcrSignatureScheme = ss - } - --- | Check if the epoch number matches that of a conversation -checkEpoch :: - Member (ErrorS 'MLSStaleMessage) r => - Epoch -> - ConversationMLSData -> - Sem r () -checkEpoch epoch mlsMeta = do - unless (epoch == cnvmlsEpoch mlsMeta) $ throwS @'MLSStaleMessage - --- | Check if the group ID matches that of a conversation -checkGroup :: - Member (ErrorS 'ConvNotFound) r => - GroupId -> - ConversationMLSData -> - Sem r () -checkGroup gId mlsMeta = do - unless (gId == cnvmlsGroupId mlsMeta) $ throwS @'ConvNotFound - --------------------------------------------------------------------------------- --- Error handling of proposal execution - --- The following errors are caught by 'executeProposalAction' and wrapped in a --- 'MLSProposalFailure'. This way errors caused by the execution of proposals are --- separated from those caused by the commit processing itself. -type ProposalErrors = - '[ Error FederationError, - Error InvalidInput, - ErrorS ('ActionDenied 'AddConversationMember), - ErrorS ('ActionDenied 'LeaveConversation), - ErrorS ('ActionDenied 'RemoveConversationMember), - ErrorS 'ConvAccessDenied, - ErrorS 'InvalidOperation, - ErrorS 'NotATeamMember, - ErrorS 'NotConnected, - ErrorS 'TooManyMembers - ] - -class HandleMLSProposalFailures effs r where - handleMLSProposalFailures :: Sem (Append effs r) a -> Sem r a - -class HandleMLSProposalFailure eff r where - handleMLSProposalFailure :: Sem (eff ': r) a -> Sem r a - -instance HandleMLSProposalFailures '[] r where - handleMLSProposalFailures = id - -instance - ( HandleMLSProposalFailures effs r, - HandleMLSProposalFailure eff (Append effs r) - ) => - HandleMLSProposalFailures (eff ': effs) r - where - handleMLSProposalFailures = handleMLSProposalFailures @effs . handleMLSProposalFailure @eff - -instance - (APIError e, Member (Error MLSProposalFailure) r) => - HandleMLSProposalFailure (Error e) r - where - handleMLSProposalFailure = mapError (MLSProposalFailure . toWai) - storeGroupInfo :: ( Member ConversationStore r, Member SubConversationStore r @@ -1334,23 +429,3 @@ fetchConvOrSub qusr convOrSubId = for convOrSubId $ \case getLocalConvForUser u >=> mkMLSConversation >=> noteS @'ConvNotFound - -incrementEpoch :: - ( Member ConversationStore r, - Member (ErrorS 'ConvNotFound) r, - Member MemberStore r, - Member SubConversationStore r - ) => - ConvOrSubConv -> - Sem r ConvOrSubConv -incrementEpoch (Conv c) = do - let epoch' = succ (cnvmlsEpoch (mcMLSData c)) - setConversationEpoch (mcId c) epoch' - conv <- getConversation (mcId c) >>= noteS @'ConvNotFound - fmap Conv (mkMLSConversation conv >>= noteS @'ConvNotFound) -incrementEpoch (SubConv c s) = do - let epoch' = succ (cnvmlsEpoch (scMLSData s)) - setSubConversationEpoch (scParentConvId s) (scSubConvId s) epoch' - subconv <- - getSubConversation (mcId c) (scSubConvId s) >>= noteS @'ConvNotFound - pure (SubConv c subconv) diff --git a/services/galley/src/Galley/API/MLS/Proposal.hs b/services/galley/src/Galley/API/MLS/Proposal.hs new file mode 100644 index 0000000000..617d95c8fa --- /dev/null +++ b/services/galley/src/Galley/API/MLS/Proposal.hs @@ -0,0 +1,306 @@ +-- This file is part of the Wire Server implementation. +-- +-- Copyright (C) 2022 Wire Swiss GmbH +-- +-- This program is free software: you can redistribute it and/or modify it under +-- the terms of the GNU Affero General Public License as published by the Free +-- Software Foundation, either version 3 of the License, or (at your option) any +-- later version. +-- +-- This program is distributed in the hope that it will be useful, but WITHOUT +-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more +-- details. +-- +-- You should have received a copy of the GNU Affero General Public License along +-- with this program. If not, see . + +module Galley.API.MLS.Proposal + ( -- * Proposal processing + derefOrCheckProposal, + checkProposal, + processProposal, + proposalProcessingStage, + addProposedClient, + applyProposals, + + -- * Proposal actions + paAddClient, + paRemoveClient, + paExternalInitPresent, + + -- * Types + ProposalAction (..), + HasProposalEffects, + ) +where + +import Data.Id +import qualified Data.Map as Map +import Data.Qualified +import qualified Data.Set as Set +import Data.Time +import Galley.API.Error +import Galley.API.MLS.IncomingMessage +import Galley.API.MLS.Types +import Galley.API.Util +import Galley.Effects +import Galley.Effects.BrigAccess +import Galley.Effects.ProposalStore +import Galley.Env +import Galley.Options +import Imports +import Polysemy +import Polysemy.Error +import Polysemy.Input +import Polysemy.State +import Polysemy.TinyLog +import Wire.API.Conversation hiding (Member) +import Wire.API.Conversation.Protocol +import Wire.API.Error +import Wire.API.Error.Galley +import Wire.API.MLS.AuthenticatedContent +import Wire.API.MLS.Credential +import Wire.API.MLS.KeyPackage +import Wire.API.MLS.LeafNode +import Wire.API.MLS.Message +import Wire.API.MLS.Proposal +import Wire.API.MLS.Serialisation +import Wire.API.MLS.Validation +import Wire.API.Message + +data ProposalAction = ProposalAction + { paAdd :: ClientMap, + paRemove :: ClientMap, + -- The backend does not process external init proposals, but still it needs + -- to know if a commit has one when processing external commits + paExternalInit :: Any + } + deriving (Show) + +instance Semigroup ProposalAction where + ProposalAction add1 rem1 init1 <> ProposalAction add2 rem2 init2 = + ProposalAction + (Map.unionWith mappend add1 add2) + (Map.unionWith mappend rem1 rem2) + (init1 <> init2) + +instance Monoid ProposalAction where + mempty = ProposalAction mempty mempty mempty + +paAddClient :: ClientIdentity -> LeafIndex -> ProposalAction +paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} + +paRemoveClient :: ClientIdentity -> LeafIndex -> ProposalAction +paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} + +paExternalInitPresent :: ProposalAction +paExternalInitPresent = mempty {paExternalInit = Any True} + +-- | This is used to sort proposals into the correct processing order, as defined by the spec +data ProposalProcessingStage + = ProposalProcessingStageExtensions + | ProposalProcessingStageUpdate + | ProposalProcessingStageRemove + | ProposalProcessingStageAdd + | ProposalProcessingStagePreSharedKey + | ProposalProcessingStageExternalInit + | ProposalProcessingStageReInit + deriving (Eq, Ord) + +proposalProcessingStage :: Proposal -> ProposalProcessingStage +proposalProcessingStage (AddProposal _) = ProposalProcessingStageAdd +proposalProcessingStage (RemoveProposal _) = ProposalProcessingStageRemove +proposalProcessingStage (UpdateProposal _) = ProposalProcessingStageUpdate +proposalProcessingStage (PreSharedKeyProposal _) = ProposalProcessingStagePreSharedKey +proposalProcessingStage (ReInitProposal _) = ProposalProcessingStageReInit +proposalProcessingStage (ExternalInitProposal _) = ProposalProcessingStageExternalInit +proposalProcessingStage (GroupContextExtensionsProposal _) = ProposalProcessingStageExtensions + +type HasProposalEffects r = + ( Member BrigAccess r, + Member ConversationStore r, + Member (Error InternalError) r, + Member (Error MLSProposalFailure) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSClientMismatch) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member ExternalAccess r, + Member FederatorAccess r, + Member GundeckAccess r, + Member (Input Env) r, + Member (Input (Local ())) r, + Member (Input Opts) r, + Member (Input UTCTime) r, + Member LegalHoldStore r, + Member MemberStore r, + Member ProposalStore r, + Member TeamStore r, + Member TeamStore r, + Member TinyLog r + ) + +derefOrCheckProposal :: + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r, + Member ProposalStore r, + Member (State IndexMap) r, + Member (ErrorS 'MLSProposalNotFound) r + ) => + ConversationMLSData -> + GroupId -> + Epoch -> + ProposalOrRef -> + Sem r Proposal +derefOrCheckProposal _mlsMeta groupId epoch (Ref ref) = do + p <- getProposal groupId epoch ref >>= noteS @'MLSProposalNotFound + pure p.value +derefOrCheckProposal mlsMeta _ _ (Inline p) = do + im <- get + checkProposal mlsMeta im p + pure p + +checkProposal :: + ( Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ConversationMLSData -> + IndexMap -> + Proposal -> + Sem r () +checkProposal mlsMeta im p = + case p of + AddProposal kp -> do + (cs, _lifetime) <- + either + (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) + pure + $ validateKeyPackage Nothing kp.value + -- we are not checking lifetime constraints here + unless (mlsMeta.cnvmlsCipherSuite == cs) $ + throw (mlsProtocolError "Key package ciphersuite does not match conversation") + RemoveProposal idx -> do + void $ noteS @'MLSInvalidLeafNodeIndex $ imLookup im idx + _ -> pure () + +addProposedClient :: Member (State IndexMap) r => ClientIdentity -> Sem r ProposalAction +addProposedClient cid = do + im <- get + let (idx, im') = imAddClient im cid + put im' + pure (paAddClient cid idx) + +applyProposals :: + ( Member (State IndexMap) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ConversationMLSData -> + GroupId -> + [Proposal] -> + Sem r ProposalAction +applyProposals mlsMeta groupId = + -- proposals are sorted before processing + foldMap (applyProposal mlsMeta groupId) + . sortOn proposalProcessingStage + +applyProposal :: + ( Member (State IndexMap) r, + Member (Error MLSProtocolError) r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (ErrorS 'MLSInvalidLeafNodeIndex) r + ) => + ConversationMLSData -> + GroupId -> + Proposal -> + Sem r ProposalAction +applyProposal mlsMeta _groupId (AddProposal kp) = do + (cs, _lifetime) <- + either + (\msg -> throw (mlsProtocolError ("Invalid key package in Add proposal: " <> msg))) + pure + $ validateKeyPackage Nothing kp.value + unless (mlsMeta.cnvmlsCipherSuite == cs) $ + throw (mlsProtocolError "Key package ciphersuite does not match conversation") + -- we are not checking lifetime constraints here + cid <- getKeyPackageIdentity kp.value + addProposedClient cid +applyProposal _mlsMeta _groupId (RemoveProposal idx) = do + im <- get + (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx + put im' + pure (paRemoveClient cid idx) +applyProposal _mlsMeta _groupId (ExternalInitProposal _) = + -- only record the fact there was an external init proposal, but do not + -- process it in any way. + pure paExternalInitPresent +applyProposal _mlsMeta _groupId _ = pure mempty + +processProposal :: + HasProposalEffects r => + ( Member (ErrorS 'ConvNotFound) r, + Member (ErrorS 'MLSStaleMessage) r + ) => + Qualified UserId -> + Local ConvOrSubConv -> + IncomingMessage -> -- TODO: just pass header? + IncomingPublicMessageContent -> + RawMLS Proposal -> + Sem r () +processProposal qusr lConvOrSub msg pub prop = do + let mlsMeta = mlsMetaConvOrSub (tUnqualified lConvOrSub) + -- Check if the epoch number matches that of a conversation + unless (msg.epoch == cnvmlsEpoch mlsMeta) $ throwS @'MLSStaleMessage + -- Check if the group ID matches that of a conversation + unless (msg.groupId == cnvmlsGroupId mlsMeta) $ throwS @'ConvNotFound + let suiteTag = cnvmlsCipherSuite mlsMeta + + -- FUTUREWORK: validate the member's conversation role + let im = indexMapConvOrSub $ tUnqualified lConvOrSub + checkProposal mlsMeta im prop.value + when (isExternal pub.sender) $ checkExternalProposalUser qusr prop.value + let propRef = authContentRef suiteTag (incomingMessageAuthenticatedContent pub) + storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop + +getKeyPackageIdentity :: + Member (ErrorS 'MLSUnsupportedProposal) r => + KeyPackage -> + Sem r ClientIdentity +getKeyPackageIdentity = + either (\_ -> throwS @'MLSUnsupportedProposal) pure + . keyPackageIdentity + +isExternal :: Sender -> Bool +isExternal (SenderMember _) = False +isExternal _ = True + +-- check owner/subject of the key package exists and belongs to the user +checkExternalProposalUser :: + ( Member BrigAccess r, + Member (ErrorS 'MLSUnsupportedProposal) r, + Member (Input (Local ())) r + ) => + Qualified UserId -> + Proposal -> + Sem r () +checkExternalProposalUser qusr prop = do + loc <- qualifyLocal () + foldQualified + loc + ( \lusr -> case prop of + AddProposal kp -> do + ClientIdentity {ciUser, ciClient} <- getKeyPackageIdentity kp.value + -- requesting user must match key package owner + when (tUnqualified lusr /= ciUser) $ throwS @'MLSUnsupportedProposal + -- client referenced in key package must be one of the user's clients + UserClients {userClients} <- lookupClients [ciUser] + maybe + (throwS @'MLSUnsupportedProposal) + (flip when (throwS @'MLSUnsupportedProposal) . Set.null . Set.filter (== ciClient)) + $ userClients Map.!? ciUser + _ -> throwS @'MLSUnsupportedProposal + ) + (const $ pure ()) -- FUTUREWORK: check external proposals from remote backends + qusr From 8959662fe681fcbf93e46d37f7a4822c61c720f2 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 11:50:00 +0200 Subject: [PATCH 58/75] Inline executeIntCommitProposalAction --- .../Galley/API/MLS/Commit/InternalCommit.hs | 247 ++++++++---------- 1 file changed, 114 insertions(+), 133 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs index debd3493fa..af8f7b4782 100644 --- a/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs +++ b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs @@ -78,153 +78,134 @@ processInternalCommit :: processInternalCommit senderIdentity con lConvOrSub epoch action commit = do let convOrSub = tUnqualified lConvOrSub mlsMeta = mlsMetaConvOrSub convOrSub - - withCommitLock (fmap idForConvOrSub lConvOrSub) (cnvmlsGroupId (mlsMetaConvOrSub convOrSub)) epoch $ do - -- check all pending proposals are referenced in the commit - allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch - let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals - unless (all (`Set.member` referencedProposals) allPendingProposals) $ - throwS @'MLSCommitMissingReferences - - -- process and execute proposals - updates <- executeIntCommitProposalAction senderIdentity con lConvOrSub action - - -- increment epoch number - for_ lConvOrSub incrementEpoch - - pure updates - -executeIntCommitProposalAction :: - forall r. - HasProposalActionEffects r => - ClientIdentity -> - Maybe ConnId -> - Local ConvOrSubConv -> - ProposalAction -> - Sem r [LocalConversationUpdate] -executeIntCommitProposalAction senderIdentity con lconvOrSub action = do - let qusr = cidQualifiedUser senderIdentity - convOrSub = tUnqualified lconvOrSub - mlsMeta = mlsMetaConvOrSub convOrSub + qusr = cidQualifiedUser senderIdentity cm = membersConvOrSub convOrSub ss = csSignatureScheme (cnvmlsCipherSuite mlsMeta) newUserClients = Map.assocs (paAdd action) - -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 - foldQualified lconvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr + -- check all pending proposals are referenced in the commit + allPendingProposals <- getAllPendingProposalRefs (cnvmlsGroupId mlsMeta) epoch + let referencedProposals = Set.fromList $ mapMaybe (\x -> preview Proposal._Ref x) commit.proposals + unless (all (`Set.member` referencedProposals) allPendingProposals) $ + throwS @'MLSCommitMissingReferences - -- no client can be directly added to a subconversation - when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ - throw (mlsProtocolError "Add proposals in subconversations are not supported") + withCommitLock (fmap idForConvOrSub lConvOrSub) (cnvmlsGroupId (mlsMetaConvOrSub convOrSub)) epoch $ do + -- FUTUREWORK: remove this check after remote admins are implemented in federation https://wearezeta.atlassian.net/browse/FS-216 + foldQualified lConvOrSub (\_ -> pure ()) (\_ -> throwS @'MLSUnsupportedProposal) qusr - -- Note [client removal] - -- We support two types of removals: - -- 1. when a user is removed from a group, all their clients have to be removed - -- 2. when a client is deleted, that particular client (but not necessarily - -- other clients of the same user) has to be removed. - -- - -- Type 2 requires no special processing on the backend, so here we filter - -- out all removals of that type, so that further checks and processing can - -- be applied only to type 1 removals. - -- - -- Furthermore, subconversation clients can be removed arbitrarily, so this - -- processing is only necessary for main conversations. In the - -- subconversation case, an empty list is returned. - membersToRemove <- case convOrSub of - SubConv _ _ -> pure [] - Conv _ -> mapMaybe hush <$$> for (Map.assocs (paRemove action)) $ - \(qtarget, Map.keysSet -> clients) -> runError @() $ do - let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) - let removedClients = Set.intersection clients clientsInConv + -- no client can be directly added to a subconversation + when (is _SubConv convOrSub && any ((senderIdentity /=) . fst) (cmAssocs (paAdd action))) $ + throw (mlsProtocolError "Add proposals in subconversations are not supported") - -- ignore user if none of their clients are being removed - when (Set.null removedClients) $ throw () + -- Note [client removal] + -- We support two types of removals: + -- 1. when a user is removed from a group, all their clients have to be removed + -- 2. when a client is deleted, that particular client (but not necessarily + -- other clients of the same user) has to be removed. + -- + -- Type 2 requires no special processing on the backend, so here we filter + -- out all removals of that type, so that further checks and processing can + -- be applied only to type 1 removals. + -- + -- Furthermore, subconversation clients can be removed arbitrarily, so this + -- processing is only necessary for main conversations. In the + -- subconversation case, an empty list is returned. + membersToRemove <- case convOrSub of + SubConv _ _ -> pure [] + Conv _ -> mapMaybe hush <$$> for (Map.assocs (paRemove action)) $ + \(qtarget, Map.keysSet -> clients) -> runError @() $ do + let clientsInConv = Map.keysSet (Map.findWithDefault mempty qtarget cm) + let removedClients = Set.intersection clients clientsInConv - -- return error if the user is trying to remove themself - when (cidQualifiedUser senderIdentity == qtarget) $ - throwS @'MLSSelfRemovalNotAllowed + -- ignore user if none of their clients are being removed + when (Set.null removedClients) $ throw () - -- FUTUREWORK: add tests against this situation for conv v subconv - when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do - -- FUTUREWORK: turn this error into a proper response - throwS @'MLSClientMismatch + -- return error if the user is trying to remove themself + when (cidQualifiedUser senderIdentity == qtarget) $ + throwS @'MLSSelfRemovalNotAllowed - pure qtarget + -- FUTUREWORK: add tests against this situation for conv v subconv + when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do + -- FUTUREWORK: turn this error into a proper response + throwS @'MLSClientMismatch - -- for each user, we compare their clients with the ones being added to the conversation - for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of - -- user is already present, skip check in this case - Just _ -> pure () - -- new user - Nothing -> do - -- final set of clients in the conversation - let clients = Map.keysSet (newclients <> Map.findWithDefault mempty qtarget cm) - -- get list of mls clients from brig - clientInfo <- getClientInfo lconvOrSub qtarget ss - let allClients = Set.map ciId clientInfo - let allMLSClients = Set.map ciId (Set.filter ciMLS clientInfo) - -- We check the following condition: - -- allMLSClients ⊆ clients ⊆ allClients - -- i.e. - -- - if a client has at least 1 key package, it has to be added - -- - if a client is being added, it has to still exist - -- - -- The reason why we can't simply check that clients == allMLSClients is - -- that a client with no remaining key packages might be added by a user - -- who just fetched its last key package. - unless - ( Set.isSubsetOf allMLSClients clients - && Set.isSubsetOf clients allClients - ) - $ do - -- unless (Set.isSubsetOf allClients clients) $ do - -- FUTUREWORK: turn this error into a proper response - throwS @'MLSClientMismatch + pure qtarget - -- remove users from the conversation and send events - removeEvents <- - foldMap - (removeMembers qusr con lconvOrSub) - (nonEmpty membersToRemove) + -- for each user, we compare their clients with the ones being added to the conversation + for_ newUserClients $ \(qtarget, newclients) -> case Map.lookup qtarget cm of + -- user is already present, skip check in this case + Just _ -> pure () + -- new user + Nothing -> do + -- final set of clients in the conversation + let clients = Map.keysSet (newclients <> Map.findWithDefault mempty qtarget cm) + -- get list of mls clients from brig + clientInfo <- getClientInfo lConvOrSub qtarget ss + let allClients = Set.map ciId clientInfo + let allMLSClients = Set.map ciId (Set.filter ciMLS clientInfo) + -- We check the following condition: + -- allMLSClients ⊆ clients ⊆ allClients + -- i.e. + -- - if a client has at least 1 key package, it has to be added + -- - if a client is being added, it has to still exist + -- + -- The reason why we can't simply check that clients == allMLSClients is + -- that a client with no remaining key packages might be added by a user + -- who just fetched its last key package. + unless + ( Set.isSubsetOf allMLSClients clients + && Set.isSubsetOf clients allClients + ) + $ do + -- unless (Set.isSubsetOf allClients clients) $ do + -- FUTUREWORK: turn this error into a proper response + throwS @'MLSClientMismatch - -- Remove clients from the conversation state. This includes client removals - -- of all types (see Note [client removal]). - for_ (Map.assocs (paRemove action)) $ \(qtarget, clients) -> do - removeMLSClients (cnvmlsGroupId mlsMeta) qtarget (Map.keysSet clients) + -- remove users from the conversation and send events + removeEvents <- + foldMap + (removeMembers qusr con lConvOrSub) + (nonEmpty membersToRemove) - -- if this is a new subconversation, call `on-new-remote-conversation` on all - -- the remote backends involved in the main conversation - forOf_ _SubConv convOrSub $ \(mlsConv, subConv) -> do - when (cnvmlsEpoch (scMLSData subConv) == Epoch 0) $ do - let remoteDomains = - Set.fromList - ( map - (void . rmId) - (mcRemoteMembers mlsConv) - ) - let nrc = - NewRemoteSubConversation - { nrscConvId = mcId mlsConv, - nrscSubConvId = scSubConvId subConv, - nrscMlsData = scMLSData subConv - } - runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do - void $ fedClient @'Galley @"on-new-remote-subconversation" nrc + -- Remove clients from the conversation state. This includes client removals + -- of all types (see Note [client removal]). + for_ (Map.assocs (paRemove action)) $ \(qtarget, clients) -> do + removeMLSClients (cnvmlsGroupId mlsMeta) qtarget (Map.keysSet clients) - -- add users to the conversation and send events - addEvents <- - foldMap (addMembers qusr con lconvOrSub) - . nonEmpty - . map fst - $ newUserClients + -- if this is a new subconversation, call `on-new-remote-conversation` on all + -- the remote backends involved in the main conversation + forOf_ _SubConv convOrSub $ \(mlsConv, subConv) -> do + when (cnvmlsEpoch (scMLSData subConv) == Epoch 0) $ do + let remoteDomains = + Set.fromList + ( map + (void . rmId) + (mcRemoteMembers mlsConv) + ) + let nrc = + NewRemoteSubConversation + { nrscConvId = mcId mlsConv, + nrscSubConvId = scSubConvId subConv, + nrscMlsData = scMLSData subConv + } + runFederatedConcurrently_ (toList remoteDomains) $ \_ -> do + void $ fedClient @'Galley @"on-new-remote-subconversation" nrc + + -- add users to the conversation and send events + addEvents <- + foldMap (addMembers qusr con lConvOrSub) + . nonEmpty + . map fst + $ newUserClients - -- add clients in the conversation state - for_ newUserClients $ \(qtarget, newClients) -> do - addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) + -- add clients in the conversation state + for_ newUserClients $ \(qtarget, newClients) -> do + addMLSClients (cnvmlsGroupId mlsMeta) qtarget (Set.fromList (Map.assocs newClients)) - -- TODO: increment epoch here instead of in the calling site + -- increment epoch number + for_ lConvOrSub incrementEpoch - pure (addEvents <> removeEvents) + pure (addEvents <> removeEvents) addMembers :: HasProposalActionEffects r => @@ -233,9 +214,9 @@ addMembers :: Local ConvOrSubConv -> NonEmpty (Qualified UserId) -> Sem r [LocalConversationUpdate] -addMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of +addMembers qusr con lConvOrSub users = case tUnqualified lConvOrSub of Conv mlsConv -> do - let lconv = qualifyAs lconvOrSub (mcConv mlsConv) + let lconv = qualifyAs lConvOrSub (mcConv mlsConv) -- FUTUREWORK: update key package ref mapping to reflect conversation membership foldMap ( handleNoChanges @@ -257,9 +238,9 @@ removeMembers :: Local ConvOrSubConv -> NonEmpty (Qualified UserId) -> Sem r [LocalConversationUpdate] -removeMembers qusr con lconvOrSub users = case tUnqualified lconvOrSub of +removeMembers qusr con lConvOrSub users = case tUnqualified lConvOrSub of Conv mlsConv -> do - let lconv = qualifyAs lconvOrSub (mcConv mlsConv) + let lconv = qualifyAs lConvOrSub (mcConv mlsConv) foldMap ( handleNoChanges . handleMLSProposalFailures @ProposalErrors From 8df69f4b5006cb4957364f197f9a0733c5b30467 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 14:33:16 +0200 Subject: [PATCH 59/75] Use more specific type for external commit actions --- .../Galley/API/MLS/Commit/ExternalCommit.hs | 80 ++++++++----------- 1 file changed, 34 insertions(+), 46 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs index eca278f9ca..6a6ceb481b 100644 --- a/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs +++ b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs @@ -27,7 +27,6 @@ import qualified Data.Map as Map import Data.Qualified import qualified Data.Set as Set import Data.Tuple.Extra -import Galley.API.Error import Galley.API.MLS.Commit.Core import Galley.API.MLS.Proposal import Galley.API.MLS.Removal @@ -52,6 +51,11 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.MLS.Validation +data ExternalCommitAction = ExternalCommitAction + { add :: (ClientIdentity, LeafIndex), + remove :: Maybe (ClientIdentity, LeafIndex) + } + getExternalCommitData :: forall r. ( Member (Error MLSProtocolError) r, @@ -63,7 +67,7 @@ getExternalCommitData :: Local ConvOrSubConv -> Epoch -> Commit -> - Sem r ProposalAction + Sem r ExternalCommitAction getExternalCommitData senderIdentity lConvOrSub epoch commit = do let convOrSub = tUnqualified lConvOrSub mlsMeta = mlsMetaConvOrSub convOrSub @@ -84,23 +88,28 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do unless (Map.lookup ExternalInitProposalTag counts == Just 1) $ throw (mlsProtocolError "External commits must contain exactly one ExternalInit proposal") - unless (Map.findWithDefault 0 RemoveProposalTag counts <= 1) $ - throw (mlsProtocolError "External commits must contain at most one Remove proposal") unless (null (Map.keys counts \\ allowedProposals)) $ throw (mlsProtocolError "Invalid proposal type in an external commit") evalState (indexMapConvOrSub convOrSub) $ do -- process optional removal propAction <- applyProposals mlsMeta groupId proposals - -- add sender - selfAction <- addProposedClient senderIdentity - case cmAssocs (paRemove propAction) of - [(cid, _)] + removedClient <- case cmAssocs (paRemove propAction) of + [(cid, idx)] | cid /= senderIdentity -> throw $ mlsProtocolError "Only the self client can be removed by an external commit" - _ -> pure () - - pure $ propAction <> selfAction + | otherwise -> pure (Just (cid, idx)) + [] -> pure Nothing + _ -> throw (mlsProtocolError "External commits must contain at most one Remove proposal") + + -- add sender client + addedIndex <- gets imNextIndex + + pure + ExternalCommitAction + { add = (senderIdentity, addedIndex), + remove = removedClient + } where allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] @@ -119,7 +128,7 @@ processExternalCommit :: ClientIdentity -> Local ConvOrSubConv -> Epoch -> - ProposalAction -> + ExternalCommitAction -> Maybe UpdatePath -> Sem r () processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do @@ -130,11 +139,6 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do unless (isClientMember senderIdentity (mcMembers mlsConv)) $ throwS @'MLSSubConvClientNotInParent - -- get index of the newly added client, as calculated when processing proposals - idx <- case cmAssocs (paAdd action) of - [(cid, idx)] | cid == senderIdentity -> pure idx - _ -> throw (InternalErrorWithDescription "Unexpected Add action for external commit") - -- extract leaf node from update path and validate it leafNode <- (.leaf) @@ -143,7 +147,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do updatePath let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) - let extra = LeafNodeTBSExtraCommit groupId idx + let extra = LeafNodeTBSExtraCommit groupId (snd action.add) case validateLeafNode cs (Just senderIdentity) extra leafNode.value of Left errMsg -> throw $ @@ -151,16 +155,15 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do Right _ -> pure () withCommitLock (fmap idForConvOrSub lConvOrSub) groupId epoch $ do - executeExtCommitProposalAction senderIdentity lConvOrSub action + executeExternalCommitAction lConvOrSub action -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch -- fetch backend remove proposals of the previous epoch - let remIndices = map snd (cmAssocs (paRemove action)) indicesInRemoveProposals <- -- skip remove proposals of already removed by the external commit - (\\ remIndices) + (\\ toList (fmap snd action.remove)) <$> getPendingBackendRemoveProposals groupId epoch -- requeue backend remove proposals for the current epoch @@ -171,41 +174,26 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do (cidQualifiedUser senderIdentity) cm -executeExtCommitProposalAction :: +executeExternalCommitAction :: forall r. HasProposalActionEffects r => - ClientIdentity -> Local ConvOrSubConv -> - ProposalAction -> + ExternalCommitAction -> Sem r () -executeExtCommitProposalAction senderIdentity lconvOrSub action = do +executeExternalCommitAction lconvOrSub action = do let mlsMeta = mlsMetaConvOrSub $ tUnqualified lconvOrSub - newCILeaves = cmAssocs (paAdd action) - deprecatedCILeaves = cmAssocs (paRemove action) - - -- Adding clients: sender's client must be added and no other other client may - -- be added. - when (length newCILeaves /= 1 || fst (head newCILeaves) /= senderIdentity) $ - throw (mlsProtocolError "No add proposals are allowed in external commits") - - -- Client removal: only the sender's client can be removed when rejoining the - -- (sub)conversation. - when (length deprecatedCILeaves > 1) $ - throw (mlsProtocolError "Up to one client can be removed in an external commit") - for_ (listToMaybe deprecatedCILeaves) $ \ciLeaf -> do - when (fst ciLeaf /= senderIdentity) $ - throw (mlsProtocolError "Only the sender can rejoin in an external commit") -- Remove deprecated sender client from conversation state. - for_ deprecatedCILeaves $ \(ci, _) -> do + for_ action.remove $ \(cid, _) -> removeMLSClients (cnvmlsGroupId mlsMeta) - (cidQualifiedUser ci) - (Set.singleton $ ciClient ci) + (cidQualifiedUser cid) + (Set.singleton (ciClient cid)) -- Add new sender client to the conversation state. - for_ newCILeaves $ \(ci, idx) -> do + do + let (cid, idx) = action.add addMLSClients (cnvmlsGroupId mlsMeta) - (cidQualifiedUser ci) - (Set.singleton (ciClient ci, idx)) + (cidQualifiedUser cid) + (Set.singleton (ciClient cid, idx)) From d1158734100859984cbda162c4b2841418c5bec7 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 14:44:12 +0200 Subject: [PATCH 60/75] Re-organise TODOs --- services/galley/src/Galley/API/MLS/Message.hs | 6 ++++++ services/galley/test/integration/API/MLS.hs | 1 - 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 70d6169b76..d1c94f17ce 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -71,6 +71,12 @@ import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation +-- TODO: +-- [ ] replace commit message in CommitBundle with a Commit object +-- [ ] restore deleted MLS unit tests +-- [ ] pass groupId and epoch to processProposal instead of the whole IncomingMessage +-- [ ] remove LWT in planMLSClientRemoval + -- FUTUREWORK -- - Check that the capabilities of a leaf node in an add proposal contains all -- the required_capabilities of the group context. This would require fetching diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index c0b3d475d8..5c75b724be 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -2290,7 +2290,6 @@ testJoinSubConv = do =<< getSubConv (qUnqualified bob) qcnv subId Date: Fri, 28 Apr 2023 14:50:18 +0200 Subject: [PATCH 61/75] Simplify processProposal arguments --- libs/wire-api/src/Wire/API/MLS/CommitBundle.hs | 2 +- services/galley/src/Galley/API/MLS/Message.hs | 4 ++-- services/galley/src/Galley/API/MLS/Proposal.hs | 11 ++++++----- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs index f828196819..1ca590e04e 100644 --- a/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs +++ b/libs/wire-api/src/Wire/API/MLS/CommitBundle.hs @@ -27,7 +27,7 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.Welcome data CommitBundle = CommitBundle - { commitMsg :: RawMLS Message, -- TODO: change this type to Commit + { commitMsg :: RawMLS Message, welcome :: Maybe (RawMLS Welcome), groupInfo :: RawMLS GroupInfo } diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index d1c94f17ce..99f117cd48 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -72,10 +72,10 @@ import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation -- TODO: --- [ ] replace commit message in CommitBundle with a Commit object -- [ ] restore deleted MLS unit tests -- [ ] pass groupId and epoch to processProposal instead of the whole IncomingMessage -- [ ] remove LWT in planMLSClientRemoval +-- [ ] restore unsupported proposal integration test -- FUTUREWORK -- - Check that the capabilities of a leaf node in an add proposal contains all @@ -354,7 +354,7 @@ postMLSMessageToLocalConv qusr c con msg convOrSubId = do FramedContentCommit _commit -> throwS @'MLSUnsupportedMessage FramedContentApplicationData _ -> throwS @'MLSUnsupportedMessage FramedContentProposal prop -> - processProposal qusr lConvOrSub msg pub prop $> mempty + processProposal qusr lConvOrSub msg.groupId msg.epoch pub prop $> mempty IncomingMessageContentPrivate -> pure mempty let cm = membersConvOrSub (tUnqualified lConvOrSub) diff --git a/services/galley/src/Galley/API/MLS/Proposal.hs b/services/galley/src/Galley/API/MLS/Proposal.hs index 617d95c8fa..a838d96ea8 100644 --- a/services/galley/src/Galley/API/MLS/Proposal.hs +++ b/services/galley/src/Galley/API/MLS/Proposal.hs @@ -245,16 +245,17 @@ processProposal :: ) => Qualified UserId -> Local ConvOrSubConv -> - IncomingMessage -> -- TODO: just pass header? + GroupId -> + Epoch -> IncomingPublicMessageContent -> RawMLS Proposal -> Sem r () -processProposal qusr lConvOrSub msg pub prop = do +processProposal qusr lConvOrSub groupId epoch pub prop = do let mlsMeta = mlsMetaConvOrSub (tUnqualified lConvOrSub) -- Check if the epoch number matches that of a conversation - unless (msg.epoch == cnvmlsEpoch mlsMeta) $ throwS @'MLSStaleMessage + unless (epoch == cnvmlsEpoch mlsMeta) $ throwS @'MLSStaleMessage -- Check if the group ID matches that of a conversation - unless (msg.groupId == cnvmlsGroupId mlsMeta) $ throwS @'ConvNotFound + unless (groupId == cnvmlsGroupId mlsMeta) $ throwS @'ConvNotFound let suiteTag = cnvmlsCipherSuite mlsMeta -- FUTUREWORK: validate the member's conversation role @@ -262,7 +263,7 @@ processProposal qusr lConvOrSub msg pub prop = do checkProposal mlsMeta im prop.value when (isExternal pub.sender) $ checkExternalProposalUser qusr prop.value let propRef = authContentRef suiteTag (incomingMessageAuthenticatedContent pub) - storeProposal msg.groupId msg.epoch propRef ProposalOriginClient prop + storeProposal groupId epoch propRef ProposalOriginClient prop getKeyPackageIdentity :: Member (ErrorS 'MLSUnsupportedProposal) r => From a12fd7550ac1a6cdce39eae5c6b2d062540c2f57 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Fri, 28 Apr 2023 14:53:08 +0200 Subject: [PATCH 62/75] Remove LWT in planMLSClientRemoval --- services/galley/src/Galley/API/MLS/Message.hs | 4 ++-- .../Galley/Cassandra/Conversation/Members.hs | 17 ++++++++--------- services/galley/src/Galley/Cassandra/Queries.hs | 4 ++-- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 99f117cd48..4f2ada95fd 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -73,8 +73,8 @@ import Wire.API.MLS.SubConversation -- TODO: -- [ ] restore deleted MLS unit tests --- [ ] pass groupId and epoch to processProposal instead of the whole IncomingMessage --- [ ] remove LWT in planMLSClientRemoval +-- [x] pass groupId and epoch to processProposal instead of the whole IncomingMessage +-- [x] remove LWT in planMLSClientRemoval -- [ ] restore unsupported proposal integration test -- FUTUREWORK diff --git a/services/galley/src/Galley/Cassandra/Conversation/Members.hs b/services/galley/src/Galley/Cassandra/Conversation/Members.hs index 97bcb237e3..da67f5e52f 100644 --- a/services/galley/src/Galley/Cassandra/Conversation/Members.hs +++ b/services/galley/src/Galley/Cassandra/Conversation/Members.hs @@ -350,16 +350,15 @@ addMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do for_ cs $ \(c, idx) -> addPrepQuery Cql.addMLSClient (groupId, domain, usr, c, fromIntegral idx) --- TODO Could (and should) we use batch instead? planMLSClientRemoval :: Foldable f => GroupId -> f ClientIdentity -> Client () -planMLSClientRemoval groupId cids = for_ cids $ \cid -> do - retry x5 $ - trans - Cql.planMLSClientRemoval - ( params - LocalQuorum - (groupId, ciDomain cid, ciUser cid, ciClient cid) - ) +planMLSClientRemoval groupId cids = + retry x5 . batch $ do + setType BatchLogged + setConsistency LocalQuorum + for_ cids $ \cid -> do + addPrepQuery + Cql.planMLSClientRemoval + (groupId, ciDomain cid, ciUser cid, ciClient cid) removeMLSClients :: GroupId -> Qualified UserId -> Set.Set ClientId -> Client () removeMLSClients groupId (Qualified usr domain) cs = retry x5 . batch $ do diff --git a/services/galley/src/Galley/Cassandra/Queries.hs b/services/galley/src/Galley/Cassandra/Queries.hs index b1ea419595..7e491df366 100644 --- a/services/galley/src/Galley/Cassandra/Queries.hs +++ b/services/galley/src/Galley/Cassandra/Queries.hs @@ -470,8 +470,8 @@ rmMemberClient c = addMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId, Int32) () addMLSClient = "insert into mls_group_member_client (group_id, user_domain, user, client, leaf_node_index, removal_pending) values (?, ?, ?, ?, ?, false)" -planMLSClientRemoval :: PrepQuery W (GroupId, Domain, UserId, ClientId) Row -planMLSClientRemoval = "update mls_group_member_client set removal_pending = true where group_id = ? and user_domain = ? and user = ? and client = ? if exists" +planMLSClientRemoval :: PrepQuery W (GroupId, Domain, UserId, ClientId) () +planMLSClientRemoval = "update mls_group_member_client set removal_pending = true where group_id = ? and user_domain = ? and user = ? and client = ?" removeMLSClient :: PrepQuery W (GroupId, Domain, UserId, ClientId) () removeMLSClient = "delete from mls_group_member_client where group_id = ? and user_domain = ? and user = ? and client = ?" From 0eea5fbf3abebf7561b75a90f23db3f981d9e7d5 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 2 May 2023 11:20:42 +0200 Subject: [PATCH 63/75] Restore unsupported proposal test --- .../src/Wire/API/MLS/AuthenticatedContent.hs | 27 +++++++++++--- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 1 + services/galley/src/Galley/API/MLS/Message.hs | 2 +- services/galley/src/Galley/API/MLS/Removal.hs | 1 + services/galley/test/integration/API/MLS.hs | 35 ++++++++++++++++++- .../galley/test/integration/API/MLS/Util.hs | 14 ++++---- 6 files changed, 68 insertions(+), 12 deletions(-) diff --git a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs index 5daf745bfb..394a18ede1 100644 --- a/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs +++ b/libs/wire-api/src/Wire/API/MLS/AuthenticatedContent.hs @@ -17,6 +17,7 @@ module Wire.API.MLS.AuthenticatedContent ( AuthenticatedContent (..), + TaggedSender (..), authContentRef, publicMessageRef, mkSignedPublicMessage, @@ -29,6 +30,7 @@ import Wire.API.MLS.CipherSuite import Wire.API.MLS.Context import Wire.API.MLS.Epoch import Wire.API.MLS.Group +import Wire.API.MLS.LeafNode import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.ProtocolVersion @@ -64,16 +66,33 @@ authContentRef cs = ProposalRef . csHash cs proposalContext . mkRawMLS publicMessageRef :: CipherSuiteTag -> PublicMessage -> ProposalRef publicMessageRef cs = authContentRef cs . msgAuthContent +-- | Sender, plus with a membership tag in the case of a member sender. +data TaggedSender + = TaggedSenderMember LeafIndex ByteString + | TaggedSenderExternal Word32 + | TaggedSenderNewMemberProposal + | TaggedSenderNewMemberCommit + +taggedSenderToSender :: TaggedSender -> Sender +taggedSenderToSender (TaggedSenderMember i _) = SenderMember i +taggedSenderToSender (TaggedSenderExternal n) = SenderExternal n +taggedSenderToSender TaggedSenderNewMemberProposal = SenderNewMemberProposal +taggedSenderToSender TaggedSenderNewMemberCommit = SenderNewMemberCommit + +taggedSenderMembershipTag :: TaggedSender -> Maybe ByteString +taggedSenderMembershipTag (TaggedSenderMember _ t) = Just t +taggedSenderMembershipTag _ = Nothing + -- | Craft a message with the backend itself as a sender. Return the message and its ref. mkSignedPublicMessage :: - SecretKey -> PublicKey -> GroupId -> Epoch -> FramedContentData -> PublicMessage -mkSignedPublicMessage priv pub gid epoch payload = + SecretKey -> PublicKey -> GroupId -> Epoch -> TaggedSender -> FramedContentData -> PublicMessage +mkSignedPublicMessage priv pub gid epoch sender payload = let framedContent = mkRawMLS FramedContent { groupId = gid, epoch = epoch, - sender = SenderExternal 0, + sender = taggedSenderToSender sender, content = payload, authenticatedData = mempty } @@ -88,5 +107,5 @@ mkSignedPublicMessage priv pub gid epoch payload = in PublicMessage { content = framedContent, authData = mkRawMLS (FramedContentAuthData sig Nothing), - membershipTag = Nothing + membershipTag = taggedSenderMembershipTag sender } diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index 6842012fee..c497d8c7a6 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -150,6 +150,7 @@ testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do publicKey gid (Epoch 1) + (TaggedSenderExternal 0) (FramedContentProposal proposal) message = mkMessage $ MessagePublic pmessage messageFilename = "signed-message.mls" diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 4f2ada95fd..60a08b1c17 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -75,7 +75,7 @@ import Wire.API.MLS.SubConversation -- [ ] restore deleted MLS unit tests -- [x] pass groupId and epoch to processProposal instead of the whole IncomingMessage -- [x] remove LWT in planMLSClientRemoval --- [ ] restore unsupported proposal integration test +-- [x] restore unsupported proposal integration test -- FUTUREWORK -- - Check that the capabilities of a leaf node in an add proposal contains all diff --git a/services/galley/src/Galley/API/MLS/Removal.hs b/services/galley/src/Galley/API/MLS/Removal.hs index 2a7e977dfd..f801bf06b5 100644 --- a/services/galley/src/Galley/API/MLS/Removal.hs +++ b/services/galley/src/Galley/API/MLS/Removal.hs @@ -88,6 +88,7 @@ createAndSendRemoveProposals lConvOrSubConv indices qusr cm = do pubKey (cnvmlsGroupId meta) (cnvmlsEpoch meta) + (TaggedSenderExternal 0) (FramedContentProposal proposal) msg = mkRawMLS (mkMessage (MessagePublic pmsg)) storeProposal diff --git a/services/galley/test/integration/API/MLS.hs b/services/galley/test/integration/API/MLS.hs index 5c75b724be..d25796947d 100644 --- a/services/galley/test/integration/API/MLS.hs +++ b/services/galley/test/integration/API/MLS.hs @@ -29,6 +29,8 @@ import Control.Exception (throw) import Control.Lens (view) import Control.Lens.Extras import qualified Control.Monad.State as State +import Crypto.Error +import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Data.Aeson as Aeson import Data.Domain import Data.Id @@ -60,10 +62,12 @@ import Wire.API.Error.Galley import Wire.API.Event.Conversation import Wire.API.Federation.API.Common import Wire.API.Federation.API.Galley +import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.CipherSuite import Wire.API.MLS.Credential import Wire.API.MLS.Keys import Wire.API.MLS.Message +import Wire.API.MLS.Proposal import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation import Wire.API.Message @@ -1583,8 +1587,37 @@ testPublicKeys = do ) @?= [Ed25519] +--- | The test manually reads from mls-test-cli's store and extracts a private +--- key. The key is needed for signing an unsupported proposal, which is then +-- forwarded by the backend without being inspected. propUnsupported :: TestM () -propUnsupported = pure () -- TODO (app ack does not exist anymore) +propUnsupported = do + users@[_alice, bob] <- createAndConnectUsers (replicate 2 Nothing) + runMLSTest $ do + [alice1, bob1] <- traverse createMLSClient users + void $ uploadNewKeyPackage bob1 + (gid, _) <- setupMLSGroup alice1 + void $ createAddCommit alice1 [bob] >>= sendAndConsumeCommitBundle + + (priv, pub) <- clientKeyPair alice1 + pmsg <- + liftIO . throwCryptoErrorIO $ + mkSignedPublicMessage + <$> Ed25519.secretKey priv + <*> Ed25519.publicKey pub + <*> pure gid + <*> pure (Epoch 1) + <*> pure (TaggedSenderMember 0 "foo") + <*> pure + ( FramedContentProposal + (mkRawMLS (GroupContextExtensionsProposal [])) + ) + + let msg = mkMessage (MessagePublic pmsg) + let msgData = encodeMLS' msg + + -- we cannot consume this message, because the membership tag is fake + postMessage alice1 msgData !!! const 201 === statusCode testBackendRemoveProposalRecreateClient :: TestM () testBackendRemoveProposalRecreateClient = do diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 4884c73dc5..10d19b414c 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -35,6 +35,7 @@ import Control.Monad.Trans.Maybe import Data.Aeson.Lens import Data.Bifunctor import Data.Binary.Builder (toLazyByteString) +import Data.Binary.Get import qualified Data.ByteArray as BA import qualified Data.ByteString as BS import qualified Data.ByteString.Base64.URL as B64U @@ -239,7 +240,8 @@ liftTest = MLSTest . lift runMLSTest :: MLSTest a -> TestM a runMLSTest (MLSTest m) = - withSystemTempDirectory "mls" $ \tmp -> do + withSystemTempDirectory "mls" $ \_tmp -> do + let tmp = "/tmp/mls" saveRemovalKey (tmp "removal.key") evalStateT m @@ -947,11 +949,11 @@ clientKeyPair cid = do credential <- liftIO . BS.readFile $ bd cid2Str cid "store" T.unpack (T.decodeUtf8 (B64U.encode "self")) - let s = - credential ^.. key "signature_private_key" . key "value" . _Array . traverse . _Integer - & fmap fromIntegral - & BS.pack - pure $ BS.splitAt 32 s + case runGetOrFail + ((,) <$> parseMLSBytes @VarInt <*> parseMLSBytes @VarInt) + (LBS.fromStrict credential) of + Left (_, _, msg) -> liftIO $ assertFailure msg + Right (_, _, keys) -> pure keys receiveNewRemoteConv :: (MonadReader TestSetup m, MonadIO m) => From a2ab595c8fb28dd131e97829f2fe32c13417b0a5 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 2 May 2023 11:53:59 +0200 Subject: [PATCH 64/75] Restore disabled MLS unit tests --- libs/wire-api/test/unit/Test/Wire/API/MLS.hs | 137 ++++++++++++++---- libs/wire-api/wire-api.cabal | 1 + services/galley/src/Galley/API/MLS/Message.hs | 6 - 3 files changed, 111 insertions(+), 33 deletions(-) diff --git a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs index c497d8c7a6..10ec20569a 100644 --- a/libs/wire-api/test/unit/Test/Wire/API/MLS.hs +++ b/libs/wire-api/test/unit/Test/Wire/API/MLS.hs @@ -19,34 +19,38 @@ module Test.Wire.API.MLS where import Control.Concurrent.Async import qualified Crypto.PubKey.Ed25519 as Ed25519 -import Data.ByteArray +import Data.ByteArray hiding (length) import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as B8 import Data.Domain import Data.Id import Data.Json.Util (toBase64Text) import Data.Qualified import qualified Data.Text as T import qualified Data.Text as Text -import qualified Data.UUID as UUID import qualified Data.UUID.V4 as UUID import Imports import System.Exit import System.FilePath (()) import System.Process +import System.Random import Test.Tasty import Test.Tasty.HUnit import UnliftIO (withSystemTempDirectory) import Wire.API.MLS.AuthenticatedContent import Wire.API.MLS.CipherSuite +import Wire.API.MLS.Commit import Wire.API.MLS.Credential import Wire.API.MLS.Epoch import Wire.API.MLS.Group +import Wire.API.MLS.GroupInfo import Wire.API.MLS.HPKEPublicKey import Wire.API.MLS.KeyPackage import Wire.API.MLS.Message import Wire.API.MLS.Proposal import Wire.API.MLS.ProtocolVersion import Wire.API.MLS.Serialisation +import Wire.API.MLS.Welcome tests :: TestTree tests = @@ -54,15 +58,15 @@ tests = [ testCase "parse key package" testParseKeyPackage, testCase "parse commit message" testParseCommit, testCase "parse application message" testParseApplication, - testCase "parse welcome message" testParseWelcome, + testCase "parse welcome and groupinfo message" testParseWelcomeAndGroupInfo, testCase "key package ref" testKeyPackageRef, - testCase "validate message signature" testVerifyMLSPlainTextWithKey, testCase "create signed remove proposal" testRemoveProposalMessageSignature ] testParseKeyPackage :: IO () testParseKeyPackage = do - let qcid = "b455a431-9db6-4404-86e7-6a3ebe73fcaf:3ae58155@mls.example.com" + alice <- randomIdentity + let qcid = B8.unpack (encodeMLS' alice) kpData <- withSystemTempDirectory "mls" $ \tmp -> do void $ spawn (cli qcid tmp ["init", qcid]) Nothing spawn (cli qcid tmp ["key-package", "create"]) Nothing @@ -77,29 +81,106 @@ testParseKeyPackage = do case keyPackageIdentity kp of Left err -> assertFailure $ "Failed to parse identity: " <> T.unpack err - Right identity -> - identity - @?= ClientIdentity - { ciDomain = Domain "mls.example.com", - ciUser = Id (fromJust (UUID.fromString "b455a431-9db6-4404-86e7-6a3ebe73fcaf")), - ciClient = newClientId 0x3ae58155 - } - --- TODO + Right identity -> identity @?= alice + testParseCommit :: IO () -testParseCommit = pure () +testParseCommit = do + qcid <- B8.unpack . encodeMLS' <$> randomIdentity + commitData <- withSystemTempDirectory "mls" $ \tmp -> do + void $ spawn (cli qcid tmp ["init", qcid]) Nothing + groupJSON <- spawn (cli qcid tmp ["group", "create", "Zm9v"]) Nothing + spawn (cli qcid tmp ["commit", "--group", "-"]) (Just groupJSON) + + msg <- case decodeMLS' @Message commitData of + Left err -> assertFailure (T.unpack err) + Right x -> pure x + + pvTag (msg.protocolVersion) @?= Just ProtocolMLS10 + + pmsg <- case msg.content of + MessagePublic x -> pure x + _ -> assertFailure "expected public message" + + pmsg.content.value.sender @?= SenderMember 0 + + commit <- case pmsg.content.value.content of + FramedContentCommit c -> pure c + _ -> assertFailure "expected commit" + + commit.value.proposals @?= [] --- TODO testParseApplication :: IO () -testParseApplication = pure () +testParseApplication = do + qcid <- B8.unpack . encodeMLS' <$> randomIdentity + msgData <- withSystemTempDirectory "mls" $ \tmp -> do + void $ spawn (cli qcid tmp ["init", qcid]) Nothing + groupJSON <- spawn (cli qcid tmp ["group", "create", "Zm9v"]) Nothing + spawn (cli qcid tmp ["message", "--group", "-", "hello"]) (Just groupJSON) + + msg <- case decodeMLS' @Message msgData of + Left err -> assertFailure (T.unpack err) + Right x -> pure x + + pvTag (msg.protocolVersion) @?= Just ProtocolMLS10 --- TODO -testParseWelcome :: IO () -testParseWelcome = pure () + pmsg <- case msg.content of + MessagePrivate x -> pure x.value + _ -> assertFailure "expected private message" --- TODO -testParseGroupInfo :: IO () -testParseGroupInfo = pure () + pmsg.groupId @?= GroupId "foo" + pmsg.epoch @?= Epoch 0 + +testParseWelcomeAndGroupInfo :: IO () +testParseWelcomeAndGroupInfo = do + qcid <- B8.unpack . encodeMLS' <$> randomIdentity + qcid2 <- B8.unpack . encodeMLS' <$> randomIdentity + (welData, giData) <- withSystemTempDirectory "mls" $ \tmp -> do + void $ spawn (cli qcid tmp ["init", qcid]) Nothing + void $ spawn (cli qcid2 tmp ["init", qcid2]) Nothing + groupJSON <- spawn (cli qcid tmp ["group", "create", "Zm9v"]) Nothing + kp <- spawn (cli qcid2 tmp ["key-package", "create"]) Nothing + BS.writeFile (tmp "kp") kp + void $ + spawn + ( cli + qcid + tmp + [ "member", + "add", + "--group", + "-", + tmp "kp", + "--welcome-out", + tmp "welcome", + "--group-info-out", + tmp "gi" + ] + ) + (Just groupJSON) + (,) + <$> BS.readFile (tmp "welcome") + <*> BS.readFile (tmp "gi") + + do + welcomeMsg <- case decodeMLS' @Message welData of + Left err -> assertFailure (T.unpack err) + Right x -> pure x + + pvTag (welcomeMsg.protocolVersion) @?= Just ProtocolMLS10 + + wel <- case welcomeMsg.content of + MessageWelcome x -> pure x.value + _ -> assertFailure "expected welcome message" + + length (wel.welSecrets) @?= 1 + + do + gi <- case decodeMLS' @GroupInfo giData of + Left err -> assertFailure (T.unpack err) + Right x -> pure x + + gi.groupContext.groupId @?= GroupId "foo" + gi.groupContext.epoch @?= Epoch 1 testKeyPackageRef :: IO () testKeyPackageRef = do @@ -112,10 +193,6 @@ testKeyPackageRef = do kpRef MLS_128_DHKEMX25519_AES128GCM_SHA256_Ed25519 (KeyPackageData kpData) @?= ref --- TODO -testVerifyMLSPlainTextWithKey :: IO () -testVerifyMLSPlainTextWithKey = pure () - testRemoveProposalMessageSignature :: IO () testRemoveProposalMessageSignature = withSystemTempDirectory "mls" $ \tmp -> do qcid <- do @@ -222,3 +299,9 @@ cli :: String -> FilePath -> [String] -> CreateProcess cli store tmp args = proc "mls-test-cli" $ ["--store", tmp (store <> ".db")] <> args + +randomIdentity :: IO ClientIdentity +randomIdentity = do + uid <- Id <$> UUID.nextRandom + c <- newClientId <$> randomIO + pure $ ClientIdentity (Domain "mls.example.com") uid c diff --git a/libs/wire-api/wire-api.cabal b/libs/wire-api/wire-api.cabal index d26801a28b..f70e2c36eb 100644 --- a/libs/wire-api/wire-api.cabal +++ b/libs/wire-api/wire-api.cabal @@ -687,6 +687,7 @@ test-suite wire-api-tests , process , proto-lens , QuickCheck + , random , saml2-web-sso , schema-profunctor , servant diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 60a08b1c17..50069d2b87 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -71,12 +71,6 @@ import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.MLS.SubConversation --- TODO: --- [ ] restore deleted MLS unit tests --- [x] pass groupId and epoch to processProposal instead of the whole IncomingMessage --- [x] remove LWT in planMLSClientRemoval --- [x] restore unsupported proposal integration test - -- FUTUREWORK -- - Check that the capabilities of a leaf node in an add proposal contains all -- the required_capabilities of the group context. This would require fetching From 9f1483708a46f0925517837146ad3f1b65eb1811 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 2 May 2023 12:05:49 +0200 Subject: [PATCH 65/75] Add CHANGELOG entries --- changelog.d/1-api-changes/mls-upgrade | 7 +++++++ changelog.d/5-internal/key-package-mapping | 1 + services/brig/schema/src/V69_MLSKeyPackageRefMapping.hs | 1 + 3 files changed, 9 insertions(+) create mode 100644 changelog.d/1-api-changes/mls-upgrade create mode 100644 changelog.d/5-internal/key-package-mapping diff --git a/changelog.d/1-api-changes/mls-upgrade b/changelog.d/1-api-changes/mls-upgrade new file mode 100644 index 0000000000..de9bd3f4d8 --- /dev/null +++ b/changelog.d/1-api-changes/mls-upgrade @@ -0,0 +1,7 @@ +Switch to MLS draft 20. The following endpoints are affected by the change: + + - All endpoints with `message/mls` content type now expect and return draft-20 MLS structures. + - `POST /conversations` does not require `creator_client` anymore. + - `POST /mls/commit-bundles` now expects a "stream" of MLS messages, i.e. a sequence of TLS-serialised messages, one after the other, in any order. Its protobuf interface has been removed. + - `POST /mls/welcome` has been removed. Welcome messages can now only be sent as part of a commit bundle. + - `POST /mls/message` does not accept commit messages anymore. All commit messages must be sent as part of a commit bundle. diff --git a/changelog.d/5-internal/key-package-mapping b/changelog.d/5-internal/key-package-mapping new file mode 100644 index 0000000000..e861208c19 --- /dev/null +++ b/changelog.d/5-internal/key-package-mapping @@ -0,0 +1 @@ +Brig does not perform key package ref mapping anymore. Claimed key packages are simply removed from the `mls_key_packages` table. The `mls_key_package_refs` table is now unused, and will be removed in the future. diff --git a/services/brig/schema/src/V69_MLSKeyPackageRefMapping.hs b/services/brig/schema/src/V69_MLSKeyPackageRefMapping.hs index 34c95d70e1..aae2b698ae 100644 --- a/services/brig/schema/src/V69_MLSKeyPackageRefMapping.hs +++ b/services/brig/schema/src/V69_MLSKeyPackageRefMapping.hs @@ -26,6 +26,7 @@ import Cassandra.Schema import Imports import Text.RawString.QQ +-- FUTUREWORK: remove this table migration :: Migration migration = Migration 69 "Add key package ref mapping" $ From 3287ad8bdadcdbb9fb6ffbf4735e212c937a55fa Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Tue, 2 May 2023 12:10:35 +0200 Subject: [PATCH 66/75] Document IndexMap and ClientMap --- services/galley/src/Galley/API/MLS/Types.hs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/services/galley/src/Galley/API/MLS/Types.hs b/services/galley/src/Galley/API/MLS/Types.hs index 49591176ff..59cdbe327b 100644 --- a/services/galley/src/Galley/API/MLS/Types.hs +++ b/services/galley/src/Galley/API/MLS/Types.hs @@ -32,6 +32,15 @@ import Wire.API.MLS.Credential import Wire.API.MLS.LeafNode import Wire.API.MLS.SubConversation +-- | A map of leaf index to members. +-- +-- This is used to reconstruct client +-- identities from leaf indices in remove proposals, as well as to allocate new +-- indices for added clients. +-- +-- Note that clients that are in the process of being removed from a group +-- (i.e. there is a pending remove proposals for them) are included in this +-- mapping. newtype IndexMap = IndexMap {unIndexMap :: IntMap ClientIdentity} deriving (Eq, Show) deriving newtype (Semigroup, Monoid) @@ -58,6 +67,14 @@ imRemoveClient im idx = do cid <- imLookup im idx pure (cid, IndexMap . IntMap.delete (fromIntegral idx) $ unIndexMap im) +-- | A two-level map of users to clients to leaf indices. +-- +-- This is used to keep track of the state of an MLS group for e.g. propagating +-- a message to all the clients that are supposed to receive it. +-- +-- Note that clients that are in the process of being removed from a group +-- (i.e. there is a pending remove proposals for them) are __not__ included in +-- this mapping. type ClientMap = Map (Qualified UserId) (Map ClientId LeafIndex) mkClientMap :: [(Domain, UserId, ClientId, Int32, Bool)] -> ClientMap From 99d1a5107b84e214fa1c88ff2bb6ee9e3d5f254e Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 3 May 2023 09:26:05 +0200 Subject: [PATCH 67/75] fixup! Restore unsupported proposal test --- services/galley/test/integration/API/MLS/Util.hs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/galley/test/integration/API/MLS/Util.hs b/services/galley/test/integration/API/MLS/Util.hs index 10d19b414c..542f3e6cd6 100644 --- a/services/galley/test/integration/API/MLS/Util.hs +++ b/services/galley/test/integration/API/MLS/Util.hs @@ -240,8 +240,7 @@ liftTest = MLSTest . lift runMLSTest :: MLSTest a -> TestM a runMLSTest (MLSTest m) = - withSystemTempDirectory "mls" $ \_tmp -> do - let tmp = "/tmp/mls" + withSystemTempDirectory "mls" $ \tmp -> do saveRemovalKey (tmp "removal.key") evalStateT m From bb7817add0157f99dda5de3d20e47b3a06c56981 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 3 May 2023 10:01:43 +0200 Subject: [PATCH 68/75] Linter fix --- libs/wire-api/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/wire-api/default.nix b/libs/wire-api/default.nix index 8981908490..59bfd69672 100644 --- a/libs/wire-api/default.nix +++ b/libs/wire-api/default.nix @@ -247,6 +247,7 @@ mkDerivation { process proto-lens QuickCheck + random saml2-web-sso schema-profunctor servant From 642b1ba0a1e74c3009e24531b804e8b4732cf554 Mon Sep 17 00:00:00 2001 From: Paolo Capriotti Date: Wed, 3 May 2023 10:53:01 +0200 Subject: [PATCH 69/75] fixup! Upgrade mls-test-cli in the nix environment --- nix/pkgs/mls-test-cli/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nix/pkgs/mls-test-cli/default.nix b/nix/pkgs/mls-test-cli/default.nix index 8565d22c35..ddbf9b342a 100644 --- a/nix/pkgs/mls-test-cli/default.nix +++ b/nix/pkgs/mls-test-cli/default.nix @@ -13,8 +13,8 @@ let src = fetchFromGitHub { owner = "wireapp"; repo = "mls-test-cli"; - rev = "f539bcc60ab3f7e2303742a37aa17b281b44bf3a"; - sha256 = "sha256-oyf+sot/aVnfoodecPGxTDxqNGk/KCX24LG7W9uP8mI="; + rev = "29109bd32cedae64bdd9a47ef373710fad477590"; + sha256 = "sha256-1GMiEMkzcKPOd5AsQkQTSMLDkNqy3yjCC03K20vyFVY="; }; cargoLockFile = builtins.toFile "cargo.lock" (builtins.readFile "${src}/Cargo.lock"); in rustPlatform.buildRustPackage rec { From 8d99e706c414b7cd17b10c719df94ca0840017d2 Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 3 May 2023 10:50:50 +0200 Subject: [PATCH 70/75] Fix: make git-add-cassandra-schema-impl lists to many keyspaces --- Makefile | 6 +- cassandra-schema.cql | 2899 +++++--------------------------- hack/bin/cassandra_dump_schema | 32 + 3 files changed, 459 insertions(+), 2478 deletions(-) create mode 100755 hack/bin/cassandra_dump_schema diff --git a/Makefile b/Makefile index dd49069100..8e7bb9c56c 100644 --- a/Makefile +++ b/Makefile @@ -225,11 +225,7 @@ git-add-cassandra-schema: db-migrate git-add-cassandra-schema-impl .PHONY: git-add-cassandra-schema-impl git-add-cassandra-schema-impl: - $(eval CASSANDRA_CONTAINER := $(shell docker ps | grep '/cassandra:' | perl -ne '/^(\S+)\s/ && print $$1')) - ( echo '-- automatically generated with `make git-add-cassandra-schema`'; \ - docker exec -i $(CASSANDRA_CONTAINER) /usr/bin/cqlsh -e "DESCRIBE schema;" ) \ - | sed "s/CREATE TABLE galley_test.member_client/-- NOTE: this table is unused. It was replaced by mls_group_member_client\nCREATE TABLE galley_test.member_client/g" \ - > ./cassandra-schema.cql + ./hack/bin/cassandra_dump_schema > ./cassandra-schema.cql git add ./cassandra-schema.cql .PHONY: cqlsh diff --git a/cassandra-schema.cql b/cassandra-schema.cql index 455ce6e185..6e8b8ea692 100644 --- a/cassandra-schema.cql +++ b/cassandra-schema.cql @@ -646,46 +646,17 @@ CREATE TABLE galley_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE KEYSPACE galley_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TYPE galley_test2.permissions ( - self bigint, - copy bigint -); - -CREATE TYPE galley_test2.pubkey ( - typ int, - size int, - pem blob -); - -CREATE TABLE galley_test2.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; +CREATE KEYSPACE gundeck_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; -CREATE TABLE galley_test2.team_conv ( - team uuid, - conv uuid, - PRIMARY KEY (team, conv) -) WITH CLUSTERING ORDER BY (conv ASC) +CREATE TABLE gundeck_test.push ( + ptoken text, + app text, + transport int, + client text, + connection blob, + usr uuid, + PRIMARY KEY (ptoken, app, transport) +) WITH CLUSTERING ORDER BY (app ASC, transport ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -701,51 +672,29 @@ CREATE TABLE galley_test2.team_conv ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.user_team ( +CREATE TABLE gundeck_test.notifications ( user uuid, - team uuid, - PRIMARY KEY (user, team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.service ( - provider uuid, - id uuid, - auth_token ascii, - base_url blob, - enabled boolean, - fingerprints set, - PRIMARY KEY (provider, id) + id timeuuid, + clients set, + payload blob, + PRIMARY KEY (user, id) ) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.data_migration ( +CREATE TABLE gundeck_test.meta ( id int, version int, date timestamp, @@ -767,69 +716,16 @@ CREATE TABLE galley_test2.data_migration ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.team_features ( - team_id uuid PRIMARY KEY, - app_lock_enforce int, - app_lock_inactivity_timeout_secs int, - app_lock_status int, - conference_calling int, - digital_signatures int, - expose_invitation_urls_to_team_admin int, - file_sharing int, - file_sharing_lock_status int, - guest_links_lock_status int, - guest_links_status int, - legalhold_status int, - mls_allowed_ciphersuites set, - mls_default_ciphersuite int, - mls_default_protocol int, - mls_e2eid_lock_status int, - mls_e2eid_status int, - mls_e2eid_ver_exp timestamp, - mls_protocol_toggle_users set, - mls_status int, - outlook_cal_integration_lock_status int, - outlook_cal_integration_status int, - search_visibility_inbound_status int, - search_visibility_status int, - self_deleting_messages_lock_status int, - self_deleting_messages_status int, - self_deleting_messages_ttl int, - snd_factor_password_challenge_lock_status int, - snd_factor_password_challenge_status int, - sso_status int, - validate_saml_emails int -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.member ( - conv uuid, - user uuid, - conversation_role text, - hidden boolean, - hidden_ref text, - otr_archived boolean, - otr_archived_ref text, - otr_muted boolean, - otr_muted_ref text, - otr_muted_status int, - provider uuid, - service uuid, - status int, - PRIMARY KEY (conv, user) -) WITH CLUSTERING ORDER BY (user ASC) +CREATE TABLE gundeck_test.user_push ( + usr uuid, + ptoken text, + app text, + transport int, + arn text, + client text, + connection blob, + PRIMARY KEY (usr, ptoken, app, transport) +) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -845,54 +741,24 @@ CREATE TABLE galley_test2.member ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.custom_backend ( - domain text PRIMARY KEY, - config_json_url blob, - webapp_welcome_url blob -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; +CREATE KEYSPACE brig_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; -CREATE TABLE galley_test2.user_remote_conv ( - user uuid, - conv_remote_domain text, - conv_remote_id uuid, - hidden boolean, - hidden_ref text, - otr_archived boolean, - otr_archived_ref text, - otr_muted_ref text, - otr_muted_status int, - PRIMARY KEY (user, conv_remote_domain, conv_remote_id) -) WITH CLUSTERING ORDER BY (conv_remote_domain ASC, conv_remote_id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; +CREATE TYPE brig_test.asset ( + typ int, + key text, + size int +); -CREATE TABLE galley_test2.legalhold_whitelisted ( - team uuid PRIMARY KEY +CREATE TYPE brig_test.pubkey ( + typ int, + size int, + pem blob +); + +CREATE TABLE brig_test.team_invitation_info ( + code ascii PRIMARY KEY, + id uuid, + team uuid ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -908,14 +774,10 @@ CREATE TABLE galley_test2.legalhold_whitelisted ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.member_remote_user ( - conv uuid, - user_remote_domain text, - user_remote_id uuid, - conversation_role text, - PRIMARY KEY (conv, user_remote_domain, user_remote_id) -) WITH CLUSTERING ORDER BY (user_remote_domain ASC, user_remote_id ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.provider_keys ( + key text PRIMARY KEY, + provider uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -930,23 +792,20 @@ CREATE TABLE galley_test2.member_remote_user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.team_member ( - team uuid, - user uuid, - invited_at timestamp, - invited_by uuid, - legalhold_status int, - perms frozen, - PRIMARY KEY (team, user) -) WITH CLUSTERING ORDER BY (user ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.oauth_refresh_token ( + id uuid PRIMARY KEY, + client uuid, + created_at timestamp, + scope set, + user uuid +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 14515200 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -954,1854 +813,17 @@ CREATE TABLE galley_test2.team_member ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE galley_test2.team_notifications ( +CREATE TABLE brig_test.team_invitation_email ( + email text, team uuid, - id timeuuid, - payload blob, - PRIMARY KEY (team, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.legalhold_pending_prekeys ( - user uuid, - key int, - data text, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.group_id_conv_id ( - group_id blob PRIMARY KEY, - conv_id uuid, - domain text, - subconv_id text -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.member_client ( - conv uuid, - user_domain text, - user uuid, - client text, - key_package_ref blob, - PRIMARY KEY (conv, user_domain, user, client) -) WITH CLUSTERING ORDER BY (user_domain ASC, user ASC, client ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.legalhold_service ( - team_id uuid PRIMARY KEY, - auth_token ascii, - base_url blob, - fingerprint blob, - pubkey pubkey -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.conversation_codes ( - key ascii, - scope int, - conversation uuid, - value ascii, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.mls_group_member_client ( - group_id blob, - user_domain text, - user uuid, - client text, - key_package_ref blob, - PRIMARY KEY (group_id, user_domain, user, client) -) WITH CLUSTERING ORDER BY (user_domain ASC, user ASC, client ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.clients ( - user uuid PRIMARY KEY, - clients set -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.conversation ( - conv uuid PRIMARY KEY, - access set, - access_role int, - access_roles_v2 set, - cipher_suite int, - creator uuid, - deleted boolean, - epoch bigint, - group_id blob, - message_timer bigint, - name text, - protocol int, - public_group_state blob, - receipt_mode int, - team uuid, - type int -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.mls_commit_locks ( - group_id blob, - epoch bigint, - PRIMARY KEY (group_id, epoch) -) WITH CLUSTERING ORDER BY (epoch ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.subconversation ( - conv_id uuid, - subconv_id text, - cipher_suite int, - epoch bigint, - group_id blob, - public_group_state blob, - PRIMARY KEY (conv_id, subconv_id) -) WITH CLUSTERING ORDER BY (subconv_id ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.team ( - team uuid PRIMARY KEY, - binding boolean, - creator uuid, - deleted boolean, - icon text, - icon_key text, - name text, - search_visibility int, - splash_screen text, - status int -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.billing_team_member ( - team uuid, - user uuid, - PRIMARY KEY (team, user) -) WITH CLUSTERING ORDER BY (user ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.mls_proposal_refs ( - group_id blob, - epoch bigint, - ref blob, - origin int, - proposal blob, - PRIMARY KEY (group_id, epoch, ref) -) WITH CLUSTERING ORDER BY (epoch ASC, ref ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE galley_test2.user ( - user uuid, - conv uuid, - PRIMARY KEY (user, conv) -) WITH CLUSTERING ORDER BY (conv ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE KEYSPACE gundeck_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TABLE gundeck_test.push ( - ptoken text, - app text, - transport int, - client text, - connection blob, - usr uuid, - PRIMARY KEY (ptoken, app, transport) -) WITH CLUSTERING ORDER BY (app ASC, transport ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test.notifications ( - user uuid, - id timeuuid, - clients set, - payload blob, - PRIMARY KEY (user, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test.user_push ( - usr uuid, - ptoken text, - app text, - transport int, - arn text, - client text, - connection blob, - PRIMARY KEY (usr, ptoken, app, transport) -) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE KEYSPACE brig_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TYPE brig_test2.asset ( - typ int, - key text, - size int -); - -CREATE TYPE brig_test2.pubkey ( - typ int, - size int, - pem blob -); - -CREATE TABLE brig_test2.team_invitation_info ( - code ascii PRIMARY KEY, - id uuid, - team uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.rich_info ( - user uuid PRIMARY KEY, - json blob -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.user_keys_hash ( - key blob PRIMARY KEY, - key_type int, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_tag ( - bucket int, - tag bigint, - name text, - service uuid, - provider uuid, - PRIMARY KEY ((bucket, tag), name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.login_codes ( - user uuid PRIMARY KEY, - code text, - retries int, - timeout timestamp -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.unique_claims ( - value text PRIMARY KEY, - claims set -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.user_cookies ( - user uuid, - expires timestamp, - id bigint, - created timestamp, - label text, - succ_id bigint, - type int, - PRIMARY KEY (user, expires, id) -) WITH CLUSTERING ORDER BY (expires ASC, id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.mls_key_packages ( - user uuid, - client text, - ref blob, - data blob, - PRIMARY KEY ((user, client), ref) -) WITH CLUSTERING ORDER BY (ref ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.mls_key_package_refs ( - ref blob PRIMARY KEY, - client text, - conv uuid, - conv_domain text, - domain text, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.excluded_phones ( - prefix text PRIMARY KEY, - comment text -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.codes ( - user uuid, - scope int, - code text, - retries int, - PRIMARY KEY (user, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.user_handle ( - handle text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service ( - provider uuid, - id uuid, - assets list>, - auth_tokens list, - base_url blob, - descr text, - enabled boolean, - fingerprints list, - name text, - pubkeys list>, - summary text, - tags set, - PRIMARY KEY (provider, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.team_invitation_email ( - email text, - team uuid, - code ascii, - invitation uuid, - PRIMARY KEY (email, team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.invitation_info ( - code ascii PRIMARY KEY, - id uuid, - inviter uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_whitelist ( - team uuid, - provider uuid, - service uuid, - PRIMARY KEY (team, provider, service) -) WITH CLUSTERING ORDER BY (provider ASC, service ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.provider ( - id uuid PRIMARY KEY, - descr text, - email text, - name text, - password blob, - url blob -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.user_keys ( - key text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.mls_public_keys ( - user uuid, - client text, - sig_scheme text, - key blob, - PRIMARY KEY (user, client, sig_scheme) -) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.invitee_info ( - invitee uuid PRIMARY KEY, - conv uuid, - inviter uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.nonce ( - user uuid, - key text, - nonce uuid, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 300 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.provider_keys ( - key text PRIMARY KEY, - provider uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_team ( - provider uuid, - service uuid, - team uuid, - user uuid, - conv uuid, - PRIMARY KEY ((provider, service), team, user) -) WITH CLUSTERING ORDER BY (team ASC, user ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.blacklist ( - key text PRIMARY KEY -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_whitelist_rev ( - provider uuid, - service uuid, - team uuid, - PRIMARY KEY ((provider, service), team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.team_invitation ( - team uuid, - id uuid, - code ascii, - created_at timestamp, - created_by uuid, - email text, - name text, - phone text, - role int, - PRIMARY KEY (team, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.user ( - id uuid PRIMARY KEY, - accent list, - accent_id int, - activated boolean, - assets list>, - country ascii, - email text, - email_unvalidated text, - expires timestamp, - feature_conference_calling int, - handle text, - language ascii, - managed_by int, - name text, - password blob, - phone text, - picture list, - provider uuid, - searchable boolean, - service uuid, - sso_id text, - status int, - team uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.vcodes_throttle ( - key ascii, - scope int, - initial_delay int, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.properties ( - user uuid, - key ascii, - value blob, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_user ( - provider uuid, - service uuid, - user uuid, - conv uuid, - team uuid, - PRIMARY KEY ((provider, service), user) -) WITH CLUSTERING ORDER BY (user ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.prekeys ( - user uuid, - client text, - key int, - data text, - PRIMARY KEY (user, client, key) -) WITH CLUSTERING ORDER BY (client ASC, key ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.password_reset ( - key ascii PRIMARY KEY, - code ascii, - retries int, - timeout timestamp, - user uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.clients ( - user uuid, - client text, - capabilities set, - class int, - cookie text, - ip inet, - label text, - lat double, - lon double, - model text, - tstamp timestamp, - type int, - PRIMARY KEY (user, client) -) WITH CLUSTERING ORDER BY (client ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.budget ( - key text PRIMARY KEY, - budget int -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.connection_remote ( - left uuid, - right_domain text, - right_user uuid, - conv_domain text, - conv_id uuid, - last_update timestamp, - status int, - PRIMARY KEY (left, right_domain, right_user) -) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.users_pending_activation ( - user uuid PRIMARY KEY, - expires_at timestamp -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.connection ( - left uuid, - right uuid, - conv uuid, - last_update timestamp, - message text, - status int, - PRIMARY KEY (left, right) -) WITH CLUSTERING ORDER BY (right ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; -CREATE INDEX conn_status ON brig_test2.connection (status); - -CREATE TABLE brig_test2.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.invitation ( - inviter uuid, - id uuid, - code ascii, - created_at timestamp, - email text, - name text, - phone text, - PRIMARY KEY (inviter, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.activation_keys ( - key ascii PRIMARY KEY, - challenge ascii, - code ascii, - key_text text, - key_type ascii, - retries int, - user uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.vcodes ( - key ascii, - scope int, - account uuid, - email text, - phone text, - retries int, - value ascii, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test2.service_prefix ( - prefix text, - name text, - service uuid, - provider uuid, - PRIMARY KEY (prefix, name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE KEYSPACE brig_test WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TYPE brig_test.asset ( - typ int, - key text, - size int -); - -CREATE TYPE brig_test.pubkey ( - typ int, - size int, - pem blob -); - -CREATE TABLE brig_test.team_invitation_info ( - code ascii PRIMARY KEY, - id uuid, - team uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.provider_keys ( - key text PRIMARY KEY, - provider uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.oauth_refresh_token ( - id uuid PRIMARY KEY, - client uuid, - created_at timestamp, - scope set, - user uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 14515200 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.team_invitation_email ( - email text, - team uuid, - code ascii, - invitation uuid, - PRIMARY KEY (email, team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.rich_info ( - user uuid PRIMARY KEY, - json blob -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.user_keys_hash ( - key blob PRIMARY KEY, - key_type int, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.service_tag ( - bucket int, - tag bigint, - name text, - service uuid, - provider uuid, - PRIMARY KEY ((bucket, tag), name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.unique_claims ( - value text PRIMARY KEY, - claims set -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.user_cookies ( - user uuid, - expires timestamp, - id bigint, - created timestamp, - label text, - succ_id bigint, - type int, - PRIMARY KEY (user, expires, id) -) WITH CLUSTERING ORDER BY (expires ASC, id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.mls_key_packages ( - user uuid, - client text, - ref blob, - data blob, - PRIMARY KEY ((user, client), ref) -) WITH CLUSTERING ORDER BY (ref ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.mls_key_package_refs ( - ref blob PRIMARY KEY, - client text, - conv uuid, - conv_domain text, - domain text, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.excluded_phones ( - prefix text PRIMARY KEY, - comment text -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.codes ( - user uuid, - scope int, - code text, - retries int, - PRIMARY KEY (user, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.user_handle ( - handle text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.service ( - provider uuid, - id uuid, - assets list>, - auth_tokens list, - base_url blob, - descr text, - enabled boolean, - fingerprints list, - name text, - pubkeys list>, - summary text, - tags set, - PRIMARY KEY (provider, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.oauth_user_refresh_token ( - user uuid, - token_id uuid, - PRIMARY KEY (user, token_id) -) WITH CLUSTERING ORDER BY (token_id ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 14515200 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.invitation_info ( - code ascii PRIMARY KEY, - id uuid, - inviter uuid -) WITH bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.service_whitelist ( - team uuid, - provider uuid, - service uuid, - PRIMARY KEY (team, provider, service) -) WITH CLUSTERING ORDER BY (provider ASC, service ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.provider ( - id uuid PRIMARY KEY, - descr text, - email text, - name text, - password blob, - url blob -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.user_keys ( - key text PRIMARY KEY, - user uuid -) WITH bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.mls_public_keys ( - user uuid, - client text, - sig_scheme text, - key blob, - PRIMARY KEY (user, client, sig_scheme) -) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE brig_test.invitee_info ( - invitee uuid PRIMARY KEY, - conv uuid, - inviter uuid -) WITH bloom_filter_fp_chance = 0.1 + code ascii, + invitation uuid, + PRIMARY KEY (email, team) +) WITH CLUSTERING ORDER BY (team ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -2813,20 +835,17 @@ CREATE TABLE brig_test.invitee_info ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.nonce ( - user uuid, - key text, - nonce uuid, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.rich_info ( + user uuid PRIMARY KEY, + json blob +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 300 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -2834,15 +853,14 @@ CREATE TABLE brig_test.nonce ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.login_codes ( - user uuid PRIMARY KEY, - code text, - retries int, - timeout timestamp -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.user_keys_hash ( + key blob PRIMARY KEY, + key_type int, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -2854,15 +872,18 @@ CREATE TABLE brig_test.login_codes ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_client ( - id uuid PRIMARY KEY, +CREATE TABLE brig_test.service_tag ( + bucket int, + tag bigint, name text, - redirect_uri blob, - secret blob -) WITH bloom_filter_fp_chance = 0.01 + service uuid, + provider uuid, + PRIMARY KEY ((bucket, tag), name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -2874,14 +895,13 @@ CREATE TABLE brig_test.oauth_client ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_team ( - provider uuid, - service uuid, - team uuid, - user uuid, - conv uuid, - PRIMARY KEY ((provider, service), team, user) -) WITH CLUSTERING ORDER BY (team ASC, user ASC) +CREATE TABLE brig_test.meta ( + id int, + version int, + date timestamp, + descr text, + PRIMARY KEY (id, version) +) WITH CLUSTERING ORDER BY (version ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -2897,34 +917,35 @@ CREATE TABLE brig_test.service_team ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.invitation ( - inviter uuid, - id uuid, - code ascii, - created_at timestamp, - email text, - name text, - phone text, - PRIMARY KEY (inviter, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.unique_claims ( + value text PRIMARY KEY, + claims set +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.blacklist ( - key text PRIMARY KEY -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.user_cookies ( + user uuid, + expires timestamp, + id bigint, + created timestamp, + label text, + succ_id bigint, + type int, + PRIMARY KEY (user, expires, id) +) WITH CLUSTERING ORDER BY (expires ASC, id ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -2939,16 +960,17 @@ CREATE TABLE brig_test.blacklist ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_whitelist_rev ( - provider uuid, - service uuid, - team uuid, - PRIMARY KEY ((provider, service), team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.mls_key_packages ( + user uuid, + client text, + ref blob, + data blob, + PRIMARY KEY ((user, client), ref) +) WITH CLUSTERING ORDER BY (ref ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -2960,22 +982,17 @@ CREATE TABLE brig_test.service_whitelist_rev ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.team_invitation ( - team uuid, - id uuid, - code ascii, - created_at timestamp, - created_by uuid, - email text, - name text, - phone text, - role int, - PRIMARY KEY (team, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.mls_key_package_refs ( + ref blob PRIMARY KEY, + client text, + conv uuid, + conv_domain text, + domain text, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -2987,34 +1004,13 @@ CREATE TABLE brig_test.team_invitation ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.user ( - id uuid PRIMARY KEY, - accent list, - accent_id int, - activated boolean, - assets list>, - country ascii, - email text, - email_unvalidated text, - expires timestamp, - feature_conference_calling int, - handle text, - language ascii, - managed_by int, - name text, - password blob, - phone text, - picture list, - provider uuid, - searchable boolean, - service uuid, - sso_id text, - status int, - team uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.excluded_phones ( + prefix text PRIMARY KEY, + comment text +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3026,11 +1022,12 @@ CREATE TABLE brig_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.vcodes_throttle ( - key ascii, +CREATE TABLE brig_test.codes ( + user uuid, scope int, - initial_delay int, - PRIMARY KEY (key, scope) + code text, + retries int, + PRIMARY KEY (user, scope) ) WITH CLUSTERING ORDER BY (scope ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} @@ -3047,16 +1044,13 @@ CREATE TABLE brig_test.vcodes_throttle ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.properties ( - user uuid, - key ascii, - value blob, - PRIMARY KEY (user, key) -) WITH CLUSTERING ORDER BY (key ASC) - AND bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.user_handle ( + handle text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3068,18 +1062,25 @@ CREATE TABLE brig_test.properties ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_user ( +CREATE TABLE brig_test.service ( provider uuid, - service uuid, - user uuid, - conv uuid, - team uuid, - PRIMARY KEY ((provider, service), user) -) WITH CLUSTERING ORDER BY (user ASC) - AND bloom_filter_fp_chance = 0.01 + id uuid, + assets list>, + auth_tokens list, + base_url blob, + descr text, + enabled boolean, + fingerprints list, + name text, + pubkeys list>, + summary text, + tags set, + PRIMARY KEY (provider, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3091,21 +1092,19 @@ CREATE TABLE brig_test.service_user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.prekeys ( +CREATE TABLE brig_test.oauth_user_refresh_token ( user uuid, - client text, - key int, - data text, - PRIMARY KEY (user, client, key) -) WITH CLUSTERING ORDER BY (client ASC, key ASC) + token_id uuid, + PRIMARY KEY (user, token_id) +) WITH CLUSTERING ORDER BY (token_id ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 14515200 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -3113,13 +1112,10 @@ CREATE TABLE brig_test.prekeys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.oauth_auth_code ( +CREATE TABLE brig_test.invitation_info ( code ascii PRIMARY KEY, - client uuid, - code_challenge blob, - redirect_uri blob, - scope set, - user uuid + id uuid, + inviter uuid ) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3127,7 +1123,7 @@ CREATE TABLE brig_test.oauth_auth_code ( AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 300 + AND default_time_to_live = 0 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -3135,25 +1131,16 @@ CREATE TABLE brig_test.oauth_auth_code ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.clients ( - user uuid, - client text, - capabilities set, - class int, - cookie text, - ip inet, - label text, - lat double, - lon double, - model text, - tstamp timestamp, - type int, - PRIMARY KEY (user, client) -) WITH CLUSTERING ORDER BY (client ASC) +CREATE TABLE brig_test.service_whitelist ( + team uuid, + provider uuid, + service uuid, + PRIMARY KEY (team, provider, service) +) WITH CLUSTERING ORDER BY (provider ASC, service ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3165,9 +1152,13 @@ CREATE TABLE brig_test.clients ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.budget ( - key text PRIMARY KEY, - budget int +CREATE TABLE brig_test.provider ( + id uuid PRIMARY KEY, + descr text, + email text, + name text, + password blob, + url blob ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3176,24 +1167,17 @@ CREATE TABLE brig_test.budget ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.connection_remote ( - left uuid, - right_domain text, - right_user uuid, - conv_domain text, - conv_id uuid, - last_update timestamp, - status int, - PRIMARY KEY (left, right_domain, right_user) -) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.user_keys ( + key text PRIMARY KEY, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3208,13 +1192,17 @@ CREATE TABLE brig_test.connection_remote ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.users_pending_activation ( - user uuid PRIMARY KEY, - expires_at timestamp -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.mls_public_keys ( + user uuid, + client text, + sig_scheme text, + key blob, + PRIMARY KEY (user, client, sig_scheme) +) WITH CLUSTERING ORDER BY (client ASC, sig_scheme ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3226,16 +1214,11 @@ CREATE TABLE brig_test.users_pending_activation ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.connection ( - left uuid, - right uuid, +CREATE TABLE brig_test.invitee_info ( + invitee uuid PRIMARY KEY, conv uuid, - last_update timestamp, - message text, - status int, - PRIMARY KEY (left, right) -) WITH CLUSTERING ORDER BY (right ASC) - AND bloom_filter_fp_chance = 0.1 + inviter uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3249,22 +1232,21 @@ CREATE TABLE brig_test.connection ( AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE INDEX conn_status ON brig_test.connection (status); -CREATE TABLE brig_test.password_reset ( - key ascii PRIMARY KEY, - code ascii, - retries int, - timeout timestamp, - user uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.nonce ( + user uuid, + key text, + nonce uuid, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 300 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -3272,18 +1254,15 @@ CREATE TABLE brig_test.password_reset ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.activation_keys ( - key ascii PRIMARY KEY, - challenge ascii, - code ascii, - key_text text, - key_type ascii, +CREATE TABLE brig_test.login_codes ( + user uuid PRIMARY KEY, + code text, retries int, - user uuid -) WITH bloom_filter_fp_chance = 0.1 + timeout timestamp +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3295,42 +1274,38 @@ CREATE TABLE brig_test.activation_keys ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.vcodes ( - key ascii, - scope int, - account uuid, - email text, - phone text, - retries int, - value ascii, - PRIMARY KEY (key, scope) -) WITH CLUSTERING ORDER BY (scope ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.oauth_client ( + id uuid PRIMARY KEY, + name text, + redirect_uri blob, + secret blob +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 0 + AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE brig_test.service_prefix ( - prefix text, - name text, - service uuid, +CREATE TABLE brig_test.service_team ( provider uuid, - PRIMARY KEY (prefix, name, service) -) WITH CLUSTERING ORDER BY (name ASC, service ASC) - AND bloom_filter_fp_chance = 0.1 + service uuid, + team uuid, + user uuid, + conv uuid, + PRIMARY KEY ((provider, service), team, user) +) WITH CLUSTERING ORDER BY (team ASC, user ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3342,12 +1317,17 @@ CREATE TABLE brig_test.service_prefix ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE KEYSPACE spar_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TABLE spar_test2.bind_cookie ( - cookie text PRIMARY KEY, - session_owner uuid -) WITH bloom_filter_fp_chance = 0.01 +CREATE TABLE brig_test.invitation ( + inviter uuid, + id uuid, + code ascii, + created_at timestamp, + email text, + name text, + phone text, + PRIMARY KEY (inviter, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} @@ -3362,14 +1342,9 @@ CREATE TABLE spar_test2.bind_cookie ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.user_v2 ( - issuer text, - normalized_uname_id text, - sso_id text, - uid uuid, - PRIMARY KEY (issuer, normalized_uname_id) -) WITH CLUSTERING ORDER BY (normalized_uname_id ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.blacklist ( + key text PRIMARY KEY +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3384,13 +1359,12 @@ CREATE TABLE spar_test2.user_v2 ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.data_migration ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) +CREATE TABLE brig_test.service_whitelist_rev ( + provider uuid, + service uuid, + team uuid, + PRIMARY KEY ((provider, service), team) +) WITH CLUSTERING ORDER BY (team ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3406,13 +1380,22 @@ CREATE TABLE spar_test2.data_migration ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.authresp ( - resp text PRIMARY KEY, - end_of_life timestamp -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.team_invitation ( + team uuid, + id uuid, + code ascii, + created_at timestamp, + created_by uuid, + email text, + name text, + phone text, + role int, + PRIMARY KEY (team, id) +) WITH CLUSTERING ORDER BY (id ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3424,9 +1407,30 @@ CREATE TABLE spar_test2.authresp ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.idp_raw_metadata ( +CREATE TABLE brig_test.user ( id uuid PRIMARY KEY, - metadata text + accent list, + accent_id int, + activated boolean, + assets list>, + country ascii, + email text, + email_unvalidated text, + expires timestamp, + feature_conference_calling int, + handle text, + language ascii, + managed_by int, + name text, + password blob, + phone text, + picture list, + provider uuid, + searchable boolean, + service uuid, + sso_id text, + status int, + team uuid ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3442,13 +1446,16 @@ CREATE TABLE spar_test2.idp_raw_metadata ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.issuer_idp ( - issuer text PRIMARY KEY, - idp uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.vcodes_throttle ( + key ascii, + scope int, + initial_delay int, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3460,21 +1467,39 @@ CREATE TABLE spar_test2.issuer_idp ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.idp ( - idp uuid PRIMARY KEY, - api_version int, - extra_public_keys list, - handle text, - issuer text, - old_issuers list, - public_key blob, - replaced_by uuid, - request_uri text, - team uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.properties ( + user uuid, + key ascii, + value blob, + PRIMARY KEY (user, key) +) WITH CLUSTERING ORDER BY (key ASC) + AND bloom_filter_fp_chance = 0.01 + AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} + AND comment = '' + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} + AND crc_check_chance = 1.0 + AND dclocal_read_repair_chance = 0.1 + AND default_time_to_live = 0 + AND gc_grace_seconds = 864000 + AND max_index_interval = 2048 + AND memtable_flush_period_in_ms = 0 + AND min_index_interval = 128 + AND read_repair_chance = 0.0 + AND speculative_retry = '99PERCENTILE'; + +CREATE TABLE brig_test.service_user ( + provider uuid, + service uuid, + user uuid, + conv uuid, + team uuid, + PRIMARY KEY ((provider, service), user) +) WITH CLUSTERING ORDER BY (user ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3486,12 +1511,14 @@ CREATE TABLE spar_test2.idp ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.default_idp ( - partition_key_always_default text, - idp uuid, - PRIMARY KEY (partition_key_always_default, idp) -) WITH CLUSTERING ORDER BY (idp ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.prekeys ( + user uuid, + client text, + key int, + data text, + PRIMARY KEY (user, client, key) +) WITH CLUSTERING ORDER BY (client ASC, key ASC) + AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3506,23 +1533,21 @@ CREATE TABLE spar_test2.default_idp ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.team_provisioning_by_team ( - team uuid, - id uuid, - created_at timestamp, - descr text, - idp uuid, - token_ text, - PRIMARY KEY (team, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.oauth_auth_code ( + code ascii PRIMARY KEY, + client uuid, + code_challenge blob, + redirect_uri blob, + scope set, + user uuid +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 + AND default_time_to_live = 300 AND gc_grace_seconds = 864000 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 @@ -3530,17 +1555,25 @@ CREATE TABLE spar_test2.team_provisioning_by_team ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) +CREATE TABLE brig_test.clients ( + user uuid, + client text, + capabilities set, + class int, + cookie text, + ip inet, + label text, + lat double, + lon double, + model text, + tstamp timestamp, + type int, + PRIMARY KEY (user, client) +) WITH CLUSTERING ORDER BY (client ASC) AND bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3552,11 +1585,9 @@ CREATE TABLE spar_test2.meta ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.verdict ( - req text PRIMARY KEY, - format_con int, - format_mobile_error text, - format_mobile_success text +CREATE TABLE brig_test.budget ( + key text PRIMARY KEY, + budget int ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3565,17 +1596,24 @@ CREATE TABLE spar_test2.verdict ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.authreq ( - req text PRIMARY KEY, - end_of_life timestamp -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.connection_remote ( + left uuid, + right_domain text, + right_user uuid, + conv_domain text, + conv_id uuid, + last_update timestamp, + status int, + PRIMARY KEY (left, right_domain, right_user) +) WITH CLUSTERING ORDER BY (right_domain ASC, right_user ASC) + AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3590,17 +1628,13 @@ CREATE TABLE spar_test2.authreq ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.team_provisioning_by_token ( - token_ text PRIMARY KEY, - created_at timestamp, - descr text, - id uuid, - idp uuid, - team uuid -) WITH bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.users_pending_activation ( + user uuid PRIMARY KEY, + expires_at timestamp +) WITH bloom_filter_fp_chance = 0.01 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} + AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 @@ -3612,11 +1646,15 @@ CREATE TABLE spar_test2.team_provisioning_by_token ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.team_idp ( - team uuid, - idp uuid, - PRIMARY KEY (team, idp) -) WITH CLUSTERING ORDER BY (idp ASC) +CREATE TABLE brig_test.connection ( + left uuid, + right uuid, + conv uuid, + last_update timestamp, + message text, + status int, + PRIMARY KEY (left, right) +) WITH CLUSTERING ORDER BY (right ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3631,14 +1669,15 @@ CREATE TABLE spar_test2.team_idp ( AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; +CREATE INDEX conn_status ON brig_test.connection (status); -CREATE TABLE spar_test2.issuer_idp_v2 ( - issuer text, - team uuid, - idp uuid, - PRIMARY KEY (issuer, team) -) WITH CLUSTERING ORDER BY (team ASC) - AND bloom_filter_fp_chance = 0.1 +CREATE TABLE brig_test.password_reset ( + key ascii PRIMARY KEY, + code ascii, + retries int, + timeout timestamp, + user uuid +) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} @@ -3653,10 +1692,14 @@ CREATE TABLE spar_test2.issuer_idp_v2 ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.scim_user_times ( - uid uuid PRIMARY KEY, - created_at timestamp, - last_updated_at timestamp +CREATE TABLE brig_test.activation_keys ( + key ascii PRIMARY KEY, + challenge ascii, + code ascii, + key_text text, + key_type ascii, + retries int, + user uuid ) WITH bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3672,12 +1715,16 @@ CREATE TABLE spar_test2.scim_user_times ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.scim_external ( - team uuid, - external_id text, - user uuid, - PRIMARY KEY (team, external_id) -) WITH CLUSTERING ORDER BY (external_id ASC) +CREATE TABLE brig_test.vcodes ( + key ascii, + scope int, + account uuid, + email text, + phone text, + retries int, + value ascii, + PRIMARY KEY (key, scope) +) WITH CLUSTERING ORDER BY (scope ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -3686,19 +1733,20 @@ CREATE TABLE spar_test2.scim_external ( AND crc_check_chance = 1.0 AND dclocal_read_repair_chance = 0.1 AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 + AND gc_grace_seconds = 0 AND max_index_interval = 2048 AND memtable_flush_period_in_ms = 0 AND min_index_interval = 128 AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE TABLE spar_test2.user ( - issuer text, - sso_id text, - uid uuid, - PRIMARY KEY (issuer, sso_id) -) WITH CLUSTERING ORDER BY (sso_id ASC) +CREATE TABLE brig_test.service_prefix ( + prefix text, + name text, + service uuid, + provider uuid, + PRIMARY KEY (prefix, name, service) +) WITH CLUSTERING ORDER BY (name ASC, service ASC) AND bloom_filter_fp_chance = 0.1 AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} AND comment = '' @@ -4086,98 +2134,3 @@ CREATE TABLE spar_test.user ( AND read_repair_chance = 0.0 AND speculative_retry = '99PERCENTILE'; -CREATE KEYSPACE gundeck_test2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'} AND durable_writes = true; - -CREATE TABLE gundeck_test2.push ( - ptoken text, - app text, - transport int, - client text, - connection blob, - usr uuid, - PRIMARY KEY (ptoken, app, transport) -) WITH CLUSTERING ORDER BY (app ASC, transport ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test2.notifications ( - user uuid, - id timeuuid, - clients set, - payload blob, - PRIMARY KEY (user, id) -) WITH CLUSTERING ORDER BY (id ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.TimeWindowCompactionStrategy', 'compaction_window_size': '1', 'compaction_window_unit': 'DAYS', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 0 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test2.meta ( - id int, - version int, - date timestamp, - descr text, - PRIMARY KEY (id, version) -) WITH CLUSTERING ORDER BY (version ASC) - AND bloom_filter_fp_chance = 0.01 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - -CREATE TABLE gundeck_test2.user_push ( - usr uuid, - ptoken text, - app text, - transport int, - arn text, - client text, - connection blob, - PRIMARY KEY (usr, ptoken, app, transport) -) WITH CLUSTERING ORDER BY (ptoken ASC, app ASC, transport ASC) - AND bloom_filter_fp_chance = 0.1 - AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'} - AND comment = '' - AND compaction = {'class': 'org.apache.cassandra.db.compaction.LeveledCompactionStrategy'} - AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'} - AND crc_check_chance = 1.0 - AND dclocal_read_repair_chance = 0.1 - AND default_time_to_live = 0 - AND gc_grace_seconds = 864000 - AND max_index_interval = 2048 - AND memtable_flush_period_in_ms = 0 - AND min_index_interval = 128 - AND read_repair_chance = 0.0 - AND speculative_retry = '99PERCENTILE'; - diff --git a/hack/bin/cassandra_dump_schema b/hack/bin/cassandra_dump_schema new file mode 100755 index 0000000000..624e4a0a18 --- /dev/null +++ b/hack/bin/cassandra_dump_schema @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 + +import subprocess +from subprocess import PIPE +from itertools import zip_longest +import re + +def run_cqlsh(container, expr): + p = subprocess.run(["docker", "exec", "-i", container, '/usr/bin/cqlsh', '-e', expr], stdout=PIPE, check=True).stdout.decode('utf8').strip() + return p + +def transpose(a): + return [x for col in zip_longest(*a, fillvalue='') for x in col] + +def main(): + container = subprocess.run(["docker", "ps", "--filter=name=cassandra", "--format={{.ID}}"], stdout=PIPE, check=True).stdout.decode('utf8').rstrip() + s = run_cqlsh(container, 'DESCRIBE keyspaces;') + + ks = [] + for line in s.splitlines(): + ks.append(re.split('\s+', line)) + + keyspaces = transpose(ks) + print("-- automatically generated with `make git-add-cassandra-schema`\n") + for keyspace in keyspaces: + if keyspace.endswith('_test'): + s = run_cqlsh(container, f'DESCRIBE keyspace {keyspace}') + print(s.replace('CREATE TABLE galley_test.member_client','-- NOTE: this table is unused. It was replaced by mls_group_member_client\nCREATE TABLE galley_test.member_client')) + print() + +if __name__ == '__main__': + main() From 482d7c0341c9470925c5960fbfb0d50d99d76763 Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 3 May 2023 11:27:33 +0200 Subject: [PATCH 71/75] postMLSMessageToLocalConv: return no events --- services/galley/src/Galley/API/MLS/Message.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Message.hs b/services/galley/src/Galley/API/MLS/Message.hs index 50069d2b87..0667468126 100644 --- a/services/galley/src/Galley/API/MLS/Message.hs +++ b/services/galley/src/Galley/API/MLS/Message.hs @@ -343,17 +343,17 @@ postMLSMessageToLocalConv qusr c con msg convOrSubId = do void $ getSenderIdentity qusr c sender lConvOrSub -- validate message - events <- case msg.content of + case msg.content of IncomingMessageContentPublic pub -> case pub.content of FramedContentCommit _commit -> throwS @'MLSUnsupportedMessage FramedContentApplicationData _ -> throwS @'MLSUnsupportedMessage FramedContentProposal prop -> - processProposal qusr lConvOrSub msg.groupId msg.epoch pub prop $> mempty - IncomingMessageContentPrivate -> pure mempty + processProposal qusr lConvOrSub msg.groupId msg.epoch pub prop + IncomingMessageContentPrivate -> pure () let cm = membersConvOrSub (tUnqualified lConvOrSub) unreachables <- propagateMessage qusr lConvOrSub con msg.rawMessage cm - pure (events, unreachables) + pure ([], unreachables) postMLSMessageToRemoteConv :: ( Members MLSMessageStaticErrors r, From 7f4ac30a2f5d7e318aa500dabababd31172b1cc0 Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 3 May 2023 11:44:35 +0200 Subject: [PATCH 72/75] Remove unused paExternalInit --- services/galley/src/Galley/API/MLS/Proposal.hs | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Proposal.hs b/services/galley/src/Galley/API/MLS/Proposal.hs index a838d96ea8..437e1cba42 100644 --- a/services/galley/src/Galley/API/MLS/Proposal.hs +++ b/services/galley/src/Galley/API/MLS/Proposal.hs @@ -27,7 +27,6 @@ module Galley.API.MLS.Proposal -- * Proposal actions paAddClient, paRemoveClient, - paExternalInitPresent, -- * Types ProposalAction (..), @@ -71,22 +70,18 @@ import Wire.API.Message data ProposalAction = ProposalAction { paAdd :: ClientMap, - paRemove :: ClientMap, - -- The backend does not process external init proposals, but still it needs - -- to know if a commit has one when processing external commits - paExternalInit :: Any + paRemove :: ClientMap } deriving (Show) instance Semigroup ProposalAction where - ProposalAction add1 rem1 init1 <> ProposalAction add2 rem2 init2 = + ProposalAction add1 rem1 <> ProposalAction add2 rem2 = ProposalAction (Map.unionWith mappend add1 add2) (Map.unionWith mappend rem1 rem2) - (init1 <> init2) instance Monoid ProposalAction where - mempty = ProposalAction mempty mempty mempty + mempty = ProposalAction mempty mempty paAddClient :: ClientIdentity -> LeafIndex -> ProposalAction paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} @@ -94,9 +89,6 @@ paAddClient cid idx = mempty {paAdd = cmSingleton cid idx} paRemoveClient :: ClientIdentity -> LeafIndex -> ProposalAction paRemoveClient cid idx = mempty {paRemove = cmSingleton cid idx} -paExternalInitPresent :: ProposalAction -paExternalInitPresent = mempty {paExternalInit = Any True} - -- | This is used to sort proposals into the correct processing order, as defined by the spec data ProposalProcessingStage = ProposalProcessingStageExtensions @@ -232,10 +224,6 @@ applyProposal _mlsMeta _groupId (RemoveProposal idx) = do (cid, im') <- noteS @'MLSInvalidLeafNodeIndex $ imRemoveClient im idx put im' pure (paRemoveClient cid idx) -applyProposal _mlsMeta _groupId (ExternalInitProposal _) = - -- only record the fact there was an external init proposal, but do not - -- process it in any way. - pure paExternalInitPresent applyProposal _mlsMeta _groupId _ = pure mempty processProposal :: From 33927e1be5ca2b5766d16dd2a5aaa2342ce34a4a Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 24 Apr 2023 13:04:27 +0200 Subject: [PATCH 73/75] Renew certificates for e2e integration tests (#3243) * Renew certificates for e2e integration tests * Document how to renew e2e integration test certs Co-authored-by: Igor Ranieri --- .../integration-test/conf/nginz/README.md | 7 +++ .../conf/nginz/integration-ca-key.pem | 50 +++++++++---------- .../conf/nginz/integration-ca.pem | 34 ++++++------- .../conf/nginz/integration-leaf-key.pem | 50 +++++++++---------- .../conf/nginz/integration-leaf.pem | 34 ++++++------- 5 files changed, 91 insertions(+), 84 deletions(-) create mode 100644 services/nginz/integration-test/conf/nginz/README.md diff --git a/services/nginz/integration-test/conf/nginz/README.md b/services/nginz/integration-test/conf/nginz/README.md new file mode 100644 index 0000000000..c8e81957c6 --- /dev/null +++ b/services/nginz/integration-test/conf/nginz/README.md @@ -0,0 +1,7 @@ +# How to regenerate certificates in this directory + +Run from this directory: + +```bash +../../../../../hack/bin/selfsigned.sh +``` diff --git a/services/nginz/integration-test/conf/nginz/integration-ca-key.pem b/services/nginz/integration-test/conf/nginz/integration-ca-key.pem index 961e87aa67..774b9d30c9 100644 --- a/services/nginz/integration-test/conf/nginz/integration-ca-key.pem +++ b/services/nginz/integration-test/conf/nginz/integration-ca-key.pem @@ -1,27 +1,27 @@ -----BEGIN RSA PRIVATE KEY----- -MIIEpQIBAAKCAQEApwf/2d2YraQDpCipPVtYR+7BNu47AgkD7kFvGhoxJhDP7CsU -VdpqU5gsVVo8kvhkh4k1tsJyuWWeKn6piNSXxUCFIc80KkUPgsYf5v+RBXr73Fdg -ezHQNhNi0dRZCh+YG/hN7pOX46+B0PyKwUEMTeUqizkmFU5tILPMMyDAGx1Bp2LB -oJi4u+48fzTDMaWSXnCVF04G9+A4LDzw0fPdDMgKLEiXJ8GPoPs0cNs6MJoFDgpe -gzy1mv7X7otmRVTaafZGd4TTo6lGC2VVSS5tpj4Qfz/PxyCLK7tf5033HNWEJzAw -6izRXp849VferHuYEbP+2lexNk9tl45BsFhkrwIDAQABAoIBAQCFkzYeSsJginuG -+iVttfEBhYPqo9V4qTEFhjqNS0jmwiclHMZkagkB1P4PO9yZRB9Q7H+SKiqI7STx -ot19WVYOHqzY/tUewJ/I2xyEJPkawuFLsmyr2IhD1nj+iKy0FdQU+huIoWukX6SX -Nn7YUWa/nHbLY+Z6v38x2deBQ72dcBtDcOh1vtUR3fVfsiX5uzCcfvNZAw4cCyB2 -j8ySDIiP10Ic81da3FIeCm8g2yp3DrnvTa77xsr0IfSykB3UcSrGqDwZxs9pS82Q -1fog//4xAfBYC9LEcnQrCvz2kqLSLICtjkgK+dlzgvY3rZMq9c/OY1nR7Wp2BIyp -kKB5AEnRAoGBANTM3fq4YGzUodf+Xla4MDvQFJsYjQuig/CJboQ7JSFZi2uLnSHX -+7JDiHtQd3uifYMhzSxXXKV82CK7SsJOQlIVoCZ5eTsyYGyAu1fUqfBvfHYN4Gbr -3QyZJE0Hut2rvn5DaT/dpgh7Uy9QWKhpAsmxzhKa/iADUTiNAO8pxxRFAoGBAMjw -iZV43XWLvzP90P5jANHuk9tR/B5cM9zK40aWglNsMlK9cUgW3ovohMzTFce/LQWy -zGZ1WZZcUUcR/pHot3fyjWKeJadZhSZ/7hN/0d/UDuFY5nQ8eGQoy2qrrtY+6MMU -Eiz09EFnKKA7hUoDnbhOH1hCKsfrOVse55RDkTZjAoGABrzRzm1mCCwXT7prDD3a -sRoefOajGJo1qTkAuckRnOOz6VzLRdYLzxIaUSU0E0MKzEsWru+5LDgus7LQZCSM -LwMmRfGUqA4pRWYyCE7gbo9pFmfMEhYnso1qu9Gh1gDpECBcRbxj1GLrOFVH6VUh -1Hb/ulET+LmCKdM1E110Qy0CgYEAimbDHSUGxHPg2pq0XMMsSWyegq3RjcfMIQPN -z0zTr0oSz1KUuCaoWo1pCvtJQS+4fvhMOTYS4rHreZw3T6CO3hs+rvJm1QGf6Iit -HtknYZfaN/TXprAP7Ez87xgZcJAcGmG0syp1Iqc/ID5e7D/ZXpzQkiXg+ZpXAyAi -OcjgOCkCgYEAmsCsqtPn5vgB+/vr0n28UsFS4Of9whlgEPYndNss3nAmVEohQJRg -QlBlJd2iDa7R0TrJZCuAwuqK7TxB/RoHL8UkryUt2nag39GYAyE+lfPM558/AWyt -9yyLQNfiJnqTC2Ne2j7EyicBLha4J9NoBeNE5UqLlzrH4LRJ3fRX9Ps= +MIIEpAIBAAKCAQEAoYyNk0aNoe2AYoWa4ey6P4LR4BxKGk0A9LeFiCP4tWqbU/aZ +DzDATytklxaQiDMDbZQboFngf5/X0S+pjSiZ+LSgIR30/g0yoDEubfUXvF+q+rEh +Om91OHnkwwNoSN1EK687N1nATFXd7YL6Lv2SOrMcyOCtqwnGFwRrH8MR3z87nL+H +vuot2ciXvyeJ3q4RG2G9t8UTjqo1jK/NJHyNZYSY4vGTGZTwGi1BCuNlizi6xzmI +Mh3HS/px/kihR7wLkQ7NpovqjfQVef3JwiJutrRYG6lJT9xXpNu2gKg8KKiZJUgb +gqnPWl+4IdRdZ/q/12Jsg9qAf8tbS+tQ2CnlLQIDAQABAoIBAQCJKkrm+me1Tm/M +tz4bh6FX3Z6Pl9V/YVRndA9n2YsJljvOXbn1wOH4FpLxChKr4gyOFMwkKUvJcRGQ +ptRia0/YcJzpoYLr1o7enwOaDxkZM218L7tT32D7E9wdjJ4WB/Ei2kUAKS9yYRHu +4V/FWD25o2zUTpiGeeT8lB7UuA9Lqg529dGlJcanlZjMe0Wj92ec1jjelERGuGdr +lujikHl8whZRwxCGC09WM48myWnsCVdJ1oqGhYM8nzqImsiMc10K6/8CmVrl3aXV +KrExPLtxCRK3pe5olyCLIkPn3OwSc/ZPSkxVQF4j/PwatqqHE98TQBi5bzKIF2JE +17+DBVxNAoGBAM4lR1WRAtXvAe6/jl5zYHr/v2D69o7v85PuXrnmSLK29h3ACSDM +svTsIkoPIZ/lotM8O/OpOHKWmbXH7MOIu9mRKQAKFlTKtw4xl36SPynegq1H5JBv +bd8N8pQtf8pLuh8qxZvZplBsg9HJHBPlbZo/dMQa8oYDI4BakMyYJEMbAoGBAMie +PyHPgI7RpE5GPXcl/rOxeMF++7qOsOX5XGUhoGoH2feYzj19V2/ptx4KdmD+M3NF +dT1ucmQKqocrE6U3sEMok3BmgajGoGOLQMPXsK18bs0VowI+mmt+uL2BwOr9hHPK +IuZrzwm9vtLYldBU3sdxGA1sLXPB2oUZalwCf7VXAoGAZo77X2GmtIKVRo98qBbk +sCzerMQOuGw+laFo9TnRf0AxT/nDUNMmUV3NbWT7yI45pLf5566Py8qLLHoLm/hB +5OsoJ8Hc/FBiJCieAzWFQTJXdxgmaYlWczuALSI5yo5ESc9AwtnUuXxTVKKmWmux +TKU3VX1GnU+gcPIdyfwDRnMCgYAWg8P8DGiWHqr86d8eDxKNoh42QQUJQ9hQhvK6 +mtKA886fffOvbPCyK52UboIokn69sg7dTRbjaVsH/mqfASfz8YrSc36brWb0pP0o +vX0jizJ4K7R2nQYBiGA9TGGVPcxunkHacED1C+ltikcN8WhrI6MaZoiXVCstAtQv +7Uvd0wKBgQDCC9xoSTr7kFiwp76f7dIBdxLKBiL1tZM/qJIP3lnX9TnLhBiHNxoR +4DbIF5yEdRRNBVfS4rJLa1zAAY3d5u4LENaZEvf7fmsjHTLEIf3gJVviHZSBMP6C +kSPQbfcNTNZaEt/40GAZzgjNiO0rTpsLLI4fGDiHeaMMBHEzAiXJmw== -----END RSA PRIVATE KEY----- diff --git a/services/nginz/integration-test/conf/nginz/integration-ca.pem b/services/nginz/integration-test/conf/nginz/integration-ca.pem index f9479d65a0..2aff84d758 100644 --- a/services/nginz/integration-test/conf/nginz/integration-ca.pem +++ b/services/nginz/integration-test/conf/nginz/integration-ca.pem @@ -1,19 +1,19 @@ -----BEGIN CERTIFICATE----- -MIIDAjCCAeqgAwIBAgIUBTz/WN3KPdXZnUyhrinjprCSy2QwDQYJKoZIhvcNAQEL -BQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5jb20wHhcNMjIwNDIwMTMyMDAwWhcN -MjcwNDE5MTMyMDAwWjAZMRcwFQYDVQQDEw5jYS5leGFtcGxlLmNvbTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALfeLu+by+cSkuhiB2eJJpFb0OUTNBzT -YiNK5yfLhHhfQRUkWJQMn9+Zis+thgJDUOuiZ7A6OaDNGNRnlJL62Wz7OgpUFWdt -kdHvmRK+rfAbYeCTOjTWTRBbMrqmRX1WO6tqn6EBttIcS4ND+Bl0tjpf2i8JR+AV -37yHuj/zoBtWHtEFhkCs2vYS09KWuYYBaaj90QKt16f1+Mp3s6OUreB/YzxsCb7d -C4aPPKrloBcI/HZu71AYiQb6WPO1LjyMFMvpYz/ty6V+l69tupYIBJUyoZ+mrY2F -XemRd/Xv3HcJRCBrwx70gER5XNg/IO5vAuRQ9DZqsbEsZApArSbM9RMCAwEAAaNC -MEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFCiX -sxgN51cGwv39O8DGk6bVuL4aMA0GCSqGSIb3DQEBCwUAA4IBAQBvQU5dywshZbUp -8/MJI36hrI3IGsf9Asc9Yb3g9Zdc1npWY7F4Mtb6wsaQt1QUWgGcZ2Om6aYQu2iH -TN5a7D1Lxm99BgSLBWeGky/Wgl3XaGKV/2ch9n2eYyz1ukiOF1yvghsNovBvQF11 -nnHLTKZQLtEvawicYB/wdRJOiGp30Ze8DjOeoiPEHHolQa/a1DFlO58tPU1TAr+b -BLmxIEPP6BiIbZHZVQY8aosITMqvY1MCZKTtlXxzRZpxNfQNPYAVjA9D/UWfxpPS -b45eCIIQmctfL5smaY32QFuYsmqOH6OiVm7wm/hkGZCTqfumPR7MpJmJ4LYhpSC4 -IZ1eInXn +MIIDAjCCAeqgAwIBAgIUaq5Rk0z4WRqKc9dEtkxgVdL0LBIwDQYJKoZIhvcNAQEL +BQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5jb20wHhcNMjMwNDI0MDkzMTAwWhcN +MjgwNDIyMDkzMTAwWjAZMRcwFQYDVQQDEw5jYS5leGFtcGxlLmNvbTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBAKGMjZNGjaHtgGKFmuHsuj+C0eAcShpN +APS3hYgj+LVqm1P2mQ8wwE8rZJcWkIgzA22UG6BZ4H+f19EvqY0omfi0oCEd9P4N +MqAxLm31F7xfqvqxITpvdTh55MMDaEjdRCuvOzdZwExV3e2C+i79kjqzHMjgrasJ +xhcEax/DEd8/O5y/h77qLdnIl78nid6uERthvbfFE46qNYyvzSR8jWWEmOLxkxmU +8BotQQrjZYs4usc5iDIdx0v6cf5IoUe8C5EOzaaL6o30FXn9ycIibra0WBupSU/c +V6TbtoCoPCiomSVIG4Kpz1pfuCHUXWf6v9dibIPagH/LW0vrUNgp5S0CAwEAAaNC +MEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFBuq +33I+JaC5KOsrFeHkzhBuFqtzMA0GCSqGSIb3DQEBCwUAA4IBAQBaOq1YyLjpMz+O +mxw0yRpROgPaPt0QMsSbUCeNXPrlMFi+7QarmKfz0EGoGJEfU8Eu22+mqnAC2tTO +iSLy89tlR21i0+x+0V+qedzZCQfMlm00SS29wzbXomeUunQxlHNuGuRzkzh7g80G ++wIJuIZRvs+qgGofd4yp2BGGQNOlNRhPmc0LP5DSB+snmIscx+sDnVUn7MWunH80 +Doj+CL6wSbP79hfJXeK5LxSBmAtQU8dpZlgNaRCO5TAU10xgzFNCKWbKJ7nf4wC5 +cMGhRWFYP3babARd42KWViRYLZ7bxTtNBnKOvo7AtQJ3YIOUwk1ofq3/PhLHDxiG +XWlMKqrV -----END CERTIFICATE----- diff --git a/services/nginz/integration-test/conf/nginz/integration-leaf-key.pem b/services/nginz/integration-test/conf/nginz/integration-leaf-key.pem index 1e25ca85b5..b1718af2d0 100644 --- a/services/nginz/integration-test/conf/nginz/integration-leaf-key.pem +++ b/services/nginz/integration-test/conf/nginz/integration-leaf-key.pem @@ -1,27 +1,27 @@ -----BEGIN RSA PRIVATE KEY----- -MIIEpAIBAAKCAQEA1iDPn3gxH1jNo5zzYqOJX664NeaPcbPZXPx29CAj+fLN8u8g -rknfVTve+JvqplZiJkf1hw82HnDUdOjCVRl9C0J6uRnncq2G+y13fzL05T751WJA -6S/HCc4oRzWSLMvW36ULiehESkriM91MMXlOltZjgbOLxXJbmN8JTer/yhDnnDXe -TmhoaCqB6WaLnLIMMHYbvWqgdeaANUJSQ2aVIK78eFSM/5yIHyTR3zXIojfSvSBA -H2fItdGxlvr+TmjnpUNGWPlbWddnNnOPc/2ezXh9Hz0z2Qp66Go7pDAhZULV4mUO -QwE/EIpDTCq1pn6BW+Pdc2neasHjPhQ+wt5PfwIDAQABAoIBAQCQaFRdcct/Hn6g -xuqFLVEqDEUDZNh8kBQZV9JJVZutp4gpPAfIQt2xN81p0IzxXPSYaJM3YJTY9rLx -nT/h8GyaOV1WlBe5CTotDz61tAHg0RXgSIEKQkRzYmtbis9oEph4/2/Gs7PKfrIK -1EXcX7kWlMNK53Ft2W/YqyI1QDT9aikgyR3NlNczIgWNB2fKIDKrJACj9sb3KZBz -ePXxDvIy2JcRo6o2B3023AKNdIA+PUMUAGgLnGXQv3bf8n3N+E984PdB5oWLHOg3 -RF9wjedIj7alfHSH9/4x1xhP6lmUF4U27FGN7jC6AcF0iV8QdPlSpa877tjeSrN6 -BnOuhCKxAoGBAPNQv7AMAtxYn7Rt6Qj4W147f3T30Muu3Kg2U5znU9rcQPSDFPYX -QFmBC4VrA31bgqzJ06mjutmn5hCtw0LzaWtA/JQIj42GMV5+hcAOMA6n2oMAT8FC -bpo93rdLpLhy8b3FJUaD9Qc7wtICu1XTyJv5gSe9nC9YJzl6MYRVEdFFAoGBAOFK -iJKzxJFsozrX0pp2O8DO6r5/IDPxPJurtAC/74n8RFzzQGymb5oGSrtjOyVqJYrd -a2bQWozQPRFiwGdyu8GK7hxxoGdaz5FCxjAe41qdDm4GlSn/wYaBzq511lb3GzVW -tauVZG2gkO4j2dvhMMthQY5xG8TAc4V3slHFdS/zAoGAF3Aa1vGBQQqEb9P6k7Og -0YX3tCO/CC/S7500FrQt3rJCy4ro9P+uYjDNFFAHqQasospaSkgMUrUas1aZrZRW -/k7nRbdBZMedb9XOOn7jYDYJFX9tL1ef4dm933g46M+hu78G5TEG5Gh8TtCWjSD2 -fRfeuh5IskLSnHXJ2U58heUCgYBENY6369l9tgiNjj5jKZzZuUv1NQQI9ebFsuyi -tXnOqyP/iF5fBt0PIwyJQ3fq0gJf0r3ruPVRYNK8asuaBnC2HlwNHJHV+PaTIkZi -11c6Xga6ZR/QQXDUSoTK6T5lwhboxUHnmyl2z4BRuWUCX2Gokd+JQtGHdkUDicPh -Ygki5QKBgQCNAkPMUEP5e44IXmecnIh1XCv9see7+jYyyjHfDmDCVNt7qwMLn1vT -sqoZtDWsTG2Dvp5ctYTfI5bOsNa2sEU/VSeccf6lHjiw2N+NhCQXffYZTLGMyAVH -78s8Xq7glmd4k08YkPpsOYpXUqB3DDQEV6v2XpDN2LI9RnYWewOF7g== +MIIEowIBAAKCAQEAr8i0VsoPb1ITTQO1O+uZ4b3+19F42kwXSpaBmgGwK9PQMjiw ++mNGKQf0AM8HISPAEWN3+7ildrl7o9gaFW6e6L00LGyRrKr9hJ46yWNhLb7auJi6 +sq5WK6Wjt+BDMWHmokfKDGOTyh4d+Q5R3uoY/Smi+QQLxUb8VkAESy6lLvff1HXy +jmcvoHigCMedOX0ipgoDg0OOMUiwDaJslsKnJ+Irn7VpfUjmIPPz4J8VRRlqxK6u +tSktq8uzZEUP03elZvlDYGuKEar5qLwgVENJKjgWWG6+gSJniQRNFKIOEvMsybip +wGdA/+da/s27NLBZvnMCLfSKVe15PnBfcEi3FwIDAQABAoIBABM4gO+UfIeRk+ax +5xk8M8FJQxpaHzrPYySWvGkYkijYqkUzibZ3MG7AHeAQwxjOjevY0n/FuuH2ehx6 +Pq/lPp74QUIyRON6duoPWyI2KaQU4Fma6Z8sDOQM4o/yh6ZYrB1GeENOiBRrop9e +/3i+ZCkaamWMGbVig6jyqwWFfi5aYZmL9BB3g7mMYz+DAnSD9eAI0Fl+dCjY3PLq +I5+BjnjHDdA9ixjyNhobBPUN67qAQLox7b5+joM+dW9TD2+2wLF8ubBP/ZjZxJpR +WRGG9tikdyR0ojC9cx4hg9+tN1OV9lAfOgWZO4ZwgCMsDFrKCf76DpG8nNbGMkUi +D8mGmhECgYEA6M6mlQuax9jvd7PhN/E5pqgDDr9gT0+6i9JRSNdX2zGxcH8QPMuE +WQN9gIT+HGfgZQR9r7DvEtl58IzMadF3Jj+zq2C1UMQujWktTp2wA+Lj+JTmSkSx +OdhFwOnouWqeHacdrP+LDahrxTAoQLWkFY7gbzYJARhT8U+MD17yFOMCgYEAwUvG +KY2H4SHqA2V3gjxjaGpj01D4Q4zaK4cDdLYofkkEIECbDXQ0MBPrhEng0bH/P4ld +8H9Sbsfaave/kdTpQunrGRG6cUnLG2/b3NPwf2FcROJ6bVP2JjQLSHZroV1WNLbO +WokoLn61AllkjHisyHjgeBx1oCBE08OVCyJ43z0CgYEAvbUHkZSvQALKwGRYNlnf +fKqUM0RHmtmBTcbIbe7srLVFvkIMXT4KTu7FKiE1YLhU5nxOXwhzCI0nDJnvSJtj +2Es4gYKAvZvfw2Pdg56De+c7lajgL8ziDhzqWlVBSzZSOh+f0wU5rpt7lmezpWde +miKfSIBjvfyxCoajvzLDWbkCgYBtFY8yeg3ZzqLa4dNM6zmKfqfxZHuG26Fv+RTJ +M9esVRaAARW/xPmCvGsoT+0RSitrNuGNzLy/igfIYCJ7cTVmrs4farLWJjf6NulU +OUM7D73bnhhLRJvgOXS4oyPgf+UbgKL50vebLaSHO92TrLKNvDGpdx4mjK9q9rBR +BVZDXQKBgBxHESayFWS0tAyV67GlOaiy3mbjVvxpRT7IGwXZAX+3NMvRmCzN8sIB +zkYMuRC3P/9RAZkBQ2qp8Fu0W8G7b32ImWyP7/HJb0hnBIfwBnePSUA1nS8jEkMp +IkrYAiU2viJTMiHNcqoVuJUY/FmxiZPPewqnJwQYAE4nrUD/oU8F -----END RSA PRIVATE KEY----- diff --git a/services/nginz/integration-test/conf/nginz/integration-leaf.pem b/services/nginz/integration-test/conf/nginz/integration-leaf.pem index 123b522f08..120d96cda5 100644 --- a/services/nginz/integration-test/conf/nginz/integration-leaf.pem +++ b/services/nginz/integration-test/conf/nginz/integration-leaf.pem @@ -1,20 +1,20 @@ -----BEGIN CERTIFICATE----- -MIIDXDCCAkSgAwIBAgIUey3LIX14eyWd2sth8HsSSDbhnYcwDQYJKoZIhvcNAQEL -BQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5jb20wHhcNMjIwNDIwMTMyMDAwWhcN -MjMwNDIwMTMyMDAwWjAAMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA -1iDPn3gxH1jNo5zzYqOJX664NeaPcbPZXPx29CAj+fLN8u8grknfVTve+JvqplZi -Jkf1hw82HnDUdOjCVRl9C0J6uRnncq2G+y13fzL05T751WJA6S/HCc4oRzWSLMvW -36ULiehESkriM91MMXlOltZjgbOLxXJbmN8JTer/yhDnnDXeTmhoaCqB6WaLnLIM -MHYbvWqgdeaANUJSQ2aVIK78eFSM/5yIHyTR3zXIojfSvSBAH2fItdGxlvr+Tmjn -pUNGWPlbWddnNnOPc/2ezXh9Hz0z2Qp66Go7pDAhZULV4mUOQwE/EIpDTCq1pn6B -W+Pdc2neasHjPhQ+wt5PfwIDAQABo4G0MIGxMA4GA1UdDwEB/wQEAwIFoDAdBgNV +MIIDXDCCAkSgAwIBAgIUV3PHvpBx77MqGBo+PM2RIuIcBfAwDQYJKoZIhvcNAQEL +BQAwGTEXMBUGA1UEAxMOY2EuZXhhbXBsZS5jb20wHhcNMjMwNDI0MDkzMTAwWhcN +MjQwNDIzMDkzMTAwWjAAMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +r8i0VsoPb1ITTQO1O+uZ4b3+19F42kwXSpaBmgGwK9PQMjiw+mNGKQf0AM8HISPA +EWN3+7ildrl7o9gaFW6e6L00LGyRrKr9hJ46yWNhLb7auJi6sq5WK6Wjt+BDMWHm +okfKDGOTyh4d+Q5R3uoY/Smi+QQLxUb8VkAESy6lLvff1HXyjmcvoHigCMedOX0i +pgoDg0OOMUiwDaJslsKnJ+Irn7VpfUjmIPPz4J8VRRlqxK6utSktq8uzZEUP03el +ZvlDYGuKEar5qLwgVENJKjgWWG6+gSJniQRNFKIOEvMsybipwGdA/+da/s27NLBZ +vnMCLfSKVe15PnBfcEi3FwIDAQABo4G0MIGxMA4GA1UdDwEB/wQEAwIFoDAdBgNV HSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4E -FgQU9WPYeYNlDXrG0S2iYHZ81js6IYswHwYDVR0jBBgwFoAUKJezGA3nVwbC/f07 -wMaTptW4vhowMgYDVR0RAQH/BCgwJoIZKi5pbnRlZ3JhdGlvbi5leGFtcGxlLmNv -bYIJbG9jYWxob3N0MA0GCSqGSIb3DQEBCwUAA4IBAQBoRgeD+blaKlqqKRXGQoEV -7u7H+YvFQOrrF/sx7XOH9qs14SBNt16HwW4U5w6VM5PhIQkz+PaYXYjLltYQMNbT -d5A+g0Tc0zpZkYa1JjW4hKEJ5RnimbrDNzIfe40tQPyz/beg1fVwj8vEGM9Nr+1W -IhVjCFvlgzUXgVZnO++IbZU4MJpI63HHxQKJtmK/N+Ees33SUY8uTt+NPB9w0KiY -9RwDfQO5ux4Xb2ZI3hp8jI3NO08ILHcl2fwifBfexc6OkGVTP8jAZWUhzfCaZ4FQ -BZ6rKYxLbFPHy27dmq/EGcpqzuqHy/GUidXdwidxNC38oxe0uEBEJhYOPJcBctcv +FgQUa7feIJTIqMh5UjDi0UR7Ub5MrvcwHwYDVR0jBBgwFoAUG6rfcj4loLko6ysV +4eTOEG4Wq3MwMgYDVR0RAQH/BCgwJoIZKi5pbnRlZ3JhdGlvbi5leGFtcGxlLmNv +bYIJbG9jYWxob3N0MA0GCSqGSIb3DQEBCwUAA4IBAQAIfB/q/+jHWbN5goGMaPh8 +CL8kynzf0dmkwOs6f6sqDIRo+9BQneWCWVOTLbO3LK6ITsZhVTFmKT3bkEmj04sy +ZUnXfqi9CqDHjQKZU9OxIWoCgbe6r4siInI46K3rSYGsmP37x9jWop1fbJBLl1HC +ray3LR8zanzsR9ksbyfA9VbNmWY1nWxTkZZ5RM+IAlU0/8qRgo5Ypsl35Gd9RJiN +DtbU3+rU9bYQ1YgYDk0h1s2woEberjp1xnvGBJLhDjewv9jXXaQXr1GlwfnJBenO +TV+GWqTeXwPclK0mSKDGs/Ixh+dH3J+8GGCGd8CJTnQfCzGZIBf4I7re8QkeNsVb -----END CERTIFICATE----- From 981ef855b59d9bc66541a00b88a15a874c040afd Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 3 May 2023 13:57:28 +0200 Subject: [PATCH 74/75] fix broken tests --- hack/python/wire/mlscli.py | 2 +- .../test/integration/Federation/End2end.hs | 79 ++++++------------- .../brig/test/integration/Federation/Util.hs | 19 ++++- 3 files changed, 41 insertions(+), 59 deletions(-) diff --git a/hack/python/wire/mlscli.py b/hack/python/wire/mlscli.py index 99eca439d5..be53f849f1 100644 --- a/hack/python/wire/mlscli.py +++ b/hack/python/wire/mlscli.py @@ -189,7 +189,7 @@ def add_member(state, kpfiles): "", "--welcome-out", welcome_file, - "--group-state-out", + "--group-info-out", pgs_file, "--group-out", "", diff --git a/services/brig/test/integration/Federation/End2end.hs b/services/brig/test/integration/Federation/End2end.hs index 60b3120a5d..1c8ba92eab 100644 --- a/services/brig/test/integration/Federation/End2end.hs +++ b/services/brig/test/integration/Federation/End2end.hs @@ -834,6 +834,8 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do tmp "group.json", "--welcome-out", tmp "welcome", + "--group-info-out", + tmp "groupinfo.mls", tmp aliceClientId ] ) @@ -873,31 +875,14 @@ testSendMLSMessage brig1 brig2 galley1 galley2 cannon1 cannon2 = do -- send welcome, commit and dove WS.bracketR cannon1 (userId alice) $ \wsAlice -> do - post - ( galley2 - . paths - ["mls", "messages"] - . zUser (userId bob) - . zClient bobClient - . zConn "conn" - . header "Z-Type" "access" - . content "message/mls" - . bytes commit - ) - !!! const 201 === statusCode - - post - ( unversioned - . galley2 - . paths ["v2", "mls", "welcome"] - . zUser (userId bob) - . zClient bobClient - . zConn "conn" - . header "Z-Type" "access" - . content "message/mls" - . bytes welcome - ) - !!! const 201 === statusCode + sendCommitBundle + tmp + "groupinfo.mls" + (Just "welcome") + galley2 + (userId bob) + bobClient + commit post ( galley2 @@ -1098,6 +1083,8 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 tmp "group.json", "--welcome-out", tmp "welcome", + "--group-info-out", + tmp "groupinfo.mls", tmp aliceClientId ] ) @@ -1106,32 +1093,14 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 -- send welcome and commit WS.bracketR cannon1 (userId alice) $ \wsAlice -> do - post - ( galley2 - . paths - ["mls", "messages"] - . zUser (userId bob) - . zClient bobClient - . zConn "conn" - . header "Z-Type" "access" - . content "message/mls" - . bytes commit - ) - !!! const 201 === statusCode - - post - ( unversioned - . galley2 - . paths - ["v2", "mls", "welcome"] - . zUser (userId bob) - . zClient bobClient - . zConn "conn" - . header "Z-Type" "access" - . content "message/mls" - . bytes welcome - ) - !!! const 201 === statusCode + sendCommitBundle + tmp + "groupinfo.mls" + (Just "welcome") + galley2 + (userId bob) + bobClient + commit -- verify that alice receives the welcome message WS.assertMatch_ (5 # Second) wsAlice $ \n -> do @@ -1198,7 +1167,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 "--in-place", "--group", tmp "subgroup.json", - "--group-state-out", + "--group-info-out", tmp "subgroupstate.mls" ] ) @@ -1206,6 +1175,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 sendCommitBundle tmp "subgroupstate.mls" + Nothing galley2 (userId bob) bobClient @@ -1222,9 +1192,9 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 [ "external-commit", "--group-out", tmp "subgroupA.json", - "--group-state-in", + "--group-info-in", tmp "subgroupstate.mls", - "--group-state-out", + "--group-info-out", tmp "subgroupstateA.mls" ] ) @@ -1232,6 +1202,7 @@ testSendMLSMessageToSubConversation brig1 brig2 galley1 galley2 cannon1 cannon2 sendCommitBundle tmp "subgroupstateA.mls" + Nothing galley1 (userId alice) aliceClient diff --git a/services/brig/test/integration/Federation/Util.hs b/services/brig/test/integration/Federation/Util.hs index e510aa50eb..399f17124a 100644 --- a/services/brig/test/integration/Federation/Util.hs +++ b/services/brig/test/integration/Federation/Util.hs @@ -67,6 +67,7 @@ import Wire.API.Conversation (Conversation (cnvMembers)) import Wire.API.Conversation.Member (OtherMember (OtherMember), cmOthers) import Wire.API.Conversation.Role (roleNameWireAdmin) import Wire.API.MLS.CommitBundle +import Wire.API.MLS.Message import Wire.API.MLS.Serialisation import Wire.API.Team.Feature (FeatureStatus (..)) import Wire.API.User @@ -117,12 +118,22 @@ connectUsersEnd2End brig1 brig2 quid1 quid2 = do putConnectionQualified brig2 (qUnqualified quid2) quid1 Accepted !!! const 200 === statusCode -sendCommitBundle :: FilePath -> FilePath -> Galley -> UserId -> ClientId -> ByteString -> Http () -sendCommitBundle tmp subGroupStateFn galley uid cid commit = do +sendCommitBundle :: HasCallStack => FilePath -> FilePath -> Maybe FilePath -> Galley -> UserId -> ClientId -> ByteString -> Http () +sendCommitBundle tmp subGroupStateFn welcomeFn galley uid cid commit = do subGroupStateRaw <- liftIO $ BS.readFile $ tmp subGroupStateFn subGroupState <- either (liftIO . assertFailure . T.unpack) pure . decodeMLS' $ subGroupStateRaw subCommit <- either (liftIO . assertFailure . T.unpack) pure . decodeMLS' $ commit - let subGroupBundle = CommitBundle subCommit Nothing subGroupState + mbWelcome <- + for + welcomeFn + $ \fn -> do + bs <- liftIO $ BS.readFile $ tmp fn + msg :: Message <- either (liftIO . assertFailure . T.unpack) pure . decodeMLS' $ bs + case msg.content of + MessageWelcome welcome -> pure welcome + _ -> liftIO . assertFailure $ "Expected a welcome" + + let subGroupBundle = CommitBundle subCommit mbWelcome subGroupState post ( galley . paths @@ -131,7 +142,7 @@ sendCommitBundle tmp subGroupStateFn galley uid cid commit = do . zClient cid . zConn "conn" . header "Z-Type" "access" - . content "message/mls" + . Bilge.content "message/mls" . lbytes (encodeMLS subGroupBundle) ) !!! const 201 === statusCode From c09326699304c0b347c9cd0ea37e1b533f8e1c1b Mon Sep 17 00:00:00 2001 From: Stefan Matting Date: Wed, 3 May 2023 14:45:13 +0200 Subject: [PATCH 75/75] ExternalCommitAction: remove superfluous ClientIdentity --- .../Galley/API/MLS/Commit/ExternalCommit.hs | 38 +++++++++---------- .../Galley/API/MLS/Commit/InternalCommit.hs | 2 +- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs index 6a6ceb481b..edb792d932 100644 --- a/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs +++ b/services/galley/src/Galley/API/MLS/Commit/ExternalCommit.hs @@ -26,7 +26,6 @@ import Control.Lens (forOf_) import qualified Data.Map as Map import Data.Qualified import qualified Data.Set as Set -import Data.Tuple.Extra import Galley.API.MLS.Commit.Core import Galley.API.MLS.Proposal import Galley.API.MLS.Removal @@ -52,8 +51,8 @@ import Wire.API.MLS.SubConversation import Wire.API.MLS.Validation data ExternalCommitAction = ExternalCommitAction - { add :: (ClientIdentity, LeafIndex), - remove :: Maybe (ClientIdentity, LeafIndex) + { add :: LeafIndex, + remove :: Maybe LeafIndex } getExternalCommitData :: @@ -94,11 +93,11 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do evalState (indexMapConvOrSub convOrSub) $ do -- process optional removal propAction <- applyProposals mlsMeta groupId proposals - removedClient <- case cmAssocs (paRemove propAction) of + removedIndex <- case cmAssocs (paRemove propAction) of [(cid, idx)] | cid /= senderIdentity -> throw $ mlsProtocolError "Only the self client can be removed by an external commit" - | otherwise -> pure (Just (cid, idx)) + | otherwise -> pure (Just idx) [] -> pure Nothing _ -> throw (mlsProtocolError "External commits must contain at most one Remove proposal") @@ -107,8 +106,8 @@ getExternalCommitData senderIdentity lConvOrSub epoch commit = do pure ExternalCommitAction - { add = (senderIdentity, addedIndex), - remove = removedClient + { add = addedIndex, + remove = removedIndex } where allowedProposals = [ExternalInitProposalTag, RemoveProposalTag, PreSharedKeyProposalTag] @@ -147,7 +146,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do updatePath let cs = cnvmlsCipherSuite (mlsMetaConvOrSub (tUnqualified lConvOrSub)) let groupId = cnvmlsGroupId (mlsMetaConvOrSub convOrSub) - let extra = LeafNodeTBSExtraCommit groupId (snd action.add) + let extra = LeafNodeTBSExtraCommit groupId action.add case validateLeafNode cs (Just senderIdentity) extra leafNode.value of Left errMsg -> throw $ @@ -155,7 +154,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do Right _ -> pure () withCommitLock (fmap idForConvOrSub lConvOrSub) groupId epoch $ do - executeExternalCommitAction lConvOrSub action + executeExternalCommitAction lConvOrSub senderIdentity action -- increment epoch number lConvOrSub' <- for lConvOrSub incrementEpoch @@ -163,7 +162,7 @@ processExternalCommit senderIdentity lConvOrSub epoch action updatePath = do -- fetch backend remove proposals of the previous epoch indicesInRemoveProposals <- -- skip remove proposals of already removed by the external commit - (\\ toList (fmap snd action.remove)) + (\\ toList action.remove) <$> getPendingBackendRemoveProposals groupId epoch -- requeue backend remove proposals for the current epoch @@ -178,22 +177,21 @@ executeExternalCommitAction :: forall r. HasProposalActionEffects r => Local ConvOrSubConv -> + ClientIdentity -> ExternalCommitAction -> Sem r () -executeExternalCommitAction lconvOrSub action = do +executeExternalCommitAction lconvOrSub senderIdentity action = do let mlsMeta = mlsMetaConvOrSub $ tUnqualified lconvOrSub -- Remove deprecated sender client from conversation state. - for_ action.remove $ \(cid, _) -> + for_ action.remove $ \_ -> removeMLSClients (cnvmlsGroupId mlsMeta) - (cidQualifiedUser cid) - (Set.singleton (ciClient cid)) + (cidQualifiedUser senderIdentity) + (Set.singleton (ciClient senderIdentity)) -- Add new sender client to the conversation state. - do - let (cid, idx) = action.add - addMLSClients - (cnvmlsGroupId mlsMeta) - (cidQualifiedUser cid) - (Set.singleton (ciClient cid, idx)) + addMLSClients + (cnvmlsGroupId mlsMeta) + (cidQualifiedUser senderIdentity) + (Set.singleton (ciClient senderIdentity, action.add)) diff --git a/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs index af8f7b4782..24991a3d3b 100644 --- a/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs +++ b/services/galley/src/Galley/API/MLS/Commit/InternalCommit.hs @@ -125,7 +125,7 @@ processInternalCommit senderIdentity con lConvOrSub epoch action commit = do throwS @'MLSSelfRemovalNotAllowed -- FUTUREWORK: add tests against this situation for conv v subconv - when (not (is _SubConv convOrSub) && removedClients /= clientsInConv) $ do + when (removedClients /= clientsInConv) $ do -- FUTUREWORK: turn this error into a proper response throwS @'MLSClientMismatch