From 96ab1d0e984d1566a3a54c189cadc6ac7facbcce Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 12:55:08 +0200 Subject: [PATCH 01/12] background-worker: Make push backoff times configurable --- charts/background-worker/values.yaml | 3 ++- hack/helm_vars/wire-server/values.yaml.gotmpl | 3 ++- .../background-worker.integration.yaml | 5 +++-- .../src/Wire/BackendNotificationPusher.hs | 4 +++- .../src/Wire/BackgroundWorker/Env.hs | 2 ++ .../src/Wire/BackgroundWorker/Options.hs | 19 ++++++++++++++++++- .../Wire/BackendNotificationPusherSpec.hs | 3 +++ .../background-worker/test/Test/Wire/Util.hs | 2 ++ 8 files changed, 35 insertions(+), 6 deletions(-) diff --git a/charts/background-worker/values.yaml b/charts/background-worker/values.yaml index 6fcdb5e05be..fcae0115bfc 100644 --- a/charts/background-worker/values.yaml +++ b/charts/background-worker/values.yaml @@ -24,7 +24,8 @@ config: vHost: / adminPort: 15672 backendNotificationPusher: - remotesRefreshInterval: 60 # seconds + pushBackoffMinWait: 10000 # in microseconds, so 10ms + pushBackoffMaxWait: 300000000 # microseconds, so 300s serviceAccount: # When setting this to 'false', either make sure that a service account named diff --git a/hack/helm_vars/wire-server/values.yaml.gotmpl b/hack/helm_vars/wire-server/values.yaml.gotmpl index ba7fb2f042a..a20f2280bb6 100644 --- a/hack/helm_vars/wire-server/values.yaml.gotmpl +++ b/hack/helm_vars/wire-server/values.yaml.gotmpl @@ -312,7 +312,8 @@ background-worker: imagePullPolicy: {{ .Values.imagePullPolicy }} config: backendNotificationPusher: - remotesRefreshInterval: 1 + pushBackoffMinWait: 1000 # 1ms + pushBackoffMaxWait: 500000 # 0.5s secrets: rabbitmq: username: {{ .Values.rabbitmqUsername }} diff --git a/services/background-worker/background-worker.integration.yaml b/services/background-worker/background-worker.integration.yaml index 03e95748914..9762cc70825 100644 --- a/services/background-worker/background-worker.integration.yaml +++ b/services/background-worker/background-worker.integration.yaml @@ -1,4 +1,4 @@ -logLevel: Info +logLevel: Debug backgroundWorker: host: 0.0.0.0 @@ -23,4 +23,5 @@ rabbitmq: adminPort: 15672 backendNotificationPusher: - remotesRefreshInterval: 1 \ No newline at end of file + pushBackoffMinWait: 1000 + pushBackoffMaxWait: 1000000 diff --git a/services/background-worker/src/Wire/BackendNotificationPusher.hs b/services/background-worker/src/Wire/BackendNotificationPusher.hs index 0b8bd91c807..f52f165dbbd 100644 --- a/services/background-worker/src/Wire/BackendNotificationPusher.hs +++ b/services/background-worker/src/Wire/BackendNotificationPusher.hs @@ -23,6 +23,7 @@ import Wire.API.Federation.BackendNotifications import Wire.API.Federation.Client import Wire.API.Routes.FederationDomainConfig import Wire.BackgroundWorker.Env +import Wire.BackgroundWorker.Options import Wire.BackgroundWorker.Util startPushingNotifications :: @@ -36,11 +37,12 @@ startPushingNotifications runningFlag chan domain = do pushNotification :: RabbitMQEnvelope e => MVar () -> Domain -> (Q.Message, e) -> AppT IO (Async ()) pushNotification runningFlag targetDomain (msg, envelope) = do + cfg <- asks (.backendNotificationsConfig) -- Jittered exponential backoff with 10ms as starting delay and 300s as max -- delay. When 300s is reached, every retry will happen after 300s. -- -- FUTUREWORK: Pull these numbers into config.s - let policy = capDelay 300_000_000 $ fullJitterBackoff 10000 + let policy = capDelay cfg.pushBackoffMaxWait $ fullJitterBackoff cfg.pushBackoffMinWait logErrr willRetry (SomeException e) rs = do Log.err $ Log.msg (Log.val "Exception occurred while pushing notification") diff --git a/services/background-worker/src/Wire/BackgroundWorker/Env.hs b/services/background-worker/src/Wire/BackgroundWorker/Env.hs index 2f3e0130aef..86a5b99ed57 100644 --- a/services/background-worker/src/Wire/BackgroundWorker/Env.hs +++ b/services/background-worker/src/Wire/BackgroundWorker/Env.hs @@ -54,6 +54,7 @@ data Env = Env -- connection-removed notifications into the notifications channels. -- This allows us to reuse existing code. This only pushes. notificationChannel :: MVar Channel, + backendNotificationsConfig :: BackendNotificationsConfig, statuses :: IORef (Map Worker IsWorking) } @@ -101,6 +102,7 @@ mkEnv opts = do metrics <- Metrics.metrics backendNotificationMetrics <- mkBackendNotificationMetrics notificationChannel <- mkRabbitMqChannelMVar logger $ demoteOpts opts.rabbitmq + let backendNotificationsConfig = opts.backendNotificationPusher pure (Env {..}, syncThread) initHttp2Manager :: IO Http2Manager diff --git a/services/background-worker/src/Wire/BackgroundWorker/Options.hs b/services/background-worker/src/Wire/BackgroundWorker/Options.hs index 1778dcf905f..7cac93318db 100644 --- a/services/background-worker/src/Wire/BackgroundWorker/Options.hs +++ b/services/background-worker/src/Wire/BackgroundWorker/Options.hs @@ -14,8 +14,25 @@ data Opts = Opts rabbitmq :: !RabbitMqAdminOpts, galley :: !Endpoint, brig :: !Endpoint, - defederationTimeout :: Maybe Int -- Seconds, Nothing for no timeout + -- | Seconds, Nothing for no timeout + defederationTimeout :: Maybe Int, + backendNotificationPusher :: BackendNotificationsConfig } deriving (Show, Generic) instance FromJSON Opts + +data BackendNotificationsConfig = BackendNotificationsConfig + { -- | Minimum amount of time (in microseconds) to wait before doing the first + -- retry in pushing a notification. Futher retries are done in a jittered + -- exponential way. + -- https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/ + pushBackoffMinWait :: Int, + -- | Upper limit on amount of time (in microseconds) to wait before retrying + -- any notification. This exists to ensure that exponential back-off doesn't + -- cause wait times to be very big. + pushBackoffMaxWait :: Int + } + deriving (Show, Generic) + +instance FromJSON BackendNotificationsConfig diff --git a/services/background-worker/test/Test/Wire/BackendNotificationPusherSpec.hs b/services/background-worker/test/Test/Wire/BackendNotificationPusherSpec.hs index 5a78b5d55c8..d7a275cf285 100644 --- a/services/background-worker/test/Test/Wire/BackendNotificationPusherSpec.hs +++ b/services/background-worker/test/Test/Wire/BackendNotificationPusherSpec.hs @@ -45,6 +45,7 @@ import Wire.API.RawJson import Wire.API.Routes.FederationDomainConfig import Wire.BackendNotificationPusher import Wire.BackgroundWorker.Env +import Wire.BackgroundWorker.Options import Wire.BackgroundWorker.Util spec :: Spec @@ -231,6 +232,7 @@ spec = do defederationTimeout = responseTimeoutNone galley = Endpoint "localhost" 8085 brig = Endpoint "localhost" 8082 + backendNotificationsConfig = BackendNotificationsConfig 1000 500000 backendNotificationMetrics <- mkBackendNotificationMetrics domains <- runAppT Env {..} getRemoteDomains @@ -253,6 +255,7 @@ spec = do defederationTimeout = responseTimeoutNone galley = Endpoint "localhost" 8085 brig = Endpoint "localhost" 8082 + backendNotificationsConfig = BackendNotificationsConfig 1000 500000 backendNotificationMetrics <- mkBackendNotificationMetrics domainsThread <- async $ runAppT Env {..} getRemoteDomains diff --git a/services/background-worker/test/Test/Wire/Util.hs b/services/background-worker/test/Test/Wire/Util.hs index d88db07780a..51e8ce51b38 100644 --- a/services/background-worker/test/Test/Wire/Util.hs +++ b/services/background-worker/test/Test/Wire/Util.hs @@ -10,6 +10,7 @@ import Util.Options import Wire.API.Routes.FederationDomainConfig import Wire.BackgroundWorker.Env hiding (federatorInternal, galley) import Wire.BackgroundWorker.Env qualified as E +import Wire.BackgroundWorker.Options import Wire.BackgroundWorker.Util testEnv :: IO Env @@ -29,6 +30,7 @@ testEnv = do galley = Endpoint "localhost" 8085 brig = Endpoint "localhost" 8082 defederationTimeout = responseTimeoutNone + backendNotificationsConfig = BackendNotificationsConfig 1000 500000 pure Env {..} runTestAppT :: AppT IO a -> Int -> IO a From 97becbc4c51df8f6feb65fd377b4da09e8c2b06f Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 12:56:47 +0200 Subject: [PATCH 02/12] brig/getFederationStatus: Always return NonConnectedBackends as empty when fed policy is AllowAll --- services/brig/src/Brig/API/Federation.hs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/services/brig/src/Brig/API/Federation.hs b/services/brig/src/Brig/API/Federation.hs index bb9ea753595..ece2d3c737e 100644 --- a/services/brig/src/Brig/API/Federation.hs +++ b/services/brig/src/Brig/API/Federation.hs @@ -33,10 +33,12 @@ import Brig.Data.Connection qualified as Data import Brig.Data.User qualified as Data import Brig.Effects.GalleyProvider (GalleyProvider) import Brig.IO.Intra (notify) +import Brig.Options import Brig.Types.User.Event import Brig.User.API.Handle import Brig.User.Search.SearchIndex qualified as Q import Control.Error.Util +import Control.Lens ((^.)) import Control.Monad.Trans.Except import Data.Domain import Data.Handle (Handle (..), parseHandle) @@ -95,8 +97,12 @@ federationSitemap = -- with the subset of those we aren't connected to. getFederationStatus :: Domain -> DomainSet -> Handler r NonConnectedBackends getFederationStatus _ request = do - fedDomains <- fromList . fmap (.domain) . (.remotes) <$> getFederationRemotes - pure $ NonConnectedBackends (request.dsDomains \\ fedDomains) + cfg <- ask + case setFederationStrategy (cfg ^. settings) of + Just AllowAll -> pure $ NonConnectedBackends mempty + _ -> do + fedDomains <- fromList . fmap (.domain) . (.remotes) <$> getFederationRemotes + pure $ NonConnectedBackends (request.dsDomains \\ fedDomains) sendConnectionAction :: Domain -> NewConnectionRequest -> Handler r NewConnectionResponse sendConnectionAction originDomain NewConnectionRequest {..} = do From 45c958b8568f6abd3e88bb297ff069a67b4e7cb1 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 13:01:31 +0200 Subject: [PATCH 03/12] integration: Use separate vHosts for backendA and B. --- deploy/dockerephemeral/init_vhosts.sh | 2 ++ integration/test/Testlib/ModService.hs | 2 ++ integration/test/Testlib/RunServices.hs | 4 ++-- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/deploy/dockerephemeral/init_vhosts.sh b/deploy/dockerephemeral/init_vhosts.sh index a7f9bd7c4a1..4c169ba4431 100755 --- a/deploy/dockerephemeral/init_vhosts.sh +++ b/deploy/dockerephemeral/init_vhosts.sh @@ -6,6 +6,8 @@ exec_until_ready() { echo 'Creating RabbitMQ resources' +exec_until_ready "curl -u $RABBITMQ_USERNAME:$RABBITMQ_PASSWORD -X PUT http://rabbitmq:15672/api/vhosts/backendA" +exec_until_ready "curl -u $RABBITMQ_USERNAME:$RABBITMQ_PASSWORD -X PUT http://rabbitmq:15672/api/vhosts/backendB" exec_until_ready "curl -u $RABBITMQ_USERNAME:$RABBITMQ_PASSWORD -X PUT http://rabbitmq:15672/api/vhosts/d1.example.com" exec_until_ready "curl -u $RABBITMQ_USERNAME:$RABBITMQ_PASSWORD -X PUT http://rabbitmq:15672/api/vhosts/d2.example.com" exec_until_ready "curl -u $RABBITMQ_USERNAME:$RABBITMQ_PASSWORD -X PUT http://rabbitmq:15672/api/vhosts/d3.example.com" diff --git a/integration/test/Testlib/ModService.hs b/integration/test/Testlib/ModService.hs index 0de729cd80b..44606a6f238 100644 --- a/integration/test/Testlib/ModService.hs +++ b/integration/test/Testlib/ModService.hs @@ -188,6 +188,8 @@ startDynamicBackend resource staticPorts beOverrides = do Gundeck -> setField "aws.queueName" resource.berAwsQueueName Galley -> setField "journal.queueName" resource.berGalleyJournal + >=> setField "rabbitmq.vHost" resource.berVHost + BackgroundWorker -> setField "rabbitmq.vHost" resource.berVHost _ -> pure setFederationSettings :: Service -> Value -> App Value diff --git a/integration/test/Testlib/RunServices.hs b/integration/test/Testlib/RunServices.hs index 01966efe720..ee79daf9187 100644 --- a/integration/test/Testlib/RunServices.hs +++ b/integration/test/Testlib/RunServices.hs @@ -34,7 +34,7 @@ backendA = berEmailSMSSesQueue = "integration-brig-events", berEmailSMSEmailSender = "backend-integration@wire.com", berGalleyJournal = "integration-team-events.fifo", - berVHost = "/", + berVHost = "backendA", berNginzSslPort = 8443 } @@ -74,7 +74,7 @@ backendB = -- FUTUREWORK: set up vhosts in dev/ci for example.com and b.example.com -- in case we want backendA and backendB to federate with a third backend -- (because otherwise both queues will overlap) - berVHost = "/", + berVHost = "backendB", berNginzSslPort = 9443 } From e4d19670eb61069314a031257096f8ef61b47a08 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 14:32:15 +0200 Subject: [PATCH 04/12] integration/RunServices: Add hack to make federation work --- integration/test/Testlib/RunServices.hs | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/integration/test/Testlib/RunServices.hs b/integration/test/Testlib/RunServices.hs index ee79daf9187..3991a607b59 100644 --- a/integration/test/Testlib/RunServices.hs +++ b/integration/test/Testlib/RunServices.hs @@ -5,6 +5,7 @@ module Testlib.RunServices where import Control.Concurrent import Control.Monad.Codensity (lowerCodensity) import Data.Map qualified as Map +import SetupHelpers import System.Directory import System.Environment (getArgs) import System.Exit (exitWith) @@ -137,17 +138,12 @@ main = do runAppWithEnv env $ do lowerCodensity $ do - let fedConfig = - def - { dbBrig = - setField - "optSettings.setFederationDomainConfigs" - [ object ["domain" .= backendA.berDomain, "search_policy" .= "full_search"], - object ["domain" .= backendB.berDomain, "search_policy" .= "full_search"] - ] - } _modifyEnv <- traverseConcurrentlyCodensity - (\(res, staticPorts, overrides) -> startDynamicBackend res staticPorts overrides) - [(backendA, staticPortsA, fedConfig), (backendB, staticPortsB, fedConfig)] + ( \(res, staticPorts) -> + -- We add the 'fullSerachWithAll' overrrides is a hack to get + -- around https://wearezeta.atlassian.net/browse/WPB-3796 + startDynamicBackend res staticPorts fullSearchWithAll + ) + [(backendA, staticPortsA), (backendB, staticPortsB)] liftIO run From 4ddb67deb6ad715586307a1c418db1a6205f83c8 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 14:34:20 +0200 Subject: [PATCH 05/12] integration: Add test to verify behaviour with offline backends --- integration/default.nix | 2 + integration/integration.cabal | 4 + integration/test/API/Brig.hs | 6 + integration/test/API/Galley.hs | 20 ++- integration/test/Notifications.hs | 53 ++++++++ integration/test/SetupHelpers.hs | 10 +- integration/test/Test/Federation.hs | 180 +++++++++++++++++++++++++ integration/test/Testlib/Assertions.hs | 14 ++ integration/test/Testlib/Cannon.hs | 8 +- integration/test/Testlib/HTTP.hs | 12 +- integration/test/Testlib/Types.hs | 10 +- 11 files changed, 307 insertions(+), 12 deletions(-) create mode 100644 integration/test/Notifications.hs create mode 100644 integration/test/Test/Federation.hs diff --git a/integration/default.nix b/integration/default.nix index 600ceb7ddc1..28e47e2f002 100644 --- a/integration/default.nix +++ b/integration/default.nix @@ -53,6 +53,7 @@ , uuid , vector , websockets +, wire-message-proto-lens , yaml }: mkDerivation { @@ -110,6 +111,7 @@ mkDerivation { uuid vector websockets + wire-message-proto-lens yaml ]; license = lib.licenses.agpl3Only; diff --git a/integration/integration.cabal b/integration/integration.cabal index 912cb5dbb3c..8e29915dbb2 100644 --- a/integration/integration.cabal +++ b/integration/integration.cabal @@ -96,6 +96,7 @@ library API.GundeckInternal API.Nginz MLS.Util + Notifications RunAllTests SetupHelpers Test.AssetDownload @@ -105,6 +106,7 @@ library Test.Conversation Test.Defederation Test.Demo + Test.Federation Test.Federator Test.Notifications Test.Presence @@ -156,6 +158,7 @@ library , network-uri , optparse-applicative , process + , proto-lens , random , raw-strings-qq , retry @@ -174,4 +177,5 @@ library , uuid , vector , websockets + , wire-message-proto-lens , yaml diff --git a/integration/test/API/Brig.hs b/integration/test/API/Brig.hs index af7d16487d4..6fd779b9f31 100644 --- a/integration/test/API/Brig.hs +++ b/integration/test/API/Brig.hs @@ -32,6 +32,12 @@ getClient u cli = do joinHttpPath ["clients", c] submit "GET" req +deleteUser :: (HasCallStack, MakesValue user) => user -> App Response +deleteUser user = do + req <- baseRequest user Brig Versioned "/self" + submit "DELETE" $ + req & addJSONObject ["password" .= defPassword] + data AddClient = AddClient { ctype :: String, internal :: Bool, diff --git a/integration/test/API/Galley.hs b/integration/test/API/Galley.hs index 3b2446f47df..5ecfad1d739 100644 --- a/integration/test/API/Galley.hs +++ b/integration/test/API/Galley.hs @@ -1,6 +1,8 @@ module API.Galley where import Data.Aeson qualified as Aeson +import Data.ProtoLens qualified as Proto +import Proto.Otr import Testlib.Prelude data CreateConv = CreateConv @@ -152,6 +154,14 @@ postMLSCommitBundle cid msg = do req <- baseRequest cid Galley Versioned "/mls/commit-bundles" submit "POST" (addMLS msg req) +postProteusMessage :: (HasCallStack, MakesValue user, MakesValue conv) => user -> conv -> QualifiedNewOtrMessage -> App Response +postProteusMessage user conv msgs = do + convDomain <- objDomain conv + convId <- objId conv + let bytes = Proto.encodeMessage msgs + req <- baseRequest user Galley Versioned ("/conversations/" <> convDomain <> "/" <> convId <> "/proteus/messages") + submit "POST" (addProtobuf bytes req) + getGroupInfo :: (HasCallStack, MakesValue user, MakesValue conv) => user -> @@ -167,7 +177,15 @@ getGroupInfo user conv = do submit "GET" req addMembers :: (HasCallStack, MakesValue user, MakesValue conv) => user -> conv -> [Value] -> App Response -addMembers usr qcnv qUsers = do +addMembers usr qcnv newMembers = do (convDomain, convId) <- objQid qcnv + qUsers <- mapM objQidObject newMembers req <- baseRequest usr Galley Versioned (joinHttpPath ["conversations", convDomain, convId, "members"]) submit "POST" (req & addJSONObject ["qualified_users" .= qUsers]) + +removeMember :: (HasCallStack, MakesValue remover, MakesValue conv, MakesValue removed) => remover -> conv -> removed -> App Response +removeMember remover qcnv removed = do + (convDomain, convId) <- objQid qcnv + (removedDomain, removedId) <- objQid removed + req <- baseRequest remover Galley Versioned (joinHttpPath ["conversations", convDomain, convId, "members", removedDomain, removedId]) + submit "DELETE" req diff --git a/integration/test/Notifications.hs b/integration/test/Notifications.hs new file mode 100644 index 00000000000..a866d98a945 --- /dev/null +++ b/integration/test/Notifications.hs @@ -0,0 +1,53 @@ +module Notifications where + +import API.Gundeck +import Control.Monad.Extra +import Testlib.Prelude +import UnliftIO.Concurrent + +awaitNotifications :: + (HasCallStack, MakesValue user, MakesValue client) => + user -> + client -> + Maybe String -> + -- | Timeout in seconds + Int -> + -- | Max no. of notifications + Int -> + -- | Selection function. Should not throw any exceptions + (Value -> App Bool) -> + App [Value] +awaitNotifications user client since0 tSecs n selector = + assertAwaitResult =<< go tSecs since0 (AwaitResult False n [] []) + where + go 0 _ res = pure res + go timeRemaining since res0 = do + notifs <- bindResponse (getNotifications user client (GetNotifications since Nothing)) $ \resp -> asList (resp.json %. "notifications") + lastNotifId <- case notifs of + [] -> pure since + _ -> Just <$> objId (last notifs) + (matching, notMatching) <- partitionM selector notifs + let matchesSoFar = res0.matches <> matching + res = + res0 + { matches = matchesSoFar, + nonMatches = res0.nonMatches <> notMatching, + success = length matchesSoFar >= res0.nMatchesExpected + } + if res.success + then pure res + else do + threadDelay (1_000_000) + go (timeRemaining - 1) lastNotifId res + +awaitNotification :: + (HasCallStack, MakesValue user, MakesValue client, MakesValue lastNotifId) => + user -> + client -> + Maybe lastNotifId -> + Int -> + (Value -> App Bool) -> + App Value +awaitNotification user client lastNotifId tSecs selector = do + since0 <- mapM objId lastNotifId + head <$> awaitNotifications user client since0 tSecs 1 selector diff --git a/integration/test/SetupHelpers.hs b/integration/test/SetupHelpers.hs index e2766e1735c..f6ba6f46768 100644 --- a/integration/test/SetupHelpers.hs +++ b/integration/test/SetupHelpers.hs @@ -1,6 +1,6 @@ module SetupHelpers where -import API.Brig qualified as Public +import API.Brig qualified as Brig import API.BrigInternal qualified as Internal import API.Galley import Control.Concurrent (threadDelay) @@ -25,6 +25,10 @@ randomUser domain cu = bindResponse (Internal.createUser domain cu) $ \resp -> d resp.status `shouldMatchInt` 201 resp.json +deleteUser :: (HasCallStack, MakesValue user) => user -> App () +deleteUser user = bindResponse (Brig.deleteUser user) $ \resp -> do + resp.status `shouldMatchInt` 200 + -- | returns (user, team id) createTeam :: (HasCallStack, MakesValue domain) => domain -> App (Value, String) createTeam domain = do @@ -45,8 +49,8 @@ connectUsers :: bob -> App () connectUsers alice bob = do - bindResponse (Public.postConnection alice bob) (\resp -> resp.status `shouldMatchInt` 201) - bindResponse (Public.putConnection bob alice "accepted") (\resp -> resp.status `shouldMatchInt` 200) + bindResponse (Brig.postConnection alice bob) (\resp -> resp.status `shouldMatchInt` 201) + bindResponse (Brig.putConnection bob alice "accepted") (\resp -> resp.status `shouldMatchInt` 200) createAndConnectUsers :: (HasCallStack, MakesValue domain) => [domain] -> App [Value] createAndConnectUsers domains = do diff --git a/integration/test/Test/Federation.hs b/integration/test/Test/Federation.hs new file mode 100644 index 00000000000..0a1814a5204 --- /dev/null +++ b/integration/test/Test/Federation.hs @@ -0,0 +1,180 @@ +{-# LANGUAGE OverloadedLabels #-} +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} + +module Test.Federation where + +import API.Brig qualified as API +import API.Galley +import Control.Lens +import Control.Monad.Catch +import Control.Monad.Codensity +import Control.Monad.Reader +import Data.ByteString.Lazy qualified as LBS +import Data.ProtoLens qualified as Proto +import Data.ProtoLens.Labels () +import Data.UUID qualified as UUID +import Notifications +import Numeric.Lens +import Proto.Otr qualified as Proto +import Proto.Otr_Fields qualified as Proto +import SetupHelpers +import Testlib.Prelude +import Testlib.ResourcePool + +testNotificationsForOfflineBackends :: HasCallStack => App () +testNotificationsForOfflineBackends = do + resourcePool <- asks (.resourcePool) + -- `delUser` will eventually get deleted. + [delUser, otherUser] <- createAndConnectUsers [OwnDomain, OtherDomain] + delClient <- objId $ bindResponse (API.addClient delUser def) $ getJSON 201 + otherClient <- objId $ bindResponse (API.addClient otherUser def) $ getJSON 201 + + -- We call it 'downBackend' because it is down for the most of this test + -- except for setup and assertions. Perhaps there is a better name. + runCodensity (acquireResources 1 resourcePool) $ \[downBackend] -> do + (downUser1, downClient1, downUser2, upBackendConv, downBackendConv) <- runCodensity (startDynamicBackend downBackend mempty mempty) $ \_ -> do + downUser1 <- randomUser downBackend.berDomain def + downUser2 <- randomUser downBackend.berDomain def + downClient1 <- objId $ bindResponse (API.addClient downUser1 def) $ getJSON 201 + connectUsers delUser downUser1 + connectUsers delUser downUser2 + connectUsers otherUser downUser1 + upBackendConv <- bindResponse (postConversation delUser (defProteus {qualifiedUsers = [otherUser, downUser1]})) $ getJSON 201 + downBackendConv <- bindResponse (postConversation downUser1 (defProteus {qualifiedUsers = [otherUser, delUser]})) $ getJSON 201 + pure (downUser1, downClient1, downUser2, upBackendConv, downBackendConv) + + -- Even when a participating backend is down, messages to conversations + -- owned by other backends should go. + successfulMsgForOtherUser <- mkProteusRecipient otherUser otherClient "success message for other user" + successfulMsgForDownUser <- mkProteusRecipient downUser1 downClient1 "success message for down user" + let successfulMsg = + Proto.defMessage @Proto.QualifiedNewOtrMessage + & #sender . Proto.client .~ (delClient ^?! hex) + & #recipients .~ [successfulMsgForOtherUser, successfulMsgForDownUser] + & #reportAll .~ Proto.defMessage + bindResponse (postProteusMessage delUser upBackendConv successfulMsg) assertSuccess + + -- When conversation owning backend is down, messages will fail to be sent. + failedMsgForOtherUser <- mkProteusRecipient otherUser otherClient "failed message for other user" + failedMsgForDownUser <- mkProteusRecipient downUser1 downClient1 "failed message for down user" + let failedMsg = + Proto.defMessage @Proto.QualifiedNewOtrMessage + & #sender . Proto.client .~ (delClient ^?! hex) + & #recipients .~ [failedMsgForOtherUser, failedMsgForDownUser] + & #reportAll .~ Proto.defMessage + bindResponse (postProteusMessage delUser downBackendConv failedMsg) $ \resp -> + resp.status `shouldMatchInt` 521 + + -- Conversation creation with people from down backend should fail + bindResponse (postConversation delUser (defProteus {qualifiedUsers = [otherUser, downUser1]})) $ \resp -> + resp.status `shouldMatchInt` 503 + + -- Adding users to an up backend conversation should work even when one of + -- the participating backends is down + otherUser2 <- randomUser OtherDomain def + connectUsers delUser otherUser2 + bindResponse (addMembers delUser upBackendConv [otherUser2]) $ \resp -> + resp.status `shouldMatchInt` 200 + + -- Adding users from down backend to a conversation should also fail + bindResponse (addMembers delUser upBackendConv [downUser2]) $ \resp -> + resp.status `shouldMatchInt` 503 + + -- Removing users from an up backend conversation should work even when one + -- of the participating backends is down. + bindResponse (removeMember delUser upBackendConv otherUser2) $ \resp -> + resp.status `shouldMatchInt` 200 + + -- User deletions should eventually make it to the other backend. + deleteUser delUser + do + newMsgNotif <- awaitNotification otherUser otherClient noValue 1 isNewMessageNotif + newMsgNotif %. "payload.0.qualified_conversation" `shouldMatch` objQidObject upBackendConv + newMsgNotif %. "payload.0.data.text" `shouldMatchBase64` "success message for other user" + + memberJoinNotif <- awaitNotification otherUser otherClient (Just newMsgNotif) 1 isMemberJoinNotif + memberJoinNotif %. "payload.0.qualified_conversation" `shouldMatch` objQidObject upBackendConv + asListOf objQidObject (memberJoinNotif %. "payload.0.data.users") `shouldMatch` mapM objQidObject [otherUser2] + + -- TODO: Broken + -- delUserLeftDownConvNotif <- nPayload $ awaitNotification otherUser otherClient (Just newMsgNotif) 1 (allPreds [isConvLeaveNotif, isNotifConv downBackendConv]) + -- delUserLeftDownConvNotif %. "qualified_conversation" `shouldMatch` objQidObject downBackendConv + -- delUserLeftDownConvNotif %. "data.qualified_user_ids.0" `shouldMatch` objQidObject delUser + + delUserDeletedNotif <- nPayload $ awaitNotification otherUser otherClient (Just newMsgNotif) 1 isDeleteUserNotif + objQid delUserDeletedNotif `shouldMatch` objQid delUser + + runCodensity (startDynamicBackend downBackend mempty mempty) $ \_ -> do + newMsgNotif <- awaitNotification downUser1 downClient1 noValue 5 isNewMessageNotif + newMsgNotif %. "payload.0.qualified_conversation" `shouldMatch` objQidObject upBackendConv + newMsgNotif %. "payload.0.data.text" `shouldMatchBase64` "success message for down user" + + -- FUTUREWORK: Uncomment after fixing this bug: https://wearezeta.atlassian.net/browse/WPB-3664 + -- memberJoinNotif <- awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 isMemberJoinNotif + -- memberJoinNotif %. "payload.0.qualified_conversation" `shouldMatch` objQidObject upBackendConv + -- asListOf objQidObject (memberJoinNotif %. "payload.0.data.users") `shouldMatch` mapM objQidObject [downUser2] + + let isDelUserLeaveDownConvNotif = + allPreds + [ isConvLeaveNotif, + isNotifConv downBackendConv, + isNotifForUser delUser + ] + void $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 isDelUserLeaveDownConvNotif + + -- FUTUREWORK: Uncomment after fixing this bug: https://wearezeta.atlassian.net/browse/WPB-3664 + -- void $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 (allPreds [isConvLeaveNotif, isNotifConv upBackendConv, isNotifForUser otherUser]) + -- void $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 (allPreds [isConvLeaveNotif, isNotifConv upBackendConv, isNotifForUser delUser]) + + delUserDeletedNotif <- nPayload $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 isDeleteUserNotif + objQid delUserDeletedNotif `shouldMatch` objQid delUser + +allPreds :: (Applicative f) => [a -> f Bool] -> a -> f Bool +allPreds [] _ = pure True +allPreds [p] x = p x +allPreds (p1 : ps) x = (&&) <$> p1 x <*> allPreds ps x + +isDeleteUserNotif :: MakesValue a => a -> App Bool +isDeleteUserNotif n = + nPayload n %. "type" `isEqual` "user.delete" + +isNewMessageNotif :: MakesValue a => a -> App Bool +isNewMessageNotif n = fieldEquals n "payload.0.type" "conversation.otr-message-add" + +isMemberJoinNotif :: MakesValue a => a -> App Bool +isMemberJoinNotif n = fieldEquals n "payload.0.type" "conversation.member-join" + +isConvLeaveNotif :: MakesValue a => a -> App Bool +isConvLeaveNotif n = fieldEquals n "payload.0.type" "conversation.member-leave" + +isNotifConv :: (MakesValue conv, MakesValue a) => conv -> a -> App Bool +isNotifConv conv n = fieldEquals n "payload.0.qualified_conversation" (objQidObject conv) + +isNotifForUser :: (MakesValue user, MakesValue a) => user -> a -> App Bool +isNotifForUser user n = fieldEquals n "payload.0.data.qualified_user_ids.0" (objQidObject user) + +fieldEquals :: (MakesValue a, MakesValue b) => a -> String -> b -> App Bool +fieldEquals a fieldSelector b = do + ma <- lookupField a fieldSelector `catchAll` const (pure Nothing) + case ma of + Nothing -> pure False + Just f -> + f `isEqual` b + +mkProteusRecipient :: (HasCallStack, MakesValue user, MakesValue client) => user -> client -> String -> App Proto.QualifiedUserEntry +mkProteusRecipient user client msg = do + userDomain <- objDomain user + userId <- LBS.toStrict . UUID.toByteString . fromJust . UUID.fromString <$> objId user + clientId <- (^?! hex) <$> objId client + pure $ + Proto.defMessage + & #domain .~ fromString userDomain + & #entries + .~ [ Proto.defMessage + & #user . #uuid .~ userId + & #clients + .~ [ Proto.defMessage + & #client . #client .~ clientId + & #text .~ fromString msg + ] + ] diff --git a/integration/test/Testlib/Assertions.hs b/integration/test/Testlib/Assertions.hs index 1d7a624d0a2..cbb19fc5c09 100644 --- a/integration/test/Testlib/Assertions.hs +++ b/integration/test/Testlib/Assertions.hs @@ -5,10 +5,13 @@ module Testlib.Assertions where import Control.Exception as E import Control.Monad.Reader import Data.Aeson (Value) +import Data.ByteString.Base64 qualified as B64 import Data.Char import Data.Foldable import Data.List import Data.Map qualified as Map +import Data.Text qualified as Text +import Data.Text.Encoding qualified as Text import GHC.Stack as Stack import System.FilePath import Testlib.JSON @@ -49,6 +52,17 @@ a `shouldMatch` b = do pb <- prettyJSON xb assertFailure $ "Actual:\n" <> pa <> "\nExpected:\n" <> pb +shouldMatchBase64 :: + (MakesValue a, MakesValue b, HasCallStack) => + -- | The actual value, in base64 + a -> + -- | The expected value, in plain text + b -> + App () +a `shouldMatchBase64` b = do + xa <- Text.decodeUtf8 . B64.decodeLenient . Text.encodeUtf8 . Text.pack <$> asString a + xa `shouldMatch` b + shouldNotMatch :: (MakesValue a, MakesValue b, HasCallStack) => -- | The actual value diff --git a/integration/test/Testlib/Cannon.hs b/integration/test/Testlib/Cannon.hs index daf14ae4bf6..4fefd3dfe3f 100644 --- a/integration/test/Testlib/Cannon.hs +++ b/integration/test/Testlib/Cannon.hs @@ -22,6 +22,7 @@ module Testlib.Cannon ( WebSocket (..), WSConnect (..), ToWSConnect (..), + AwaitResult (..), withWebSocket, withWebSockets, awaitNMatchesResult, @@ -31,6 +32,7 @@ module Testlib.Cannon awaitAtLeastNMatches, awaitNToMMatchesResult, awaitNToMMatches, + assertAwaitResult, nPayload, printAwaitResult, printAwaitAtLeastResult, @@ -406,10 +408,14 @@ awaitNMatches :: App [Value] awaitNMatches nExpected tSecs checkMatch ws = do res <- awaitNMatchesResult nExpected tSecs checkMatch ws + assertAwaitResult res + +assertAwaitResult :: HasCallStack => AwaitResult -> App [Value] +assertAwaitResult res = do if res.success then pure res.matches else do - let msgHeader = "Expected " <> show nExpected <> " matching events, but got " <> show (length res.matches) <> "." + let msgHeader = "Expected " <> show res.nMatchesExpected <> " matching events, but got " <> show (length res.matches) <> "." details <- ("Details:\n" <>) <$> prettyAwaitResult res assertFailure $ unlines [msgHeader, details] diff --git a/integration/test/Testlib/HTTP.hs b/integration/test/Testlib/HTTP.hs index f31bf2a6b4c..4168c709e6a 100644 --- a/integration/test/Testlib/HTTP.hs +++ b/integration/test/Testlib/HTTP.hs @@ -52,12 +52,19 @@ addBody body contentType req = addMLS :: ByteString -> HTTP.Request -> HTTP.Request addMLS bytes req = req - { HTTP.requestBody = HTTP.RequestBodyLBS (L.fromStrict bytes), + { HTTP.requestBody = HTTP.RequestBodyBS bytes, HTTP.requestHeaders = (fromString "Content-Type", fromString "message/mls") : HTTP.requestHeaders req } +addProtobuf :: ByteString -> HTTP.Request -> HTTP.Request +addProtobuf bytes req = + req + { HTTP.requestBody = HTTP.RequestBodyBS bytes, + HTTP.requestHeaders = (fromString "Content-Type", fromString "application/x-protobuf") : HTTP.requestHeaders req + } + addHeader :: String -> String -> HTTP.Request -> HTTP.Request addHeader name value req = req {HTTP.requestHeaders = (CI.mk . C8.pack $ name, C8.pack value) : HTTP.requestHeaders req} @@ -93,6 +100,9 @@ getJSON status resp = withResponse resp $ \r -> do r.status `shouldMatch` status r.json +assertSuccess :: Response -> App () +assertSuccess resp = withResponse resp $ \r -> r.status `shouldMatchRange` (200, 299) + onFailureAddResponse :: HasCallStack => Response -> App a -> App a onFailureAddResponse r m = App $ do e <- ask diff --git a/integration/test/Testlib/Types.hs b/integration/test/Testlib/Types.hs index 1e0aa03ab16..25ed9c640ed 100644 --- a/integration/test/Testlib/Types.hs +++ b/integration/test/Testlib/Types.hs @@ -29,6 +29,7 @@ import Network.HTTP.Types qualified as HTTP import Network.URI import Testlib.Env import Testlib.Printing +import UnliftIO (MonadUnliftIO) import Prelude data Response = Response @@ -107,14 +108,11 @@ newtype App a = App {unApp :: ReaderT Env IO a} MonadCatch, MonadThrow, MonadReader Env, - MonadBase IO + MonadBase IO, + MonadUnliftIO, + MonadBaseControl IO ) -instance MonadBaseControl IO App where - type StM App a = StM (ReaderT Env IO) a - liftBaseWith f = App (liftBaseWith (\g -> f (g . unApp))) - restoreM = App . restoreM - runAppWithEnv :: Env -> App a -> IO a runAppWithEnv e m = runReaderT (unApp m) e From 93d9d839dba3d8421f6b85cbed2fd1bfc9a0d73b Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 15:17:47 +0200 Subject: [PATCH 06/12] regen nix --- integration/default.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/integration/default.nix b/integration/default.nix index 28e47e2f002..e02e43a5c6f 100644 --- a/integration/default.nix +++ b/integration/default.nix @@ -35,6 +35,7 @@ , network-uri , optparse-applicative , process +, proto-lens , random , raw-strings-qq , retry @@ -93,6 +94,7 @@ mkDerivation { network-uri optparse-applicative process + proto-lens random raw-strings-qq retry From 542f4dd2618271773e63b56da59e85f32ee0c991 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 15:35:59 +0200 Subject: [PATCH 07/12] Little hack --- integration/test/Test/Federation.hs | 4 +++- integration/test/Testlib/Assertions.hs | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/integration/test/Test/Federation.hs b/integration/test/Test/Federation.hs index 0a1814a5204..5dcd43d2634 100644 --- a/integration/test/Test/Federation.hs +++ b/integration/test/Test/Federation.hs @@ -63,7 +63,9 @@ testNotificationsForOfflineBackends = do & #recipients .~ [failedMsgForOtherUser, failedMsgForDownUser] & #reportAll .~ Proto.defMessage bindResponse (postProteusMessage delUser downBackendConv failedMsg) $ \resp -> - resp.status `shouldMatchInt` 521 + -- Due to the way federation breaks in local env vs K8s, it can return 521 + -- (local) or 533 (K8s). + resp.status `shouldMatchOneOf` [Number 521, Number 533] -- Conversation creation with people from down backend should fail bindResponse (postConversation delUser (defProteus {qualifiedUsers = [otherUser, downUser1]})) $ \resp -> diff --git a/integration/test/Testlib/Assertions.hs b/integration/test/Testlib/Assertions.hs index cbb19fc5c09..a33f0e6835d 100644 --- a/integration/test/Testlib/Assertions.hs +++ b/integration/test/Testlib/Assertions.hs @@ -121,6 +121,19 @@ shouldMatchSet a b = do lb <- fmap sort (asList b) la `shouldMatch` lb +shouldMatchOneOf :: + (MakesValue a, MakesValue b, HasCallStack) => + a -> + b -> + App () +shouldMatchOneOf a b = do + lb <- asList b + xa <- make a + unless (xa `elem` lb) $ do + pa <- prettyJSON a + pb <- prettyJSON b + assertFailure $ "Expected:\n" <> pa <> "\n to match at least one of:\n" <> pb + shouldContainString :: HasCallStack => -- | The actual value From 3db9c13fb493124c91a1dab0b69607384ae38751 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 14 Aug 2023 15:51:26 +0200 Subject: [PATCH 08/12] helm-var-integration: Workaround bug with federation --- hack/helm_vars/common.yaml.gotmpl | 4 ++++ hack/helm_vars/wire-server/values.yaml.gotmpl | 7 +++++++ hack/helmfile.yaml | 12 ++++++++++++ 3 files changed, 23 insertions(+) diff --git a/hack/helm_vars/common.yaml.gotmpl b/hack/helm_vars/common.yaml.gotmpl index b5748c96012..010aa42dadb 100644 --- a/hack/helm_vars/common.yaml.gotmpl +++ b/hack/helm_vars/common.yaml.gotmpl @@ -5,3 +5,7 @@ federationDomain2: {{ requiredEnv "FEDERATION_DOMAIN_2" }} ingressChart: {{ requiredEnv "INGRESS_CHART" }} rabbitmqUsername: guest rabbitmqPassword: guest + +dynBackendDomain1: dynamic-backend-1.{{ requiredEnv "NAMESPACE_1" }}.svc.cluster.local +dynBackendDomain2: dynamic-backend-2.{{ requiredEnv "NAMESPACE_1" }}.svc.cluster.local +dynBackendDomain3: dynamic-backend-3.{{ requiredEnv "NAMESPACE_1" }}.svc.cluster.local \ No newline at end of file diff --git a/hack/helm_vars/wire-server/values.yaml.gotmpl b/hack/helm_vars/wire-server/values.yaml.gotmpl index a20f2280bb6..4bd06d953fe 100644 --- a/hack/helm_vars/wire-server/values.yaml.gotmpl +++ b/hack/helm_vars/wire-server/values.yaml.gotmpl @@ -85,6 +85,13 @@ brig: search_policy: full_search - domain: federation-test-helper.{{ .Release.Namespace }}.svc.cluster.local search_policy: full_search + # Remove these after fixing https://wearezeta.atlassian.net/browse/WPB-3796 + - domain: dyn-backend-1 + search_policy: full_search + - domain: dyn-backend-2 + search_policy: full_search + - domain: dyn-backend-3 + search_policy: full_search setFederationStrategy: allowAll setFederationDomainConfigsUpdateFreq: 10 set2FACodeGenerationDelaySecs: 5 diff --git a/hack/helmfile.yaml b/hack/helmfile.yaml index d9568f2e5a8..b40cd73d623 100644 --- a/hack/helmfile.yaml +++ b/hack/helmfile.yaml @@ -129,6 +129,12 @@ releases: value: {{ .Values.federationDomain1 }} - name: brig.config.optSettings.setFederationDomainConfigs[0].domain value: {{ .Values.federationDomain2 }} + - name: brig.config.optSettings.setFederationDomainConfigs[2].domain + value: {{ .Values.dynBackendDomain1 }} + - name: brig.config.optSettings.setFederationDomainConfigs[3].domain + value: {{ .Values.dynBackendDomain2 }} + - name: brig.config.optSettings.setFederationDomainConfigs[4].domain + value: {{ .Values.dynBackendDomain3 }} needs: - 'databases-ephemeral' @@ -147,5 +153,11 @@ releases: value: {{ .Values.federationDomain2 }} - name: brig.config.optSettings.setFederationDomainConfigs[0].domain value: {{ .Values.federationDomain1 }} + - name: brig.config.optSettings.setFederationDomainConfigs[2].domain + value: {{ .Values.dynBackendDomain1 }} + - name: brig.config.optSettings.setFederationDomainConfigs[3].domain + value: {{ .Values.dynBackendDomain2 }} + - name: brig.config.optSettings.setFederationDomainConfigs[4].domain + value: {{ .Values.dynBackendDomain3 }} needs: - 'databases-ephemeral' From de56d2254c94349a5bfbb8a61cc763041b38c150 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marko=20Dimja=C5=A1evi=C4=87?= Date: Mon, 21 Aug 2023 11:12:57 +0200 Subject: [PATCH 09/12] Integration test: fix asserted status code --- integration/test/Test/Federation.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration/test/Test/Federation.hs b/integration/test/Test/Federation.hs index 5dcd43d2634..ee0c66df3d1 100644 --- a/integration/test/Test/Federation.hs +++ b/integration/test/Test/Federation.hs @@ -69,7 +69,7 @@ testNotificationsForOfflineBackends = do -- Conversation creation with people from down backend should fail bindResponse (postConversation delUser (defProteus {qualifiedUsers = [otherUser, downUser1]})) $ \resp -> - resp.status `shouldMatchInt` 503 + resp.status `shouldMatchInt` 533 -- Adding users to an up backend conversation should work even when one of -- the participating backends is down @@ -80,7 +80,7 @@ testNotificationsForOfflineBackends = do -- Adding users from down backend to a conversation should also fail bindResponse (addMembers delUser upBackendConv [downUser2]) $ \resp -> - resp.status `shouldMatchInt` 503 + resp.status `shouldMatchInt` 533 -- Removing users from an up backend conversation should work even when one -- of the participating backends is down. From 54b3587a8099a0dff314312da8b0756c71611d72 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 21 Aug 2023 11:45:02 +0200 Subject: [PATCH 10/12] integration: Assert about down backend conv after down backend is brought up --- integration/test/Test/Federation.hs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/integration/test/Test/Federation.hs b/integration/test/Test/Federation.hs index ee0c66df3d1..32fda232e01 100644 --- a/integration/test/Test/Federation.hs +++ b/integration/test/Test/Federation.hs @@ -98,11 +98,6 @@ testNotificationsForOfflineBackends = do memberJoinNotif %. "payload.0.qualified_conversation" `shouldMatch` objQidObject upBackendConv asListOf objQidObject (memberJoinNotif %. "payload.0.data.users") `shouldMatch` mapM objQidObject [otherUser2] - -- TODO: Broken - -- delUserLeftDownConvNotif <- nPayload $ awaitNotification otherUser otherClient (Just newMsgNotif) 1 (allPreds [isConvLeaveNotif, isNotifConv downBackendConv]) - -- delUserLeftDownConvNotif %. "qualified_conversation" `shouldMatch` objQidObject downBackendConv - -- delUserLeftDownConvNotif %. "data.qualified_user_ids.0" `shouldMatch` objQidObject delUser - delUserDeletedNotif <- nPayload $ awaitNotification otherUser otherClient (Just newMsgNotif) 1 isDeleteUserNotif objQid delUserDeletedNotif `shouldMatch` objQid delUser @@ -123,6 +118,7 @@ testNotificationsForOfflineBackends = do isNotifForUser delUser ] void $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 isDelUserLeaveDownConvNotif + void $ awaitNotification otherUser otherClient noValue 1 isDelUserLeaveDownConvNotif -- FUTUREWORK: Uncomment after fixing this bug: https://wearezeta.atlassian.net/browse/WPB-3664 -- void $ awaitNotification downUser1 downClient1 (Just newMsgNotif) 1 (allPreds [isConvLeaveNotif, isNotifConv upBackendConv, isNotifForUser otherUser]) From 370a8e4dc9d14e306b4328dc4673b29b7493e180 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 21 Aug 2023 12:26:57 +0200 Subject: [PATCH 11/12] integration: Move utility functions in appropriate modules --- integration/test/API/Galley.hs | 28 ++++++++++++++- integration/test/Notifications.hs | 19 ++++++++++ integration/test/Test/Federation.hs | 48 -------------------------- integration/test/Testlib/Assertions.hs | 17 --------- integration/test/Testlib/JSON.hs | 25 ++++++++++++++ 5 files changed, 71 insertions(+), 66 deletions(-) diff --git a/integration/test/API/Galley.hs b/integration/test/API/Galley.hs index 5ecfad1d739..d03ba1322c9 100644 --- a/integration/test/API/Galley.hs +++ b/integration/test/API/Galley.hs @@ -1,8 +1,16 @@ +{-# LANGUAGE OverloadedLabels #-} + module API.Galley where +import Control.Lens hiding ((.=)) +import Control.Monad.Reader import Data.Aeson qualified as Aeson +import Data.ByteString.Lazy qualified as LBS import Data.ProtoLens qualified as Proto -import Proto.Otr +import Data.ProtoLens.Labels () +import Data.UUID qualified as UUID +import Numeric.Lens +import Proto.Otr as Proto import Testlib.Prelude data CreateConv = CreateConv @@ -162,6 +170,24 @@ postProteusMessage user conv msgs = do req <- baseRequest user Galley Versioned ("/conversations/" <> convDomain <> "/" <> convId <> "/proteus/messages") submit "POST" (addProtobuf bytes req) +mkProteusRecipient :: (HasCallStack, MakesValue user, MakesValue client) => user -> client -> String -> App Proto.QualifiedUserEntry +mkProteusRecipient user client msg = do + userDomain <- objDomain user + userId <- LBS.toStrict . UUID.toByteString . fromJust . UUID.fromString <$> objId user + clientId <- (^?! hex) <$> objId client + pure $ + Proto.defMessage + & #domain .~ fromString userDomain + & #entries + .~ [ Proto.defMessage + & #user . #uuid .~ userId + & #clients + .~ [ Proto.defMessage + & #client . #client .~ clientId + & #text .~ fromString msg + ] + ] + getGroupInfo :: (HasCallStack, MakesValue user, MakesValue conv) => user -> diff --git a/integration/test/Notifications.hs b/integration/test/Notifications.hs index a866d98a945..0364bee7ceb 100644 --- a/integration/test/Notifications.hs +++ b/integration/test/Notifications.hs @@ -51,3 +51,22 @@ awaitNotification :: awaitNotification user client lastNotifId tSecs selector = do since0 <- mapM objId lastNotifId head <$> awaitNotifications user client since0 tSecs 1 selector + +isDeleteUserNotif :: MakesValue a => a -> App Bool +isDeleteUserNotif n = + nPayload n %. "type" `isEqual` "user.delete" + +isNewMessageNotif :: MakesValue a => a -> App Bool +isNewMessageNotif n = fieldEquals n "payload.0.type" "conversation.otr-message-add" + +isMemberJoinNotif :: MakesValue a => a -> App Bool +isMemberJoinNotif n = fieldEquals n "payload.0.type" "conversation.member-join" + +isConvLeaveNotif :: MakesValue a => a -> App Bool +isConvLeaveNotif n = fieldEquals n "payload.0.type" "conversation.member-leave" + +isNotifConv :: (MakesValue conv, MakesValue a) => conv -> a -> App Bool +isNotifConv conv n = fieldEquals n "payload.0.qualified_conversation" (objQidObject conv) + +isNotifForUser :: (MakesValue user, MakesValue a) => user -> a -> App Bool +isNotifForUser user n = fieldEquals n "payload.0.data.qualified_user_ids.0" (objQidObject user) diff --git a/integration/test/Test/Federation.hs b/integration/test/Test/Federation.hs index 32fda232e01..2636827be65 100644 --- a/integration/test/Test/Federation.hs +++ b/integration/test/Test/Federation.hs @@ -6,13 +6,10 @@ module Test.Federation where import API.Brig qualified as API import API.Galley import Control.Lens -import Control.Monad.Catch import Control.Monad.Codensity import Control.Monad.Reader -import Data.ByteString.Lazy qualified as LBS import Data.ProtoLens qualified as Proto import Data.ProtoLens.Labels () -import Data.UUID qualified as UUID import Notifications import Numeric.Lens import Proto.Otr qualified as Proto @@ -131,48 +128,3 @@ allPreds :: (Applicative f) => [a -> f Bool] -> a -> f Bool allPreds [] _ = pure True allPreds [p] x = p x allPreds (p1 : ps) x = (&&) <$> p1 x <*> allPreds ps x - -isDeleteUserNotif :: MakesValue a => a -> App Bool -isDeleteUserNotif n = - nPayload n %. "type" `isEqual` "user.delete" - -isNewMessageNotif :: MakesValue a => a -> App Bool -isNewMessageNotif n = fieldEquals n "payload.0.type" "conversation.otr-message-add" - -isMemberJoinNotif :: MakesValue a => a -> App Bool -isMemberJoinNotif n = fieldEquals n "payload.0.type" "conversation.member-join" - -isConvLeaveNotif :: MakesValue a => a -> App Bool -isConvLeaveNotif n = fieldEquals n "payload.0.type" "conversation.member-leave" - -isNotifConv :: (MakesValue conv, MakesValue a) => conv -> a -> App Bool -isNotifConv conv n = fieldEquals n "payload.0.qualified_conversation" (objQidObject conv) - -isNotifForUser :: (MakesValue user, MakesValue a) => user -> a -> App Bool -isNotifForUser user n = fieldEquals n "payload.0.data.qualified_user_ids.0" (objQidObject user) - -fieldEquals :: (MakesValue a, MakesValue b) => a -> String -> b -> App Bool -fieldEquals a fieldSelector b = do - ma <- lookupField a fieldSelector `catchAll` const (pure Nothing) - case ma of - Nothing -> pure False - Just f -> - f `isEqual` b - -mkProteusRecipient :: (HasCallStack, MakesValue user, MakesValue client) => user -> client -> String -> App Proto.QualifiedUserEntry -mkProteusRecipient user client msg = do - userDomain <- objDomain user - userId <- LBS.toStrict . UUID.toByteString . fromJust . UUID.fromString <$> objId user - clientId <- (^?! hex) <$> objId client - pure $ - Proto.defMessage - & #domain .~ fromString userDomain - & #entries - .~ [ Proto.defMessage - & #user . #uuid .~ userId - & #clients - .~ [ Proto.defMessage - & #client . #client .~ clientId - & #text .~ fromString msg - ] - ] diff --git a/integration/test/Testlib/Assertions.hs b/integration/test/Testlib/Assertions.hs index a33f0e6835d..ef2b2c46eb0 100644 --- a/integration/test/Testlib/Assertions.hs +++ b/integration/test/Testlib/Assertions.hs @@ -4,7 +4,6 @@ module Testlib.Assertions where import Control.Exception as E import Control.Monad.Reader -import Data.Aeson (Value) import Data.ByteString.Base64 qualified as B64 import Data.Char import Data.Foldable @@ -145,22 +144,6 @@ super `shouldContainString` sub = do unless (sub `isInfixOf` super) $ do assertFailure $ "String:\n" <> show super <> "\nDoes not contain:\n" <> show sub -liftP2 :: - (MakesValue a, MakesValue b, HasCallStack) => - (Value -> Value -> c) -> - a -> - b -> - App c -liftP2 f a b = do - f <$> make a <*> make b - -isEqual :: - (MakesValue a, MakesValue b, HasCallStack) => - a -> - b -> - App Bool -isEqual = liftP2 (==) - printFailureDetails :: AssertionFailure -> IO String printFailureDetails (AssertionFailure stack mbResponse msg) = do s <- prettierCallStack stack diff --git a/integration/test/Testlib/JSON.hs b/integration/test/Testlib/JSON.hs index 3c3d2132efa..a5c932d8741 100644 --- a/integration/test/Testlib/JSON.hs +++ b/integration/test/Testlib/JSON.hs @@ -1,6 +1,7 @@ module Testlib.JSON where import Control.Monad +import Control.Monad.Catch import Control.Monad.IO.Class import Control.Monad.Trans.Maybe import Data.Aeson hiding ((.=)) @@ -120,6 +121,30 @@ asBool x = App Value (%.) x k = lookupField x k >>= assertField x k +isEqual :: + (MakesValue a, MakesValue b, HasCallStack) => + a -> + b -> + App Bool +isEqual = liftP2 (==) + +liftP2 :: + (MakesValue a, MakesValue b, HasCallStack) => + (Value -> Value -> c) -> + a -> + b -> + App c +liftP2 f a b = do + f <$> make a <*> make b + +fieldEquals :: (MakesValue a, MakesValue b) => a -> String -> b -> App Bool +fieldEquals a fieldSelector b = do + ma <- lookupField a fieldSelector `catchAll` const (pure Nothing) + case ma of + Nothing -> pure False + Just f -> + f `isEqual` b + assertField :: (HasCallStack, MakesValue a) => a -> String -> Maybe Value -> App Value assertField x k Nothing = assertFailureWithJSON x $ "Field \"" <> k <> "\" is missing from object:" assertField _ _ (Just x) = pure x From 6e9c2dd248a26a0426bcd8d7b3f967e0cfbcfa58 Mon Sep 17 00:00:00 2001 From: Akshay Mankar Date: Mon, 21 Aug 2023 13:12:46 +0200 Subject: [PATCH 12/12] integration-test.sh: Run new integration test suite first --- hack/bin/integration-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hack/bin/integration-test.sh b/hack/bin/integration-test.sh index 27f85d0275e..841a3172fd8 100755 --- a/hack/bin/integration-test.sh +++ b/hack/bin/integration-test.sh @@ -11,7 +11,7 @@ UPLOAD_LOGS=${UPLOAD_LOGS:-0} echo "Running integration tests on wire-server with parallelism=${HELM_PARALLELISM} ..." CHART=wire-server -tests=(stern galley cargohold gundeck federator spar brig integration) +tests=(integration stern galley cargohold gundeck federator spar brig) cleanup() { if (( CLEANUP_LOCAL_FILES > 0 )); then