From 2005c8637548bf27c387faa507e9aa69ca65b073 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 30 Apr 2024 15:59:41 +0200 Subject: [PATCH 01/25] Added fixture waiting unti SaaS database is running --- doc/changes/changes_0.3.0.md | 4 +++ poetry.lock | 16 +++++++++- pyproject.toml | 1 + test/integration/api_access.py | 48 +++++++++++++++++++++++++++++- test/integration/conftest.py | 7 +++++ test/integration/databases_test.py | 14 +++++++++ 6 files changed, 88 insertions(+), 2 deletions(-) diff --git a/doc/changes/changes_0.3.0.md b/doc/changes/changes_0.3.0.md index 810d185..70be594 100644 --- a/doc/changes/changes_0.3.0.md +++ b/doc/changes/changes_0.3.0.md @@ -8,3 +8,7 @@ This release adds integration tests for the most important calls to SaaS API. * #21: Added integration test for operation "create database" * #23: Added integration test for operation "add IP to whitelist" + +## Feature + +* #14: Added fixture waiting unti SaaS database is running diff --git a/poetry.lock b/poetry.lock index 625aa03..6f2bee5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1606,6 +1606,20 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "tokenize-rt" version = "5.2.0" @@ -1740,4 +1754,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8.0,<4.0" -content-hash = "ad4b180534cde9f997bdcf957befe72feec70ee062f48b8099ebfccc5869b165" +content-hash = "af45210362c425328aeef0fcce6ac508c3475f64e655fe874b45f8a48967e57c" diff --git a/pyproject.toml b/pyproject.toml index 3fe57fa..52cbb29 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ python = ">=3.8.0,<4.0" requests = "^2.31.0" types-requests = "^2.31.0.6" ifaddr = "^0.2.0" +tenacity = "^8.2.3" [build-system] diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 5fd3a83..bc83eab 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,24 +1,36 @@ from typing import Iterable from contextlib import contextmanager -from datetime import datetime +from datetime import datetime, timedelta +from tenacity.wait import wait_fixed +from tenacity.stop import stop_after_delay from exasol.saas.client import openapi +from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( create_database, delete_database, list_databases, + get_database, ) from exasol.saas.client.openapi.api.security import ( list_allowed_i_ps, add_allowed_ip, delete_allowed_ip, ) +from tenacity import retry, TryAgain def timestamp() -> str: return f'{datetime.now().timestamp():.0f}' +class DatabaseStartupFailure(Exception): + """ + If a SaaS database instance during startup reports a status other than + transitional or successful. + """ + + def create_saas_client( host: str, pat: str, @@ -88,6 +100,40 @@ def database( if not keep and db: self.delete_database(db.id, ignore_delete_failure) + def get_database(self, database_id: str): + return get_database.sync( + self._account_id, + database_id, + client=self._client, + ) + + def wait_until_running( + self, + database_id: str, + timeout: timedelta = timedelta(minutes=30), + interval: timedelta = timedelta(minutes=2), + ) -> str: + transitional = [ + Status.TOCREATE, + Status.CREATING, + Status.SCALING, + ] + success = [ + Status.RUNNING, + ] + + @retry(wait=wait_fixed(interval), stop=stop_after_delay(timeout)) + def poll_status(): + db = self.get_database(database_id) + if db.status in transitional: + print(f'status = {db.status}') + raise TryAgain + return db.status + + if poll_status() not in success: + raise DatabaseStartupFailure() + + def list_allowed_ip_ids(self) -> Iterable[openapi.models.allowed_ip.AllowedIP]: ips = list_allowed_i_ps.sync( self._account_id, diff --git a/test/integration/conftest.py b/test/integration/conftest.py index b407910..2a58cbc 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -33,3 +33,10 @@ def saas_database(api_access) -> openapi.models.database.Database: """ with api_access.database() as db: yield db + + +@pytest.fixture(scope="session") +def operational_saas_database_id(api_access) -> str: + with api_access.database() as db: + api_access.wait_until_running(db.id) + yield db diff --git a/test/integration/databases_test.py b/test/integration/databases_test.py index 80c76d5..597224d 100644 --- a/test/integration/databases_test.py +++ b/test/integration/databases_test.py @@ -1,4 +1,7 @@ +import pytest from exasol.saas.client import openapi +from tenacity import RetryError +from datetime import timedelta def test_lifecycle(api_access): @@ -24,3 +27,14 @@ def test_lifecycle(api_access): # delete database and verify database is not listed anymore testee.delete_database(db.id) assert db.id not in testee.list_database_ids() + + +def test_poll(api_access): + with api_access.database() as db: + print(f'{db.status}') + with pytest.raises(RetryError): + api_access.wait_until_running( + db.id, + timeout=timedelta(seconds=3), + interval=timedelta(seconds=1), + ) From 4e5152288def007f4982059e15a6963371dffc61 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 30 Apr 2024 16:00:35 +0200 Subject: [PATCH 02/25] fixed typo in changes file --- doc/changes/changes_0.3.0.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/changes/changes_0.3.0.md b/doc/changes/changes_0.3.0.md index 70be594..bd68c9a 100644 --- a/doc/changes/changes_0.3.0.md +++ b/doc/changes/changes_0.3.0.md @@ -11,4 +11,4 @@ This release adds integration tests for the most important calls to SaaS API. ## Feature -* #14: Added fixture waiting unti SaaS database is running +* #14: Added fixture waiting until SaaS database is running From 7e1cb916304bbae8fce554fd2be9182f003eb649 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 30 Apr 2024 16:11:33 +0200 Subject: [PATCH 03/25] Fixed merge errors --- doc/changes/changes_0.3.0.md | 3 --- pyproject.toml | 3 --- 2 files changed, 6 deletions(-) diff --git a/doc/changes/changes_0.3.0.md b/doc/changes/changes_0.3.0.md index df9495b..8a5249d 100644 --- a/doc/changes/changes_0.3.0.md +++ b/doc/changes/changes_0.3.0.md @@ -8,11 +8,8 @@ This release adds integration tests for the most important calls to SaaS API. * #21: Added integration test for operation "create database" * #23: Added integration test for operation "add IP to whitelist" -<<<<<<< HEAD ## Feature * #14: Added fixture waiting until SaaS database is running -======= * #25: Fixed transitive dependencies required by generated API client ->>>>>>> main diff --git a/pyproject.toml b/pyproject.toml index 06ee560..f76796f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,14 +25,11 @@ python = ">=3.8.0,<4.0" requests = "^2.31.0" types-requests = "^2.31.0.6" ifaddr = "^0.2.0" -<<<<<<< HEAD tenacity = "^8.2.3" -======= # generated by openapi-python-client httpx = ">=0.20.0,<0.28.0" attrs = ">=21.3.0" python-dateutil = "^2.8.0" ->>>>>>> main [build-system] From de6a01fe57ade3d3789c722f283e44d68929482c Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 10:17:15 +0200 Subject: [PATCH 04/25] Fixed review findings --- test/integration/api_access.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index bc83eab..52ab399 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -27,7 +27,7 @@ def timestamp() -> str: class DatabaseStartupFailure(Exception): """ If a SaaS database instance during startup reports a status other than - transitional or successful. + successful. """ @@ -100,7 +100,7 @@ def database( if not keep and db: self.delete_database(db.id, ignore_delete_failure) - def get_database(self, database_id: str): + def get_database(self, database_id: str) -> -> openapi.models.database.Database: return get_database.sync( self._account_id, database_id, @@ -113,11 +113,6 @@ def wait_until_running( timeout: timedelta = timedelta(minutes=30), interval: timedelta = timedelta(minutes=2), ) -> str: - transitional = [ - Status.TOCREATE, - Status.CREATING, - Status.SCALING, - ] success = [ Status.RUNNING, ] @@ -125,7 +120,7 @@ def wait_until_running( @retry(wait=wait_fixed(interval), stop=stop_after_delay(timeout)) def poll_status(): db = self.get_database(database_id) - if db.status in transitional: + if db.status not in success: print(f'status = {db.status}') raise TryAgain return db.status From dad0f116fa78795480e98f45ff3bf234a466e910 Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 10:21:48 +0200 Subject: [PATCH 05/25] Fixed review findings --- test/integration/api_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 52ab399..96265b9 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -100,7 +100,7 @@ def database( if not keep and db: self.delete_database(db.id, ignore_delete_failure) - def get_database(self, database_id: str) -> -> openapi.models.database.Database: + def get_database(self, database_id: str) -> openapi.models.database.Database: return get_database.sync( self._account_id, database_id, From ebef57c6bcb87d9e2ce59ca53ddc46d456124cec Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:03:52 +0200 Subject: [PATCH 06/25] Added user name to resources in Exasol Saas --- test/integration/api_access.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 96265b9..9ace137 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,3 +1,4 @@ +import os from typing import Iterable from contextlib import contextmanager from datetime import datetime, timedelta @@ -20,8 +21,10 @@ from tenacity import retry, TryAgain -def timestamp() -> str: - return f'{datetime.now().timestamp():.0f}' +def _timestamp_name() -> str: + username = os.login() + timstamp = f'{datetime.now().timestamp():.0f}' + return f"{username}-pytest-{timestamp}" class DatabaseStartupFailure(Exception): @@ -63,7 +66,7 @@ def create_database(self, cluster_size: str = "XS") -> openapi.models.database.D self._account_id, client=self._client, body=openapi.models.CreateDatabase( - name=f"pytest-{timestamp()}", + name=_timestamp_name(), initial_cluster=cluster_spec, provider="aws", region='us-east-1', @@ -144,7 +147,7 @@ def add_allowed_ip(self, cidr_ip: str = "0.0.0.0/0") -> openapi.models.allowed_i * ::/0 = all ipv6 """ rule = openapi.models.create_allowed_ip.CreateAllowedIP( - name=f"pytest-{timestamp()}", + name=_timestamp_name(), cidr_ip=cidr_ip, ) return add_allowed_ip.sync( From 97f44ccead4f5dad831a1dc0aa1de3e070a3ee1f Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:13:48 +0200 Subject: [PATCH 07/25] fixed method call --- test/integration/api_access.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 9ace137..334fc7a 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,4 +1,5 @@ import os + from typing import Iterable from contextlib import contextmanager from datetime import datetime, timedelta @@ -22,7 +23,7 @@ def _timestamp_name() -> str: - username = os.login() + username = os.getlogin() timstamp = f'{datetime.now().timestamp():.0f}' return f"{username}-pytest-{timestamp}" @@ -100,7 +101,7 @@ def database( db = self.create_database() yield db finally: - if not keep and db: + if db and not keep: self.delete_database(db.id, ignore_delete_failure) def get_database(self, database_id: str) -> openapi.models.database.Database: @@ -173,5 +174,5 @@ def allowed_ip( ip = self.add_allowed_ip(cidr_ip) yield ip finally: - if not keep and ip: + if ip and not keep: self.delete_allowed_ip(ip.id, ignore_delete_failure) From 8a400e4ceb50177998beae8941ccefa9997384eb Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:22:19 +0200 Subject: [PATCH 08/25] fixed _timestamp_name() --- test/integration/api_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 334fc7a..1956dc1 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -24,7 +24,7 @@ def _timestamp_name() -> str: username = os.getlogin() - timstamp = f'{datetime.now().timestamp():.0f}' + timestamp = f'{datetime.now().timestamp():.0f}' return f"{username}-pytest-{timestamp}" From 0985a0d455b4a81c55094def983951dd60edd14f Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:25:51 +0200 Subject: [PATCH 09/25] replaced os.getlogin() by getpass.getuser() --- test/integration/api_access.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 1956dc1..0a0f43d 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,4 +1,4 @@ -import os +import getpass from typing import Iterable from contextlib import contextmanager @@ -23,7 +23,7 @@ def _timestamp_name() -> str: - username = os.getlogin() + username = getpass.getuser() timestamp = f'{datetime.now().timestamp():.0f}' return f"{username}-pytest-{timestamp}" From bb93ddab782847d09e1a3f8b14d70d076bdfb72d Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:32:24 +0200 Subject: [PATCH 10/25] shortened database name --- test/integration/api_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 0a0f43d..0f6a15d 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -25,7 +25,7 @@ def _timestamp_name() -> str: username = getpass.getuser() timestamp = f'{datetime.now().timestamp():.0f}' - return f"{username}-pytest-{timestamp}" + return f"{username}-{timestamp}" class DatabaseStartupFailure(Exception): From 6b328bfe4beae42351c11ed6289f9013bd03b4fd Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 14:43:08 +0200 Subject: [PATCH 11/25] Added log messages for deleting the database --- test/integration/api_access.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 0f6a15d..f27370e 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,4 +1,5 @@ import getpass +import logging from typing import Iterable from contextlib import contextmanager @@ -22,6 +23,10 @@ from tenacity import retry, TryAgain +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.INFO) + + def _timestamp_name() -> str: username = getpass.getuser() timestamp = f'{datetime.now().timestamp():.0f}' @@ -102,7 +107,16 @@ def database( yield db finally: if db and not keep: + LOG.info( + f"deleting database {db.name}," + f" ignore_delete_failure = {ignore_delete_failure}" + ) self.delete_database(db.id, ignore_delete_failure) + LOG.info(f"deleted database successully") + elif not db: + LOG.warning("cannot delete db None") + else: + LOG.info(f"keeping database {db.name} as keep = {keep}") def get_database(self, database_id: str) -> openapi.models.database.Database: return get_database.sync( From b21c96a560345dd2bf92b90a60f98778aca06242 Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 16:30:17 +0200 Subject: [PATCH 12/25] Make pytest display log output of tests cases in CI build --- .github/workflows/checks.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 9041220..eab5256 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -109,6 +109,7 @@ jobs: SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + PYTEST_ADDOPTS: -o log_cli=true -o log_cli_level=INFO run: poetry run nox -s coverage -- -- - name: Upload Artifacts From 2da0580cfeafe2ec3aed46908a3c3c1de0061a0c Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 17:27:50 +0200 Subject: [PATCH 13/25] Added sleep before deleting the database --- test/integration/api_access.py | 39 ++++++++++++++++++++++++------ test/integration/databases_test.py | 5 +++- 2 files changed, 35 insertions(+), 9 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index f27370e..3622725 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -1,5 +1,6 @@ import getpass import logging +import time from typing import Iterable from contextlib import contextmanager @@ -27,6 +28,14 @@ LOG.setLevel(logging.INFO) +# For auto-stopping idle database clusters +MINIMUM_IDLE_TIME = timedelta(minutes=15) + + +# If deleting a database too early, then logging and accounting could be invalid. +MINIMUM_LIFETIME = timedelta(seconds=30) + + def _timestamp_name() -> str: username = getpass.getuser() timestamp = f'{datetime.now().timestamp():.0f}' @@ -67,6 +76,10 @@ def create_database(self, cluster_size: str = "XS") -> openapi.models.database.D cluster_spec = openapi.models.CreateCluster( name="my-cluster", size=cluster_size, + auto_stop=openapi.models.AutoStop( + enabled=True, + idle_time=int(MINIMUM_IDLE_TIME.seconds / 60), + ), ) return create_database.sync( self._account_id, @@ -102,21 +115,23 @@ def database( ignore_delete_failure: bool = False, ): db = None + start = datetime.now() try: db = self.create_database() yield db + self.wait_for_delete_clearance(start) finally: if db and not keep: - LOG.info( - f"deleting database {db.name}," - f" ignore_delete_failure = {ignore_delete_failure}" - ) - self.delete_database(db.id, ignore_delete_failure) - LOG.info(f"deleted database successully") + LOG.info(f"Deleting database {db.name}") + response = self.delete_database(db.id, ignore_delete_failure) + if response.status_code == 200: + LOG.info(f"Successfully deleted database {db.name}.") + else: + LOG.info(f"Ignoring status code {response.status_code}.") elif not db: - LOG.warning("cannot delete db None") + LOG.warning("Cannot delete db None") else: - LOG.info(f"keeping database {db.name} as keep = {keep}") + LOG.info(f"Keeping database {db.name} as keep = {keep}") def get_database(self, database_id: str) -> openapi.models.database.Database: return get_database.sync( @@ -154,6 +169,14 @@ def list_allowed_ip_ids(self) -> Iterable[openapi.models.allowed_ip.AllowedIP]: ) return (x.id for x in ips) + def wait_for_delete_clearance(self, start: datetime.time): + lifetime = datetime.now() - start + if lifetime < MINIMUM_LIFETIME: + wait = MINIMUM_LIFETIME - lifetime + LOG.info(f"Waiting {int(wait.seconds)} seconds" + " before deleting the database.") + time.sleep(wait.seconds) + def add_allowed_ip(self, cidr_ip: str = "0.0.0.0/0") -> openapi.models.allowed_ip.AllowedIP: """ Suggested values for cidr_ip: diff --git a/test/integration/databases_test.py b/test/integration/databases_test.py index 597224d..4600bd1 100644 --- a/test/integration/databases_test.py +++ b/test/integration/databases_test.py @@ -1,7 +1,8 @@ import pytest + from exasol.saas.client import openapi from tenacity import RetryError -from datetime import timedelta +from datetime import datetime, timedelta def test_lifecycle(api_access): @@ -17,6 +18,7 @@ def test_lifecycle(api_access): testee = api_access with testee.database(ignore_delete_failure=True) as db: + start = datetime.now() # verify state and clusters of created database assert db.status == openapi.models.Status.TOCREATE and \ db.clusters.total == 1 @@ -25,6 +27,7 @@ def test_lifecycle(api_access): assert db.id in testee.list_database_ids() # delete database and verify database is not listed anymore + testee.wait_for_delete_clearance(start) testee.delete_database(db.id) assert db.id not in testee.list_database_ids() From 66026b011f400da93d22c701159fc2c1b627e8e9 Mon Sep 17 00:00:00 2001 From: ckunki Date: Mon, 6 May 2024 17:45:36 +0200 Subject: [PATCH 14/25] Added log message for creating a database --- test/integration/api_access.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 3622725..4c93ae3 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -81,11 +81,13 @@ def create_database(self, cluster_size: str = "XS") -> openapi.models.database.D idle_time=int(MINIMUM_IDLE_TIME.seconds / 60), ), ) + db_name = _timestamp_name() + LOG.info(f"Creating database {db_name}") return create_database.sync( self._account_id, client=self._client, body=openapi.models.CreateDatabase( - name=_timestamp_name(), + name=db_name, initial_cluster=cluster_spec, provider="aws", region='us-east-1', From 26cf3218f5fde78ca10358c1a0e82a859697d8c8 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 08:17:36 +0200 Subject: [PATCH 15/25] Excluded generated code from coverage --- pyproject.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f76796f..f9cc3be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,10 @@ source = [ "exasol", ] +omit = [ + '*/exasol/saas/client/openapi/*', +] + [tool.coverage.report] fail_under = 15 From d822647a41271af12c4ebe384d93f987443fc4d8 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 12:47:11 +0200 Subject: [PATCH 16/25] Fixed first batch of review findings --- exasol/saas/client/__init__.py | 18 +++++++++++++++ test/integration/api_access.py | 35 ++++++++++++++---------------- test/integration/databases_test.py | 8 ++++--- 3 files changed, 39 insertions(+), 22 deletions(-) diff --git a/exasol/saas/client/__init__.py b/exasol/saas/client/__init__.py index 6c22d66..78969fb 100644 --- a/exasol/saas/client/__init__.py +++ b/exasol/saas/client/__init__.py @@ -2,4 +2,22 @@ Package openapi contains the API generated from the JSON definition. """ +from datetime import timedelta +from exasol.saas.client.openapi.models.status import Status + + SAAS_HOST = "https://cloud.exasol.com" + +# For auto-stopping idle database clusters +MINIMUM_IDLE_TIME = timedelta(minutes=15) + +# If deleting a database too early, then logging and accounting could be invalid. +MINIMUM_LIFETIME = timedelta(seconds=30) + +PROMISING_STATES = [ + Status.CREATING, + Status.RUNNING, + Status.STARTING, + Status.TOCREATE, + Status.TOSTART, +] diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 4c93ae3..a9cde1f 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -8,7 +8,11 @@ from tenacity.wait import wait_fixed from tenacity.stop import stop_after_delay -from exasol.saas.client import openapi +from exasol.saas.client import ( + openapi, + MINIMUM_IDLE_TIME, + MINIMUM_LIFETIME, +) from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( create_database, @@ -28,20 +32,21 @@ LOG.setLevel(logging.INFO) -# For auto-stopping idle database clusters -MINIMUM_IDLE_TIME = timedelta(minutes=15) - - -# If deleting a database too early, then logging and accounting could be invalid. -MINIMUM_LIFETIME = timedelta(seconds=30) - - def _timestamp_name() -> str: username = getpass.getuser() timestamp = f'{datetime.now().timestamp():.0f}' return f"{username}-{timestamp}" +def wait_for_delete_clearance(start: datetime.time): + lifetime = datetime.now() - start + if lifetime < MINIMUM_LIFETIME: + wait = MINIMUM_LIFETIME - lifetime + LOG.info(f"Waiting {int(wait.seconds)} seconds" + " before deleting the database.") + time.sleep(wait.seconds) + + class DatabaseStartupFailure(Exception): """ If a SaaS database instance during startup reports a status other than @@ -121,7 +126,7 @@ def database( try: db = self.create_database() yield db - self.wait_for_delete_clearance(start) + wait_for_delete_clearance(start) finally: if db and not keep: LOG.info(f"Deleting database {db.name}") @@ -129,7 +134,7 @@ def database( if response.status_code == 200: LOG.info(f"Successfully deleted database {db.name}.") else: - LOG.info(f"Ignoring status code {response.status_code}.") + LOG.warning(f"Ignoring status code {response.status_code}.") elif not db: LOG.warning("Cannot delete db None") else: @@ -171,14 +176,6 @@ def list_allowed_ip_ids(self) -> Iterable[openapi.models.allowed_ip.AllowedIP]: ) return (x.id for x in ips) - def wait_for_delete_clearance(self, start: datetime.time): - lifetime = datetime.now() - start - if lifetime < MINIMUM_LIFETIME: - wait = MINIMUM_LIFETIME - lifetime - LOG.info(f"Waiting {int(wait.seconds)} seconds" - " before deleting the database.") - time.sleep(wait.seconds) - def add_allowed_ip(self, cidr_ip: str = "0.0.0.0/0") -> openapi.models.allowed_ip.AllowedIP: """ Suggested values for cidr_ip: diff --git a/test/integration/databases_test.py b/test/integration/databases_test.py index 4600bd1..7f64096 100644 --- a/test/integration/databases_test.py +++ b/test/integration/databases_test.py @@ -1,9 +1,11 @@ import pytest -from exasol.saas.client import openapi +from exasol.saas.client import openapi, PROMISING_STATES from tenacity import RetryError from datetime import datetime, timedelta +from api_access import wait_for_delete_clearance + def test_lifecycle(api_access): """ @@ -20,14 +22,14 @@ def test_lifecycle(api_access): with testee.database(ignore_delete_failure=True) as db: start = datetime.now() # verify state and clusters of created database - assert db.status == openapi.models.Status.TOCREATE and \ + assert db.status in PROMISING_STATES and \ db.clusters.total == 1 # verify database is listed assert db.id in testee.list_database_ids() # delete database and verify database is not listed anymore - testee.wait_for_delete_clearance(start) + wait_for_delete_clearance(start) testee.delete_database(db.id) assert db.id not in testee.list_database_ids() From 2b2b4a3474c7adb5287e956cfbb730faae95f506 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 13:02:09 +0200 Subject: [PATCH 17/25] Refactored extracting minutes from timedelta for SaaS API --- test/integration/api_access.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index a9cde1f..d5ece8b 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -78,12 +78,15 @@ def __init__(self, client: openapi.Client, account_id: str): self._account_id = account_id def create_database(self, cluster_size: str = "XS") -> openapi.models.database.Database: + def minutes(x: timedelta) -> int: + return x.seconds // 60 + cluster_spec = openapi.models.CreateCluster( name="my-cluster", size=cluster_size, auto_stop=openapi.models.AutoStop( enabled=True, - idle_time=int(MINIMUM_IDLE_TIME.seconds / 60), + idle_time=minutes(MINIMUM_IDLE_TIME), ), ) db_name = _timestamp_name() From aace19e049c5bc0387f208ecd0e37534f41c47b3 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 13:11:08 +0200 Subject: [PATCH 18/25] Added parameter region for create_database() --- test/integration/api_access.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index d5ece8b..111da65 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -77,7 +77,12 @@ def __init__(self, client: openapi.Client, account_id: str): self._client = client self._account_id = account_id - def create_database(self, cluster_size: str = "XS") -> openapi.models.database.Database: + def create_database( + self, + name: str | None, + cluster_size: str = "XS", + region: str = "eu-central-1", + ) -> openapi.models.database.Database: def minutes(x: timedelta) -> int: return x.seconds // 60 @@ -89,7 +94,7 @@ def minutes(x: timedelta) -> int: idle_time=minutes(MINIMUM_IDLE_TIME), ), ) - db_name = _timestamp_name() + db_name = name or _timestamp_name() LOG.info(f"Creating database {db_name}") return create_database.sync( self._account_id, @@ -98,7 +103,7 @@ def minutes(x: timedelta) -> int: name=db_name, initial_cluster=cluster_spec, provider="aws", - region='us-east-1', + region=region, ) ) @@ -121,13 +126,14 @@ def list_database_ids(self) -> Iterable[str]: @contextmanager def database( self, + name: str = None, keep: bool = False, ignore_delete_failure: bool = False, ): db = None start = datetime.now() try: - db = self.create_database() + db = self.create_database(name) yield db wait_for_delete_clearance(start) finally: From 5af0e1d49b17db9f4bb4fc73454632f401c4f325 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 13:23:16 +0200 Subject: [PATCH 19/25] Moved limits into dedicated class --- exasol/saas/client/__init__.py | 20 +++++++++++++------- test/integration/api_access.py | 9 ++++----- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/exasol/saas/client/__init__.py b/exasol/saas/client/__init__.py index 78969fb..8afac4a 100644 --- a/exasol/saas/client/__init__.py +++ b/exasol/saas/client/__init__.py @@ -2,18 +2,13 @@ Package openapi contains the API generated from the JSON definition. """ -from datetime import timedelta +from dataclasses import dataclass +from datetime import datetime, timedelta from exasol.saas.client.openapi.models.status import Status SAAS_HOST = "https://cloud.exasol.com" -# For auto-stopping idle database clusters -MINIMUM_IDLE_TIME = timedelta(minutes=15) - -# If deleting a database too early, then logging and accounting could be invalid. -MINIMUM_LIFETIME = timedelta(seconds=30) - PROMISING_STATES = [ Status.CREATING, Status.RUNNING, @@ -21,3 +16,14 @@ Status.TOCREATE, Status.TOSTART, ] + + +@dataclass +class Limits: + MAX_DATABASE_NAME_LENGTH: int = 20 + MAX_CLUSTER_NAME_LENGTH: int = 40 + AUTOSTOP_MIN_IDLE_TIME: datetime.time = timedelta(minutes=15) + AUTOSTOP_MAX_IDLE_TIME: datetime.time = timedelta(minutes=10000) + AUTOSTOP_DEFAULT_IDLE_TIME: datetime.time = timedelta(minutes=120) + # If deleting a database too early, then logging and accounting could be invalid. + MIN_DATABASE_LIFETIME: datetime.time = timedelta(seconds=30) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 111da65..9715e4d 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -10,8 +10,7 @@ from exasol.saas.client import ( openapi, - MINIMUM_IDLE_TIME, - MINIMUM_LIFETIME, + Limits, ) from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( @@ -40,8 +39,8 @@ def _timestamp_name() -> str: def wait_for_delete_clearance(start: datetime.time): lifetime = datetime.now() - start - if lifetime < MINIMUM_LIFETIME: - wait = MINIMUM_LIFETIME - lifetime + if lifetime < Limits.MIN_DATABASE_LIFETIME: + wait = Limits.MIN_DATABASE_LIFETIME - lifetime LOG.info(f"Waiting {int(wait.seconds)} seconds" " before deleting the database.") time.sleep(wait.seconds) @@ -91,7 +90,7 @@ def minutes(x: timedelta) -> int: size=cluster_size, auto_stop=openapi.models.AutoStop( enabled=True, - idle_time=minutes(MINIMUM_IDLE_TIME), + idle_time=minutes(Limits.AUTOSTOP_MIN_IDLE_TIME), ), ) db_name = name or _timestamp_name() From bd0fcc961d5331981f43a1d8384778cb82c6738c Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 14:32:31 +0200 Subject: [PATCH 20/25] Use project short tag for saas resource --- .github/workflows/checks.yml | 4 +++- noxfile.py | 19 +++++++++++++++++++ test/integration/api_access.py | 21 ++++++++++++--------- test/integration/conftest.py | 25 ++++++++++++++++++++----- test/integration/databases_test.py | 8 ++++---- 5 files changed, 58 insertions(+), 19 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index eab5256..03b336c 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -110,7 +110,9 @@ jobs: SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} PYTEST_ADDOPTS: -o log_cli=true -o log_cli_level=INFO - run: poetry run nox -s coverage -- -- + run: | + export PROJECT_SHORT_TAG=$(poetry run nox -s get-project-short-tag) + poetry run nox -s coverage -- -- - name: Upload Artifacts uses: actions/upload-artifact@v3 diff --git a/noxfile.py b/noxfile.py index 1396c4a..bae485b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,5 +1,7 @@ import os import nox + +from pathlib import Path from nox import Session from noxconfig import PROJECT_CONFIG from exasol.saas.client import SAAS_HOST @@ -41,3 +43,20 @@ def check_api_outdated(session: Session): """ generate_api(session) session.run("git", "diff", "--exit-code") + + +@nox.session(name="get-project-short-tag", python=False) +def get_project_short_tag(session: Session): + config_file = Path("error_code_config.yml") + content = config_file.read_text() + header = False + for line in content.splitlines(): + line = line.strip() + if header: + print(line.strip().replace(":", "")) + return + if line.startswith("error-tags:"): + header = True + raise RuntimeError( + f"Could not read project short tag from file {config_file}" + ) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 9715e4d..58dcf66 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -31,10 +31,14 @@ LOG.setLevel(logging.INFO) -def _timestamp_name() -> str: - username = getpass.getuser() +def timestamp_name(project_short_tag: str | None) -> str: + """ + project_short_tag: Abbreviation of your project + """ timestamp = f'{datetime.now().timestamp():.0f}' - return f"{username}-{timestamp}" + owner = project_short_tag or getpass.getuser() + candidate = f"{timestamp}-{owner}" + return candidate[:Limits.MAX_DATABASE_NAME_LENGTH] def wait_for_delete_clearance(start: datetime.time): @@ -78,7 +82,7 @@ def __init__(self, client: openapi.Client, account_id: str): def create_database( self, - name: str | None, + name: str, cluster_size: str = "XS", region: str = "eu-central-1", ) -> openapi.models.database.Database: @@ -93,13 +97,12 @@ def minutes(x: timedelta) -> int: idle_time=minutes(Limits.AUTOSTOP_MIN_IDLE_TIME), ), ) - db_name = name or _timestamp_name() - LOG.info(f"Creating database {db_name}") + LOG.info(f"Creating database {name}") return create_database.sync( self._account_id, client=self._client, body=openapi.models.CreateDatabase( - name=db_name, + name=name, initial_cluster=cluster_spec, provider="aws", region=region, @@ -125,7 +128,7 @@ def list_database_ids(self) -> Iterable[str]: @contextmanager def database( self, - name: str = None, + name: str, keep: bool = False, ignore_delete_failure: bool = False, ): @@ -192,7 +195,7 @@ def add_allowed_ip(self, cidr_ip: str = "0.0.0.0/0") -> openapi.models.allowed_i * ::/0 = all ipv6 """ rule = openapi.models.create_allowed_ip.CreateAllowedIP( - name=_timestamp_name(), + name=timestamp_name(), cidr_ip=cidr_ip, ) return add_allowed_ip.sync( diff --git a/test/integration/conftest.py b/test/integration/conftest.py index 2a58cbc..bfa611a 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -1,8 +1,13 @@ import pytest import os +from pathlib import Path from exasol.saas.client import openapi -from api_access import create_saas_client, _OpenApiAccess +from api_access import ( + create_saas_client, + _OpenApiAccess, + timestamp_name, +) @pytest.fixture(scope="session") def saas_host() -> str: @@ -26,17 +31,27 @@ def api_access(saas_host, saas_pat, saas_account_id) -> _OpenApiAccess: @pytest.fixture(scope="session") -def saas_database(api_access) -> openapi.models.database.Database: +def saas_database(api_access, database_name) -> openapi.models.database.Database: """ Note: The SaaS instance database returned by this fixture initially will not be operational. The startup takes about 20 minutes. """ - with api_access.database() as db: + with api_access.database(database_name) as db: yield db @pytest.fixture(scope="session") -def operational_saas_database_id(api_access) -> str: - with api_access.database() as db: +def operational_saas_database_id(api_access, database_name) -> str: + with api_access.database(database_name) as db: api_access.wait_until_running(db.id) yield db + + +@pytest.fixture(scope="session") +def project_short_tag(): + return os.environ.get("PROJECT_SHORT_TAG") + + +@pytest.fixture +def database_name(project_short_tag): + return timestamp_name(project_short_tag) diff --git a/test/integration/databases_test.py b/test/integration/databases_test.py index 7f64096..118c315 100644 --- a/test/integration/databases_test.py +++ b/test/integration/databases_test.py @@ -7,7 +7,7 @@ from api_access import wait_for_delete_clearance -def test_lifecycle(api_access): +def test_lifecycle(api_access, database_name): """ This integration test uses the database created and provided by pytest context ``_OpenApiAccess.database()`` to verify @@ -19,7 +19,7 @@ def test_lifecycle(api_access): """ testee = api_access - with testee.database(ignore_delete_failure=True) as db: + with testee.database(database_name, ignore_delete_failure=True) as db: start = datetime.now() # verify state and clusters of created database assert db.status in PROMISING_STATES and \ @@ -34,8 +34,8 @@ def test_lifecycle(api_access): assert db.id not in testee.list_database_ids() -def test_poll(api_access): - with api_access.database() as db: +def test_poll(api_access, database_name): + with api_access.database(database_name) as db: print(f'{db.status}') with pytest.raises(RetryError): api_access.wait_until_running( From 2534a4af5ea4e3ad4b8a1599eb333a01a8dacd63 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 14:47:09 +0200 Subject: [PATCH 21/25] Renamed database limits to meet linter requirements --- exasol/saas/client/__init__.py | 18 ++++++++++++------ test/integration/api_access.py | 10 +++++----- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/exasol/saas/client/__init__.py b/exasol/saas/client/__init__.py index 8afac4a..7846ea6 100644 --- a/exasol/saas/client/__init__.py +++ b/exasol/saas/client/__init__.py @@ -20,10 +20,16 @@ @dataclass class Limits: - MAX_DATABASE_NAME_LENGTH: int = 20 - MAX_CLUSTER_NAME_LENGTH: int = 40 - AUTOSTOP_MIN_IDLE_TIME: datetime.time = timedelta(minutes=15) - AUTOSTOP_MAX_IDLE_TIME: datetime.time = timedelta(minutes=10000) - AUTOSTOP_DEFAULT_IDLE_TIME: datetime.time = timedelta(minutes=120) + """ + Constants for Exasol SaaS databases. + """ + max_database_name_length: int = 20 + max_cluster_name_length: int = 40 + autostop_min_idle_time: datetime.time = timedelta(minutes=15) + autostop_max_idle_time: datetime.time = timedelta(minutes=10000) + autostop_default_idle_time: datetime.time = timedelta(minutes=120) # If deleting a database too early, then logging and accounting could be invalid. - MIN_DATABASE_LIFETIME: datetime.time = timedelta(seconds=30) + min_database_lifetime: datetime.time = timedelta(seconds=30) + + +DATABASE_LIMITS = Limits() diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 58dcf66..d330e97 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -10,7 +10,7 @@ from exasol.saas.client import ( openapi, - Limits, + DATABASE_LIMITS, ) from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( @@ -38,13 +38,13 @@ def timestamp_name(project_short_tag: str | None) -> str: timestamp = f'{datetime.now().timestamp():.0f}' owner = project_short_tag or getpass.getuser() candidate = f"{timestamp}-{owner}" - return candidate[:Limits.MAX_DATABASE_NAME_LENGTH] + return candidate[:DATABASE_LIMITS.max_database_name_length] def wait_for_delete_clearance(start: datetime.time): lifetime = datetime.now() - start - if lifetime < Limits.MIN_DATABASE_LIFETIME: - wait = Limits.MIN_DATABASE_LIFETIME - lifetime + if lifetime < DATABASE_LIMITS.min_database_lifetime: + wait = DATABASE_LIMITS.min_database_lifetime - lifetime LOG.info(f"Waiting {int(wait.seconds)} seconds" " before deleting the database.") time.sleep(wait.seconds) @@ -94,7 +94,7 @@ def minutes(x: timedelta) -> int: size=cluster_size, auto_stop=openapi.models.AutoStop( enabled=True, - idle_time=minutes(Limits.AUTOSTOP_MIN_IDLE_TIME), + idle_time=minutes(DATABASE_LIMITS.autostop_min_idle_time), ), ) LOG.info(f"Creating database {name}") From 381e404116e261df820bf1f6ef4fd027d49b9f2f Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 15:20:37 +0200 Subject: [PATCH 22/25] Fixed type check --- exasol/saas/client/__init__.py | 17 +++++++---------- test/integration/api_access.py | 8 ++++---- 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/exasol/saas/client/__init__.py b/exasol/saas/client/__init__.py index 7846ea6..a792713 100644 --- a/exasol/saas/client/__init__.py +++ b/exasol/saas/client/__init__.py @@ -3,6 +3,7 @@ """ from dataclasses import dataclass +from typing import Final from datetime import datetime, timedelta from exasol.saas.client.openapi.models.status import Status @@ -18,18 +19,14 @@ ] -@dataclass class Limits: """ Constants for Exasol SaaS databases. """ - max_database_name_length: int = 20 - max_cluster_name_length: int = 40 - autostop_min_idle_time: datetime.time = timedelta(minutes=15) - autostop_max_idle_time: datetime.time = timedelta(minutes=10000) - autostop_default_idle_time: datetime.time = timedelta(minutes=120) + MAX_DATABASE_NAME_LENGTH: Final[int] = 20 + MAX_CLUSTER_NAME_LENGTH: Final[int] = 40 + AUTOSTOP_MIN_IDLE_TIME: Final[timedelta] = timedelta(minutes=15) + AUTOSTOP_MAX_IDLE_TIME: Final[timedelta] = timedelta(minutes=10000) + AUTOSTOP_DEFAULT_IDLE_TIME: Final[timedelta] = timedelta(minutes=120) # If deleting a database too early, then logging and accounting could be invalid. - min_database_lifetime: datetime.time = timedelta(seconds=30) - - -DATABASE_LIMITS = Limits() + MIN_DATABASE_LIFETIME: Final[timedelta] = timedelta(seconds=30) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index d330e97..6fd4b1b 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -38,13 +38,13 @@ def timestamp_name(project_short_tag: str | None) -> str: timestamp = f'{datetime.now().timestamp():.0f}' owner = project_short_tag or getpass.getuser() candidate = f"{timestamp}-{owner}" - return candidate[:DATABASE_LIMITS.max_database_name_length] + return candidate[:Limits.MAX_DATABASE_NAME_LENGTH] def wait_for_delete_clearance(start: datetime.time): lifetime = datetime.now() - start - if lifetime < DATABASE_LIMITS.min_database_lifetime: - wait = DATABASE_LIMITS.min_database_lifetime - lifetime + if lifetime < Limits.MIN_DATABASE_LIFETIME: + wait = Limits.MIN_DATABASE_LIFETIME - lifetime LOG.info(f"Waiting {int(wait.seconds)} seconds" " before deleting the database.") time.sleep(wait.seconds) @@ -94,7 +94,7 @@ def minutes(x: timedelta) -> int: size=cluster_size, auto_stop=openapi.models.AutoStop( enabled=True, - idle_time=minutes(DATABASE_LIMITS.autostop_min_idle_time), + idle_time=minutes(Limits.AUTOSTOP_MIN_IDLE_TIME), ), ) LOG.info(f"Creating database {name}") From fb30fa73590b08f530bb50dd6d2244ae30b3908c Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 15:21:38 +0200 Subject: [PATCH 23/25] fixed import --- test/integration/api_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 6fd4b1b..58dcf66 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -10,7 +10,7 @@ from exasol.saas.client import ( openapi, - DATABASE_LIMITS, + Limits, ) from exasol.saas.client.openapi.models.status import Status from exasol.saas.client.openapi.api.databases import ( From db989b6e8dcfb89054f2ed5c808d9617452ea524 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 7 May 2024 16:25:27 +0200 Subject: [PATCH 24/25] Made parameter project short tag for timestamp_name optional --- test/integration/api_access.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 58dcf66..23fbd8f 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -31,7 +31,7 @@ LOG.setLevel(logging.INFO) -def timestamp_name(project_short_tag: str | None) -> str: +def timestamp_name(project_short_tag: str | None = None) -> str: """ project_short_tag: Abbreviation of your project """ From 3e23db00a4f1df620ffd0d608d5a37057673aa94 Mon Sep 17 00:00:00 2001 From: ckunki Date: Wed, 8 May 2024 09:57:31 +0200 Subject: [PATCH 25/25] Fixed review finding --- test/integration/api_access.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/api_access.py b/test/integration/api_access.py index 23fbd8f..7449c06 100644 --- a/test/integration/api_access.py +++ b/test/integration/api_access.py @@ -36,8 +36,8 @@ def timestamp_name(project_short_tag: str | None = None) -> str: project_short_tag: Abbreviation of your project """ timestamp = f'{datetime.now().timestamp():.0f}' - owner = project_short_tag or getpass.getuser() - candidate = f"{timestamp}-{owner}" + owner = getpass.getuser() + candidate = f"{timestamp}{project_short_tag or ''}-{owner}" return candidate[:Limits.MAX_DATABASE_NAME_LENGTH]