From 749ffc3fe213623529fda4b82fd92056d58cac54 Mon Sep 17 00:00:00 2001 From: Adriana Lopez Lopez <71252798+dlpzx@users.noreply.github.com> Date: Wed, 15 Jan 2025 15:06:20 +0100 Subject: [PATCH] 2.6.2 Security features (#1737) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Feature or Bugfix - Security ### Detail ### 🔐 Security * Update sanitization technique for terms filtering by @noah-paige in https://github.com/data-dot-all/dataall/pull/1692 and in https://github.com/data-dot-all/dataall/pull/1693 * Move access logging to a separate environment logging bucket by @noah-paige in https://github.com/data-dot-all/dataall/pull/1695 * Add explicit token duration config for both JWTs by @noah-paige in https://github.com/data-dot-all/dataall/pull/1698 * Disable GraphQL introspection if prod sizing by @noah-paige in https://github.com/data-dot-all/dataall/pull/1704 * Add snyk workflow on schedule by @noah-paige in https://github.com/data-dot-all/dataall/pull/1705, https://github.com/data-dot-all/dataall/pull/1708, https://github.com/data-dot-all/dataall/pull/1713, https://github.com/data-dot-all/dataall/pull/1745 and in in https://github.com/data-dot-all/dataall/pull/1746 * Unify Logger Config for Tasks by @noah-paige in https://github.com/data-dot-all/dataall/pull/1709 * Updating overly permissive policies tagged by checkov for environment role using least privilege principles by @mourya-33 in https://github.com/data-dot-all/dataall/pull/1632 Data.all permission model has been reviewed to ensure all Mutations and Queries have proper permissions: * Add MANAGE_SHARES permissions by @dlpzx in https://github.com/data-dot-all/dataall/pull/1702 * Add permission check - is tenant to update SSM parameters API by @dlpzx in https://github.com/data-dot-all/dataall/pull/1714 * Add GET_SHARE_OBJECT permissions to get data filters API by @dlpzx in https://github.com/data-dot-all/dataall/pull/1717 * Add permissions on list datasets for env group + cosmetic S3 Datasets by @dlpzx in https://github.com/data-dot-all/dataall/pull/1718 * Add GET_WORKSHEET permission in RUN_SQL_QUERY by @dlpzx in https://github.com/data-dot-all/dataall/pull/1716 * Add permissions to Quicksight monitoring service layer by @dlpzx in https://github.com/data-dot-all/dataall/pull/1715 * Add LIST_ENVIRONMENT_DATASETS permission for listing shared datasets and cleanup unused code by @dlpzx in https://github.com/data-dot-all/dataall/pull/1719 * Add is_owner permissions to Glossary mutations + add new integration tests by @dlpzx in https://github.com/data-dot-all/dataall/pull/1721 * Refactor env permissions + modify getTrustAccount by @dlpzx in https://github.com/data-dot-all/dataall/pull/1712 * Add Feed consistent permissions by @dlpzx in https://github.com/data-dot-all/dataall/pull/1722 * Add Votes consistent permissions by @dlpzx in https://github.com/data-dot-all/dataall/pull/1724 * Consistent get_ permissions - Dashboards by @dlpzx in https://github.com/data-dot-all/dataall/pull/1729 ### 🧪 Test improvements Integration tests are in sync with `main` without 2.7 planned features. In this PR all core modules, optional modules and submodules are tested. That includes: tenant-permissions, omics, mlstudio, votes, notifications and backwards compatiblity of s3 shares. by @SofiaSazonova, @noah-paige , @petrkalos and @dlpzx In addition, the following PR adds functional tests that ensure the permission model of data.all is not corrupted. * ⭐ Add resource permission checks by @petrkalos in https://github.com/data-dot-all/dataall/pull/1711 ### Dependencies * Update FastAPI by @petrkalos in #1577 * update fastapi dependency by @noah-paige in https://github.com/data-dot-all/dataall/pull/1699 * Upgrade "cross-spawn" to "7.0.5" by @dlpzx in https://github.com/data-dot-all/dataall/pull/1701 * Bump python runtime to bump cdk klayers cryptography version by @noah-paige in https://github.com/data-dot-all/dataall/pull/1707 ### Relates - List above ### Security Please answer the questions below briefly where applicable, or write `N/A`. Based on [OWASP 10](https://owasp.org/Top10/en/). - Does this PR introduce or modify any input fields or queries - this includes fetching data from storage outside the application (e.g. a database, an S3 bucket)? - Is the input sanitized? - What precautions are you taking before deserializing the data you consume? - Is injection prevented by parametrizing queries? - Have you ensured no `eval` or similar functions are used? - Does this PR introduce any functionality or component that requires authorization? - How have you ensured it respects the existing AuthN/AuthZ mechanisms? - Are you logging failed auth attempts? - Are you using or adding any cryptographic features? - Do you use a standard proven implementations? - Are the used keys controlled by the customer? Where are they stored? - Are you introducing any new policies/roles/users? - Have you used the least-privilege principle? How? By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --------- Co-authored-by: mourya-33 <134511711+mourya-33@users.noreply.github.com> Co-authored-by: Mourya Darivemula Co-authored-by: Noah Paige <69586985+noah-paige@users.noreply.github.com> Co-authored-by: Petros Kalos Co-authored-by: Sofia Sazonova Co-authored-by: Sofia Sazonova --- .checkov.baseline | 71 +- .github/workflows/snyk.yaml | 31 + Makefile | 8 +- backend/api_handler.py | 10 +- backend/dataall/__init__.py | 11 + .../dataall/base/cdkproxy/requirements.txt | 15 +- backend/dataall/base/context.py | 2 +- .../dataall/base/feature_toggle_checker.py | 3 + .../dataall/base/utils/naming_convention.py | 34 +- .../dataall/core/environment/api/queries.py | 1 + .../dataall/core/environment/api/resolvers.py | 32 +- backend/dataall/core/environment/api/types.py | 1 + .../env_role_core_policies/cloudformation.py | 13 +- .../env_role_core_policies/service_policy.py | 14 +- .../core/environment/cdk/environment_stack.py | 89 +- .../core/environment/db/environment_models.py | 1 + .../db/environment_repositories.py | 5 +- .../services/environment_service.py | 153 +- .../environment/tasks/env_stacks_updater.py | 5 +- backend/dataall/core/groups/api/resolvers.py | 3 +- .../db/organization_repositories.py | 6 +- .../services/organization_service.py | 2 +- .../dataall/core/permissions/api/resolvers.py | 18 +- .../services/tenant_policy_service.py | 30 + backend/dataall/core/stacks/api/types.py | 1 + .../stacks/db/target_type_repositories.py | 11 +- .../core/stacks/services/stack_service.py | 34 +- backend/dataall/core/stacks/tasks/cdkproxy.py | 4 - .../dataall/core/vpc/db/vpc_repositories.py | 4 + .../dataall/core/vpc/services/vpc_service.py | 25 +- .../catalog/services/glossaries_service.py | 46 +- .../catalog/tasks/catalog_indexer_task.py | 4 - .../dataall/modules/dashboards/__init__.py | 6 +- .../modules/dashboards/api/mutations.py | 11 - .../dataall/modules/dashboards/api/queries.py | 9 - .../modules/dashboards/api/resolvers.py | 10 +- .../dataall/modules/dashboards/api/types.py | 13 +- .../services/dashboard_quicksight_service.py | 25 +- .../dashboards/services/dashboard_service.py | 6 +- .../dataall/modules/datapipelines/__init__.py | 13 +- .../modules/datapipelines/api/input_types.py | 10 - .../modules/datapipelines/api/resolvers.py | 1 + .../data_pipeline_blueprint/requirements.txt | 3 +- .../services/datapipelines_service.py | 12 +- .../modules/datasets_base/api/resolvers.py | 4 +- .../datasets_base/db/dataset_repositories.py | 25 +- backend/dataall/modules/feed/api/registry.py | 5 + backend/dataall/modules/feed/api/resolvers.py | 2 +- .../modules/feed/services/feed_service.py | 39 +- backend/dataall/modules/mlstudio/__init__.py | 8 +- .../dataall/modules/mlstudio/api/resolvers.py | 1 + backend/dataall/modules/notebooks/__init__.py | 8 +- .../modules/notebooks/api/resolvers.py | 1 + .../notebooks/db/notebook_repository.py | 8 +- .../modules/notifications/api/mutations.py | 9 +- .../modules/notifications/api/queries.py | 16 - .../modules/notifications/api/resolvers.py | 43 +- .../db/notification_repositories.py | 59 +- .../services/notification_service.py | 71 + .../omics/tasks/omics_workflows_fetcher.py | 4 - .../api/datasets/resolvers.py | 0 .../dataall/modules/s3_datasets/__init__.py | 18 +- .../s3_datasets/api/dataset/resolvers.py | 12 +- .../modules/s3_datasets/api/dataset/types.py | 46 +- .../s3_datasets/api/profiling/input_types.py | 10 - .../s3_datasets/api/profiling/resolvers.py | 8 +- .../api/storage_location/input_types.py | 9 - .../api/storage_location/resolvers.py | 26 +- .../s3_datasets/api/storage_location/types.py | 48 +- .../s3_datasets/api/table/mutations.py | 2 +- .../s3_datasets/api/table/resolvers.py | 6 + .../modules/s3_datasets/api/table/types.py | 17 +- .../cdk/dataset_custom_resources_extension.py | 32 + .../modules/s3_datasets/cdk/dataset_stack.py | 3 +- .../cdk/env_role_dataset_glue_policy.py | 39 +- .../db/dataset_profiling_repositories.py | 11 - .../s3_datasets/db/dataset_repositories.py | 8 +- .../services/dataset_column_service.py | 18 +- .../services/dataset_location_service.py | 10 +- .../services/dataset_profiling_service.py | 22 +- .../s3_datasets/services/dataset_service.py | 43 +- .../dataset_table_data_filter_service.py | 0 .../services/dataset_table_service.py | 24 +- .../s3_datasets/tasks/tables_syncer.py | 4 - .../s3_datasets_shares/api/resolvers.py | 9 +- .../services/s3_share_service.py | 19 +- .../tasks/dataset_subscription_task.py | 4 - .../tasks/subscriptions/sqs_poller.py | 5 +- .../modules/shares_base/api/resolvers.py | 10 +- .../services/share_item_service.py | 15 +- .../services/share_logs_service.py | 8 +- .../services/share_object_service.py | 9 + .../shares_base/services/share_permissions.py | 7 + .../tasks/persistent_email_reminders_task.py | 4 - .../shares_base/tasks/share_manager_task.py | 5 - .../shares_base/tasks/share_reapplier_task.py | 4 - .../shares_base/tasks/share_verifier_task.py | 4 - backend/dataall/modules/vote/api/resolvers.py | 4 - .../modules/vote/services/vote_service.py | 25 +- .../modules/worksheets/api/resolvers.py | 36 +- .../worksheets/db/worksheet_repositories.py | 8 +- .../worksheets/services/worksheet_service.py | 181 +- backend/local_graphql_server.py | 87 +- .../04d92886fabe_add_consumption_roles.py | 10 +- ...8e35e39e1e_invite_env_groups_as_readers.py | 14 +- .../49c6b18ed814_add_env_logs_bucket.py | 55 + ...1ac7a85a2_drop_remove_group_permissions.py | 14 +- ...2e1362d4cb_add_tenant_share_permissions.py | 67 + backend/requirements.txt | 8 +- deploy/requirements.txt | 8 +- deploy/stacks/cdk_nag_exclusions.py | 4 + deploy/stacks/cognito.py | 3 + deploy/stacks/lambda_api.py | 5 +- deploy/stacks/pipeline.py | 6 +- docker-compose.yaml | 2 +- frontend/package-lock.json | 675 +- frontend/package.json | 30 +- .../src/design/components/UpVoteButton.js | 6 +- .../AdministratorDashboardViewer.js | 54 +- .../services/getPlatformAuthorSession.js | 12 - .../modules/Administration/services/index.js | 1 - .../components/DashboardListItem.js | 4 +- .../components/DashboardOverview.js | 2 +- .../Dashboards/components/DashboardViewer.js | 8 +- .../Dashboards/services/getDashboard.js | 5 +- .../Dashboards/services/searchDashboards.js | 6 +- .../modules/Dashboards/views/DashboardView.js | 27 +- .../modules/DatasetsBase/views/DatasetList.js | 6 +- .../Environments/services/getTrustAccount.js | 12 + .../modules/Environments/services/index.js | 1 + .../views/EnvironmentCreateForm.js | 14 +- .../Folders/components/FolderOverview.js | 12 +- .../Folders/components/FolderS3Properties.js | 10 +- .../services/getDatasetStorageLocation.js | 10 +- .../src/modules/Folders/views/FolderView.js | 9 +- .../modules/Pipelines/views/PipelineView.js | 34 +- .../components/DatasetConsoleAccess.js | 15 +- .../S3_Datasets/components/DatasetFolders.js | 7 +- .../S3_Datasets/components/DatasetOverview.js | 6 +- .../components/DatasetStartCrawlerModal.js | 2 +- .../S3_Datasets/components/DatasetTables.js | 18 +- .../S3_Datasets/components/DatasetUpload.js | 2 +- .../services/listDatasetStorageLocations.js | 3 + .../S3_Datasets/services/startGlueCrawler.js | 2 - .../S3_Datasets/services/syncTables.js | 20 +- .../S3_Datasets/views/DatasetEditForm.js | 2 +- .../modules/S3_Datasets/views/DatasetView.js | 13 +- .../Tables/services/getDatasetTable.js | 11 +- .../src/modules/Tables/views/TableView.js | 6 +- .../modules/Worksheets/views/WorksheetView.js | 12 +- .../services/graphql/Datasets/getDataset.js | 16 +- .../graphql/Datasets/listDatasetTables.js | 9 +- .../Datasets/listS3DatasetsOwnedByEnvGroup.js | 10 +- .../graphql/Environment/getTrustAccount.js | 9 - .../src/services/graphql/Environment/index.js | 1 - .../Notification/archiveNotification.js | 12 - .../Notification/countDeletedNotifications.js | 10 - .../Notification/countReadNotifications.js | 10 - .../services/graphql/Notification/index.js | 3 - .../utils/helpers/emptyPrintUnauthorized.js | 5 + frontend/src/utils/helpers/index.js | 1 + frontend/yarn.lock | 292 +- tests/client.py | 35 +- tests/conftest.py | 42 +- tests/core/permissions/test_tenant.py | 19 +- tests/modules/conftest.py | 1 + tests/modules/dashboards/test_dashboards.py | 35 - tests/modules/omics/test_omics.py | 348 +- tests/modules/s3_datasets/conftest.py | 31 +- tests/modules/s3_datasets/test_dataset.py | 82 +- .../s3_datasets/test_dataset_glossary.py | 6 +- .../s3_datasets/test_dataset_location.py | 8 +- .../modules/s3_datasets/test_dataset_table.py | 8 +- .../test_environment_stack_with_dataset.py | 19 + .../test_import_dataset_check_unit.py | 12 +- tests/modules/s3_datasets_shares/conftest.py | 24 +- tests/permissions.py | 908 ++ tests/requirements.txt | 4 +- tests/test_permissions.py | 146 + tests_new/integration_tests/README.md | 207 +- .../integration_tests/aws_clients/athena.py | 47 + .../integration_tests/aws_clients/iam.py | 81 + tests_new/integration_tests/aws_clients/s3.py | 36 + .../integration_tests/aws_clients/sts.py | 57 + tests_new/integration_tests/client.py | 33 +- tests_new/integration_tests/conftest.py | 44 + .../core/environment/global_conftest.py | 200 +- .../core/environment/queries.py | 56 +- .../core/environment/test_environment.py | 26 +- .../core/environment/utils.py | 46 + .../core/organizations/global_conftest.py | 8 +- .../core/organizations/queries.py | 6 +- .../core/organizations/test_organization.py | 1 - .../core/permissions/queries.py | 64 + .../core/permissions/test_permissions.py | 57 + .../integration_tests/core/stack/conftest.py | 31 + .../integration_tests/core/stack/queries.py | 66 + .../core/stack/test_stack.py | 102 + .../integration_tests/core/vpc/conftest.py | 21 + .../integration_tests/core/vpc/queries.py | 84 + .../integration_tests/core/vpc/test_vpc.py | 68 + .../modules/catalog/__init__.py | 0 .../modules/catalog/conftest.py | 113 + .../modules/catalog/queries.py | 421 + .../modules/catalog/test_glossaries.py | 289 + .../modules/dashboards/aws_clients.py | 37 + .../modules/dashboards/conftest.py | 55 + .../modules/dashboards/mutations.py | 108 + .../modules/dashboards/queries.py | 149 + .../modules/dashboards/test_dashboard.py | 116 + .../modules/datasets_base/queries.py | 52 + .../modules/datasets_base/test_dataset.py | 70 + .../integration_tests/modules/feed/queries.py | 64 + .../modules/feed/test_feed.py | 56 + .../modules/mlstudio/conftest.py | 22 + .../modules/mlstudio/mutations.py | 58 + .../modules/mlstudio/queries.py | 152 + .../modules/mlstudio/test_mlstudio.py | 33 + .../modules/notebooks/conftest.py | 3 + .../modules/notifications/queries.py | 56 + .../notifications/test_notifications.py | 46 + .../modules/s3_datasets/aws_clients.py | 141 +- .../modules/s3_datasets/conftest.py | 44 + .../modules/s3_datasets/global_conftest.py | 349 +- .../modules/s3_datasets/queries.py | 417 +- .../sample_data/csv_table/books.csv | 11128 ---------------- .../sample_data/csv_table/csv_sample.csv | 3 + .../sample_data/parquet_table/sample1.parquet | Bin 1308 -> 0 bytes .../modules/s3_datasets/test_s3_dataset.py | 281 +- .../modules/s3_datasets/test_s3_folders.py | 88 + .../modules/s3_datasets/test_s3_tables.py | 143 + .../s3_datasets/test_s3_tables_columns.py | 87 + .../s3_datasets/test_s3_tables_profiling.py | 148 + .../modules/shares/__init__.py | 0 .../modules/shares/queries.py | 301 + .../s3_datasets_shares/global_conftest.py | 523 + .../shared_test_functions.py | 217 + .../test_new_crossacc_s3_share.py | 242 + .../test_persistent_crossacc_share.py | 107 + .../integration_tests/modules/shares/types.py | 74 + .../integration_tests/modules/shares/utils.py | 32 + .../modules/vote/conftest.py | 11 + .../integration_tests/modules/vote/queries.py | 55 + .../modules/vote/test_vote.py | 43 + 244 files changed, 9412 insertions(+), 13591 deletions(-) create mode 100644 .github/workflows/snyk.yaml create mode 100644 backend/dataall/modules/notifications/services/notification_service.py create mode 100644 backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py create mode 100644 backend/dataall/modules/s3_datasets/services/dataset_table_data_filter_service.py create mode 100644 backend/migrations/versions/49c6b18ed814_add_env_logs_bucket.py create mode 100644 backend/migrations/versions/af2e1362d4cb_add_tenant_share_permissions.py delete mode 100644 frontend/src/modules/Administration/services/getPlatformAuthorSession.js create mode 100644 frontend/src/modules/Environments/services/getTrustAccount.js delete mode 100644 frontend/src/services/graphql/Environment/getTrustAccount.js delete mode 100644 frontend/src/services/graphql/Notification/archiveNotification.js delete mode 100644 frontend/src/services/graphql/Notification/countDeletedNotifications.js delete mode 100644 frontend/src/services/graphql/Notification/countReadNotifications.js create mode 100644 frontend/src/utils/helpers/emptyPrintUnauthorized.js create mode 100644 tests/permissions.py create mode 100644 tests/test_permissions.py create mode 100644 tests_new/integration_tests/aws_clients/athena.py create mode 100644 tests_new/integration_tests/aws_clients/iam.py create mode 100644 tests_new/integration_tests/aws_clients/s3.py create mode 100644 tests_new/integration_tests/aws_clients/sts.py create mode 100644 tests_new/integration_tests/core/environment/utils.py create mode 100644 tests_new/integration_tests/core/permissions/queries.py create mode 100644 tests_new/integration_tests/core/permissions/test_permissions.py create mode 100644 tests_new/integration_tests/core/stack/conftest.py create mode 100644 tests_new/integration_tests/core/stack/test_stack.py create mode 100644 tests_new/integration_tests/core/vpc/conftest.py create mode 100644 tests_new/integration_tests/core/vpc/queries.py create mode 100644 tests_new/integration_tests/core/vpc/test_vpc.py create mode 100644 tests_new/integration_tests/modules/catalog/__init__.py create mode 100644 tests_new/integration_tests/modules/catalog/conftest.py create mode 100644 tests_new/integration_tests/modules/catalog/queries.py create mode 100644 tests_new/integration_tests/modules/catalog/test_glossaries.py create mode 100644 tests_new/integration_tests/modules/dashboards/aws_clients.py create mode 100644 tests_new/integration_tests/modules/dashboards/conftest.py create mode 100644 tests_new/integration_tests/modules/dashboards/mutations.py create mode 100644 tests_new/integration_tests/modules/dashboards/queries.py create mode 100644 tests_new/integration_tests/modules/dashboards/test_dashboard.py create mode 100644 tests_new/integration_tests/modules/datasets_base/test_dataset.py create mode 100644 tests_new/integration_tests/modules/feed/queries.py create mode 100644 tests_new/integration_tests/modules/feed/test_feed.py create mode 100644 tests_new/integration_tests/modules/mlstudio/conftest.py create mode 100644 tests_new/integration_tests/modules/mlstudio/mutations.py create mode 100644 tests_new/integration_tests/modules/mlstudio/queries.py create mode 100644 tests_new/integration_tests/modules/mlstudio/test_mlstudio.py create mode 100644 tests_new/integration_tests/modules/notifications/queries.py create mode 100644 tests_new/integration_tests/modules/notifications/test_notifications.py create mode 100644 tests_new/integration_tests/modules/s3_datasets/conftest.py delete mode 100644 tests_new/integration_tests/modules/s3_datasets/sample_data/csv_table/books.csv create mode 100644 tests_new/integration_tests/modules/s3_datasets/sample_data/csv_table/csv_sample.csv delete mode 100644 tests_new/integration_tests/modules/s3_datasets/sample_data/parquet_table/sample1.parquet create mode 100644 tests_new/integration_tests/modules/s3_datasets/test_s3_folders.py create mode 100644 tests_new/integration_tests/modules/s3_datasets/test_s3_tables.py create mode 100644 tests_new/integration_tests/modules/s3_datasets/test_s3_tables_columns.py create mode 100644 tests_new/integration_tests/modules/s3_datasets/test_s3_tables_profiling.py create mode 100644 tests_new/integration_tests/modules/shares/__init__.py create mode 100644 tests_new/integration_tests/modules/shares/queries.py create mode 100644 tests_new/integration_tests/modules/shares/s3_datasets_shares/global_conftest.py create mode 100644 tests_new/integration_tests/modules/shares/s3_datasets_shares/shared_test_functions.py create mode 100644 tests_new/integration_tests/modules/shares/s3_datasets_shares/test_new_crossacc_s3_share.py create mode 100644 tests_new/integration_tests/modules/shares/s3_datasets_shares/test_persistent_crossacc_share.py create mode 100644 tests_new/integration_tests/modules/shares/types.py create mode 100644 tests_new/integration_tests/modules/shares/utils.py create mode 100644 tests_new/integration_tests/modules/vote/conftest.py create mode 100644 tests_new/integration_tests/modules/vote/queries.py create mode 100644 tests_new/integration_tests/modules/vote/test_vote.py diff --git a/.checkov.baseline b/.checkov.baseline index e0bbfce12..796f224f6 100644 --- a/.checkov.baseline +++ b/.checkov.baseline @@ -417,7 +417,7 @@ ] }, { - "file": "/cdk.out/asset.3045cb6b4340be1e173df6dcf6248d565aa849ceda3e2cf2c2f221ccee4bc1d6/pivotRole.yaml", + "file": "/cdk.out/asset.05d71d8b69cd4483d3c9db9120b556b718c72f349debbb79d461c74c4964b350/pivotRole.yaml", "findings": [ { "resource": "AWS::IAM::ManagedPolicy.PivotRolePolicy0", @@ -490,12 +490,6 @@ { "file": "/checkov_environment_synth.json", "findings": [ - { - "resource": "AWS::IAM::ManagedPolicy.dataallanothergroup111111servicespolicy19AC37181", - "check_ids": [ - "CKV_AWS_111" - ] - }, { "resource": "AWS::IAM::ManagedPolicy.dataallanothergroup111111servicespolicy2E85AF510", "check_ids": [ @@ -508,24 +502,6 @@ "CKV_AWS_111" ] }, - { - "resource": "AWS::IAM::ManagedPolicy.dataallanothergroup111111servicespolicy5A19E75CA", - "check_ids": [ - "CKV_AWS_109" - ] - }, - { - "resource": "AWS::IAM::ManagedPolicy.dataallanothergroup111111servicespolicyCC720210", - "check_ids": [ - "CKV_AWS_109" - ] - }, - { - "resource": "AWS::IAM::ManagedPolicy.dataalltestadmins111111servicespolicy1A0C96958", - "check_ids": [ - "CKV_AWS_111" - ] - }, { "resource": "AWS::IAM::ManagedPolicy.dataalltestadmins111111servicespolicy2B12D381A", "check_ids": [ @@ -538,18 +514,6 @@ "CKV_AWS_111" ] }, - { - "resource": "AWS::IAM::ManagedPolicy.dataalltestadmins111111servicespolicy3E3CBA9E", - "check_ids": [ - "CKV_AWS_109" - ] - }, - { - "resource": "AWS::IAM::ManagedPolicy.dataalltestadmins111111servicespolicy56D7DC525", - "check_ids": [ - "CKV_AWS_109" - ] - }, { "resource": "AWS::Lambda::Function.CustomCDKBucketDeployment8693BB64968944B69AAFB0CC9EB8756C81C01536", "check_ids": [ @@ -563,16 +527,14 @@ "resource": "AWS::Lambda::Function.GlueDatabaseLFCustomResourceHandler7FAF0F82", "check_ids": [ "CKV_AWS_115", - "CKV_AWS_117", - "CKV_AWS_173" + "CKV_AWS_117" ] }, { "resource": "AWS::Lambda::Function.LakeformationDefaultSettingsHandler2CBEDB06", "check_ids": [ "CKV_AWS_115", - "CKV_AWS_117", - "CKV_AWS_173" + "CKV_AWS_117" ] }, { @@ -580,8 +542,7 @@ "check_ids": [ "CKV_AWS_115", "CKV_AWS_116", - "CKV_AWS_117", - "CKV_AWS_173" + "CKV_AWS_117" ] }, { @@ -589,12 +550,11 @@ "check_ids": [ "CKV_AWS_115", "CKV_AWS_116", - "CKV_AWS_117", - "CKV_AWS_173" + "CKV_AWS_117" ] }, { - "resource": "AWS::S3::Bucket.EnvironmentDefaultBucket78C3A8B0", + "resource": "AWS::S3::Bucket.EnvironmentDefaultLogBucket7F0EFAB3", "check_ids": [ "CKV_AWS_18" ] @@ -653,6 +613,25 @@ } ] }, + { + "file": "/checkov_pipeline_synth.json", + "findings": [ + { + "resource": "AWS::IAM::Role.PipelineRoleDCFDBB91", + "check_ids": [ + "CKV_AWS_107", + "CKV_AWS_108", + "CKV_AWS_111" + ] + }, + { + "resource": "AWS::S3::Bucket.thistableartifactsbucketDB1C8C64", + "check_ids": [ + "CKV_AWS_18" + ] + } + ] + }, { "file": "/frontend/docker/prod/Dockerfile", "findings": [ diff --git a/.github/workflows/snyk.yaml b/.github/workflows/snyk.yaml new file mode 100644 index 000000000..17e04cde7 --- /dev/null +++ b/.github/workflows/snyk.yaml @@ -0,0 +1,31 @@ +name: Snyk + +on: + workflow_dispatch: + + schedule: + - cron: "0 9 * * 1" # runs each Monday at 9:00 UTC + +permissions: + contents: read + security-events: write + +jobs: + security: + strategy: + matrix: + python-version: [3.9] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: snyk/actions/setup@master + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install All Requirements + run: make install + - name: Run Snyk to check for vulnerabilities + run: snyk test --all-projects --detection-depth=5 --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} diff --git a/Makefile b/Makefile index 888927042..8fd5e85cd 100644 --- a/Makefile +++ b/Makefile @@ -16,7 +16,7 @@ venv: @python3 -m venv "venv" @/bin/bash -c "source venv/bin/activate" -install: upgrade-pip install-deploy install-backend install-cdkproxy install-tests +install: upgrade-pip install-deploy install-backend install-cdkproxy install-tests install-integration-tests install-custom-auth install-userguide upgrade-pip: pip install --upgrade pip setuptools @@ -36,6 +36,12 @@ install-tests: install-integration-tests: pip install -r tests_new/integration_tests/requirements.txt +install-custom-auth: + pip install -r deploy/custom_resources/custom_authorizer/requirements.txt + +install-userguide: + pip install -r documentation/userguide/requirements.txt + lint: pip install ruff ruff check --fix diff --git a/backend/api_handler.py b/backend/api_handler.py index e46113546..bbc1f1e03 100644 --- a/backend/api_handler.py +++ b/backend/api_handler.py @@ -23,6 +23,7 @@ from dataall.base.db import get_engine from dataall.base.loader import load_modules, ImportMode +from graphql.pyutils import did_you_mean logger = logging.getLogger() logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) @@ -32,6 +33,11 @@ for name in ['boto3', 's3transfer', 'botocore', 'boto']: logging.getLogger(name).setLevel(logging.ERROR) +ALLOW_INTROSPECTION = True if os.getenv('ALLOW_INTROSPECTION') == 'True' else False + +if not ALLOW_INTROSPECTION: + did_you_mean.__globals__['MAX_LENGTH'] = 0 + load_modules(modes={ImportMode.API}) SCHEMA = bootstrap_schema() TYPE_DEFS = gql(SCHEMA.gql(with_directives=False)) @@ -137,7 +143,9 @@ def handler(event, context): else: raise Exception(f'Could not initialize user context from event {event}') - success, response = graphql_sync(schema=executable_schema, data=query, context_value=app_context) + success, response = graphql_sync( + schema=executable_schema, data=query, context_value=app_context, introspection=ALLOW_INTROSPECTION + ) dispose_context() response = json.dumps(response) diff --git a/backend/dataall/__init__.py b/backend/dataall/__init__.py index a6387d880..5a78164b2 100644 --- a/backend/dataall/__init__.py +++ b/backend/dataall/__init__.py @@ -1,2 +1,13 @@ from . import core, version from .base import utils, db, api +import logging +import os +import sys + +logging.basicConfig( + level=os.environ.get('LOG_LEVEL', 'INFO'), + handlers=[logging.StreamHandler(sys.stdout)], + format='[%(levelname)s] %(message)s', +) +for name in ['boto3', 's3transfer', 'botocore', 'boto', 'urllib3']: + logging.getLogger(name).setLevel(logging.ERROR) diff --git a/backend/dataall/base/cdkproxy/requirements.txt b/backend/dataall/base/cdkproxy/requirements.txt index f55e149a6..a10ce157a 100644 --- a/backend/dataall/base/cdkproxy/requirements.txt +++ b/backend/dataall/base/cdkproxy/requirements.txt @@ -1,17 +1,12 @@ -aws-cdk-lib==2.99.0 -boto3==1.28.23 -boto3-stubs==1.28.23 -botocore==1.31.23 +aws-cdk-lib==2.160.0 +boto3==1.35.26 +boto3-stubs==1.35.26 cdk-nag==2.7.2 -constructs==10.0.73 -starlette==0.36.3 -fastapi == 0.109.2 -Flask==2.3.2 +fastapi == 0.115.5 PyYAML==6.0 requests==2.32.2 tabulate==0.8.9 uvicorn==0.15.0 -werkzeug==3.0.3 -constructs>=10.0.0,<11.0.0 +werkzeug==3.0.6 git-remote-codecommit==1.16 aws-ddk-core==1.3.0 \ No newline at end of file diff --git a/backend/dataall/base/context.py b/backend/dataall/base/context.py index b271b3e18..3df892e18 100644 --- a/backend/dataall/base/context.py +++ b/backend/dataall/base/context.py @@ -4,7 +4,7 @@ that in the request scope The class uses Flask's approach to handle request: ThreadLocal -That approach should work fine for AWS Lambdas and local server that uses Flask app +That approach should work fine for AWS Lambdas and local server that uses FastApi app """ from dataclasses import dataclass diff --git a/backend/dataall/base/feature_toggle_checker.py b/backend/dataall/base/feature_toggle_checker.py index b6d681646..f8a4651f0 100644 --- a/backend/dataall/base/feature_toggle_checker.py +++ b/backend/dataall/base/feature_toggle_checker.py @@ -2,6 +2,8 @@ Contains decorators that check if a feature has been enabled or not """ +import functools + from dataall.base.config import config from dataall.base.utils.decorator_utls import process_func @@ -10,6 +12,7 @@ def is_feature_enabled(config_property: str): def decorator(f): fn, fn_decorator = process_func(f) + @functools.wraps(fn) def decorated(*args, **kwargs): value = config.get_property(config_property) if not value: diff --git a/backend/dataall/base/utils/naming_convention.py b/backend/dataall/base/utils/naming_convention.py index 178bb1882..3d366b811 100644 --- a/backend/dataall/base/utils/naming_convention.py +++ b/backend/dataall/base/utils/naming_convention.py @@ -1,28 +1,46 @@ from enum import Enum - +import re from .slugify import slugify class NamingConventionPattern(Enum): - S3 = {'regex': '[^a-zA-Z0-9-]', 'separator': '-', 'max_length': 63} + S3 = { + 'regex': '[^a-zA-Z0-9-]', + 'separator': '-', + 'max_length': 63, + 'valid_external_regex': '(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$', + } + KMS = {'regex': '[^a-zA-Z0-9-]$', 'separator': '-', 'max_length': 63, 'valid_external_regex': '^[a-zA-Z0-9_-]+$'} IAM = {'regex': '[^a-zA-Z0-9-_]', 'separator': '-', 'max_length': 63} # Role names up to 64 chars IAM_POLICY = {'regex': '[^a-zA-Z0-9-_]', 'separator': '-', 'max_length': 128} # Policy names up to 128 chars - GLUE = {'regex': '[^a-zA-Z0-9_]', 'separator': '_', 'max_length': 240} # Limit 255 - 15 extra chars buffer + GLUE = { + 'regex': '[^a-zA-Z0-9_]', + 'separator': '_', + 'max_length': 240, + 'valid_external_regex': '^[a-zA-Z0-9_]+$', + } # Limit 255 - 15 extra chars buffer GLUE_ETL = {'regex': '[^a-zA-Z0-9-]', 'separator': '-', 'max_length': 52} NOTEBOOK = {'regex': '[^a-zA-Z0-9-]', 'separator': '-', 'max_length': 63} MLSTUDIO_DOMAIN = {'regex': '[^a-zA-Z0-9-]', 'separator': '-', 'max_length': 63} DEFAULT = {'regex': '[^a-zA-Z0-9-_]', 'separator': '-', 'max_length': 63} + DEFAULT_SEARCH = {'regex': '[^a-zA-Z0-9-_:. ]'} OPENSEARCH = {'regex': '[^a-z0-9-]', 'separator': '-', 'max_length': 27} OPENSEARCH_SERVERLESS = {'regex': '[^a-z0-9-]', 'separator': '-', 'max_length': 31} + DATA_FILTERS = {'regex': '[^a-z0-9_]', 'separator': '_', 'max_length': 31} + REDSHIFT_DATASHARE = { + 'regex': '[^a-zA-Z0-9_]', + 'separator': '_', + 'max_length': 1000, + } # Maximum length of 2147483647 class NamingConventionService: def __init__( self, target_label: str, - target_uri: str, pattern: NamingConventionPattern, - resource_prefix: str, + target_uri: str = '', + resource_prefix: str = '', ): self.target_label = target_label self.target_uri = target_uri if target_uri else '' @@ -37,4 +55,8 @@ def build_compliant_name(self) -> str: separator = NamingConventionPattern[self.service].value['separator'] max_length = NamingConventionPattern[self.service].value['max_length'] suffix = f'-{self.target_uri}' if len(self.target_uri) else '' - return f"{slugify(self.resource_prefix + '-' + self.target_label[:(max_length- len(self.resource_prefix + self.target_uri))] + suffix, regex_pattern=fr'{regex}', separator=separator, lowercase=True)}" + return f"{slugify(self.resource_prefix + '-' + self.target_label[:(max_length - len(self.resource_prefix + self.target_uri))] + suffix, regex_pattern=fr'{regex}', separator=separator, lowercase=True)}" + + def sanitize(self): + regex = NamingConventionPattern[self.service].value['regex'] + return re.sub(regex, '', self.target_label) diff --git a/backend/dataall/core/environment/api/queries.py b/backend/dataall/core/environment/api/queries.py index a1bd9bc57..3d9e84567 100644 --- a/backend/dataall/core/environment/api/queries.py +++ b/backend/dataall/core/environment/api/queries.py @@ -32,6 +32,7 @@ getTrustAccount = gql.QueryField( name='getTrustAccount', + args=[gql.Argument(name='organizationUri', type=gql.NonNullableType(gql.String))], type=gql.String, resolver=get_trust_account, test_scope='Environment', diff --git a/backend/dataall/core/environment/api/resolvers.py b/backend/dataall/core/environment/api/resolvers.py index 537e6de93..01a3c1bde 100644 --- a/backend/dataall/core/environment/api/resolvers.py +++ b/backend/dataall/core/environment/api/resolvers.py @@ -14,14 +14,11 @@ from dataall.core.organizations.api.resolvers import Context, exceptions, get_organization_simplified - log = logging.getLogger() -def get_trust_account(context: Context, source, **kwargs): - current_account = SessionHelper.get_account() - print('current_account = ', current_account) - return current_account +def get_trust_account(context: Context, source, organizationUri): + return EnvironmentService.get_trust_account(uri=organizationUri) def create_environment(context: Context, source, input={}): @@ -203,8 +200,7 @@ def resolve_user_role(context: Context, source: Environment): def list_environment_group_permissions(context, source, environmentUri: str = None, groupUri: str = None): - with context.engine.scoped_session() as session: - return EnvironmentService.list_group_permissions(session=session, uri=environmentUri, group_uri=groupUri) + return EnvironmentService.list_group_permissions(uri=environmentUri, group_uri=groupUri) @is_feature_enabled('core.features.env_aws_actions') @@ -214,18 +210,19 @@ def get_environment_assume_role_url( environmentUri: str = None, groupUri: str = None, ): - return EnvironmentService.get_environment_assume_role_url(environmentUri=environmentUri, groupUri=groupUri) + return EnvironmentService.get_environment_assume_role_url(uri=environmentUri, groupUri=groupUri) @is_feature_enabled('core.features.env_aws_actions') def generate_environment_access_token(context, source, environmentUri: str = None, groupUri: str = None): - credentials = EnvironmentService.generate_environment_access_token(environmentUri=environmentUri, groupUri=groupUri) + credentials = EnvironmentService.generate_environment_access_token(uri=environmentUri, groupUri=groupUri) return json.dumps(credentials) def get_environment_stack(context: Context, source: Environment, **kwargs): return StackService.resolve_parent_obj_stack( targetUri=source.environmentUri, + targetType='environment', environmentUri=source.environmentUri, ) @@ -245,39 +242,40 @@ def delete_environment(context: Context, source, environmentUri: str = None, del def enable_subscriptions(context: Context, source, environmentUri: str = None, input: dict = None): - EnvironmentService.enable_subscriptions(environmentUri, input) + EnvironmentService.enable_subscriptions(uri=environmentUri, input=input) StackService.deploy_stack(targetUri=environmentUri) return True def disable_subscriptions(context: Context, source, environmentUri: str = None): - EnvironmentService.disable_subscriptions(environmentUri) + EnvironmentService.disable_subscriptions(uri=environmentUri) StackService.deploy_stack(targetUri=environmentUri) return True def get_pivot_role_template(context: Context, source, organizationUri=None): - return EnvironmentService.get_template_from_resource_bucket(organizationUri, 'pivot_role_prefix') + return EnvironmentService.get_template_from_resource_bucket(uri=organizationUri, template_name='pivot_role_prefix') def get_cdk_exec_policy_template(context: Context, source, organizationUri=None): - return EnvironmentService.get_template_from_resource_bucket(organizationUri, 'cdk_exec_policy_prefix') + return EnvironmentService.get_template_from_resource_bucket( + uri=organizationUri, template_name='cdk_exec_policy_prefix' + ) def get_external_id(context: Context, source, organizationUri=None): - return EnvironmentService.get_external_id(organizationUri) + return EnvironmentService.get_external_id(uri=organizationUri) def get_pivot_role_name(context: Context, source, organizationUri=None): - return EnvironmentService.get_pivot_role(organizationUri) + return EnvironmentService.get_pivot_role(uri=organizationUri) def resolve_environment(context, source, **kwargs): """Resolves the environment for a environmental resource""" if not source: return None - with context.engine.scoped_session() as session: - return EnvironmentService.get_environment_by_uri(session, source.environmentUri) + return EnvironmentService.find_environment_by_uri(uri=source.environmentUri) def resolve_parameters(context, source: Environment, **kwargs): diff --git a/backend/dataall/core/environment/api/types.py b/backend/dataall/core/environment/api/types.py index 229593dd8..a95d943c4 100644 --- a/backend/dataall/core/environment/api/types.py +++ b/backend/dataall/core/environment/api/types.py @@ -100,6 +100,7 @@ gql.Field('subscriptionsConsumersTopicName', type=gql.String), gql.Field('subscriptionsProducersTopicName', type=gql.String), gql.Field('EnvironmentDefaultBucketName', type=gql.String), + gql.Field('EnvironmentLogsBucketName', type=gql.String), gql.Field('EnvironmentDefaultAthenaWorkGroup', type=gql.String), gql.Field( name='networks', diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py b/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py index 75be97e9b..0cfbc3f1a 100644 --- a/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/cloudformation.py @@ -24,10 +24,6 @@ def get_statements(self, group_permissions, **kwargs): 'cloudformation:ListImports', 'cloudformation:DescribeAccountLimits', 'cloudformation:DescribeStackDriftDetectionStatus', - 'cloudformation:Cancel*', - 'cloudformation:Continue*', - 'cloudformation:CreateChangeSet', - 'cloudformation:ExecuteChangeSet', 'cloudformation:CreateStackSet', 'cloudformation:Get*', 'cloudformation:Describe*', @@ -36,6 +32,15 @@ def get_statements(self, group_permissions, **kwargs): ], resources=['*'], ), + iam.PolicyStatement( + actions=[ + 'cloudformation:Cancel*', + 'cloudformation:Continue*', + 'cloudformation:CreateChangeSet', + 'cloudformation:ExecuteChangeSet', + ], + resources=[f'arn:aws:cloudformation:*:{self.account}:*/{self.resource_prefix}*'], + ), iam.PolicyStatement( # sid="DeleteTeamCloudFormation", actions=[ diff --git a/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py index 0e68dc694..0ad5b67ff 100644 --- a/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py +++ b/backend/dataall/core/environment/cdk/env_role_core_policies/service_policy.py @@ -64,14 +64,24 @@ def generate_policies(self) -> [aws_iam.ManagedPolicy]: 'events:ListRuleNamesByTarget', 'iam:list*', 'iam:Get*', - 'iam:CreatePolicy', - 'iam:CreateServiceLinkedRole', 'tag:GetResources', 'tag:GetTagValues', 'tag:GetTagKeys', ], resources=['*'], ), + aws_iam.PolicyStatement( + sid='IAMCreatePolicy', + effect=aws_iam.Effect.ALLOW, + actions=[ + 'iam:CreatePolicy', + 'iam:CreateServiceLinkedRole', + ], + resources=[ + f'arn:aws:iam::{self.account}:policy/{self.resource_prefix}*', + f'arn:aws:iam::{self.account}:role/aws-service-role/*', + ], + ), aws_iam.PolicyStatement( sid='CreateServiceRole', actions=[ diff --git a/backend/dataall/core/environment/cdk/environment_stack.py b/backend/dataall/core/environment/cdk/environment_stack.py index ca4a27190..85694cda1 100644 --- a/backend/dataall/core/environment/cdk/environment_stack.py +++ b/backend/dataall/core/environment/cdk/environment_stack.py @@ -165,30 +165,44 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): f'arn:aws:iam::{self._environment.AwsAccountId}:role/{self.pivot_role_name}', ) - # Environment S3 Bucket - default_environment_bucket = s3.Bucket( + # Environment Logging S3 Bucket + default_environment_log_bucket = s3.Bucket( self, - 'EnvironmentDefaultBucket', - bucket_name=self._environment.EnvironmentDefaultBucketName, + 'EnvironmentDefaultLogBucket', + bucket_name=self._environment.EnvironmentLogsBucketName, encryption=s3.BucketEncryption.S3_MANAGED, removal_policy=RemovalPolicy.RETAIN, block_public_access=s3.BlockPublicAccess.BLOCK_ALL, versioned=True, enforce_ssl=True, ) - default_environment_bucket.policy.apply_removal_policy(RemovalPolicy.RETAIN) - self.default_environment_bucket = default_environment_bucket - - default_environment_bucket.add_to_resource_policy( + default_environment_log_bucket.policy.apply_removal_policy(RemovalPolicy.RETAIN) + default_environment_log_bucket.add_to_resource_policy( iam.PolicyStatement( sid='AWSLogDeliveryWrite', effect=iam.Effect.ALLOW, principals=[iam.ServicePrincipal('logging.s3.amazonaws.com')], actions=['s3:PutObject', 's3:PutObjectAcl'], - resources=[f'{default_environment_bucket.bucket_arn}/*'], + resources=[f'{default_environment_log_bucket.bucket_arn}/*'], ) ) + # Environment S3 Bucket + default_environment_bucket = s3.Bucket( + self, + 'EnvironmentDefaultBucket', + bucket_name=self._environment.EnvironmentDefaultBucketName, + encryption=s3.BucketEncryption.S3_MANAGED, + removal_policy=RemovalPolicy.RETAIN, + block_public_access=s3.BlockPublicAccess.BLOCK_ALL, + versioned=True, + enforce_ssl=True, + server_access_logs_bucket=default_environment_log_bucket, + server_access_logs_prefix=f'access_logs/{self._environment.EnvironmentDefaultBucketName}/', + ) + default_environment_bucket.policy.apply_removal_policy(RemovalPolicy.RETAIN) + self.default_environment_bucket = default_environment_bucket + default_environment_bucket.add_lifecycle_rule( abort_incomplete_multipart_upload_after=Duration.days(7), noncurrent_version_transitions=[ @@ -582,12 +596,22 @@ def create_integration_tests_role(self): 's3:CreateBucket', 's3:DeleteBucket', 's3:PutEncryptionConfiguration', - 's3:List*', 's3:GetObject*', 's3:DeleteObject', + 's3:DeleteObjectVersion', ], effect=iam.Effect.ALLOW, - resources=['arn:aws:s3:::dataalltesting*'], + resources=[ + 'arn:aws:s3:::dataalltesting*', + 'arn:aws:s3:::dataalltesting*/*', + 'arn:aws:s3:::dataall-session*', + 'arn:aws:s3:::dataall-session*/*', + 'arn:aws:s3:::dataall-test-session*', + 'arn:aws:s3:::dataall-test-session*/*', + 'arn:aws:s3:::dataall-temp*', + 'arn:aws:s3:::dataall-temp*/*', + 'arn:aws:s3:::dataall-env-access-logs*', + ], ) ) self.test_role.add_to_policy( @@ -606,8 +630,10 @@ def create_integration_tests_role(self): iam.PolicyStatement( actions=[ 'lakeformation:GrantPermissions', + 'lakeformation:RevokePermissions', 'lakeformation:PutDataLakeSettings', 'lakeformation:GetDataLakeSettings', + 'glue:GetDatabase', 'kms:CreateKey', 'kms:CreateAlias', 'kms:DeleteAlias', @@ -616,7 +642,11 @@ def create_integration_tests_role(self): 'kms:PutKeyPolicy', 'kms:ScheduleKeyDeletion', 'kms:TagResource', + 'kms:DescribeKey', 's3:GetBucketVersioning', + 's3:List*', + 's3:ListAccessPoints', + 's3:DeleteAccessPoint', ], effect=iam.Effect.ALLOW, resources=['*'], @@ -653,3 +683,40 @@ def create_integration_tests_role(self): resources=[f'arn:aws:cloudformation:*:{self.account}:stack/*/*'], ), ) + + self.test_role.add_to_policy( + iam.PolicyStatement( + actions=[ + 'iam:GetRole', + 'iam:CreateRole', + 'iam:DeleteRole', + 'iam:PutRolePolicy', + 'iam:DeleteRolePolicy', + 'iam:DetachRolePolicy', + 'iam:ListAttachedRolePolicies', + ], + effect=iam.Effect.ALLOW, + resources=[ + f'arn:aws:iam::{self.account}:role/dataall-test*', + f'arn:aws:iam::{self.account}:role/dataall-session*', + ], + ), + ) + + self.test_role.add_to_policy( + iam.PolicyStatement( + actions=[ + 'quicksight:DescribeAccountSubscription', + ], + effect=iam.Effect.ALLOW, + resources=[f'arn:aws:quicksight:*:{self.account}:*'], + ), + ) + + self.test_role.add_to_policy( + iam.PolicyStatement( + actions=['redshift:DeauthorizeDataShare'], + effect=iam.Effect.ALLOW, + resources=[f'arn:aws:redshift:{self.region}:{self.account}:datashare:*/dataall*'], + ), + ) diff --git a/backend/dataall/core/environment/db/environment_models.py b/backend/dataall/core/environment/db/environment_models.py index e56135e97..c4890850a 100644 --- a/backend/dataall/core/environment/db/environment_models.py +++ b/backend/dataall/core/environment/db/environment_models.py @@ -25,6 +25,7 @@ class Environment(Resource, Base): EnvironmentDefaultIAMRoleImported = Column(Boolean, default=False) EnvironmentDefaultIAMRoleArn = Column(String, nullable=False) EnvironmentDefaultBucketName = Column(String) + EnvironmentLogsBucketName = Column(String) EnvironmentDefaultAthenaWorkGroup = Column(String) roleCreated = Column(Boolean, nullable=False, default=False) diff --git a/backend/dataall/core/environment/db/environment_repositories.py b/backend/dataall/core/environment/db/environment_repositories.py index 7aca43b40..860cfb6fa 100644 --- a/backend/dataall/core/environment/db/environment_repositories.py +++ b/backend/dataall/core/environment/db/environment_repositories.py @@ -1,3 +1,4 @@ +from dataall.base.utils.naming_convention import NamingConventionPattern, NamingConventionService from dataall.core.environment.db.environment_models import ( EnvironmentParameter, Environment, @@ -281,7 +282,9 @@ def query_user_environments(session, username, groups, filter) -> Query: or_( Environment.label.ilike('%' + term + '%'), Environment.description.ilike('%' + term + '%'), - Environment.tags.contains(f'{{{term}}}'), + Environment.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), Environment.region.ilike('%' + term + '%'), ) ) diff --git a/backend/dataall/core/environment/services/environment_service.py b/backend/dataall/core/environment/services/environment_service.py index 795e3a63b..febaefd42 100644 --- a/backend/dataall/core/environment/services/environment_service.py +++ b/backend/dataall/core/environment/services/environment_service.py @@ -135,7 +135,7 @@ def validate_org_group(org_uri, group, session): class EnvironmentService: @staticmethod - def validate_permissions(session, uri, g_permissions, group): + def _validate_permissions(session, uri, g_permissions, group): """ g_permissions: coming from frontend = ENVIRONMENT_INVITATION_REQUEST @@ -160,7 +160,7 @@ def validate_permissions(session, uri, g_permissions, group): ) @staticmethod - def get_pivot_role_as_part_of_environment(): + def _get_pivot_role_as_part_of_environment(): ssm_param = ParameterStoreManager.get_parameter_value( region=os.getenv('AWS_REGION', 'eu-west-1'), parameter_path=f"/dataall/{os.getenv('envname', 'local')}/pivotRole/enablePivotRoleAutoCreate", @@ -168,7 +168,7 @@ def get_pivot_role_as_part_of_environment(): return ssm_param == 'True' @staticmethod - def check_cdk_resources(account_id, region, data) -> str: + def _check_cdk_resources(account_id, region, data) -> str: """ Check if all necessary cdk resources exists in the account :return : pivot role name @@ -181,7 +181,7 @@ def check_cdk_resources(account_id, region, data) -> str: log.info('Checking cdk resources for environment.') - pivot_role_as_part_of_environment = EnvironmentService.get_pivot_role_as_part_of_environment() + pivot_role_as_part_of_environment = EnvironmentService._get_pivot_role_as_part_of_environment() log.info(f'Pivot role as part of environment = {pivot_role_as_part_of_environment}') cdk_look_up_role_arn = SessionHelper.get_cdk_look_up_role_arn(accountid=account_id, region=region) @@ -216,6 +216,11 @@ def check_cdk_resources(account_id, region, data) -> str: return cdk_role_name + @staticmethod + @ResourcePolicyService.has_resource_permission(LINK_ENVIRONMENT) + def get_trust_account(uri): + return SessionHelper.get_account() + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_ENVIRONMENTS) @ResourcePolicyService.has_resource_permission(LINK_ENVIRONMENT) @@ -223,7 +228,7 @@ def create_environment(uri, data=None): context = get_context() with context.db_engine.scoped_session() as session: EnvironmentRequestValidationService.validate_creation_params(data, uri, session) - cdk_role_name = EnvironmentService.check_cdk_resources(data.get('AwsAccountId'), data.get('region'), data) + cdk_role_name = EnvironmentService._check_cdk_resources(data.get('AwsAccountId'), data.get('region'), data) env = Environment( organizationUri=data.get('organizationUri'), label=data.get('label', 'Unnamed'), @@ -255,6 +260,13 @@ def create_environment(uri, data=None): resource_prefix=env.resourcePrefix, ).build_compliant_name() + env.EnvironmentLogsBucketName = NamingConventionService( + target_uri=env.environmentUri, + target_label='env-access-logs', + pattern=NamingConventionPattern.S3, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + env.EnvironmentDefaultAthenaWorkGroup = NamingConventionService( target_uri=env.environmentUri, target_label=env.label, @@ -316,7 +328,7 @@ def update_environment(uri, data=None): with get_context().db_engine.scoped_session() as session: environment = EnvironmentService.get_environment_by_uri(session, uri) previous_resource_prefix = environment.resourcePrefix - EnvironmentService.check_cdk_resources( + EnvironmentService._check_cdk_resources( account_id=environment.AwsAccountId, region=environment.region, data=data ) @@ -359,7 +371,7 @@ def invite_group(uri, data=None) -> (Environment, EnvironmentGroup): group: str = data['groupUri'] with get_context().db_engine.scoped_session() as session: - EnvironmentService.validate_permissions(session, uri, data['permissions'], group) + EnvironmentService._validate_permissions(session, uri, data['permissions'], group) environment = EnvironmentService.get_environment_by_uri(session, uri) @@ -486,7 +498,7 @@ def update_group_permissions(uri, data=None): group = data['groupUri'] with get_context().db_engine.scoped_session() as session: - EnvironmentService.validate_permissions(session, uri, data['permissions'], group) + EnvironmentService._validate_permissions(session, uri, data['permissions'], group) environment = EnvironmentService.get_environment_by_uri(session, uri) @@ -514,7 +526,7 @@ def update_group_permissions(uri, data=None): @staticmethod @ResourcePolicyService.has_resource_permission(environment_permissions.LIST_ENVIRONMENT_GROUP_PERMISSIONS) - def list_group_permissions(session, uri, group_uri): + def list_group_permissions(uri, group_uri): # the permission checked with get_context().db_engine.scoped_session() as session: return EnvironmentService.list_group_permissions_internal(session, uri, group_uri) @@ -912,7 +924,7 @@ def get_boolean_env_param(session, env: Environment, param: str) -> bool: return param is not None and param.value.lower() == 'true' @staticmethod - def is_user_invited(uri): + def _is_user_invited(uri): context = get_context() with context.db_engine.scoped_session() as session: return EnvironmentRepository.is_user_invited_to_environment(session=session, groups=context.groups, uri=uri) @@ -923,23 +935,17 @@ def resolve_user_role(environment: Environment): return EnvironmentPermission.Owner.value elif environment.SamlGroupName in get_context().groups: return EnvironmentPermission.Admin.value - elif EnvironmentService.is_user_invited(environment.environmentUri): + elif EnvironmentService._is_user_invited(environment.environmentUri): return EnvironmentPermission.Invited.value return EnvironmentPermission.NotInvited.value @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_ENVIRONMENTS) - def enable_subscriptions(environmentUri: str = None, input: dict = None): + @ResourcePolicyService.has_resource_permission(ENABLE_ENVIRONMENT_SUBSCRIPTIONS) + def enable_subscriptions(uri, input: dict = None): context = get_context() with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=ENABLE_ENVIRONMENT_SUBSCRIPTIONS, - ) - environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + environment = EnvironmentService.get_environment_by_uri(session, uri) if input.get('producersTopicArn'): environment.subscriptionsProducersTopicName = input.get('producersTopicArn') environment.subscriptionsProducersTopicImported = True @@ -965,17 +971,11 @@ def enable_subscriptions(environmentUri: str = None, input: dict = None): @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_ENVIRONMENTS) - def disable_subscriptions(environment_uri: str = None): + @ResourcePolicyService.has_resource_permission(ENABLE_ENVIRONMENT_SUBSCRIPTIONS) + def disable_subscriptions(uri): context = get_context() with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environment_uri, - permission_name=ENABLE_ENVIRONMENT_SUBSCRIPTIONS, - ) - environment = EnvironmentService.get_environment_by_uri(session, environment_uri) + environment = EnvironmentService.get_environment_by_uri(session, uri) environment.subscriptionsConsumersTopicName = None environment.subscriptionsConsumersTopicImported = False @@ -1027,20 +1027,11 @@ def _get_environment_group_aws_session(session, username, groups, environment, g @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_ENVIRONMENTS) - def get_environment_assume_role_url( - environmentUri: str = None, - groupUri: str = None, - ): + @ResourcePolicyService.has_resource_permission(CREDENTIALS_ENVIRONMENT) + def get_environment_assume_role_url(uri, groupUri): context = get_context() with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=CREDENTIALS_ENVIRONMENT, - ) - environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + environment = EnvironmentService.get_environment_by_uri(session, uri) url = SessionHelper.get_console_access_url( EnvironmentService._get_environment_group_aws_session( session=session, @@ -1055,17 +1046,11 @@ def get_environment_assume_role_url( @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_ENVIRONMENTS) - def generate_environment_access_token(environmentUri: str = None, groupUri: str = None): + @ResourcePolicyService.has_resource_permission(CREDENTIALS_ENVIRONMENT) + def generate_environment_access_token(uri, groupUri): context = get_context() with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=environmentUri, - permission_name=CREDENTIALS_ENVIRONMENT, - ) - environment = EnvironmentService.get_environment_by_uri(session, environmentUri) + environment = EnvironmentService.get_environment_by_uri(session, uri) c = EnvironmentService._get_environment_group_aws_session( session=session, username=context.username, @@ -1080,16 +1065,8 @@ def generate_environment_access_token(environmentUri: str = None, groupUri: str } @staticmethod - def get_pivot_role(organization_uri): - context = get_context() - with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organization_uri, - permission_name=GET_ORGANIZATION, - ) + @ResourcePolicyService.has_resource_permission(LINK_ENVIRONMENT) + def get_pivot_role(uri): pivot_role_name = SessionHelper.get_delegation_role_name(region='') if not pivot_role_name: raise exceptions.AWSResourceNotFound( @@ -1099,47 +1076,31 @@ def get_pivot_role(organization_uri): return pivot_role_name @staticmethod - def get_external_id(organization_uri): - context = get_context() - with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organization_uri, - permission_name=GET_ORGANIZATION, + @ResourcePolicyService.has_resource_permission(LINK_ENVIRONMENT) + def get_external_id(uri): + external_id = SessionHelper.get_external_id_secret() + if not external_id: + raise exceptions.AWSResourceNotFound( + action='GET_EXTERNAL_ID', + message='External Id could not be found on AWS Secretsmanager', ) - external_id = SessionHelper.get_external_id_secret() - if not external_id: - raise exceptions.AWSResourceNotFound( - action='GET_EXTERNAL_ID', - message='External Id could not be found on AWS Secretsmanager', - ) - return external_id + return external_id @staticmethod - def get_template_from_resource_bucket(organization_uri, template_name): - context = get_context() - with context.db_engine.scoped_session() as session: - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=organization_uri, - permission_name=GET_ORGANIZATION, + @ResourcePolicyService.has_resource_permission(LINK_ENVIRONMENT) + def get_template_from_resource_bucket(uri, template_name): + envname = os.getenv('envname', 'local') + region = os.getenv('AWS_REGION', 'eu-central-1') + + resource_bucket = Parameter().get_parameter(env=envname, path='s3/resources_bucket_name') + template_key = Parameter().get_parameter(env=envname, path=f's3/{template_name}') + if not resource_bucket or not template_key: + raise AWSResourceNotFound( + action='GET_TEMPLATE', + message=f'{template_name} Yaml template file could not be found on Amazon S3 bucket', ) - envname = os.getenv('envname', 'local') - region = os.getenv('AWS_REGION', 'eu-central-1') - - resource_bucket = Parameter().get_parameter(env=envname, path='s3/resources_bucket_name') - template_key = Parameter().get_parameter(env=envname, path=f's3/{template_name}') - if not resource_bucket or not template_key: - raise AWSResourceNotFound( - action='GET_TEMPLATE', - message=f'{template_name} Yaml template file could not be found on Amazon S3 bucket', - ) - return S3_client.get_presigned_url(region, resource_bucket, template_key) + return S3_client.get_presigned_url(region, resource_bucket, template_key) @staticmethod @ResourcePolicyService.has_resource_permission(environment_permissions.GET_ENVIRONMENT) diff --git a/backend/dataall/core/environment/tasks/env_stacks_updater.py b/backend/dataall/core/environment/tasks/env_stacks_updater.py index ecf6b72f9..40b9a14c6 100644 --- a/backend/dataall/core/environment/tasks/env_stacks_updater.py +++ b/backend/dataall/core/environment/tasks/env_stacks_updater.py @@ -12,11 +12,8 @@ from dataall.base.db import get_engine from dataall.base.utils import Parameter -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) + RETRIES = 30 SLEEP_TIME = 30 diff --git a/backend/dataall/core/groups/api/resolvers.py b/backend/dataall/core/groups/api/resolvers.py index ae7c28323..9434db6b5 100644 --- a/backend/dataall/core/groups/api/resolvers.py +++ b/backend/dataall/core/groups/api/resolvers.py @@ -14,8 +14,7 @@ def resolve_group_environment_permissions(context, source, environmentUri): if not source: return None - with context.engine.scoped_session() as session: - return EnvironmentService.list_group_permissions(session=session, uri=environmentUri, group_uri=source.groupUri) + return EnvironmentService.list_group_permissions(uri=environmentUri, group_uri=source.groupUri) def resolve_group_tenant_permissions(context, source): diff --git a/backend/dataall/core/organizations/db/organization_repositories.py b/backend/dataall/core/organizations/db/organization_repositories.py index 0fbf23935..4134e7fc6 100644 --- a/backend/dataall/core/organizations/db/organization_repositories.py +++ b/backend/dataall/core/organizations/db/organization_repositories.py @@ -7,6 +7,8 @@ from dataall.core.organizations.db import organization_models as models from dataall.core.environment.db.environment_models import Environment from dataall.base.context import get_context +from dataall.base.utils.naming_convention import NamingConventionPattern, NamingConventionService + logger = logging.getLogger(__name__) @@ -45,7 +47,9 @@ def query_user_organizations(session, username, groups, filter) -> Query: or_( models.Organization.label.ilike('%' + filter.get('term') + '%'), models.Organization.description.ilike('%' + filter.get('term') + '%'), - models.Organization.tags.contains(f"{{{filter.get('term')}}}"), + models.Organization.tags.contains( + f"{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=filter.get('term')).sanitize()}}}" + ), ) ) return query.order_by(models.Organization.label).distinct() diff --git a/backend/dataall/core/organizations/services/organization_service.py b/backend/dataall/core/organizations/services/organization_service.py index 696ae0881..cbc81a1ce 100644 --- a/backend/dataall/core/organizations/services/organization_service.py +++ b/backend/dataall/core/organizations/services/organization_service.py @@ -305,7 +305,7 @@ def resolve_organization_by_env(uri): context = get_context() with context.db_engine.scoped_session() as session: env = EnvironmentRepository.get_environment_by_uri(session, uri) - return OrganizationRepository.find_organization_by_uri(session, env.organizationUri) + return OrganizationService.get_organization(uri=env.organizationUri) @staticmethod @ResourcePolicyService.has_resource_permission(GET_ORGANIZATION) diff --git a/backend/dataall/core/permissions/api/resolvers.py b/backend/dataall/core/permissions/api/resolvers.py index de35d596b..6cbceee12 100644 --- a/backend/dataall/core/permissions/api/resolvers.py +++ b/backend/dataall/core/permissions/api/resolvers.py @@ -1,11 +1,5 @@ import logging -import os - -from dataall.base.aws.sts import SessionHelper -from dataall.base.aws.parameter_store import ParameterStoreManager -from dataall.base.db.exceptions import RequiredParameter -from dataall.core.permissions.services.permission_service import PermissionService -from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService, TenantActionsService log = logging.getLogger(__name__) @@ -26,12 +20,4 @@ def list_tenant_groups(context, source, filter=None): def update_ssm_parameter(context, source, name: str = None, value: str = None): - current_account = SessionHelper.get_account() - region = os.getenv('AWS_REGION', 'eu-west-1') - response = ParameterStoreManager.update_parameter( - AwsAccountId=current_account, - region=region, - parameter_name=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/{name}', - parameter_value=value, - ) - return response + return TenantActionsService.update_monitoring_ssm_parameter(name, value) diff --git a/backend/dataall/core/permissions/services/tenant_policy_service.py b/backend/dataall/core/permissions/services/tenant_policy_service.py index 2d1d1511b..d8096d248 100644 --- a/backend/dataall/core/permissions/services/tenant_policy_service.py +++ b/backend/dataall/core/permissions/services/tenant_policy_service.py @@ -8,12 +8,19 @@ from dataall.core.permissions.db.tenant.tenant_repositories import TenantRepository from dataall.core.permissions.services.permission_service import PermissionService from dataall.core.permissions.db.tenant.tenant_models import Tenant +from dataall.base.services.service_provider_factory import ServiceProviderFactory +from dataall.base.aws.sts import SessionHelper +from dataall.base.aws.parameter_store import ParameterStoreManager import logging +import os from functools import wraps log = logging.getLogger('Permissions') +ENVNAME = os.getenv('envname', 'local') +REGION = os.getenv('AWS_REGION', 'eu-west-1') + class RequestValidationService: @staticmethod @@ -69,6 +76,9 @@ def validate_update_group_permission_params(data): raise exceptions.RequiredParameter('permissions') if not data.get('groupUri'): raise exceptions.RequiredParameter('groupUri') + groups = ServiceProviderFactory.get_service_provider_instance().list_groups(envname=ENVNAME, region=REGION) + if data.get('groupUri') not in groups: + raise exceptions.InvalidInput('groupUri', data.get('groupUri'), ' a valid group') class TenantPolicyValidationService: @@ -113,6 +123,26 @@ def validate_permissions(session, tenant_name, g_permissions, group): return tenant_group_permissions +class TenantActionsService: + @staticmethod + def update_monitoring_ssm_parameter(name, value): + # raises UnauthorizedOperation exception, if there is no admin access + context = get_context() + TenantPolicyValidationService.validate_admin_access( + context.username, context.groups, 'UPDATE_SSM_PARAMETER_MONITORING' + ) + + current_account = SessionHelper.get_account() + region = os.getenv('AWS_REGION', 'eu-west-1') + response = ParameterStoreManager.update_parameter( + AwsAccountId=current_account, + region=region, + parameter_name=f'/dataall/{os.getenv("envname", "local")}/quicksightmonitoring/{name}', + parameter_value=value, + ) + return response + + class TenantPolicyService: TENANT_NAME = 'dataall' diff --git a/backend/dataall/core/stacks/api/types.py b/backend/dataall/core/stacks/api/types.py index 1ddd96184..35a4a92ad 100644 --- a/backend/dataall/core/stacks/api/types.py +++ b/backend/dataall/core/stacks/api/types.py @@ -20,6 +20,7 @@ gql.Field(name='region', type=gql.NonNullableType(gql.String)), gql.Field(name='status', type=gql.String), gql.Field(name='stackid', type=gql.String), + gql.Field(name='updated', type=gql.AWSDateTime), gql.Field(name='link', type=gql.String, resolver=resolve_link), gql.Field(name='outputs', type=gql.String, resolver=resolve_outputs), gql.Field(name='resources', type=gql.String, resolver=resolve_resources), diff --git a/backend/dataall/core/stacks/db/target_type_repositories.py b/backend/dataall/core/stacks/db/target_type_repositories.py index c175ed307..ca5770649 100644 --- a/backend/dataall/core/stacks/db/target_type_repositories.py +++ b/backend/dataall/core/stacks/db/target_type_repositories.py @@ -5,6 +5,7 @@ GET_ENVIRONMENT, UPDATE_ENVIRONMENT, ) +from dataall.core.permissions.services.tenant_permissions import MANAGE_ENVIRONMENTS logger = logging.getLogger(__name__) @@ -14,10 +15,11 @@ class TargetType: _TARGET_TYPES = {} - def __init__(self, name, read_permission, write_permission): + def __init__(self, name, read_permission, write_permission, tenant_permission): self.name = name self.read_permission = read_permission self.write_permission = write_permission + self.tenant_permission = tenant_permission TargetType._TARGET_TYPES[name] = self @@ -31,6 +33,11 @@ def get_resource_read_permission_name(target_type): TargetType.is_supported_target_type(target_type) return TargetType._TARGET_TYPES[target_type].read_permission + @staticmethod + def get_resource_tenant_permission_name(target_type): + TargetType.is_supported_target_type(target_type) + return TargetType._TARGET_TYPES[target_type].tenant_permission + @staticmethod def is_supported_target_type(target_type): if target_type not in TargetType._TARGET_TYPES: @@ -41,4 +48,4 @@ def is_supported_target_type(target_type): ) -TargetType('environment', GET_ENVIRONMENT, UPDATE_ENVIRONMENT) +TargetType('environment', GET_ENVIRONMENT, UPDATE_ENVIRONMENT, MANAGE_ENVIRONMENTS) diff --git a/backend/dataall/core/stacks/services/stack_service.py b/backend/dataall/core/stacks/services/stack_service.py index 5da1c4ba6..2a80fc2a6 100644 --- a/backend/dataall/core/stacks/services/stack_service.py +++ b/backend/dataall/core/stacks/services/stack_service.py @@ -4,6 +4,7 @@ import logging from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.stacks.aws.cloudformation import CloudFormation from dataall.core.stacks.services.keyvaluetag_service import KeyValueTagService from dataall.core.tasks.service_handlers import Worker @@ -46,9 +47,16 @@ def verify_target_type_and_uri(target_type, target_uri): class StackService: @staticmethod - def resolve_parent_obj_stack(targetUri: str, environmentUri: str): + def resolve_parent_obj_stack(targetUri: str, targetType: str, environmentUri: str): context = get_context() with context.db_engine.scoped_session() as session: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=targetUri, + permission_name=TargetType.get_resource_read_permission_name(targetType), + ) env: Environment = EnvironmentRepository.get_environment_by_uri(session, environmentUri) stack: Stack = StackRepository.find_stack_by_target_uri(session, target_uri=targetUri) if not stack: @@ -163,6 +171,13 @@ def update_stack_by_target_uri(target_uri, target_type): StackRequestVerifier.verify_target_type_and_uri(target_uri, target_type) context = get_context() with context.db_engine.scoped_session() as session: + TenantPolicyService.check_user_tenant_permission( + session=session, + username=context.username, + groups=context.groups, + permission_name=TargetType.get_resource_tenant_permission_name(target_type), + tenant_name=TenantPolicyService.TENANT_NAME, + ) ResourcePolicyService.check_user_resource_permission( session=session, username=context.username, @@ -178,6 +193,23 @@ def update_stack_by_target_uri(target_uri, target_type): def update_stack_tags(input): StackRequestVerifier.validate_update_tag_input(input) target_uri = input.get('targetUri') + target_type = input.get('targetType') + context = get_context() + with context.db_engine.scoped_session() as session: + TenantPolicyService.check_user_tenant_permission( + session=session, + username=context.username, + groups=context.groups, + permission_name=TargetType.get_resource_tenant_permission_name(target_type), + tenant_name=TenantPolicyService.TENANT_NAME, + ) + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=target_uri, + permission_name=TargetType.get_resource_update_permission_name(target_type), + ) kv_tags = KeyValueTagService.update_key_value_tags( uri=target_uri, data=input, diff --git a/backend/dataall/core/stacks/tasks/cdkproxy.py b/backend/dataall/core/stacks/tasks/cdkproxy.py index 198f80081..f0aa7d6d5 100644 --- a/backend/dataall/core/stacks/tasks/cdkproxy.py +++ b/backend/dataall/core/stacks/tasks/cdkproxy.py @@ -5,11 +5,7 @@ from dataall.base.cdkproxy.cdk_cli_wrapper import deploy_cdk_stack from dataall.base.db import get_engine -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) logger = logging.getLogger(__name__) -logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) if __name__ == '__main__': diff --git a/backend/dataall/core/vpc/db/vpc_repositories.py b/backend/dataall/core/vpc/db/vpc_repositories.py index f5b4865b6..4fbca29bf 100644 --- a/backend/dataall/core/vpc/db/vpc_repositories.py +++ b/backend/dataall/core/vpc/db/vpc_repositories.py @@ -4,6 +4,7 @@ from dataall.base.db import exceptions from dataall.core.vpc.db.vpc_models import Vpc +from dataall.base.utils.naming_convention import NamingConventionPattern, NamingConventionService log = logging.getLogger(__name__) @@ -48,6 +49,9 @@ def query_environment_networks(session, uri, filter): or_( Vpc.label.ilike('%' + term + '%'), Vpc.VpcId.ilike('%' + term + '%'), + Vpc.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), ) ) return query.order_by(Vpc.label) diff --git a/backend/dataall/core/vpc/services/vpc_service.py b/backend/dataall/core/vpc/services/vpc_service.py index 29feb3428..fbdf8f092 100644 --- a/backend/dataall/core/vpc/services/vpc_service.py +++ b/backend/dataall/core/vpc/services/vpc_service.py @@ -1,5 +1,8 @@ +import logging + from dataall.base.context import get_context from dataall.base.db import exceptions +from dataall.base.db.exceptions import ResourceUnauthorized from dataall.core.permissions.services.group_policy_service import GroupPolicyService from dataall.core.environment.db.environment_repositories import EnvironmentRepository from dataall.core.activity.db.activity_models import Activity @@ -7,10 +10,12 @@ from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.vpc.db.vpc_repositories import VpcRepository from dataall.core.vpc.db.vpc_models import Vpc -from dataall.core.permissions.services.network_permissions import NETWORK_ALL, DELETE_NETWORK +from dataall.core.permissions.services.network_permissions import NETWORK_ALL, DELETE_NETWORK, GET_NETWORK from dataall.core.permissions.services.environment_permissions import CREATE_NETWORK from dataall.core.permissions.services.tenant_permissions import MANAGE_ENVIRONMENTS +log = logging.getLogger(__name__) + def _session(): return get_context().db_engine.scoped_session() @@ -44,6 +49,7 @@ def create_network(uri: str, admin_group: str, data: dict): owner=username, label=data['label'], name=data['label'], + tags=data.get('tags', []), default=data.get('default', False), ) VpcRepository.save_network(session, vpc) @@ -89,4 +95,19 @@ def delete_network(uri): @staticmethod def get_environment_networks(environment_uri): with _session() as session: - return VpcRepository.get_environment_networks(session=session, environment_uri=environment_uri) + nets = [] + all_nets = VpcRepository.get_environment_networks(session=session, environment_uri=environment_uri) + for net in all_nets: + try: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=get_context().username, + groups=get_context().groups, + resource_uri=net.vpcUri, + permission_name=GET_NETWORK, + ) + except ResourceUnauthorized as exc: + log.info(exc) + else: + nets += net + return nets diff --git a/backend/dataall/modules/catalog/services/glossaries_service.py b/backend/dataall/modules/catalog/services/glossaries_service.py index 92ba22142..8a7db7d82 100644 --- a/backend/dataall/modules/catalog/services/glossaries_service.py +++ b/backend/dataall/modules/catalog/services/glossaries_service.py @@ -1,12 +1,13 @@ import logging +from functools import wraps from dataall.base.context import get_context +from dataall.base.db import exceptions from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService - -from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository from dataall.modules.catalog.db.glossary_models import GlossaryNode -from dataall.modules.catalog.services.glossaries_permissions import MANAGE_GLOSSARIES +from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository from dataall.modules.catalog.indexers.registry import GlossaryRegistry +from dataall.modules.catalog.services.glossaries_permissions import MANAGE_GLOSSARIES logger = logging.getLogger(__name__) @@ -15,6 +16,39 @@ def _session(): return get_context().db_engine.scoped_session() +class GlossariesResourceAccess: + @staticmethod + def is_owner(): + def decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + uri = kwargs.get('uri') + if not uri: + raise KeyError(f"{f.__name__} doesn't have parameter uri.") + GlossariesResourceAccess.check_owner(uri) + return f(*args, **kwargs) + + return wrapper + + return decorator + + @staticmethod + def check_owner(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + node = GlossaryRepository.get_node(session=session, uri=uri) + MAX_GLOSSARY_DEPTH = 10 + depth = 0 + while node.nodeType != 'G' and depth <= MAX_GLOSSARY_DEPTH: + node = GlossaryRepository.get_node(session=session, uri=node.parentUri) + depth += 1 + if not node or node.admin not in context.groups: + raise exceptions.UnauthorizedOperation( + action='GLOSSARY MUTATION', + message=f'User {context.username} is not the admin of the glossary {node.label}.', + ) + + class GlossariesService: @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) @@ -24,12 +58,14 @@ def create_glossary(data: dict = None) -> GlossaryNode: @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) + @GlossariesResourceAccess.is_owner() def create_category(uri: str, data: dict = None): with _session() as session: return GlossaryRepository.create_category(session=session, uri=uri, data=data) @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) + @GlossariesResourceAccess.is_owner() def create_term(uri: str, data: dict = None): with _session() as session: return GlossaryRepository.create_term(session=session, uri=uri, data=data) @@ -95,12 +131,14 @@ def get_link_target(targetUri: str, targetType: str): @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) + @GlossariesResourceAccess.is_owner() def update_node(uri: str = None, data: dict = None): with _session() as session: return GlossaryRepository.update_node(session=session, uri=uri, data=data) @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) + @GlossariesResourceAccess.is_owner() def delete_node(uri: str = None): with _session() as session: return GlossaryRepository.delete_node(session=session, uri=uri) @@ -108,6 +146,7 @@ def delete_node(uri: str = None): @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) def approve_term_association(linkUri: str): + # is_owner permissions checked in GlossaryRepository.approve_term_association with _session() as session: return GlossaryRepository.approve_term_association( session=session, username=get_context().username, groups=get_context().groups, linkUri=linkUri @@ -116,6 +155,7 @@ def approve_term_association(linkUri: str): @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_GLOSSARIES) def dismiss_term_association(linkUri: str): + # is_owner permissions checked in GlossaryRepository.dismiss_term_association with _session() as session: return GlossaryRepository.dismiss_term_association( session=session, username=get_context().username, groups=get_context().groups, linkUri=linkUri diff --git a/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py b/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py index 032739db9..807b712a3 100644 --- a/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py +++ b/backend/dataall/modules/catalog/tasks/catalog_indexer_task.py @@ -9,11 +9,7 @@ from dataall.base.loader import load_modules, ImportMode from dataall.base.utils.alarm_service import AlarmService -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) class CatalogIndexerTask: diff --git a/backend/dataall/modules/dashboards/__init__.py b/backend/dataall/modules/dashboards/__init__.py index ffbc8e92d..4d62dbb18 100644 --- a/backend/dataall/modules/dashboards/__init__.py +++ b/backend/dataall/modules/dashboards/__init__.py @@ -5,7 +5,6 @@ from dataall.base.loader import ImportMode, ModuleInterface - log = logging.getLogger(__name__) @@ -33,8 +32,9 @@ def __init__(self): from dataall.modules.catalog.indexers.registry import GlossaryRegistry, GlossaryDefinition from dataall.modules.vote.services.vote_service import add_vote_type from dataall.modules.dashboards.indexers.dashboard_indexer import DashboardIndexer + from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD - FeedRegistry.register(FeedDefinition('Dashboard', Dashboard)) + FeedRegistry.register(FeedDefinition('Dashboard', Dashboard, GET_DASHBOARD)) GlossaryRegistry.register( GlossaryDefinition( @@ -42,7 +42,7 @@ def __init__(self): ) ) - add_vote_type('dashboard', DashboardIndexer) + add_vote_type('dashboard', DashboardIndexer, GET_DASHBOARD) EnvironmentResourceManager.register(DashboardRepository()) log.info('Dashboard API has been loaded') diff --git a/backend/dataall/modules/dashboards/api/mutations.py b/backend/dataall/modules/dashboards/api/mutations.py index 93a6e0a02..270c35f6f 100644 --- a/backend/dataall/modules/dashboards/api/mutations.py +++ b/backend/dataall/modules/dashboards/api/mutations.py @@ -35,17 +35,6 @@ resolver=delete_dashboard, ) - -shareDashboard = gql.MutationField( - name='shareDashboard', - type=gql.Ref('DashboardShare'), - args=[ - gql.Argument(name='principalId', type=gql.NonNullableType(gql.String)), - gql.Argument(name='dashboardUri', type=gql.NonNullableType(gql.String)), - ], - resolver=share_dashboard, -) - requestDashboardShare = gql.MutationField( name='requestDashboardShare', type=gql.Ref('DashboardShare'), diff --git a/backend/dataall/modules/dashboards/api/queries.py b/backend/dataall/modules/dashboards/api/queries.py index 9912375b8..a4b005adc 100644 --- a/backend/dataall/modules/dashboards/api/queries.py +++ b/backend/dataall/modules/dashboards/api/queries.py @@ -3,7 +3,6 @@ get_dashboard, get_monitoring_dashboard_id, get_monitoring_vpc_connection_id, - get_quicksight_author_session, get_quicksight_designer_url, get_quicksight_reader_session, get_quicksight_reader_url, @@ -37,14 +36,6 @@ resolver=get_monitoring_vpc_connection_id, ) -getPlatformAuthorSession = gql.QueryField( - name='getPlatformAuthorSession', - args=[ - gql.Argument(name='awsAccount', type=gql.NonNullableType(gql.String)), - ], - type=gql.String, - resolver=get_quicksight_author_session, -) getPlatformReaderSession = gql.QueryField( name='getPlatformReaderSession', diff --git a/backend/dataall/modules/dashboards/api/resolvers.py b/backend/dataall/modules/dashboards/api/resolvers.py index 86d0fb671..1744d71ea 100644 --- a/backend/dataall/modules/dashboards/api/resolvers.py +++ b/backend/dataall/modules/dashboards/api/resolvers.py @@ -48,6 +48,12 @@ def get_dashboard(context: Context, source, dashboardUri: str = None): return DashboardService.get_dashboard(uri=dashboardUri) +def get_dashboard_restricted_information(context: Context, source: Dashboard): + if not source: + return None + return DashboardService.get_dashboard_restricted_information(uri=source.dashboardUri, dashboard=source) + + def resolve_user_role(context: Context, source: Dashboard): if context.username and source.owner == context.username: return DashboardRole.Creator.value @@ -124,10 +130,6 @@ def create_quicksight_data_source_set(context, source, vpcConnectionId: str = No return DashboardQuicksightService.create_quicksight_data_source_set(vpcConnectionId) -def get_quicksight_author_session(context, source, awsAccount: str = None): - return DashboardQuicksightService.get_quicksight_author_session(awsAccount) - - def get_quicksight_reader_session(context, source, dashboardId: str = None): return DashboardQuicksightService.get_quicksight_reader_session(dashboardId) diff --git a/backend/dataall/modules/dashboards/api/types.py b/backend/dataall/modules/dashboards/api/types.py index 857cf9333..9e5405891 100644 --- a/backend/dataall/modules/dashboards/api/types.py +++ b/backend/dataall/modules/dashboards/api/types.py @@ -1,7 +1,7 @@ from dataall.base.api import gql from dataall.modules.dashboards.api.resolvers import ( DashboardRole, - get_dashboard_organization, + get_dashboard_restricted_information, resolve_glossary_terms, resolve_upvotes, resolve_user_role, @@ -9,6 +9,11 @@ from dataall.core.environment.api.resolvers import resolve_environment +DashboardRestrictedInformation = gql.ObjectType( + name='DashboardRestrictedInformation', + fields=[gql.Field('AwsAccountId', type=gql.String), gql.Field('region', type=gql.String)], +) + Dashboard = gql.ObjectType( name='Dashboard', fields=[ @@ -19,10 +24,14 @@ gql.Field('DashboardId', type=gql.String), gql.Field('tags', type=gql.ArrayType(gql.String)), gql.Field('created', type=gql.String), - gql.Field('AwsAccountId', type=gql.String), gql.Field('updated', type=gql.String), gql.Field('owner', type=gql.String), gql.Field('SamlGroupName', type=gql.String), + gql.Field( + 'restricted', + type=DashboardRestrictedInformation, + resolver=get_dashboard_restricted_information, + ), gql.Field( 'environment', type=gql.Ref('EnvironmentSimplified'), diff --git a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py index 8b7eb9a9e..73a27e9f7 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_quicksight_service.py @@ -3,16 +3,16 @@ from dataall.base.aws.parameter_store import ParameterStoreManager from dataall.base.aws.sts import SessionHelper from dataall.base.context import get_context -from dataall.core.environment.services.environment_service import EnvironmentService -from dataall.core.permissions.db.tenant.tenant_policy_repositories import TenantPolicyRepository from dataall.base.db.exceptions import UnauthorizedOperation, TenantUnauthorized, AWSResourceNotFound -from dataall.core.permissions.services.tenant_permissions import TENANT_ALL +from dataall.base.utils import Parameter +from dataall.core.environment.services.environment_service import EnvironmentService from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService -from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository -from dataall.modules.dashboards.db.dashboard_models import Dashboard +from dataall.core.permissions.services.tenant_permissions import TENANT_ALL +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService, TenantPolicyValidationService from dataall.modules.dashboards.aws.dashboard_quicksight_client import DashboardQuicksightClient -from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD -from dataall.base.utils import Parameter +from dataall.modules.dashboards.db.dashboard_models import Dashboard +from dataall.modules.dashboards.db.dashboard_repositories import DashboardRepository +from dataall.modules.dashboards.services.dashboard_permissions import GET_DASHBOARD, CREATE_DASHBOARD, MANAGE_DASHBOARDS class DashboardQuicksightService: @@ -58,6 +58,7 @@ def get_quicksight_reader_url(cls, uri): return client.get_anonymous_session(dashboard_id=dash.DashboardId) @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_DASHBOARDS) @ResourcePolicyService.has_resource_permission(CREATE_DASHBOARD) def get_quicksight_designer_url(cls, uri: str): context = get_context() @@ -69,6 +70,7 @@ def get_quicksight_designer_url(cls, uri: str): @staticmethod def get_monitoring_dashboard_id(): + DashboardQuicksightService._check_user_must_be_admin() current_account = SessionHelper.get_account() dashboard_id = ParameterStoreManager.get_parameter_value( AwsAccountId=current_account, @@ -85,6 +87,7 @@ def get_monitoring_dashboard_id(): @staticmethod def get_monitoring_vpc_connection_id(): + DashboardQuicksightService._check_user_must_be_admin() current_account = SessionHelper.get_account() vpc_connection_id = ParameterStoreManager.get_parameter_value( AwsAccountId=current_account, @@ -101,6 +104,7 @@ def get_monitoring_vpc_connection_id(): @classmethod def create_quicksight_data_source_set(cls, vpc_connection_id): + cls._check_user_must_be_admin() client = cls._client() client.register_user_in_group(group_name='dataall', user_role='AUTHOR') @@ -114,11 +118,6 @@ def create_quicksight_data_source_set(cls, vpc_connection_id): return datasource_id - @classmethod - def get_quicksight_author_session(cls, aws_account): - DashboardQuicksightService._check_user_must_be_admin() - return cls._client(aws_account).get_author_session() - @classmethod def get_quicksight_reader_session(cls, dashboard_uri): cls._check_user_must_be_admin() @@ -128,7 +127,7 @@ def get_quicksight_reader_session(cls, dashboard_uri): @staticmethod def _check_user_must_be_admin(): context = get_context() - admin = TenantPolicyRepository.is_tenant_admin(context.groups) + admin = TenantPolicyValidationService.is_tenant_admin(context.groups) if not admin: raise TenantUnauthorized( diff --git a/backend/dataall/modules/dashboards/services/dashboard_service.py b/backend/dataall/modules/dashboards/services/dashboard_service.py index 34d6c3a34..30c205e0f 100644 --- a/backend/dataall/modules/dashboards/services/dashboard_service.py +++ b/backend/dataall/modules/dashboards/services/dashboard_service.py @@ -25,11 +25,15 @@ class DashboardService: """Service that serves request related to dashboard""" @staticmethod - @ResourcePolicyService.has_resource_permission(GET_DASHBOARD) def get_dashboard(uri: str) -> Dashboard: with get_context().db_engine.scoped_session() as session: return DashboardRepository.get_dashboard_by_uri(session, uri) + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DASHBOARD) + def get_dashboard_restricted_information(uri: str, dashboard: Dashboard): + return dashboard + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DASHBOARDS) @ResourcePolicyService.has_resource_permission(CREATE_DASHBOARD) diff --git a/backend/dataall/modules/datapipelines/__init__.py b/backend/dataall/modules/datapipelines/__init__.py index ad96a73c1..171a6a311 100644 --- a/backend/dataall/modules/datapipelines/__init__.py +++ b/backend/dataall/modules/datapipelines/__init__.py @@ -28,14 +28,17 @@ def __init__(self): from dataall.modules.feed.api.registry import FeedRegistry, FeedDefinition from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository - from dataall.modules.datapipelines.services.datapipelines_permissions import GET_PIPELINE, UPDATE_PIPELINE - + from dataall.modules.datapipelines.services.datapipelines_permissions import ( + GET_PIPELINE, + UPDATE_PIPELINE, + MANAGE_PIPELINES, + ) import dataall.modules.datapipelines.api - FeedRegistry.register(FeedDefinition('DataPipeline', DataPipeline)) + FeedRegistry.register(FeedDefinition('DataPipeline', DataPipeline, GET_PIPELINE)) - TargetType('pipeline', GET_PIPELINE, UPDATE_PIPELINE) - TargetType('cdkpipeline', GET_PIPELINE, UPDATE_PIPELINE) + TargetType('pipeline', GET_PIPELINE, UPDATE_PIPELINE, MANAGE_PIPELINES) + TargetType('cdkpipeline', GET_PIPELINE, UPDATE_PIPELINE, MANAGE_PIPELINES) EnvironmentResourceManager.register(DatapipelinesRepository()) diff --git a/backend/dataall/modules/datapipelines/api/input_types.py b/backend/dataall/modules/datapipelines/api/input_types.py index 3d6b4556a..0b026f38c 100644 --- a/backend/dataall/modules/datapipelines/api/input_types.py +++ b/backend/dataall/modules/datapipelines/api/input_types.py @@ -45,16 +45,6 @@ ], ) -DataPipelineEnvironmentFilter = gql.InputType( - name='DataPipelineEnvironmentFilter', - arguments=[ - gql.Argument(name='term', type=gql.String), - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument(name='pipelineUri', type=gql.String), - ], -) - DataPipelineBrowseInput = gql.InputType( name='DataPipelineBrowseInput', arguments=[ diff --git a/backend/dataall/modules/datapipelines/api/resolvers.py b/backend/dataall/modules/datapipelines/api/resolvers.py index 3c2bde886..47e676816 100644 --- a/backend/dataall/modules/datapipelines/api/resolvers.py +++ b/backend/dataall/modules/datapipelines/api/resolvers.py @@ -105,5 +105,6 @@ def resolve_stack(context, source: DataPipeline, **kwargs): return None return StackService.resolve_parent_obj_stack( targetUri=source.DataPipelineUri, + targetType='pipeline', environmentUri=source.environmentUri, ) diff --git a/backend/dataall/modules/datapipelines/cdk/blueprints/data_pipeline_blueprint/requirements.txt b/backend/dataall/modules/datapipelines/cdk/blueprints/data_pipeline_blueprint/requirements.txt index 4067e0fd9..7351d1274 100644 --- a/backend/dataall/modules/datapipelines/cdk/blueprints/data_pipeline_blueprint/requirements.txt +++ b/backend/dataall/modules/datapipelines/cdk/blueprints/data_pipeline_blueprint/requirements.txt @@ -1,3 +1,2 @@ -aws-cdk-lib==2.103.1 -constructs>=10.0.0,<11.0.0 +aws-cdk-lib==2.160.0 aws-ddk-core==1.3.0 diff --git a/backend/dataall/modules/datapipelines/services/datapipelines_service.py b/backend/dataall/modules/datapipelines/services/datapipelines_service.py index de277d20d..ed010ef9e 100644 --- a/backend/dataall/modules/datapipelines/services/datapipelines_service.py +++ b/backend/dataall/modules/datapipelines/services/datapipelines_service.py @@ -3,8 +3,9 @@ from dataall.base.aws.sts import SessionHelper from dataall.base.context import get_context -from dataall.core.permissions.services.group_policy_service import GroupPolicyService +from dataall.base.db import exceptions from dataall.core.environment.services.environment_service import EnvironmentService +from dataall.core.permissions.services.group_policy_service import GroupPolicyService from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.stacks.db.keyvaluetag_repositories import KeyValueTagRepository @@ -12,7 +13,6 @@ from dataall.core.stacks.services.stack_service import StackService from dataall.core.tasks.db.task_models import Task from dataall.core.tasks.service_handlers import Worker -from dataall.base.db import exceptions from dataall.modules.datapipelines.db.datapipelines_models import DataPipeline, DataPipelineEnvironment from dataall.modules.datapipelines.db.datapipelines_repositories import DatapipelinesRepository from dataall.modules.datapipelines.services.datapipelines_permissions import ( @@ -25,7 +25,6 @@ UPDATE_PIPELINE, ) - logger = logging.getLogger(__name__) @@ -34,6 +33,10 @@ def _session(): class DataPipelineService: + @staticmethod + def _get_pipeline_uri_from_env_uri(session, envPipelineUri): + return DatapipelinesRepository.get_pipeline_environment_by_uri(session, envPipelineUri).pipelineUri + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_PIPELINES) @ResourcePolicyService.has_resource_permission(CREATE_PIPELINE) @@ -255,6 +258,9 @@ def _delete_repository(target_uri, accountid, cdk_role_arn, region, repo_name): @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_PIPELINES) + @ResourcePolicyService.has_resource_permission( + UPDATE_PIPELINE, param_name='envPipelineUri', parent_resource=_get_pipeline_uri_from_env_uri + ) def delete_pipeline_environment(envPipelineUri: str): with _session() as session: DatapipelinesRepository.delete_pipeline_environment(session=session, envPipelineUri=envPipelineUri) diff --git a/backend/dataall/modules/datasets_base/api/resolvers.py b/backend/dataall/modules/datasets_base/api/resolvers.py index 73f6539c7..017256ae6 100644 --- a/backend/dataall/modules/datasets_base/api/resolvers.py +++ b/backend/dataall/modules/datasets_base/api/resolvers.py @@ -58,8 +58,7 @@ def get_dataset_organization(context, source: DatasetBase, **kwargs): def get_dataset_environment(context, source: DatasetBase, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - return EnvironmentService.get_environment_by_uri(session, source.environmentUri) + return EnvironmentService.find_environment_by_uri(uri=source.environmentUri) def get_dataset_owners_group(context, source: DatasetBase, **kwargs): @@ -79,5 +78,6 @@ def resolve_dataset_stack(context: Context, source: DatasetBase, **kwargs): return None return StackService.resolve_parent_obj_stack( targetUri=source.datasetUri, + targetType='dataset', environmentUri=source.environmentUri, ) diff --git a/backend/dataall/modules/datasets_base/db/dataset_repositories.py b/backend/dataall/modules/datasets_base/db/dataset_repositories.py index 05dab4c93..41152e394 100644 --- a/backend/dataall/modules/datasets_base/db/dataset_repositories.py +++ b/backend/dataall/modules/datasets_base/db/dataset_repositories.py @@ -6,6 +6,10 @@ from dataall.base.db.exceptions import ObjectNotFound from dataall.core.activity.db.activity_models import Activity from dataall.modules.datasets_base.db.dataset_models import DatasetBase +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) logger = logging.getLogger(__name__) @@ -65,9 +69,11 @@ def _query_all_user_datasets(session, username, groups, all_subqueries: List[Que term = filter['term'] query = query.filter( or_( - DatasetBase.description.ilike(term + '%%'), - DatasetBase.label.ilike(term + '%%'), - DatasetBase.tags.contains(f'{{{term}}}'), + DatasetBase.label.ilike('%' + term + '%'), + DatasetBase.description.ilike('%' + term + '%'), + DatasetBase.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), ) ) return query.order_by(DatasetBase.label).distinct(DatasetBase.datasetUri, DatasetBase.label) @@ -90,10 +96,14 @@ def _query_user_datasets(session, username, groups, filter) -> Query: ) ) if filter and filter.get('term'): + term = filter['term'] query = query.filter( or_( - DatasetBase.description.ilike(filter.get('term') + '%%'), - DatasetBase.label.ilike(filter.get('term') + '%%'), + DatasetBase.label.ilike('%' + term + '%'), + DatasetBase.description.ilike('%' + term + '%'), + DatasetBase.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), ) ) return query.order_by(DatasetBase.label).distinct(DatasetBase.datasetUri, DatasetBase.label) @@ -124,8 +134,9 @@ def _query_environment_datasets(session, uri, filter) -> Query: or_( DatasetBase.label.ilike('%' + term + '%'), DatasetBase.description.ilike('%' + term + '%'), - DatasetBase.tags.contains(f'{{{term}}}'), - DatasetBase.region.ilike('%' + term + '%'), + DatasetBase.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), ) ) return query.order_by(DatasetBase.label) diff --git a/backend/dataall/modules/feed/api/registry.py b/backend/dataall/modules/feed/api/registry.py index 3fe72f245..8db914263 100644 --- a/backend/dataall/modules/feed/api/registry.py +++ b/backend/dataall/modules/feed/api/registry.py @@ -10,6 +10,7 @@ class FeedDefinition: target_type: str model: Type[Resource] + permission: str class FeedRegistry(UnionTypeRegistry): @@ -25,6 +26,10 @@ def register(cls, definition: FeedDefinition): def find_model(cls, target_type: str): return cls._DEFINITIONS[target_type].model + @classmethod + def find_permission(cls, target_type: str): + return cls._DEFINITIONS[target_type].permission + @classmethod def find_target(cls, obj: Resource): for target_type, definition in cls._DEFINITIONS.items(): diff --git a/backend/dataall/modules/feed/api/resolvers.py b/backend/dataall/modules/feed/api/resolvers.py index a3bcca622..e971d90bf 100644 --- a/backend/dataall/modules/feed/api/resolvers.py +++ b/backend/dataall/modules/feed/api/resolvers.py @@ -43,4 +43,4 @@ def resolve_feed_messages(context: Context, source: Feed, filter: dict = None): _required_uri(source.targetUri) if not filter: filter = {} - return FeedService.list_feed_messages(targetUri=source.targetUri, filter=filter) + return FeedService.list_feed_messages(targetUri=source.targetUri, targetType=source.targetType, filter=filter) diff --git a/backend/dataall/modules/feed/services/feed_service.py b/backend/dataall/modules/feed/services/feed_service.py index 364b2a575..69d271186 100644 --- a/backend/dataall/modules/feed/services/feed_service.py +++ b/backend/dataall/modules/feed/services/feed_service.py @@ -6,8 +6,10 @@ import logging from dataall.base.context import get_context +from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.modules.feed.db.feed_models import FeedMessage from dataall.modules.feed.db.feed_repository import FeedRepository +from dataall.modules.feed.api.registry import FeedRegistry logger = logging.getLogger(__name__) @@ -27,10 +29,6 @@ def targetType(self): return self._targetType -def _session(): - return get_context().db_engine.scoped_session() - - class FeedService: """ Encapsulate the logic of interactions with Feeds. @@ -41,6 +39,15 @@ def get_feed( targetUri: str = None, targetType: str = None, ) -> Feed: + context = get_context() + with context.db_engine.scoped_session() as session: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=targetUri, + permission_name=FeedRegistry.find_permission(target_type=targetType), + ) return Feed(targetUri=targetUri, targetType=targetType) @staticmethod @@ -49,17 +56,33 @@ def post_feed_message( targetType: str = None, content: str = None, ): - with _session() as session: + context = get_context() + with context.db_engine.scoped_session() as session: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=targetUri, + permission_name=FeedRegistry.find_permission(target_type=targetType), + ) m = FeedMessage( targetUri=targetUri, targetType=targetType, - creator=get_context().username, + creator=context.username, content=content, ) session.add(m) return m @staticmethod - def list_feed_messages(targetUri: str, filter: dict = None): - with _session() as session: + def list_feed_messages(targetUri: str, targetType: str, filter: dict = None): + context = get_context() + with context.db_engine.scoped_session() as session: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=targetUri, + permission_name=FeedRegistry.find_permission(target_type=targetType), + ) return FeedRepository(session).paginated_feed_messages(uri=targetUri, filter=filter) diff --git a/backend/dataall/modules/mlstudio/__init__.py b/backend/dataall/modules/mlstudio/__init__.py index e3d1d2f19..2e50fb64e 100644 --- a/backend/dataall/modules/mlstudio/__init__.py +++ b/backend/dataall/modules/mlstudio/__init__.py @@ -20,9 +20,13 @@ def __init__(self): from dataall.core.stacks.db.target_type_repositories import TargetType import dataall.modules.mlstudio.api from dataall.modules.mlstudio.services.mlstudio_service import SagemakerStudioEnvironmentResource - from dataall.modules.mlstudio.services.mlstudio_permissions import GET_SGMSTUDIO_USER, UPDATE_SGMSTUDIO_USER + from dataall.modules.mlstudio.services.mlstudio_permissions import ( + GET_SGMSTUDIO_USER, + UPDATE_SGMSTUDIO_USER, + MANAGE_SGMSTUDIO_USERS, + ) - TargetType('mlstudio', GET_SGMSTUDIO_USER, UPDATE_SGMSTUDIO_USER) + TargetType('mlstudio', GET_SGMSTUDIO_USER, UPDATE_SGMSTUDIO_USER, MANAGE_SGMSTUDIO_USERS) EnvironmentResourceManager.register(SagemakerStudioEnvironmentResource()) diff --git a/backend/dataall/modules/mlstudio/api/resolvers.py b/backend/dataall/modules/mlstudio/api/resolvers.py index e38d72ae6..6d4e07f8b 100644 --- a/backend/dataall/modules/mlstudio/api/resolvers.py +++ b/backend/dataall/modules/mlstudio/api/resolvers.py @@ -122,6 +122,7 @@ def resolve_sagemaker_studio_user_stack(context: Context, source: SagemakerStudi return None return StackService.resolve_parent_obj_stack( targetUri=source.sagemakerStudioUserUri, + targetType='mlstudio', environmentUri=source.environmentUri, ) diff --git a/backend/dataall/modules/notebooks/__init__.py b/backend/dataall/modules/notebooks/__init__.py index 5fd8900da..0fc22ea07 100644 --- a/backend/dataall/modules/notebooks/__init__.py +++ b/backend/dataall/modules/notebooks/__init__.py @@ -17,9 +17,13 @@ def is_supported(modes): def __init__(self): import dataall.modules.notebooks.api from dataall.core.stacks.db.target_type_repositories import TargetType - from dataall.modules.notebooks.services.notebook_permissions import GET_NOTEBOOK, UPDATE_NOTEBOOK + from dataall.modules.notebooks.services.notebook_permissions import ( + GET_NOTEBOOK, + UPDATE_NOTEBOOK, + MANAGE_NOTEBOOKS, + ) - TargetType('notebook', GET_NOTEBOOK, UPDATE_NOTEBOOK) + TargetType('notebook', GET_NOTEBOOK, UPDATE_NOTEBOOK, MANAGE_NOTEBOOKS) log.info('API of sagemaker notebooks has been imported') diff --git a/backend/dataall/modules/notebooks/api/resolvers.py b/backend/dataall/modules/notebooks/api/resolvers.py index de6235305..e7e111fc3 100644 --- a/backend/dataall/modules/notebooks/api/resolvers.py +++ b/backend/dataall/modules/notebooks/api/resolvers.py @@ -90,6 +90,7 @@ def resolve_notebook_stack(context: Context, source: SagemakerNotebook, **kwargs return None return StackService.resolve_parent_obj_stack( targetUri=source.notebookUri, + targetType='notebook', environmentUri=source.environmentUri, ) diff --git a/backend/dataall/modules/notebooks/db/notebook_repository.py b/backend/dataall/modules/notebooks/db/notebook_repository.py index f5219fdde..384bc2f67 100644 --- a/backend/dataall/modules/notebooks/db/notebook_repository.py +++ b/backend/dataall/modules/notebooks/db/notebook_repository.py @@ -10,6 +10,10 @@ from dataall.base.db import paginate from dataall.modules.notebooks.db.notebook_models import SagemakerNotebook from dataall.core.environment.services.environment_resource_manager import EnvironmentResource +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) class NotebookRepository(EnvironmentResource): @@ -51,7 +55,9 @@ def _query_user_notebooks(self, username, groups, filter) -> Query: or_( SagemakerNotebook.description.ilike(term + '%%'), SagemakerNotebook.label.ilike(term + '%%'), - SagemakerNotebook.tags.contains(f'{{{term}}}'), + SagemakerNotebook.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), ) ) return query.order_by(SagemakerNotebook.label) diff --git a/backend/dataall/modules/notifications/api/mutations.py b/backend/dataall/modules/notifications/api/mutations.py index a38936ae6..991f559f3 100644 --- a/backend/dataall/modules/notifications/api/mutations.py +++ b/backend/dataall/modules/notifications/api/mutations.py @@ -1,5 +1,5 @@ from dataall.base.api import gql -from .resolvers import delete, mark_as_read +from .resolvers import mark_as_read markNotificationAsRead = gql.MutationField( @@ -10,10 +10,3 @@ type=gql.Boolean, resolver=mark_as_read, ) - -deleteNotification = gql.MutationField( - name='deleteNotification', - args=[gql.Argument(name='notificationUri', type=gql.String)], - type=gql.Boolean, - resolver=delete, -) diff --git a/backend/dataall/modules/notifications/api/queries.py b/backend/dataall/modules/notifications/api/queries.py index 95ba57aaa..7b63253a6 100644 --- a/backend/dataall/modules/notifications/api/queries.py +++ b/backend/dataall/modules/notifications/api/queries.py @@ -1,7 +1,5 @@ from dataall.base.api import gql from .resolvers import ( - count_deleted_notifications, - count_read_notifications, count_unread_notifications, list_my_notifications, ) @@ -21,17 +19,3 @@ type=gql.Integer, resolver=count_unread_notifications, ) - -# Not used in frontend -countReadNotifications = gql.QueryField( - name='countReadNotifications', - type=gql.Integer, - resolver=count_read_notifications, -) - -# Not used in frontend -countDeletedNotifications = gql.QueryField( - name='countDeletedNotifications', - type=gql.Integer, - resolver=count_deleted_notifications, -) diff --git a/backend/dataall/modules/notifications/api/resolvers.py b/backend/dataall/modules/notifications/api/resolvers.py index 783a4eefe..b2b1154c4 100644 --- a/backend/dataall/modules/notifications/api/resolvers.py +++ b/backend/dataall/modules/notifications/api/resolvers.py @@ -1,19 +1,11 @@ import logging +from dataall.modules.notifications.services.notification_service import NotificationService from dataall.base.api.context import Context -from dataall.base.context import get_context from dataall.base.db import exceptions -from dataall.modules.notifications.db.notification_repositories import NotificationRepository log = logging.getLogger(__name__) -# For simplicity there is no additional layer for the business logic of notifications as it happens with other more -# complex modules. In the resolvers we check the input and we perform the db calls directly. - - -def _session(): - return get_context().db_engine.scoped_session() - def _required_uri(uri): if not uri: @@ -27,10 +19,7 @@ def list_my_notifications( ): if not filter: filter = {} - with _session() as session: - return NotificationRepository.paginated_notifications( - session=session, username=get_context().username, groups=get_context().groups, filter=filter - ) + return NotificationService.list_my_notifications(filter=filter) def mark_as_read( @@ -39,32 +28,8 @@ def mark_as_read( notificationUri: str = None, ): _required_uri(notificationUri) - with _session() as session: - return NotificationRepository.read_notification(session=session, notificationUri=notificationUri) + return NotificationService.mark_as_read(notificationUri=notificationUri) def count_unread_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_unread_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) - - -def count_deleted_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_deleted_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) - - -def count_read_notifications(context: Context, source): - with _session() as session: - return NotificationRepository.count_read_notifications( - session=session, username=get_context().username, groups=get_context().groups - ) - - -def delete(context: Context, source, notificationUri): - _required_uri(notificationUri) - with _session() as session: - return NotificationRepository.delete_notification(session=session, notificationUri=notificationUri) + return NotificationService.count_unread_notifications() diff --git a/backend/dataall/modules/notifications/db/notification_repositories.py b/backend/dataall/modules/notifications/db/notification_repositories.py index 4dd3b159a..a090652d4 100644 --- a/backend/dataall/modules/notifications/db/notification_repositories.py +++ b/backend/dataall/modules/notifications/db/notification_repositories.py @@ -1,33 +1,7 @@ -from datetime import datetime - from sqlalchemy import func, and_, or_ from dataall.modules.notifications.db import notification_models as models -from dataall.base.db import paginate, exceptions -from dataall.base.context import get_context -from functools import wraps - - -class NotificationAccess: - @staticmethod - def is_recipient(f): - @wraps(f) - def wrapper(*args, **kwds): - uri = kwds.get('notificationUri') - if not uri: - raise KeyError(f"{f.__name__} doesn't have parameter uri.") - context = get_context() - with context.db_engine.scoped_session() as session: - notification = session.query(models.Notification).get(uri) - if notification and (notification.recipient in context.groups + [context.username]): - return f(*args, **kwds) - else: - raise exceptions.UnauthorizedOperation( - action='UPDATE NOTIFICATION', - message=f'User {context.username} is not the recipient user/group of the notification {uri}', - ) - - return wrapper +from dataall.base.db import paginate class NotificationRepository: @@ -91,28 +65,6 @@ def count_unread_notifications(session, username, groups): return int(count) @staticmethod - def count_read_notifications(session, username, groups): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(or_(models.Notification.recipient == username, models.Notification.recipient.in_(groups))) - .filter(models.Notification.is_read == True) - .filter(models.Notification.deleted.is_(None)) - .scalar() - ) - return int(count) - - @staticmethod - def count_deleted_notifications(session, username, groups): - count = ( - session.query(func.count(models.Notification.notificationUri)) - .filter(or_(models.Notification.recipient == username, models.Notification.recipient.in_(groups))) - .filter(models.Notification.deleted.isnot(None)) - .scalar() - ) - return int(count) - - @staticmethod - @NotificationAccess.is_recipient def read_notification(session, notificationUri): notification = session.query(models.Notification).get(notificationUri) notification.is_read = True @@ -120,10 +72,5 @@ def read_notification(session, notificationUri): return True @staticmethod - @NotificationAccess.is_recipient - def delete_notification(session, notificationUri): - notification = session.query(models.Notification).get(notificationUri) - if notification: - notification.deleted = datetime.now() - session.commit() - return True + def get_notification(session, uri): + return session.query(models.Notification).get(uri) diff --git a/backend/dataall/modules/notifications/services/notification_service.py b/backend/dataall/modules/notifications/services/notification_service.py new file mode 100644 index 000000000..c53dacff3 --- /dev/null +++ b/backend/dataall/modules/notifications/services/notification_service.py @@ -0,0 +1,71 @@ +""" +A service layer for Notifications +""" + +import logging +from dataall.base.db import exceptions + +from dataall.base.context import get_context +from dataall.modules.notifications.db import notification_models as models +from functools import wraps + +from dataall.modules.notifications.db.notification_repositories import NotificationRepository + +logger = logging.getLogger(__name__) + + +class NotificationAccess: + @staticmethod + def check_recipient(uri): + context = get_context() + with context.db_engine.scoped_session() as session: + notification = NotificationRepository.get_notification(session=session, uri=uri) + return notification and (notification.recipient in context.groups + [context.username]) + + @staticmethod + def is_recipient(f): + @wraps(f) + def wrapper(*args, **kwds): + uri = kwds.get('notificationUri') + if not uri: + raise KeyError(f"{f.__name__} doesn't have parameter uri.") + + if NotificationAccess.check_recipient(uri): + return f(*args, **kwds) + else: + raise exceptions.UnauthorizedOperation( + action='UPDATE NOTIFICATION', + message=f'User {get_context().username} is not the recipient user/group of the notification {uri}', + ) + + return wrapper + + +class NotificationService: + """ + Encapsulate the logic of interactions with notifications. + """ + + @staticmethod + def list_my_notifications(filter: dict = {}): + """List existed user notifications. Filters only required notifications by the filter param""" + context = get_context() + + with context.db_engine.scoped_session() as session: + return NotificationRepository.paginated_notifications( + session=session, username=context.username, groups=context.groups, filter=filter + ) + + @staticmethod + @NotificationAccess.is_recipient + def mark_as_read(notificationUri: str): + with get_context().db_engine.scoped_session() as session: + return NotificationRepository.read_notification(session=session, notificationUri=notificationUri) + + @staticmethod + def count_unread_notifications(): + context = get_context() + with context.db_engine.scoped_session() as session: + return NotificationRepository.count_unread_notifications( + session=session, username=context.username, groups=context.groups + ) diff --git a/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py b/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py index ecfff37db..d20d69cfc 100644 --- a/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py +++ b/backend/dataall/modules/omics/tasks/omics_workflows_fetcher.py @@ -11,11 +11,7 @@ from dataall.modules.omics.db.omics_repository import OmicsRepository -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) def fetch_omics_workflows(engine): diff --git a/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py b/backend/dataall/modules/redshift_datasets/api/datasets/resolvers.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/s3_datasets/__init__.py b/backend/dataall/modules/s3_datasets/__init__.py index f0b73a6d0..e8150e1b7 100644 --- a/backend/dataall/modules/s3_datasets/__init__.py +++ b/backend/dataall/modules/s3_datasets/__init__.py @@ -41,13 +41,19 @@ def __init__(self): from dataall.modules.s3_datasets.indexers.table_indexer import DatasetTableIndexer import dataall.modules.s3_datasets.api - from dataall.modules.s3_datasets.services.dataset_permissions import GET_DATASET, UPDATE_DATASET + from dataall.modules.s3_datasets.services.dataset_permissions import ( + GET_DATASET, + UPDATE_DATASET, + GET_DATASET_TABLE, + GET_DATASET_FOLDER, + MANAGE_DATASETS, + ) from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, DatasetTable, S3Dataset - FeedRegistry.register(FeedDefinition('DatasetStorageLocation', DatasetStorageLocation)) - FeedRegistry.register(FeedDefinition('DatasetTable', DatasetTable)) - FeedRegistry.register(FeedDefinition('Dataset', S3Dataset)) + FeedRegistry.register(FeedDefinition('DatasetStorageLocation', DatasetStorageLocation, GET_DATASET_FOLDER)) + FeedRegistry.register(FeedDefinition('DatasetTable', DatasetTable, GET_DATASET_TABLE)) + FeedRegistry.register(FeedDefinition('Dataset', S3Dataset, GET_DATASET)) GlossaryRegistry.register( GlossaryDefinition( @@ -71,9 +77,9 @@ def __init__(self): ) ) - add_vote_type('dataset', DatasetIndexer) + add_vote_type('dataset', DatasetIndexer, GET_DATASET) - TargetType('dataset', GET_DATASET, UPDATE_DATASET) + TargetType('dataset', GET_DATASET, UPDATE_DATASET, MANAGE_DATASETS) EnvironmentResourceManager.register(DatasetRepository()) diff --git a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py index 6f25a6293..491551de0 100644 --- a/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/dataset/resolvers.py @@ -84,8 +84,7 @@ def get_dataset_organization(context, source: S3Dataset, **kwargs): def get_dataset_environment(context, source: S3Dataset, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - return EnvironmentService.get_environment_by_uri(session, source.environmentUri) + return EnvironmentService.find_environment_by_uri(uri=source.environmentUri) def get_dataset_owners_group(context, source: S3Dataset, **kwargs): @@ -110,6 +109,12 @@ def get_dataset_statistics(context: Context, source: S3Dataset, **kwargs): return DatasetService.get_dataset_statistics(source) +def get_dataset_restricted_information(context: Context, source: S3Dataset, **kwargs): + if not source: + return None + return DatasetService.get_dataset_restricted_information(uri=source.datasetUri, dataset=source) + + @is_feature_enabled('modules.s3_datasets.features.aws_actions') def get_dataset_assume_role_url(context: Context, source, datasetUri: str = None): return DatasetService.get_dataset_assume_role_url(uri=datasetUri) @@ -130,6 +135,7 @@ def resolve_dataset_stack(context: Context, source: S3Dataset, **kwargs): return None return StackService.resolve_parent_obj_stack( targetUri=source.datasetUri, + targetType='dataset', environmentUri=source.environmentUri, ) @@ -150,7 +156,7 @@ def list_datasets_owned_by_env_group( ): if not filter: filter = {} - return DatasetService.list_datasets_owned_by_env_group(environmentUri, groupUri, filter) + return DatasetService.list_datasets_owned_by_env_group(uri=environmentUri, group_uri=groupUri, data=filter) class RequestValidator: diff --git a/backend/dataall/modules/s3_datasets/api/dataset/types.py b/backend/dataall/modules/s3_datasets/api/dataset/types.py index 282a29833..82f6f168d 100644 --- a/backend/dataall/modules/s3_datasets/api/dataset/types.py +++ b/backend/dataall/modules/s3_datasets/api/dataset/types.py @@ -11,6 +11,7 @@ get_dataset_statistics, get_dataset_glossary_terms, resolve_dataset_stack, + get_dataset_restricted_information, ) from dataall.core.environment.api.enums import EnvironmentPermission @@ -23,6 +24,23 @@ ], ) +DatasetRestrictedInformation = gql.ObjectType( + name='DatasetRestrictedInformation', + fields=[ + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='region', type=gql.String), + gql.Field(name='S3BucketName', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueCrawlerName', type=gql.String), + gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), + gql.Field(name='KmsAlias', type=gql.String), + gql.Field(name='importedS3Bucket', type=gql.Boolean), + gql.Field(name='importedGlueDatabase', type=gql.Boolean), + gql.Field(name='importedKmsKey', type=gql.Boolean), + gql.Field(name='importedAdminRole', type=gql.Boolean), + ], +) + Dataset = gql.ObjectType( name='Dataset', fields=[ @@ -35,29 +53,13 @@ gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueCrawlerName', type=gql.String), - gql.Field(name='GlueCrawlerSchedule', type=gql.String), - gql.Field(name='GlueProfilingJobName', type=gql.String), - gql.Field(name='GlueProfilingTriggerSchedule', type=gql.String), - gql.Field(name='IAMDatasetAdminRoleArn', type=gql.String), - gql.Field(name='KmsAlias', type=gql.String), - gql.Field(name='bucketCreated', type=gql.Boolean), - gql.Field(name='glueDatabaseCreated', type=gql.Boolean), - gql.Field(name='iamAdminRoleCreated', type=gql.Boolean), - gql.Field(name='lakeformationLocationCreated', type=gql.Boolean), - gql.Field(name='bucketPolicyCreated', type=gql.Boolean), gql.Field(name='SamlAdminGroupName', type=gql.String), - gql.Field(name='businessOwnerEmail', type=gql.String), - gql.Field(name='businessOwnerDelegationEmails', type=gql.ArrayType(gql.String)), - gql.Field(name='importedS3Bucket', type=gql.Boolean), - gql.Field(name='importedGlueDatabase', type=gql.Boolean), - gql.Field(name='importedKmsKey', type=gql.Boolean), - gql.Field(name='importedAdminRole', type=gql.Boolean), gql.Field(name='imported', type=gql.Boolean), + gql.Field( + name='restricted', + type=DatasetRestrictedInformation, + resolver=get_dataset_restricted_information, + ), gql.Field( name='environment', type=gql.Ref('EnvironmentSimplified'), @@ -126,8 +128,6 @@ name='GlueCrawler', fields=[ gql.Field(name='Name', type=gql.ID), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='region', type=gql.String), gql.Field(name='status', type=gql.String), ], ) diff --git a/backend/dataall/modules/s3_datasets/api/profiling/input_types.py b/backend/dataall/modules/s3_datasets/api/profiling/input_types.py index 52d31e832..655564dc9 100644 --- a/backend/dataall/modules/s3_datasets/api/profiling/input_types.py +++ b/backend/dataall/modules/s3_datasets/api/profiling/input_types.py @@ -8,13 +8,3 @@ gql.Argument('tableUri', gql.String), ], ) - - -DatasetProfilingRunFilter = gql.InputType( - name='DatasetProfilingRunFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - gql.Argument(name='term', type=gql.String), - ], -) diff --git a/backend/dataall/modules/s3_datasets/api/profiling/resolvers.py b/backend/dataall/modules/s3_datasets/api/profiling/resolvers.py index b92b8d065..5eae47bb5 100644 --- a/backend/dataall/modules/s3_datasets/api/profiling/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/profiling/resolvers.py @@ -10,6 +10,11 @@ log = logging.getLogger(__name__) +def _validate_uri(uri): + if not uri: + raise RequiredParameter('URI') + + def resolve_dataset(context, source: DatasetProfilingRun): if not source: return None @@ -17,8 +22,7 @@ def resolve_dataset(context, source: DatasetProfilingRun): def start_profiling_run(context: Context, source, input: dict = None): - if 'datasetUri' not in input: - raise RequiredParameter('datasetUri') + _validate_uri(input.get('datasetUri')) return DatasetProfilingService.start_profiling_run( uri=input['datasetUri'], table_uri=input.get('tableUri'), glue_table_name=input.get('GlueTableName') diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/input_types.py b/backend/dataall/modules/s3_datasets/api/storage_location/input_types.py index 99eb89686..a68ad82c3 100644 --- a/backend/dataall/modules/s3_datasets/api/storage_location/input_types.py +++ b/backend/dataall/modules/s3_datasets/api/storage_location/input_types.py @@ -30,12 +30,3 @@ gql.Argument('pageSize', gql.Integer), ], ) - - -DatasetAccessPointFilter = gql.InputType( - name='DatasetAccessPointFilter', - arguments=[ - gql.Argument(name='page', type=gql.Integer), - gql.Argument(name='pageSize', type=gql.Integer), - ], -) diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py index 212332652..928d0d4f8 100644 --- a/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/storage_location/resolvers.py @@ -1,18 +1,22 @@ from dataall.base.api.context import Context -from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository from dataall.base.db.exceptions import RequiredParameter from dataall.base.feature_toggle_checker import is_feature_enabled +from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository +from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation from dataall.modules.s3_datasets.services.dataset_location_service import DatasetLocationService -from dataall.modules.s3_datasets.db.dataset_models import DatasetStorageLocation, S3Dataset +from dataall.modules.s3_datasets.services.dataset_service import DatasetService -@is_feature_enabled('modules.s3_datasets.features.file_actions') -def create_storage_location(context, source, datasetUri: str = None, input: dict = None): - if 'prefix' not in input: - raise RequiredParameter('prefix') +def _validate_input(input: dict): if 'label' not in input: raise RequiredParameter('label') + if 'prefix' not in input: + raise RequiredParameter('prefix') + +@is_feature_enabled('modules.s3_datasets.features.file_actions') +def create_storage_location(context, source, datasetUri: str = None, input: dict = None): + _validate_input(input) return DatasetLocationService.create_storage_location(uri=datasetUri, data=input) @@ -43,9 +47,13 @@ def remove_storage_location(context, source, locationUri: str = None): def resolve_dataset(context, source: DatasetStorageLocation, **kwargs): if not source: return None - with context.engine.scoped_session() as session: - d = session.query(S3Dataset).get(source.datasetUri) - return d + return DatasetService.find_dataset(uri=source.datasetUri) + + +def get_folder_restricted_information(context: Context, source: DatasetStorageLocation, **kwargs): + if not source: + return None + return DatasetLocationService.get_folder_restricted_information(uri=source.locationUri, folder=source) def resolve_glossary_terms(context: Context, source: DatasetStorageLocation, **kwargs): diff --git a/backend/dataall/modules/s3_datasets/api/storage_location/types.py b/backend/dataall/modules/s3_datasets/api/storage_location/types.py index 40070a287..14db04c06 100644 --- a/backend/dataall/modules/s3_datasets/api/storage_location/types.py +++ b/backend/dataall/modules/s3_datasets/api/storage_location/types.py @@ -1,5 +1,9 @@ from dataall.base.api import gql -from dataall.modules.s3_datasets.api.storage_location.resolvers import resolve_glossary_terms, resolve_dataset +from dataall.modules.s3_datasets.api.storage_location.resolvers import ( + resolve_glossary_terms, + resolve_dataset, + get_folder_restricted_information, +) DatasetStorageLocation = gql.ObjectType( name='DatasetStorageLocation', @@ -11,13 +15,15 @@ gql.Field(name='owner', type=gql.String), gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), gql.Field(name='tags', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), gql.Field(name='S3Prefix', type=gql.String), gql.Field(name='locationCreated', type=gql.Boolean), gql.Field(name='dataset', type=gql.Ref('Dataset'), resolver=resolve_dataset), + gql.Field( + name='restricted', + type=gql.Ref('DatasetRestrictedInformation'), + resolver=get_folder_restricted_information, + ), gql.Field(name='userRoleForStorageLocation', type=gql.Ref('DatasetRole')), gql.Field(name='environmentEndPoint', type=gql.String), gql.Field( @@ -40,37 +46,3 @@ gql.Field(name='hasPrevious', type=gql.Boolean), ], ) - - -DatasetAccessPoint = gql.ObjectType( - name='DatasetAccessPoint', - fields=[ - gql.Field(name='accessPointUri', type=gql.ID), - gql.Field(name='location', type=DatasetStorageLocation), - gql.Field(name='dataset', type=gql.Ref('Dataset')), - gql.Field(name='name', type=gql.String), - gql.Field(name='description', type=gql.String), - gql.Field(name='owner', type=gql.String), - gql.Field(name='created', type=gql.String), - gql.Field(name='updated', type=gql.String), - gql.Field(name='region', type=gql.String), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='S3BucketName', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), - gql.Field(name='S3AccessPointName', type=gql.String), - ], -) - - -DatasetAccessPointSearchResult = gql.ObjectType( - name='DatasetAccessPointSearchResult', - fields=[ - gql.Field(name='count', type=gql.Integer), - gql.Field(name='page', type=gql.Integer), - gql.Field(name='pageSize', type=gql.Integer), - gql.Field(name='pages', type=gql.Integer), - gql.Field(name='hasNext', type=gql.Integer), - gql.Field(name='hasPrevious', type=gql.Integer), - gql.Field(name='nodes', type=gql.ArrayType(DatasetAccessPoint)), - ], -) diff --git a/backend/dataall/modules/s3_datasets/api/table/mutations.py b/backend/dataall/modules/s3_datasets/api/table/mutations.py index 08e601409..1b67061e9 100644 --- a/backend/dataall/modules/s3_datasets/api/table/mutations.py +++ b/backend/dataall/modules/s3_datasets/api/table/mutations.py @@ -26,6 +26,6 @@ syncTables = gql.MutationField( name='syncTables', args=[gql.Argument(name='datasetUri', type=gql.NonNullableType(gql.String))], - type=gql.Ref('DatasetTableSearchResult'), + type=gql.Integer, resolver=sync_tables, ) diff --git a/backend/dataall/modules/s3_datasets/api/table/resolvers.py b/backend/dataall/modules/s3_datasets/api/table/resolvers.py index 7e810bdbe..47df9e0e1 100644 --- a/backend/dataall/modules/s3_datasets/api/table/resolvers.py +++ b/backend/dataall/modules/s3_datasets/api/table/resolvers.py @@ -56,3 +56,9 @@ def resolve_glossary_terms(context: Context, source: DatasetTable, **kwargs): return None with context.engine.scoped_session() as session: return GlossaryRepository.get_glossary_terms_links(session, source.tableUri, 'DatasetTable') + + +def get_dataset_table_restricted_information(context: Context, source: DatasetTable, **kwargs): + if not source: + return None + return DatasetTableService.get_table_restricted_information(uri=source.tableUri, table=source) diff --git a/backend/dataall/modules/s3_datasets/api/table/types.py b/backend/dataall/modules/s3_datasets/api/table/types.py index 20911734f..067119c66 100644 --- a/backend/dataall/modules/s3_datasets/api/table/types.py +++ b/backend/dataall/modules/s3_datasets/api/table/types.py @@ -4,6 +4,7 @@ resolve_dataset, get_glue_table_properties, resolve_glossary_terms, + get_dataset_table_restricted_information, ) TablePermission = gql.ObjectType( @@ -21,6 +22,15 @@ gql.Field(name='nodes', type=gql.ArrayType(TablePermission)), ], ) +DatasetTableRestrictedInformation = gql.ObjectType( + name='DatasetTableRestrictedInformation', + fields=[ + gql.Field(name='AwsAccountId', type=gql.String), + gql.Field(name='GlueDatabaseName', type=gql.String), + gql.Field(name='GlueTableName', type=gql.String), + gql.Field(name='S3Prefix', type=gql.String), + ], +) DatasetTable = gql.ObjectType( name='DatasetTable', @@ -35,12 +45,11 @@ gql.Field(name='created', type=gql.String), gql.Field(name='updated', type=gql.String), gql.Field(name='admins', type=gql.ArrayType(gql.String)), - gql.Field(name='AwsAccountId', type=gql.String), - gql.Field(name='GlueDatabaseName', type=gql.String), - gql.Field(name='GlueTableName', type=gql.String), gql.Field(name='LastGlueTableStatus', type=gql.String), - gql.Field(name='S3Prefix', type=gql.String), gql.Field(name='GlueTableConfig', type=gql.String), + gql.Field( + name='restricted', type=DatasetTableRestrictedInformation, resolver=get_dataset_table_restricted_information + ), gql.Field( name='GlueTableProperties', type=gql.String, diff --git a/backend/dataall/modules/s3_datasets/cdk/dataset_custom_resources_extension.py b/backend/dataall/modules/s3_datasets/cdk/dataset_custom_resources_extension.py index 2686a3679..3f9b7a57e 100644 --- a/backend/dataall/modules/s3_datasets/cdk/dataset_custom_resources_extension.py +++ b/backend/dataall/modules/s3_datasets/cdk/dataset_custom_resources_extension.py @@ -29,6 +29,34 @@ def extent(setup: EnvironmentSetup): setup=setup, environment=_environment, group_roles=setup.group_roles, default_role=setup.default_role ) + lambda_env_key = kms.Key( + setup, + f'{_environment.resourcePrefix}-ds-cst-lambda-env-var-key', + removal_policy=RemovalPolicy.DESTROY, + alias=f'{_environment.resourcePrefix}-ds-cst-lambda-env-var-key', + enable_key_rotation=True, + policy=iam.PolicyDocument( + statements=[ + iam.PolicyStatement( + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[ + iam.AccountPrincipal(account_id=_environment.AwsAccountId), + ], + actions=['kms:*'], + ), + iam.PolicyStatement( + resources=['*'], + effect=iam.Effect.ALLOW, + principals=[ + iam.ServicePrincipal(service='lambda.amazonaws.com'), + ], + actions=['kms:GenerateDataKey*', 'kms:Decrypt'], + ), + ], + ), + ) + # Lakeformation default settings custom resource # Set PivotRole as Lake Formation data lake admin entry_point = str( @@ -55,6 +83,7 @@ def extent(setup: EnvironmentSetup): 'DEFAULT_ENV_ROLE_ARN': _environment.EnvironmentDefaultIAMRoleArn, 'DEFAULT_CDK_ROLE_ARN': _environment.CDKRoleArn, }, + environment_encryption=lambda_env_key, dead_letter_queue_enabled=True, dead_letter_queue=lakeformation_cr_dlq, on_failure=lambda_destination.SqsDestination(lakeformation_cr_dlq), @@ -64,6 +93,7 @@ def extent(setup: EnvironmentSetup): setup, f'{_environment.resourcePrefix}LakeformationDefaultSettingsProvider', on_event_handler=lf_default_settings_custom_resource, + provider_function_env_encryption=lambda_env_key, ) default_lf_settings = CustomResource( @@ -119,6 +149,7 @@ def extent(setup: EnvironmentSetup): 'DEFAULT_ENV_ROLE_ARN': _environment.EnvironmentDefaultIAMRoleArn, 'DEFAULT_CDK_ROLE_ARN': _environment.CDKRoleArn, }, + environment_encryption=lambda_env_key, dead_letter_queue_enabled=True, dead_letter_queue=gluedb_lf_cr_dlq, on_failure=lambda_destination.SqsDestination(gluedb_lf_cr_dlq), @@ -130,6 +161,7 @@ def extent(setup: EnvironmentSetup): setup, f'{_environment.resourcePrefix}GlueDbCustomResourceProvider', on_event_handler=gluedb_lf_custom_resource, + provider_function_env_encryption=lambda_env_key, ) ssm.StringParameter( setup, diff --git a/backend/dataall/modules/s3_datasets/cdk/dataset_stack.py b/backend/dataall/modules/s3_datasets/cdk/dataset_stack.py index bb5b51c6d..ffbff7369 100644 --- a/backend/dataall/modules/s3_datasets/cdk/dataset_stack.py +++ b/backend/dataall/modules/s3_datasets/cdk/dataset_stack.py @@ -11,6 +11,7 @@ Duration, CfnResource, CustomResource, + RemovalPolicy, Tags, ) from aws_cdk.aws_glue import CfnCrawler @@ -192,7 +193,7 @@ def __init__(self, scope, id, target_uri: str = None, **kwargs): server_access_logs_bucket=s3.Bucket.from_bucket_name( self, 'EnvAccessLogsBucket', - f'{env.EnvironmentDefaultBucketName}', + f'{env.EnvironmentLogsBucketName}', ), server_access_logs_prefix=f'access_logs/{dataset.S3BucketName}/', enforce_ssl=True, diff --git a/backend/dataall/modules/s3_datasets/cdk/env_role_dataset_glue_policy.py b/backend/dataall/modules/s3_datasets/cdk/env_role_dataset_glue_policy.py index ca7604e27..18c294ceb 100644 --- a/backend/dataall/modules/s3_datasets/cdk/env_role_dataset_glue_policy.py +++ b/backend/dataall/modules/s3_datasets/cdk/env_role_dataset_glue_policy.py @@ -100,11 +100,48 @@ def get_statements(self, group_permissions, **kwargs): effect=iam.Effect.ALLOW, actions=[ 'glue:Get*', - 'glue:List*', + 'glue:ListDevEndpoints', + 'glue:ListBlueprints', + 'glue:ListRegistries', + 'glue:ListTriggers', + 'glue:ListUsageProfiles', + 'glue:ListCrawlers', + 'glue:ListCrawls', + 'glue:ListJobs', + 'glue:ListCustomEntityTypes', + 'glue:ListSessions', + 'glue:ListWorkflows', 'glue:BatchGet*', ], resources=['*'], ), + iam.PolicyStatement( + effect=iam.Effect.ALLOW, + actions=[ + 'glue:ListDataQualityRuleRecommendationRuns', + 'glue:ListSchemaVersions', + 'glue:QuerySchemaVersionMetadata', + 'glue:ListMLTransforms', + 'glue:ListStatements', + 'glue:ListSchemas', + 'glue:ListDataQualityRulesetEvaluationRuns', + 'glue:ListTableOptimizerRuns', + 'glue:GetMLTaskRuns', + 'glue:ListDataQualityRulesets', + 'glue:ListDataQualityResults', + 'glue:GetMLTransforms', + ], + resources=[ + f'arn:aws:glue:*:{self.account}:schema/*', + f'arn:aws:glue:*:{self.account}:registry/*', + f'arn:aws:glue:*:{self.account}:dataQualityRuleset/*', + f'arn:aws:glue:*:{self.account}:table/*/*', + f'arn:aws:glue:*:{self.account}:database/*', + f'arn:aws:glue:*:{self.account}:mlTransform/*', + f'arn:aws:glue:*:{self.account}:catalog', + f'arn:aws:glue:*:{self.account}:session/*', + ], + ), iam.PolicyStatement( # sid="GlueCreateS3Bucket", effect=iam.Effect.ALLOW, diff --git a/backend/dataall/modules/s3_datasets/db/dataset_profiling_repositories.py b/backend/dataall/modules/s3_datasets/db/dataset_profiling_repositories.py index 001fcb1b6..6676d42ee 100644 --- a/backend/dataall/modules/s3_datasets/db/dataset_profiling_repositories.py +++ b/backend/dataall/modules/s3_datasets/db/dataset_profiling_repositories.py @@ -50,17 +50,6 @@ def get_profiling_run(session, profiling_run_uri=None, glue_job_run_id=None, glu ) return run - @staticmethod - def list_profiling_runs(session, dataset_uri): - # TODO filter is always default - filter = {} - q = ( - session.query(DatasetProfilingRun) - .filter(DatasetProfilingRun.datasetUri == dataset_uri) - .order_by(DatasetProfilingRun.created.desc()) - ) - return paginate(q, page=filter.get('page', 1), page_size=filter.get('pageSize', 20)).to_dict() - @staticmethod def list_table_profiling_runs(session, table_uri): # TODO filter is always default diff --git a/backend/dataall/modules/s3_datasets/db/dataset_repositories.py b/backend/dataall/modules/s3_datasets/db/dataset_repositories.py index 4c1283cb2..11eb12d18 100644 --- a/backend/dataall/modules/s3_datasets/db/dataset_repositories.py +++ b/backend/dataall/modules/s3_datasets/db/dataset_repositories.py @@ -219,7 +219,9 @@ def query_environment_group_datasets(session, env_uri, group_uri, filter) -> Que or_( S3Dataset.label.ilike('%' + term + '%'), S3Dataset.description.ilike('%' + term + '%'), - S3Dataset.tags.contains(f'{{{term}}}'), + S3Dataset.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), S3Dataset.region.ilike('%' + term + '%'), ) ) @@ -236,7 +238,9 @@ def query_environment_imported_datasets(session, uri, filter) -> Query: or_( S3Dataset.label.ilike('%' + term + '%'), S3Dataset.description.ilike('%' + term + '%'), - S3Dataset.tags.contains(f'{{{term}}}'), + S3Dataset.tags.contains( + f'{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=term).sanitize()}}}' + ), S3Dataset.region.ilike('%' + term + '%'), ) ) diff --git a/backend/dataall/modules/s3_datasets/services/dataset_column_service.py b/backend/dataall/modules/s3_datasets/services/dataset_column_service.py index eb7c19a00..77d94b271 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_column_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_column_service.py @@ -1,16 +1,17 @@ from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.tasks.service_handlers import Worker from dataall.base.aws.sts import SessionHelper from dataall.base.context import get_context +from dataall.base.db import exceptions from dataall.core.tasks.db.task_models import Task from dataall.modules.s3_datasets.aws.glue_table_client import GlueTableClient from dataall.modules.s3_datasets.db.dataset_column_repositories import DatasetColumnRepository from dataall.modules.s3_datasets.db.dataset_table_repositories import DatasetTableRepository -from dataall.modules.s3_datasets.services.dataset_permissions import UPDATE_DATASET_TABLE +from dataall.modules.s3_datasets.services.dataset_permissions import UPDATE_DATASET_TABLE, MANAGE_DATASETS from dataall.modules.s3_datasets.db.dataset_models import DatasetTable, DatasetTableColumn from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository from dataall.modules.datasets_base.services.datasets_enums import ConfidentialityClassification -from dataall.modules.s3_datasets.services.dataset_permissions import PREVIEW_DATASET_TABLE class DatasetColumnService: @@ -33,17 +34,15 @@ def paginate_active_columns_for_table(uri: str, filter=None): if ( ConfidentialityClassification.get_confidentiality_level(dataset.confidentiality) != ConfidentialityClassification.Unclassified.value - ): - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=PREVIEW_DATASET_TABLE, + ) and (dataset.SamlAdminGroupName not in context.groups and dataset.stewards not in context.groups): + raise exceptions.UnauthorizedOperation( + action='LIST_DATASET_TABLE_COLUMNS', + message='User is not authorized to view Columns for Confidential datasets', ) return DatasetColumnRepository.paginate_active_columns_for_table(session, uri, filter) @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission( UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri, param_name='table_uri' ) @@ -58,6 +57,7 @@ def sync_table_columns(cls, table_uri: str): return cls.paginate_active_columns_for_table(uri=table_uri, filter={}) @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission( UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri_for_column, param_name='column_uri' ) diff --git a/backend/dataall/modules/s3_datasets/services/dataset_location_service.py b/backend/dataall/modules/s3_datasets/services/dataset_location_service.py index ee83d1c5f..13c12d144 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_location_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_location_service.py @@ -3,7 +3,7 @@ from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository -from dataall.base.db.exceptions import ResourceShared, ResourceAlreadyExists +from dataall.base.db.exceptions import ResourceAlreadyExists from dataall.modules.s3_datasets.services.dataset_service import DatasetService from dataall.modules.s3_datasets.aws.s3_location_client import S3LocationClient from dataall.modules.s3_datasets.db.dataset_location_repositories import DatasetLocationRepository @@ -59,7 +59,6 @@ def list_dataset_locations(uri: str, filter: dict = None): return DatasetLocationRepository.list_dataset_locations(session=session, uri=uri, data=filter) @staticmethod - @ResourcePolicyService.has_resource_permission(GET_DATASET_FOLDER) def get_storage_location(uri): with get_context().db_engine.scoped_session() as session: return DatasetLocationRepository.get_location_by_uri(session, uri) @@ -135,3 +134,10 @@ def _delete_dataset_folder_read_permission(session, dataset: S3Dataset, location } for group in permission_group: ResourcePolicyService.delete_resource_policy(session=session, group=group, resource_uri=location_uri) + + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET_FOLDER) + def get_folder_restricted_information(uri: str, folder: DatasetStorageLocation): + context = get_context() + with context.db_engine.scoped_session() as session: + return DatasetRepository.get_dataset_by_uri(session, folder.datasetUri) diff --git a/backend/dataall/modules/s3_datasets/services/dataset_profiling_service.py b/backend/dataall/modules/s3_datasets/services/dataset_profiling_service.py index be94ce51c..d9241d94b 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_profiling_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_profiling_service.py @@ -1,17 +1,19 @@ import json from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.tasks.service_handlers import Worker from dataall.base.context import get_context from dataall.core.environment.db.environment_models import Environment from dataall.core.environment.services.environment_service import EnvironmentService from dataall.core.tasks.db.task_models import Task from dataall.base.db.exceptions import ObjectNotFound +from dataall.base.db import exceptions from dataall.modules.s3_datasets.aws.glue_profiler_client import GlueDatasetProfilerClient from dataall.modules.s3_datasets.aws.s3_profiler_client import S3ProfilerClient from dataall.modules.s3_datasets.db.dataset_profiling_repositories import DatasetProfilingRepository from dataall.modules.s3_datasets.db.dataset_table_repositories import DatasetTableRepository -from dataall.modules.s3_datasets.services.dataset_permissions import PROFILE_DATASET_TABLE, GET_DATASET +from dataall.modules.s3_datasets.services.dataset_permissions import PROFILE_DATASET_TABLE, GET_DATASET, MANAGE_DATASETS from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository from dataall.modules.datasets_base.services.datasets_enums import ConfidentialityClassification from dataall.modules.s3_datasets.db.dataset_models import DatasetProfilingRun, DatasetTable @@ -20,6 +22,7 @@ class DatasetProfilingService: @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(PROFILE_DATASET_TABLE) def start_profiling_run(uri, table_uri, glue_table_name): context = get_context() @@ -61,12 +64,6 @@ def resolve_profiling_run_status(run_uri): session.add(task) Worker.queue(engine=context.db_engine, task_ids=[task.taskUri]) - @staticmethod - @ResourcePolicyService.has_resource_permission(GET_DATASET) - def list_profiling_runs(uri): - with get_context().db_engine.scoped_session() as session: - return DatasetProfilingRepository.list_profiling_runs(session, uri) - @classmethod def get_dataset_table_profiling_run(cls, uri: str): with get_context().db_engine.scoped_session() as session: @@ -106,12 +103,9 @@ def _check_preview_permissions_if_needed(session, table_uri): if ( ConfidentialityClassification.get_confidentiality_level(dataset.confidentiality) != ConfidentialityClassification.Unclassified.value - ): - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=PREVIEW_DATASET_TABLE, + ) and (dataset.SamlAdminGroupName not in context.groups and dataset.stewards not in context.groups): + raise exceptions.UnauthorizedOperation( + action='GET_TABLE_PROFILING_METRICS', + message='User is not authorized to view Profiling Metrics for Confidential datasets', ) return True diff --git a/backend/dataall/modules/s3_datasets/services/dataset_service.py b/backend/dataall/modules/s3_datasets/services/dataset_service.py index 908e38551..7f59912ea 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_service.py @@ -37,7 +37,9 @@ DATASET_ALL, DATASET_READ, IMPORT_DATASET, + GET_DATASET, ) +from dataall.modules.datasets_base.services.dataset_list_permissions import LIST_ENVIRONMENT_DATASETS from dataall.modules.s3_datasets.db.dataset_repositories import DatasetRepository from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository from dataall.modules.datasets_base.services.datasets_enums import DatasetRole @@ -85,13 +87,13 @@ def _attach_additional_steward_permissions(cls, session, dataset, new_stewards): interface.extend_attach_steward_permissions(session, dataset, new_stewards) @classmethod - def _delete_additional_steward__permissions(cls, session, dataset): + def _delete_additional_steward_permissions(cls, session, dataset): """All permissions from other modules that need to be deleted to stewards""" for interface in cls._interfaces: interface.extend_delete_steward_permissions(session, dataset) @staticmethod - def check_dataset_account(session, environment): + def _check_dataset_account(session, environment): dashboards_enabled = EnvironmentService.get_boolean_env_param(session, environment, 'dashboardsEnabled') if dashboards_enabled: quicksight_subscription = QuicksightClient.check_quicksight_enterprise_subscription( @@ -105,7 +107,7 @@ def check_dataset_account(session, environment): return True @staticmethod - def check_imported_resources(dataset: S3Dataset): + def _check_imported_resources(dataset: S3Dataset): if dataset.importedGlueDatabase: if len(dataset.GlueDatabaseName) > NamingConventionPattern.GLUE.value.get('max_length'): raise exceptions.InvalidInput( @@ -158,11 +160,11 @@ def create_dataset(uri, admin_group, data: dict): context = get_context() with context.db_engine.scoped_session() as session: environment = EnvironmentService.get_environment_by_uri(session, uri) - DatasetService.check_dataset_account(session=session, environment=environment) + DatasetService._check_dataset_account(session=session, environment=environment) dataset = DatasetRepository.build_dataset(username=context.username, env=environment, data=data) if dataset.imported: - DatasetService.check_imported_resources(dataset) + DatasetService._check_imported_resources(dataset) dataset = DatasetRepository.create_dataset(session=session, env=environment, dataset=dataset, data=data) DatasetBucketRepository.create_dataset_bucket(session, dataset, data) @@ -217,6 +219,11 @@ def get_dataset(uri): dataset.userRoleForDataset = DatasetRole.Admin.value return dataset + @classmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET) + def find_dataset(cls, uri): + return DatasetService.get_dataset(uri) + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(CREDENTIALS_DATASET) @@ -251,13 +258,13 @@ def update_dataset(uri: str, data: dict): with get_context().db_engine.scoped_session() as session: dataset = DatasetRepository.get_dataset_by_uri(session, uri) environment = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) - DatasetService.check_dataset_account(session=session, environment=environment) + DatasetService._check_dataset_account(session=session, environment=environment) username = get_context().username dataset: S3Dataset = DatasetRepository.get_dataset_by_uri(session, uri) if data and isinstance(data, dict): if data.get('imported', False): - DatasetService.check_imported_resources(dataset) + DatasetService._check_imported_resources(dataset) for k in data.keys(): if k not in ['stewards', 'KmsAlias']: @@ -304,6 +311,11 @@ def get_dataset_statistics(dataset: S3Dataset): 'upvotes': count_upvotes or 0, } + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET) + def get_dataset_restricted_information(uri: str, dataset: S3Dataset): + return dataset + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(CREDENTIALS_DATASET) @@ -357,8 +369,6 @@ def start_crawler(uri: str, data: dict = None): return { 'Name': dataset.GlueCrawlerName, - 'AwsAccountId': dataset.AwsAccountId, - 'region': dataset.region, 'status': crawler.get('LastCrawl', {}).get('Status', 'N/A'), } @@ -453,11 +463,18 @@ def _create_dataset_stack(session, dataset: S3Dataset) -> Stack: ) @staticmethod - def list_datasets_owned_by_env_group(env_uri: str, group_uri: str, data: dict): - with get_context().db_engine.scoped_session() as session: + @ResourcePolicyService.has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def list_datasets_owned_by_env_group(uri: str, group_uri: str, data: dict): + context = get_context() + if group_uri not in context.groups: + raise exceptions.UnauthorizedOperation( + action='LIST_ENVIRONMENT_GROUP_DATASETS', + message=f'User: {context.username} is not a member of the team {group_uri}', + ) + with context.db_engine.scoped_session() as session: return DatasetRepository.paginated_environment_group_datasets( session=session, - env_uri=env_uri, + env_uri=uri, group_uri=group_uri, data=data, ) @@ -482,7 +499,7 @@ def _transfer_stewardship_to_owners(session, dataset): resource_uri=tableUri, ) - DatasetService._delete_additional_steward__permissions(session, dataset) + DatasetService._delete_additional_steward_permissions(session, dataset) return dataset @staticmethod diff --git a/backend/dataall/modules/s3_datasets/services/dataset_table_data_filter_service.py b/backend/dataall/modules/s3_datasets/services/dataset_table_data_filter_service.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/dataall/modules/s3_datasets/services/dataset_table_service.py b/backend/dataall/modules/s3_datasets/services/dataset_table_service.py index 021bfb37b..5c31da21d 100644 --- a/backend/dataall/modules/s3_datasets/services/dataset_table_service.py +++ b/backend/dataall/modules/s3_datasets/services/dataset_table_service.py @@ -1,6 +1,6 @@ import logging - from dataall.base.context import get_context +from dataall.base.db import exceptions from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.modules.catalog.db.glossary_repositories import GlossaryRepository @@ -41,6 +41,11 @@ def get_table(uri: str): with get_context().db_engine.scoped_session() as session: return DatasetTableRepository.get_dataset_table_by_uri(session, uri) + @staticmethod + @ResourcePolicyService.has_resource_permission(GET_DATASET_TABLE) + def get_table_restricted_information(uri: str, table: DatasetTable): + return table + @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(UPDATE_DATASET_TABLE, parent_resource=_get_dataset_uri) @@ -87,13 +92,10 @@ def preview(uri: str): if ( ConfidentialityClassification.get_confidentiality_level(dataset.confidentiality) != ConfidentialityClassification.Unclassified.value - ): - ResourcePolicyService.check_user_resource_permission( - session=session, - username=context.username, - groups=context.groups, - resource_uri=table.tableUri, - permission_name=PREVIEW_DATASET_TABLE, + ) and (dataset.SamlAdminGroupName not in context.groups and dataset.stewards not in context.groups): + raise exceptions.UnauthorizedOperation( + action=PREVIEW_DATASET_TABLE, + message='User is not authorized to Preview Table for Confidential datasets', ) env = EnvironmentService.get_environment_by_uri(session, dataset.environmentUri) return AthenaTableClient(env, table).get_table(dataset_uri=dataset.datasetUri) @@ -118,11 +120,7 @@ def sync_tables_for_dataset(cls, uri): DatasetTableIndexer.upsert_all(session=session, dataset_uri=dataset.datasetUri) DatasetTableIndexer.remove_all_deleted(session=session, dataset_uri=dataset.datasetUri) DatasetIndexer.upsert(session=session, dataset_uri=dataset.datasetUri) - return DatasetRepository.paginated_dataset_tables( - session=session, - uri=uri, - data={'page': 1, 'pageSize': 10}, - ) + return DatasetRepository.count_dataset_tables(session, dataset.datasetUri) @staticmethod def sync_existing_tables(session, uri, glue_tables=None): diff --git a/backend/dataall/modules/s3_datasets/tasks/tables_syncer.py b/backend/dataall/modules/s3_datasets/tasks/tables_syncer.py index bfd4feaad..9bd47dd3c 100644 --- a/backend/dataall/modules/s3_datasets/tasks/tables_syncer.py +++ b/backend/dataall/modules/s3_datasets/tasks/tables_syncer.py @@ -16,11 +16,7 @@ from dataall.modules.s3_datasets.indexers.dataset_indexer import DatasetIndexer from dataall.modules.s3_datasets.services.dataset_alarm_service import DatasetAlarmService -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) def sync_tables(engine): diff --git a/backend/dataall/modules/s3_datasets_shares/api/resolvers.py b/backend/dataall/modules/s3_datasets_shares/api/resolvers.py index f77076a7a..8f530a9c9 100644 --- a/backend/dataall/modules/s3_datasets_shares/api/resolvers.py +++ b/backend/dataall/modules/s3_datasets_shares/api/resolvers.py @@ -5,7 +5,6 @@ from dataall.base.feature_toggle_checker import is_feature_enabled from dataall.modules.s3_datasets_shares.services.s3_share_service import S3ShareService - log = logging.getLogger(__name__) @@ -41,7 +40,7 @@ def validate_dataset_share_selector_input(data): def list_shared_tables_by_env_dataset(context: Context, source, datasetUri: str, envUri: str): - return S3ShareService.list_shared_tables_by_env_dataset(datasetUri, envUri) + return S3ShareService.list_shared_tables_by_env_dataset(uri=envUri, dataset_uri=datasetUri) @is_feature_enabled('modules.s3_datasets.features.aws_actions') @@ -65,10 +64,8 @@ def get_s3_consumption_data(context: Context, source, shareUri: str): def list_shared_databases_tables_with_env_group(context: Context, source, environmentUri: str, groupUri: str): - return S3ShareService.list_shared_databases_tables_with_env_group(environmentUri=environmentUri, groupUri=groupUri) + return S3ShareService.list_shared_databases_tables_with_env_group(uri=environmentUri, group_uri=groupUri) def resolve_shared_db_name(context: Context, source, **kwargs): - return S3ShareService.resolve_shared_db_name( - source.GlueDatabaseName, source.shareUri, source.targetEnvAwsAccountId, source.targetEnvRegion - ) + return S3ShareService.resolve_shared_db_name(source.GlueDatabaseName, source.shareUri) diff --git a/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py b/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py index 255544bc7..8a0f9bf50 100644 --- a/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py +++ b/backend/dataall/modules/s3_datasets_shares/services/s3_share_service.py @@ -1,7 +1,7 @@ import logging from warnings import warn -from dataall.base.db import utils +from dataall.base.db import utils, exceptions from dataall.base.context import get_context from dataall.base.aws.sts import SessionHelper from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService @@ -10,6 +10,7 @@ from dataall.core.tasks.db.task_models import Task from dataall.core.tasks.service_handlers import Worker from dataall.modules.shares_base.db.share_object_repositories import ShareObjectRepository +from dataall.modules.datasets_base.services.dataset_list_permissions import LIST_ENVIRONMENT_DATASETS from dataall.modules.shares_base.db.share_state_machines_repositories import ShareStatusRepository from dataall.modules.shares_base.services.share_item_service import ShareItemService from dataall.modules.shares_base.services.share_permissions import GET_SHARE_OBJECT @@ -164,17 +165,19 @@ def reapply_share_items_for_dataset(uri: str): return True @staticmethod - def list_shared_tables_by_env_dataset(dataset_uri: str, env_uri: str): + @ResourcePolicyService.has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def list_shared_tables_by_env_dataset(uri: str, dataset_uri: str): context = get_context() with context.db_engine.scoped_session() as session: return [ {'tableUri': t.tableUri, 'GlueTableName': t.GlueTableName} for t in S3ShareObjectRepository.query_dataset_tables_shared_with_env( - session, env_uri, dataset_uri, context.username, context.groups + session, uri, dataset_uri, context.username, context.groups ) ] @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_DATASETS) @ResourcePolicyService.has_resource_permission(CREDENTIALS_DATASET) def get_dataset_shared_assume_role_url(uri): context = get_context() @@ -246,11 +249,17 @@ def get_s3_consumption_data(uri): } @staticmethod - def list_shared_databases_tables_with_env_group(environmentUri: str, groupUri: str): + @ResourcePolicyService.has_resource_permission(LIST_ENVIRONMENT_DATASETS) + def list_shared_databases_tables_with_env_group(uri: str, group_uri: str): context = get_context() + if group_uri not in context.groups: + raise exceptions.UnauthorizedOperation( + action='LIST_ENVIRONMENT_GROUP_DATASETS', + message=f'User: {context.username} is not a member of the owner team', + ) with context.db_engine.scoped_session() as session: return S3ShareObjectRepository.query_shared_glue_databases( - session=session, groups=context.groups, env_uri=environmentUri, group_uri=groupUri + session=session, groups=context.groups, env_uri=uri, group_uri=group_uri ) @staticmethod diff --git a/backend/dataall/modules/s3_datasets_shares/tasks/dataset_subscription_task.py b/backend/dataall/modules/s3_datasets_shares/tasks/dataset_subscription_task.py index cf6a9ec05..a08fb5cf8 100644 --- a/backend/dataall/modules/s3_datasets_shares/tasks/dataset_subscription_task.py +++ b/backend/dataall/modules/s3_datasets_shares/tasks/dataset_subscription_task.py @@ -22,11 +22,7 @@ from dataall.modules.shares_base.db.share_object_models import ShareObject from dataall.modules.shares_base.services.share_notification_service import DataSharingNotificationType -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) # TODO: review this task usage and remove if not needed diff --git a/backend/dataall/modules/s3_datasets_shares/tasks/subscriptions/sqs_poller.py b/backend/dataall/modules/s3_datasets_shares/tasks/subscriptions/sqs_poller.py index a122b8915..89497e62a 100644 --- a/backend/dataall/modules/s3_datasets_shares/tasks/subscriptions/sqs_poller.py +++ b/backend/dataall/modules/s3_datasets_shares/tasks/subscriptions/sqs_poller.py @@ -6,11 +6,8 @@ import boto3 from botocore.exceptions import ClientError -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) + ENVNAME = os.getenv('envname', 'local') region = os.getenv('AWS_REGION', 'eu-west-1') diff --git a/backend/dataall/modules/shares_base/api/resolvers.py b/backend/dataall/modules/shares_base/api/resolvers.py index dc43640ba..efc427870 100644 --- a/backend/dataall/modules/shares_base/api/resolvers.py +++ b/backend/dataall/modules/shares_base/api/resolvers.py @@ -265,12 +265,10 @@ def list_shares_in_my_outbox(context: Context, source, filter: dict = None): def list_shared_with_environment_data_items(context: Context, source, environmentUri: str = None, filter: dict = None): if not filter: filter = {} - with context.engine.scoped_session() as session: - return ShareItemService.paginated_shared_with_environment_datasets( - session=session, - uri=environmentUri, - data=filter, - ) + return ShareItemService.paginated_shared_with_environment_datasets( + uri=environmentUri, + data=filter, + ) def update_share_request_purpose(context: Context, source, shareUri: str = None, requestPurpose: str = None): diff --git a/backend/dataall/modules/shares_base/services/share_item_service.py b/backend/dataall/modules/shares_base/services/share_item_service.py index 7237e4f4d..86ff79ac6 100644 --- a/backend/dataall/modules/shares_base/services/share_item_service.py +++ b/backend/dataall/modules/shares_base/services/share_item_service.py @@ -1,6 +1,7 @@ import logging from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.tasks.service_handlers import Worker from dataall.base.context import get_context from dataall.core.tasks.db.task_models import Task @@ -26,6 +27,7 @@ REMOVE_ITEM, LIST_ENVIRONMENT_SHARED_WITH_OBJECTS, APPROVE_SHARE_OBJECT, + MANAGE_SHARES, ) from dataall.modules.shares_base.services.share_processor_manager import ShareProcessorManager from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository @@ -41,6 +43,7 @@ def _get_share_uri(session, uri): return share.shareUri @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(GET_SHARE_OBJECT) def verify_items_share_object(uri, item_uris): context = get_context() @@ -56,6 +59,7 @@ def verify_items_share_object(uri, item_uris): return True @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(APPROVE_SHARE_OBJECT) def reapply_items_share_object(uri, item_uris): context = get_context() @@ -71,6 +75,7 @@ def reapply_items_share_object(uri, item_uris): return True @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(GET_SHARE_OBJECT) def revoke_items_share_object(uri, revoked_uris): context = get_context() @@ -123,6 +128,7 @@ def revoke_items_share_object(uri, revoked_uris): return share @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(ADD_ITEM) def add_shared_item(uri: str, data: dict = None): context = get_context() @@ -155,6 +161,7 @@ def add_shared_item(uri: str, data: dict = None): return share_item @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(REMOVE_ITEM, parent_resource=_get_share_uri) def remove_shared_item(uri: str): with get_context().db_engine.scoped_session() as session: @@ -201,6 +208,8 @@ def list_shareable_objects(share, filter, is_revokable=False): @staticmethod @ResourcePolicyService.has_resource_permission(LIST_ENVIRONMENT_SHARED_WITH_OBJECTS) - def paginated_shared_with_environment_datasets(session, uri, data) -> dict: - share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() - return ShareObjectRepository.paginate_shared_datasets(session, uri, data, share_item_shared_states) + def paginated_shared_with_environment_datasets(uri, data) -> dict: + context = get_context() + with context.db_engine.scoped_session() as session: + share_item_shared_states = ShareStatusRepository.get_share_item_shared_states() + return ShareObjectRepository.paginate_shared_datasets(session, uri, data, share_item_shared_states) diff --git a/backend/dataall/modules/shares_base/services/share_logs_service.py b/backend/dataall/modules/shares_base/services/share_logs_service.py index 6d29e5155..ced1975ed 100644 --- a/backend/dataall/modules/shares_base/services/share_logs_service.py +++ b/backend/dataall/modules/shares_base/services/share_logs_service.py @@ -21,7 +21,7 @@ def check_view_log_permissions(username, groups, shareUri): return ds.stewards in groups or ds.SamlAdminGroupName in groups or username == ds.owner @staticmethod - def get_share_logs_name_query(shareUri): + def _get_share_logs_name_query(shareUri): log.info(f'Get share Logs stream name for share {shareUri}') query = f"""fields @logStream @@ -32,7 +32,7 @@ def get_share_logs_name_query(shareUri): return query @staticmethod - def get_share_logs_query(log_stream_name): + def _get_share_logs_query(log_stream_name): query = f"""fields @timestamp, @message, @logStream, @log as @logGroup | sort @timestamp asc | filter @logStream like "{log_stream_name}" @@ -52,7 +52,7 @@ def get_share_logs(shareUri): envname = os.getenv('envname', 'local') log_group_name = f"/{Parameter().get_parameter(env=envname, path='resourcePrefix')}/{envname}/ecs/share-manager" - query_for_name = ShareLogsService.get_share_logs_name_query(shareUri=shareUri) + query_for_name = ShareLogsService._get_share_logs_name_query(shareUri=shareUri) name_query_result = CloudWatch.run_query( query=query_for_name, log_group_name=log_group_name, @@ -63,7 +63,7 @@ def get_share_logs(shareUri): name = name_query_result[0]['logStream'] - query = ShareLogsService.get_share_logs_query(log_stream_name=name) + query = ShareLogsService._get_share_logs_query(log_stream_name=name) results = CloudWatch.run_query( query=query, log_group_name=log_group_name, diff --git a/backend/dataall/modules/shares_base/services/share_object_service.py b/backend/dataall/modules/shares_base/services/share_object_service.py index 8e191beae..22ee5eced 100644 --- a/backend/dataall/modules/shares_base/services/share_object_service.py +++ b/backend/dataall/modules/shares_base/services/share_object_service.py @@ -4,6 +4,7 @@ from dataall.core.tasks.service_handlers import Worker from dataall.base.context import get_context from dataall.core.activity.db.activity_models import Activity +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.core.environment.db.environment_models import EnvironmentGroup, ConsumptionRole from dataall.core.environment.services.environment_service import EnvironmentService from dataall.core.environment.services.managed_iam_policies import PolicyManager @@ -34,6 +35,7 @@ CREATE_SHARE_OBJECT, DELETE_SHARE_OBJECT, GET_SHARE_OBJECT, + MANAGE_SHARES, ) from dataall.modules.shares_base.services.share_processor_manager import ShareProcessorManager from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository @@ -61,6 +63,7 @@ def get_share_object(uri): return ShareObjectRepository.get_share_by_uri(session, uri) @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(CREATE_SHARE_OBJECT) def create_share_object( cls, @@ -213,6 +216,7 @@ def create_share_object( return share @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(SUBMIT_SHARE_OBJECT) def submit_share_object(cls, uri: str): context = get_context() @@ -254,6 +258,7 @@ def submit_share_object(cls, uri: str): return share @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(APPROVE_SHARE_OBJECT) def approve_share_object(cls, uri: str): context = get_context() @@ -286,6 +291,7 @@ def approve_share_object(cls, uri: str): return share @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(SUBMIT_SHARE_OBJECT) def update_share_request_purpose(uri: str, request_purpose) -> bool: with get_context().db_engine.scoped_session() as session: @@ -295,6 +301,7 @@ def update_share_request_purpose(uri: str, request_purpose) -> bool: return True @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(REJECT_SHARE_OBJECT) def update_share_reject_purpose(uri: str, reject_purpose) -> bool: with get_context().db_engine.scoped_session() as session: @@ -304,6 +311,7 @@ def update_share_reject_purpose(uri: str, reject_purpose) -> bool: return True @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(REJECT_SHARE_OBJECT) def reject_share_object(cls, uri: str, reject_purpose: str): context = get_context() @@ -322,6 +330,7 @@ def reject_share_object(cls, uri: str, reject_purpose: str): return share @classmethod + @TenantPolicyService.has_tenant_permission(MANAGE_SHARES) @ResourcePolicyService.has_resource_permission(DELETE_SHARE_OBJECT) def delete_share_object(cls, uri: str): with get_context().db_engine.scoped_session() as session: diff --git a/backend/dataall/modules/shares_base/services/share_permissions.py b/backend/dataall/modules/shares_base/services/share_permissions.py index e2238b68d..a821aba46 100644 --- a/backend/dataall/modules/shares_base/services/share_permissions.py +++ b/backend/dataall/modules/shares_base/services/share_permissions.py @@ -14,6 +14,10 @@ RESOURCES_ALL_WITH_DESC, ) +from dataall.core.permissions.services.tenant_permissions import TENANT_ALL, TENANT_ALL_WITH_DESC + +MANAGE_SHARES = 'MANAGE_SHARES' + ADD_ITEM = 'ADD_ITEM' REMOVE_ITEM = 'REMOVE_ITEM' SUBMIT_SHARE_OBJECT = 'SUBMIT_SHARE_OBJECT' @@ -66,3 +70,6 @@ RESOURCES_ALL_WITH_DESC[CREATE_SHARE_OBJECT] = 'Create dataset Share requests for this environment' RESOURCES_ALL_WITH_DESC[LIST_ENVIRONMENT_SHARED_WITH_OBJECTS] = 'LIST_ENVIRONMENT_SHARED_WITH_OBJECTS' + +TENANT_ALL.append(MANAGE_SHARES) +TENANT_ALL_WITH_DESC[MANAGE_SHARES] = 'Manage Data Share Objects' diff --git a/backend/dataall/modules/shares_base/tasks/persistent_email_reminders_task.py b/backend/dataall/modules/shares_base/tasks/persistent_email_reminders_task.py index e9982c6c7..e120614f1 100644 --- a/backend/dataall/modules/shares_base/tasks/persistent_email_reminders_task.py +++ b/backend/dataall/modules/shares_base/tasks/persistent_email_reminders_task.py @@ -10,11 +10,7 @@ from dataall.modules.datasets_base.db.dataset_repositories import DatasetBaseRepository -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) def persistent_email_reminders(engine): diff --git a/backend/dataall/modules/shares_base/tasks/share_manager_task.py b/backend/dataall/modules/shares_base/tasks/share_manager_task.py index 65da67ca4..c7ae66515 100644 --- a/backend/dataall/modules/shares_base/tasks/share_manager_task.py +++ b/backend/dataall/modules/shares_base/tasks/share_manager_task.py @@ -6,12 +6,7 @@ from dataall.base.db import get_engine from dataall.base.loader import load_modules, ImportMode -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) - if __name__ == '__main__': try: diff --git a/backend/dataall/modules/shares_base/tasks/share_reapplier_task.py b/backend/dataall/modules/shares_base/tasks/share_reapplier_task.py index 32eebd5ab..225f069bd 100644 --- a/backend/dataall/modules/shares_base/tasks/share_reapplier_task.py +++ b/backend/dataall/modules/shares_base/tasks/share_reapplier_task.py @@ -11,11 +11,7 @@ from dataall.base.loader import load_modules, ImportMode -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) class EcsBulkShareRepplyService: diff --git a/backend/dataall/modules/shares_base/tasks/share_verifier_task.py b/backend/dataall/modules/shares_base/tasks/share_verifier_task.py index 36c677b33..90948fc9c 100644 --- a/backend/dataall/modules/shares_base/tasks/share_verifier_task.py +++ b/backend/dataall/modules/shares_base/tasks/share_verifier_task.py @@ -9,11 +9,7 @@ from dataall.base.loader import load_modules, ImportMode -root = logging.getLogger() -if not root.hasHandlers(): - root.addHandler(logging.StreamHandler(sys.stdout)) log = logging.getLogger(__name__) -log.setLevel(os.environ.get('LOG_LEVEL', 'INFO')) def verify_shares(engine): diff --git a/backend/dataall/modules/vote/api/resolvers.py b/backend/dataall/modules/vote/api/resolvers.py index a35533159..609f3064c 100644 --- a/backend/dataall/modules/vote/api/resolvers.py +++ b/backend/dataall/modules/vote/api/resolvers.py @@ -1,9 +1,5 @@ -from typing import Dict, Type from dataall.base.db import exceptions from dataall.modules.vote.services.vote_service import VoteService -from dataall.modules.catalog.indexers.base_indexer import BaseIndexer - -_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} def _required_param(param, name): diff --git a/backend/dataall/modules/vote/services/vote_service.py b/backend/dataall/modules/vote/services/vote_service.py index 380d9728d..e373c3b76 100644 --- a/backend/dataall/modules/vote/services/vote_service.py +++ b/backend/dataall/modules/vote/services/vote_service.py @@ -4,15 +4,21 @@ """ from typing import Dict, Type + from dataall.base.context import get_context +from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.modules.catalog.indexers.base_indexer import BaseIndexer from dataall.modules.vote.db.vote_repositories import VoteRepository -_VOTE_TYPES: Dict[str, Type[BaseIndexer]] = {} +_VOTE_TYPES: Dict[str, Dict[Type[BaseIndexer], str]] = {} + +def add_vote_type(target_type: str, indexer: Type[BaseIndexer], permission: str): + _VOTE_TYPES[target_type] = {'indexer': indexer, 'permission': permission} -def add_vote_type(target_type: str, indexer: Type[BaseIndexer]): - _VOTE_TYPES[target_type] = indexer + +def get_vote_type(target_type: str) -> dict[Type[BaseIndexer], str]: + return _VOTE_TYPES[target_type] def _session(): @@ -26,9 +32,18 @@ class VoteService: @staticmethod def upvote(targetUri: str, targetType: str, upvote: bool): - with _session() as session: + context = get_context() + target_type = get_vote_type(targetType) + with context.db_engine.scoped_session() as session: + ResourcePolicyService.check_user_resource_permission( + session=session, + username=context.username, + groups=context.groups, + resource_uri=targetUri, + permission_name=target_type.get('permission'), + ) vote = VoteRepository.upvote(session=session, targetUri=targetUri, targetType=targetType, upvote=upvote) - _VOTE_TYPES[vote.targetType].upsert(session, vote.targetUri) + target_type.get('indexer').upsert(session, vote.targetUri) return vote @staticmethod diff --git a/backend/dataall/modules/worksheets/api/resolvers.py b/backend/dataall/modules/worksheets/api/resolvers.py index 450667217..280cf468c 100644 --- a/backend/dataall/modules/worksheets/api/resolvers.py +++ b/backend/dataall/modules/worksheets/api/resolvers.py @@ -14,27 +14,15 @@ def create_worksheet(context: Context, source, input: dict = None): if not input.get('label'): raise exceptions.RequiredParameter('label') - with context.engine.scoped_session() as session: - return WorksheetService.create_worksheet( - session=session, - username=context.username, - data=input, - ) + return WorksheetService.create_worksheet(data=input) def update_worksheet(context: Context, source, worksheetUri: str = None, input: dict = None): - with context.engine.scoped_session() as session: - return WorksheetService.update_worksheet( - session=session, username=context.username, uri=worksheetUri, data=input - ) + return WorksheetService.update_worksheet(uri=worksheetUri, data=input) def get_worksheet(context: Context, source, worksheetUri: str = None): - with context.engine.scoped_session() as session: - return WorksheetService.get_worksheet( - session=session, - uri=worksheetUri, - ) + return WorksheetService.get_worksheet(uri=worksheetUri) def resolve_user_role(context: Context, source: Worksheet): @@ -48,24 +36,12 @@ def resolve_user_role(context: Context, source: Worksheet): def list_worksheets(context, source, filter: dict = None): if not filter: filter = {} - with context.engine.scoped_session() as session: - return WorksheetRepository.paginated_user_worksheets( - session=session, - username=context.username, - groups=context.groups, - uri=None, - data=filter, - check_perm=True, - ) + return WorksheetService.list_user_worksheets(filter) def run_sql_query(context: Context, source, environmentUri: str = None, worksheetUri: str = None, sqlQuery: str = None): - with context.engine.scoped_session() as session: - return WorksheetService.run_sql_query( - session=session, uri=environmentUri, worksheetUri=worksheetUri, sqlQuery=sqlQuery - ) + return WorksheetService.run_sql_query(uri=environmentUri, worksheetUri=worksheetUri, sqlQuery=sqlQuery) def delete_worksheet(context, source, worksheetUri: str = None): - with context.engine.scoped_session() as session: - return WorksheetService.delete_worksheet(session=session, uri=worksheetUri) + return WorksheetService.delete_worksheet(uri=worksheetUri) diff --git a/backend/dataall/modules/worksheets/db/worksheet_repositories.py b/backend/dataall/modules/worksheets/db/worksheet_repositories.py index aea51761b..a2b4b8054 100644 --- a/backend/dataall/modules/worksheets/db/worksheet_repositories.py +++ b/backend/dataall/modules/worksheets/db/worksheet_repositories.py @@ -8,6 +8,10 @@ from dataall.core.environment.services.environment_resource_manager import EnvironmentResource from dataall.base.db import paginate from dataall.modules.worksheets.db.worksheet_models import Worksheet, WorksheetQueryResult +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) class WorksheetRepository(EnvironmentResource): @@ -41,7 +45,9 @@ def query_user_worksheets(session, username, groups, filter) -> Query: or_( Worksheet.label.ilike('%' + filter.get('term') + '%'), Worksheet.description.ilike('%' + filter.get('term') + '%'), - Worksheet.tags.contains(f"{{{filter.get('term')}}}"), + Worksheet.tags.contains( + f"{{{NamingConventionService(pattern=NamingConventionPattern.DEFAULT_SEARCH, target_label=filter.get('term')).sanitize()}}}" + ), ) ) return query.order_by(Worksheet.label) diff --git a/backend/dataall/modules/worksheets/services/worksheet_service.py b/backend/dataall/modules/worksheets/services/worksheet_service.py index 128f2af94..ffea373d9 100644 --- a/backend/dataall/modules/worksheets/services/worksheet_service.py +++ b/backend/dataall/modules/worksheets/services/worksheet_service.py @@ -3,6 +3,7 @@ from dataall.core.activity.db.activity_models import Activity from dataall.core.environment.services.environment_service import EnvironmentService from dataall.base.db import exceptions +from dataall.base.context import get_context from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService from dataall.modules.worksheets.aws.athena_client import AthenaClient @@ -23,7 +24,7 @@ class WorksheetService: @staticmethod - def get_worksheet_by_uri(session, uri: str) -> Worksheet: + def _get_worksheet_by_uri(session, uri: str) -> Worksheet: if not uri: raise exceptions.RequiredParameter(param_name='worksheetUri') worksheet = WorksheetRepository.find_worksheet_by_uri(session, uri) @@ -33,94 +34,120 @@ def get_worksheet_by_uri(session, uri: str) -> Worksheet: @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_WORKSHEETS) - def create_worksheet(session, username, data=None) -> Worksheet: - worksheet = Worksheet( - owner=username, - label=data.get('label'), - description=data.get('description', 'No description provided'), - tags=data.get('tags'), - chartConfig={'dimensions': [], 'measures': [], 'chartType': 'bar'}, - SamlAdminGroupName=data['SamlAdminGroupName'], - ) - - session.add(worksheet) - session.commit() - - activity = Activity( - action='WORKSHEET:CREATE', - label='WORKSHEET:CREATE', - owner=username, - summary=f'{username} created worksheet {worksheet.name} ', - targetUri=worksheet.worksheetUri, - targetType='worksheet', - ) - session.add(activity) - - ResourcePolicyService.attach_resource_policy( - session=session, - group=data['SamlAdminGroupName'], - permissions=WORKSHEET_ALL, - resource_uri=worksheet.worksheetUri, - resource_type=Worksheet.__name__, - ) + def create_worksheet(data=None) -> Worksheet: + context = get_context() + if data['SamlAdminGroupName'] not in context.groups: + raise exceptions.UnauthorizedOperation( + 'CREATE_WORKSHEET', f"user {context.username} does not belong to group {data['SamlAdminGroupName']}" + ) + with context.db_engine.scoped_session() as session: + worksheet = Worksheet( + owner=context.username, + label=data.get('label'), + description=data.get('description', 'No description provided'), + tags=data.get('tags'), + chartConfig={'dimensions': [], 'measures': [], 'chartType': 'bar'}, + SamlAdminGroupName=data['SamlAdminGroupName'], + ) + + session.add(worksheet) + session.commit() + + activity = Activity( + action='WORKSHEET:CREATE', + label='WORKSHEET:CREATE', + owner=context.username, + summary=f'{context.username} created worksheet {worksheet.name} ', + targetUri=worksheet.worksheetUri, + targetType='worksheet', + ) + session.add(activity) + + ResourcePolicyService.attach_resource_policy( + session=session, + group=data['SamlAdminGroupName'], + permissions=WORKSHEET_ALL, + resource_uri=worksheet.worksheetUri, + resource_type=Worksheet.__name__, + ) return worksheet @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_WORKSHEETS) @ResourcePolicyService.has_resource_permission(UPDATE_WORKSHEET) - def update_worksheet(session, username, uri, data=None): - worksheet = WorksheetService.get_worksheet_by_uri(session, uri) - for field in data.keys(): - setattr(worksheet, field, data.get(field)) - session.commit() - - activity = Activity( - action='WORKSHEET:UPDATE', - label='WORKSHEET:UPDATE', - owner=username, - summary=f'{username} updated worksheet {worksheet.name} ', - targetUri=worksheet.worksheetUri, - targetType='worksheet', - ) - session.add(activity) - return worksheet + def update_worksheet(uri, data=None): + context = get_context() + with context.db_engine.scoped_session() as session: + worksheet = WorksheetService._get_worksheet_by_uri(session, uri) + for field in data.keys(): + setattr(worksheet, field, data.get(field)) + session.commit() + + activity = Activity( + action='WORKSHEET:UPDATE', + label='WORKSHEET:UPDATE', + owner=context.username, + summary=f'{context.username} updated worksheet {worksheet.name} ', + targetUri=worksheet.worksheetUri, + targetType='worksheet', + ) + session.add(activity) + return worksheet @staticmethod @ResourcePolicyService.has_resource_permission(GET_WORKSHEET) - def get_worksheet(session, uri): - worksheet = WorksheetService.get_worksheet_by_uri(session, uri) - return worksheet + def get_worksheet(uri): + with get_context().db_engine.scoped_session() as session: + worksheet = WorksheetService._get_worksheet_by_uri(session, uri) + return worksheet + + @staticmethod + def list_user_worksheets(filter): + context = get_context() + with context.db_engine.scoped_session() as session: + return WorksheetRepository.paginated_user_worksheets( + session=session, + username=context.username, + groups=context.groups, + uri=None, + data=filter, + check_perm=True, + ) @staticmethod @TenantPolicyService.has_tenant_permission(MANAGE_WORKSHEETS) @ResourcePolicyService.has_resource_permission(DELETE_WORKSHEET) - def delete_worksheet(session, uri) -> bool: - worksheet = WorksheetService.get_worksheet_by_uri(session, uri) - session.delete(worksheet) - ResourcePolicyService.delete_resource_policy( - session=session, - group=worksheet.SamlAdminGroupName, - resource_uri=uri, - resource_type=Worksheet.__name__, - ) - return True + def delete_worksheet(uri) -> bool: + with get_context().db_engine.scoped_session() as session: + worksheet = WorksheetService._get_worksheet_by_uri(session, uri) + session.delete(worksheet) + ResourcePolicyService.delete_resource_policy( + session=session, + group=worksheet.SamlAdminGroupName, + resource_uri=uri, + resource_type=Worksheet.__name__, + ) + return True @staticmethod + @TenantPolicyService.has_tenant_permission(MANAGE_WORKSHEETS) @ResourcePolicyService.has_resource_permission(RUN_ATHENA_QUERY) - def run_sql_query(session, uri, worksheetUri, sqlQuery): - environment = EnvironmentService.get_environment_by_uri(session, uri) - worksheet = WorksheetService.get_worksheet_by_uri(session, worksheetUri) - - env_group = EnvironmentService.get_environment_group( - session, worksheet.SamlAdminGroupName, environment.environmentUri - ) - - cursor = AthenaClient.run_athena_query( - aws_account_id=environment.AwsAccountId, - env_group=env_group, - s3_staging_dir=f's3://{environment.EnvironmentDefaultBucketName}/athenaqueries/{env_group.environmentAthenaWorkGroup}/', - region=environment.region, - sql=sqlQuery, - ) - - return AthenaClient.convert_query_output(cursor) + @ResourcePolicyService.has_resource_permission(GET_WORKSHEET, param_name='worksheetUri') + def run_sql_query(uri, worksheetUri, sqlQuery): + with get_context().db_engine.scoped_session() as session: + environment = EnvironmentService.get_environment_by_uri(session, uri) + worksheet = WorksheetService._get_worksheet_by_uri(session, worksheetUri) + + env_group = EnvironmentService.get_environment_group( + session, worksheet.SamlAdminGroupName, environment.environmentUri + ) + + cursor = AthenaClient.run_athena_query( + aws_account_id=environment.AwsAccountId, + env_group=env_group, + s3_staging_dir=f's3://{environment.EnvironmentDefaultBucketName}/athenaqueries/{env_group.environmentAthenaWorkGroup}/', + region=environment.region, + sql=sqlQuery, + ) + + return AthenaClient.convert_query_output(cursor) diff --git a/backend/local_graphql_server.py b/backend/local_graphql_server.py index 1ea96a732..40c765896 100644 --- a/backend/local_graphql_server.py +++ b/backend/local_graphql_server.py @@ -1,24 +1,23 @@ +import logging import os import jwt from ariadne import graphql_sync from ariadne.constants import PLAYGROUND_HTML -from flask import Flask, request, jsonify -from flask_cors import CORS +from fastapi import FastAPI, Request from graphql import parse +from starlette.middleware.cors import CORSMiddleware +from starlette.responses import JSONResponse, HTMLResponse from dataall.base.api import get_executable_schema -from dataall.core.tasks.service_handlers import Worker -from dataall.core.permissions.services.tenant_permissions import TENANT_ALL -from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService - -from dataall.base.db import get_engine, Base -from dataall.base.searchproxy import connect, run_query -from dataall.base.loader import load_modules, ImportMode from dataall.base.config import config from dataall.base.context import set_context, dispose_context, RequestContext - -import logging +from dataall.base.db import get_engine, Base +from dataall.base.loader import load_modules, ImportMode +from dataall.base.searchproxy import connect, run_query +from dataall.core.permissions.services.tenant_permissions import TENANT_ALL +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService +from dataall.core.tasks.service_handlers import Worker logger = logging.getLogger('graphql') logger.propagate = False @@ -45,10 +44,14 @@ def __init__(self, **kwargs): schema = get_executable_schema() -# app = GraphQL(schema, debug=True) - -app = Flask(__name__) -CORS(app) +app = FastAPI(debug=True) +app.add_middleware( + CORSMiddleware, + allow_origins=['*'], + allow_credentials=True, + allow_methods=['*'], + allow_headers=['*'], +) def request_context(headers, mock=False): @@ -87,67 +90,61 @@ def request_context(headers, mock=False): return context.__dict__ -@app.route('/graphql', methods=['OPTIONS']) +@app.options('/graphql') def opt(): # On GET request serve GraphQL Playground # You don't need to provide Playground if you don't want to # but keep on mind this will not prohibit clients from # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 + return HTMLResponse('

Hello

') -@app.route('/esproxy', methods=['OPTIONS']) +@app.options('/esproxy') def esproxyopt(): # On GET request serve GraphQL Playground # You don't need to provide Playground if you don't want to # but keep on mind this will not prohibit clients from # exploring your API using desktop GraphQL Playground app. - return '

Hello

', 200 + return HTMLResponse('

Hello

') -@app.route('/graphql', methods=['GET']) +@app.get('/graphql') def graphql_playground(): # On GET request serve GraphQL Playground # You don't need to provide Playground if you don't want to # but keep on mind this will not prohibit clients from # exploring your API using desktop GraphQL Playground app. - return PLAYGROUND_HTML, 200 + return HTMLResponse(PLAYGROUND_HTML) -@app.route('/esproxy', methods=['POST']) -def esproxy(): - body = request.data.decode('utf-8') - print(body) +@app.post('/esproxy') +async def esproxy(request: Request): + body = (await request.body()).decode('utf-8') + logger.info('body %s', body) return run_query(es=es, index='dataall-index', body=body) -@app.route('/graphql', methods=['POST']) -def graphql_server(): - print('.............................') - # GraphQL queries are always sent as POST - logger.debug(request.data) - data = request.get_json() - print('*** Request ***', request.data) - logger.info(data) +@app.post('/graphql') +async def graphql_server(request: Request): + logger.info('.............................') + data = await request.json() + logger.info('Request payload %s', data) # Extract the GraphQL query string from the 'query' key in the data dictionary query_string = data.get('query') if not query_string: - return jsonify({'error': 'GraphQL query not provided'}), 400 + return JSONResponse({'error': 'GraphQL query not provided'}, 400) try: query = parse(query_string) except Exception as e: - return jsonify({'error': str(e)}), 400 + return JSONResponse({'error': str(e)}, 400) - print('***** Printing Query ****** \n\n') - print(query) + logger.info('Request query %s', query.to_dict()) context = request_context(request.headers, mock=True) logger.debug(context) - # Note: Passing the request to the context is optional. - # In Flask, the current request is always accessible as flask.request success, result = graphql_sync( schema, data, @@ -157,14 +154,4 @@ def graphql_server(): dispose_context() status_code = 200 if success else 400 - return jsonify(result), status_code - - -if __name__ == '__main__': - logger.info('Starting dataall flask local application') - app.run( - debug=True, # nosec - threaded=False, - host='0.0.0.0', - port=5000, - ) + return JSONResponse(result, status_code) diff --git a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py index e15a34972..b7bde887f 100644 --- a/backend/migrations/versions/04d92886fabe_add_consumption_roles.py +++ b/backend/migrations/versions/04d92886fabe_add_consumption_roles.py @@ -12,9 +12,8 @@ from sqlalchemy.dialects import postgresql from sqlalchemy.ext.declarative import declarative_base -from dataall.core.environment.db.environment_models import Environment from dataall.core.environment.services.environment_service import EnvironmentService -from dataall.base.db import utils +from dataall.base.db import utils, Resource from datetime import datetime from dataall.core.permissions.services.permission_service import PermissionService @@ -33,6 +32,11 @@ Base = declarative_base() +class Environment(Resource, Base): + __tablename__ = 'environment' + environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) + + class EnvironmentGroup(Base): __tablename__ = 'environment_group_permission' groupUri = Column(String, primary_key=True) @@ -123,7 +127,7 @@ def upgrade(): bind = op.get_bind() session = orm.Session(bind=bind) print('Back-filling consumer role permissions for environments...') - envs = EnvironmentService.list_all_active_environments(session=session) + envs = session.query(Environment).filter(Environment.deleted.is_(None)).all() for env in envs: groups = EnvironmentService.get_all_environment_groups(session=session, uri=env.environmentUri, filter=None) for group in groups: diff --git a/backend/migrations/versions/328e35e39e1e_invite_env_groups_as_readers.py b/backend/migrations/versions/328e35e39e1e_invite_env_groups_as_readers.py index 5f243a8ff..43655246e 100644 --- a/backend/migrations/versions/328e35e39e1e_invite_env_groups_as_readers.py +++ b/backend/migrations/versions/328e35e39e1e_invite_env_groups_as_readers.py @@ -7,12 +7,14 @@ """ from alembic import op -from sqlalchemy import orm -from dataall.core.environment.db.environment_models import EnvironmentGroup, Environment +from sqlalchemy import orm, Column, String +from sqlalchemy.ext.declarative import declarative_base +from dataall.core.environment.db.environment_models import EnvironmentGroup from dataall.core.organizations.db.organization_repositories import OrganizationRepository from dataall.core.permissions.services.organization_permissions import GET_ORGANIZATION from dataall.core.organizations.db import organization_models as models from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.base.db import utils, Resource # revision identifiers, used by Alembic. revision = '328e35e39e1e' @@ -20,6 +22,14 @@ branch_labels = None depends_on = None +Base = declarative_base() + + +class Environment(Resource, Base): + __tablename__ = 'environment' + environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) + organizationUri = Column(String, nullable=False) + def get_session(): bind = op.get_bind() diff --git a/backend/migrations/versions/49c6b18ed814_add_env_logs_bucket.py b/backend/migrations/versions/49c6b18ed814_add_env_logs_bucket.py new file mode 100644 index 000000000..f521d3337 --- /dev/null +++ b/backend/migrations/versions/49c6b18ed814_add_env_logs_bucket.py @@ -0,0 +1,55 @@ +"""add_env_logs_bucket + +Revision ID: 49c6b18ed814 +Revises: b21f86882012 +Create Date: 2024-11-13 19:16:18.030415 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy import orm, Column, String +from sqlalchemy.ext.declarative import declarative_base + +from dataall.base.db import Resource, utils +from dataall.base.utils.naming_convention import ( + NamingConventionService, + NamingConventionPattern, +) + +# revision identifiers, used by Alembic. +revision = '49c6b18ed814' +down_revision = '797dd1012be1' +branch_labels = None +depends_on = None + +Base = declarative_base() + + +class Environment(Resource, Base): + __tablename__ = 'environment' + environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) + resourcePrefix = Column(String, nullable=False, default='dataall') + EnvironmentLogsBucketName = Column(String, nullable=True) + + +def upgrade(): + op.add_column('environment', sa.Column('EnvironmentLogsBucketName', sa.String(), nullable=True)) + bind = op.get_bind() + session = orm.Session(bind=bind) + environments = session.query(Environment).all() + for env in environments: + env.EnvironmentLogsBucketName = NamingConventionService( + target_uri=env.environmentUri, + target_label='env-access-logs', + pattern=NamingConventionPattern.S3, + resource_prefix=env.resourcePrefix, + ).build_compliant_name() + session.commit() + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('environment', 'EnvironmentLogsBucketName') + # ### end Alembic commands ### diff --git a/backend/migrations/versions/a991ac7a85a2_drop_remove_group_permissions.py b/backend/migrations/versions/a991ac7a85a2_drop_remove_group_permissions.py index 64ded7dd5..424e22f8f 100644 --- a/backend/migrations/versions/a991ac7a85a2_drop_remove_group_permissions.py +++ b/backend/migrations/versions/a991ac7a85a2_drop_remove_group_permissions.py @@ -10,9 +10,11 @@ from dataall.core.permissions.services.environment_permissions import REMOVE_ENVIRONMENT_GROUP from dataall.core.permissions.db.resource_policy.resource_policy_models import ResourcePolicy from dataall.core.permissions.services.resource_policy_service import ResourcePolicyService +from dataall.base.db import utils, Resource + from alembic import op -from sqlalchemy import orm -from dataall.core.environment.db.environment_models import Environment +from sqlalchemy import orm, Column, String +from sqlalchemy.ext.declarative import declarative_base # revision identifiers, used by Alembic. revision = 'a991ac7a85a2' @@ -21,6 +23,14 @@ depends_on = None +Base = declarative_base() + + +class Environment(Resource, Base): + __tablename__ = 'environment' + environmentUri = Column(String, primary_key=True, default=utils.uuid('environment')) + + def get_session(): bind = op.get_bind() session = orm.Session(bind=bind) diff --git a/backend/migrations/versions/af2e1362d4cb_add_tenant_share_permissions.py b/backend/migrations/versions/af2e1362d4cb_add_tenant_share_permissions.py new file mode 100644 index 000000000..f0e72db31 --- /dev/null +++ b/backend/migrations/versions/af2e1362d4cb_add_tenant_share_permissions.py @@ -0,0 +1,67 @@ +"""add_tenant_share_permissions + +Revision ID: af2e1362d4cb +Revises: 49c6b18ed814 +Create Date: 2024-11-18 15:23:08.215870 + +""" + +from alembic import op +from sqlalchemy import orm +from sqlalchemy.sql import and_ +from dataall.core.permissions.services.permission_service import PermissionService +from dataall.core.permissions.db.tenant.tenant_models import TenantPolicy +from dataall.core.permissions.db.tenant.tenant_policy_repositories import TenantPolicyRepository +from dataall.modules.shares_base.services.share_permissions import MANAGE_SHARES +from dataall.core.permissions.services.tenant_policy_service import TenantPolicyService + +# revision identifiers, used by Alembic. +revision = 'af2e1362d4cb' +down_revision = '49c6b18ed814' +branch_labels = None +depends_on = None +TENANT_NAME = 'dataall' + + +def upgrade(): + from dataall.core.permissions.db.permission.permission_models import Permission, PermissionType + + # Ensure all permissions including MANAGE_SHARES are created in the db + bind = op.get_bind() + session = orm.Session(bind=bind) + PermissionService.init_permissions(session) + + # listTenantGroups + tenant_groups = ( + session.query( + TenantPolicy.principalId.label('name'), + TenantPolicy.principalId.label('groupUri'), + ) + .filter( + and_( + TenantPolicy.principalType == 'GROUP', + TenantPolicy.principalId != 'DAAdministrators', + ) + ) + .all() + ) + # updateGroupTenantPermissions and add MANAGE_SHARES + for group in tenant_groups: + policy = TenantPolicyRepository.find_tenant_policy( + session=session, + group_uri=group.groupUri, + tenant_name=TENANT_NAME, + ) + already_associated = TenantPolicyRepository.has_group_tenant_permission( + session, + group_uri=group.groupUri, + permission_name=MANAGE_SHARES, + tenant_name=TENANT_NAME, + ) + + if not already_associated: + TenantPolicyService.associate_permission_to_tenant_policy(session, policy, MANAGE_SHARES) + + +def downgrade(): + pass diff --git a/backend/requirements.txt b/backend/requirements.txt index 5fccee041..a744f2fc1 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -1,10 +1,7 @@ ariadne==0.17.0 aws-xray-sdk==2.4.3 -boto3==1.28.23 -botocore==1.31.23 -fastapi == 0.109.2 -Flask==3.0.3 -flask-cors==4.0.1 +boto3==1.35.26 +fastapi == 0.115.5 nanoid==2.0.0 opensearch-py==1.0.0 PyAthena==2.3.0 @@ -14,5 +11,4 @@ PyYAML==6.0 requests==2.32.2 requests_aws4auth==1.1.1 sqlalchemy==1.3.24 -starlette==0.36.3 alembic==1.13.1 \ No newline at end of file diff --git a/deploy/requirements.txt b/deploy/requirements.txt index a67fb2621..11626e22c 100644 --- a/deploy/requirements.txt +++ b/deploy/requirements.txt @@ -1,8 +1,6 @@ -aws-cdk-lib==2.115.0 -boto3-stubs==1.20.20 -boto3==1.28.23 -botocore==1.31.23 +aws-cdk-lib==2.160.0 +boto3==1.35.26 +boto3-stubs==1.35.26 cdk-nag==2.7.2 typeguard==4.2.1 cdk-klayers==0.3.0 -constructs>=10.0.0,<11.0.0 diff --git a/deploy/stacks/cdk_nag_exclusions.py b/deploy/stacks/cdk_nag_exclusions.py index a9948088b..e1c80e0d2 100644 --- a/deploy/stacks/cdk_nag_exclusions.py +++ b/deploy/stacks/cdk_nag_exclusions.py @@ -28,6 +28,10 @@ 'id': 'AwsSolutions-CB3', 'reason': 'Access to docker daemon is required to build docker images', }, + { + 'id': 'AwsSolutions-SMG4', + 'reason': 'Database is used for test purposes', + }, ] BACKEND_STACK_CDK_NAG_EXCLUSIONS = [ diff --git a/deploy/stacks/cognito.py b/deploy/stacks/cognito.py index 661dc87a0..3a28225f2 100644 --- a/deploy/stacks/cognito.py +++ b/deploy/stacks/cognito.py @@ -99,6 +99,7 @@ def __init__( domain_prefix=f"{resource_prefix.replace('-', '')}{envname}{self.region.replace('-', '')}{self.account}" ), ) + jwt_token_duration = 180 if with_approval_tests else 60 self.client = cognito.UserPoolClient( self, f'AppClient-{envname}', @@ -106,6 +107,8 @@ def __init__( auth_flows=AuthFlow(user_password=with_approval_tests, user_srp=True, custom=True), prevent_user_existence_errors=True, refresh_token_validity=Duration.minutes(cognito_user_session_timeout_inmins), + id_token_validity=Duration.minutes(jwt_token_duration), + access_token_validity=Duration.minutes(jwt_token_duration), ) if enable_cw_rum: diff --git a/deploy/stacks/lambda_api.py b/deploy/stacks/lambda_api.py index 797a9097f..05109a870 100644 --- a/deploy/stacks/lambda_api.py +++ b/deploy/stacks/lambda_api.py @@ -138,7 +138,8 @@ def __init__( api_handler_env = { 'envname': envname, 'LOG_LEVEL': log_level, - 'REAUTH_TTL': str(reauth_ttl) + 'REAUTH_TTL': str(reauth_ttl), + 'ALLOW_INTROSPECTION': str(not prod_sizing), } # Check if custom domain exists and if it exists email notifications could be enabled. Create a env variable which stores the domain url. This is used for sending data.all share weblinks in the email notifications. if custom_domain and custom_domain.get('hosted_zone_name', None): @@ -259,7 +260,7 @@ def __init__( ) # Initialize Klayers - runtime = _lambda.Runtime.PYTHON_3_9 + runtime = _lambda.Runtime.PYTHON_3_12 klayers = Klayers(self, python_version=runtime, region=self.region) # get the latest layer version for the cryptography package diff --git a/deploy/stacks/pipeline.py b/deploy/stacks/pipeline.py index 8664ae269..c68879750 100644 --- a/deploy/stacks/pipeline.py +++ b/deploy/stacks/pipeline.py @@ -792,7 +792,7 @@ def set_cloudfront_stage(self, target_env): 'echo "credential_source = EcsContainer" >> ~/.aws/config', 'aws sts get-caller-identity --profile buildprofile', 'export AWS_PROFILE=buildprofile', - 'pip install boto3==1.34.35', + 'pip install boto3==1.35.26', 'pip install beautifulsoup4', 'python deploy/configs/frontend_config.py', 'export AWS_DEFAULT_REGION=us-east-1', @@ -864,7 +864,7 @@ def cw_rum_config_action(self, target_env): 'aws sts get-caller-identity --profile buildprofile', 'export AWS_PROFILE=buildprofile', 'pip install --upgrade pip', - 'pip install boto3==1.34.35', + 'pip install boto3==1.35.26', 'python deploy/configs/rum_config.py', ], role=self.expanded_codebuild_role.without_policy_updates(), @@ -931,7 +931,7 @@ def set_albfront_stage(self, target_env, repository_name): 'echo "credential_source = EcsContainer" >> ~/.aws/config', 'aws sts get-caller-identity --profile buildprofile', 'export AWS_PROFILE=buildprofile', - 'pip install boto3==1.34.35', + 'pip install boto3==1.35.26', 'pip install beautifulsoup4', 'python deploy/configs/frontend_config.py', 'unset AWS_PROFILE', diff --git a/docker-compose.yaml b/docker-compose.yaml index 6b10cfeac..2e9597772 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -37,7 +37,7 @@ services: dockerfile: docker/dev/Dockerfile args: CONTAINER_UID: ${UID} - entrypoint: /bin/bash -c "../build/wait-for-it.sh elasticsearch:9200 -t 30 && python3.9 local_graphql_server.py" + entrypoint: /bin/bash -c "../build/wait-for-it.sh elasticsearch:9200 -t 30 && uvicorn local_graphql_server:app --host 0.0.0.0 --port 5000 --reload" expose: - 5000 ports: diff --git a/frontend/package-lock.json b/frontend/package-lock.json index d40dba0b8..35ced25d0 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -20,6 +20,7 @@ "@mui/styles": "^5.5.1", "@mui/x-data-grid": "^5.17.26", "@mui/x-date-pickers": "^5.0.0", + "@mui/x-tree-view": "^6.17.0", "@reduxjs/toolkit": "^1.8.0", "@testing-library/jest-dom": "^5.16.2", "@testing-library/react": "^12.1.4", @@ -28,7 +29,7 @@ "apexcharts": "^3.33.2", "apollo-boost": "^0.4.9", "aws-amplify": "^5.3.14", - "axios": "^1.6.5", + "axios": "^1.7.4", "braces": "3.0.3", "classnames": "^2.3.1", "date-fns": "^2.28.0", @@ -38,6 +39,7 @@ "graphql-tag": "^2.12.6", "json5": "^2.2.2", "jwt-decode": "^3.1.2", + "nanoid": "^3.3.8", "notistack": "^2.0.3", "nprogress": "^0.2.0", "nth-check": "^2.0.1", @@ -58,7 +60,9 @@ "react-scripts": "^5.0.1", "simplebar": "^5.3.6", "simplebar-react": "^2.3.6", + "uuid": "^10.0.0", "web-vitals": "^2.1.4", + "webpack": "^5.94.0", "yup": "^0.32.11" }, "devDependencies": { @@ -244,6 +248,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-amplify/analytics/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/api": { "version": "5.4.12", "resolved": "https://registry.npmjs.org/@aws-amplify/api/-/api-5.4.12.tgz", @@ -275,6 +288,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-amplify/api-graphql/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/api-graphql/node_modules/zen-observable-ts": { "version": "0.8.19", "resolved": "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-0.8.19.tgz", @@ -795,6 +817,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-amplify/datastore/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/datastore/node_modules/zen-observable-ts": { "version": "0.8.19", "resolved": "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-0.8.19.tgz", @@ -852,6 +883,15 @@ "uuid": "^3.2.1" } }, + "node_modules/@aws-amplify/notifications/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/predictions": { "version": "5.5.12", "resolved": "https://registry.npmjs.org/@aws-amplify/predictions/-/predictions-5.5.12.tgz", @@ -876,6 +916,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-amplify/predictions/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/pubsub": { "version": "5.5.12", "resolved": "https://registry.npmjs.org/@aws-amplify/pubsub/-/pubsub-5.5.12.tgz", @@ -897,6 +946,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-amplify/pubsub/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-amplify/pubsub/node_modules/zen-observable-ts": { "version": "0.8.19", "resolved": "https://registry.npmjs.org/zen-observable-ts/-/zen-observable-ts-0.8.19.tgz", @@ -1147,6 +1205,15 @@ "node": ">=10.0.0" } }, + "node_modules/@aws-sdk/client-comprehend/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-sdk/client-firehose": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@aws-sdk/client-firehose/-/client-firehose-3.6.1.tgz", @@ -4583,27 +4650,6 @@ "node": ">= 12.0.0" } }, - "node_modules/@aws-sdk/client-sts/node_modules/fast-xml-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz", - "integrity": "sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==", - "funding": [ - { - "type": "paypal", - "url": "https://paypal.me/naturalintelligence" - }, - { - "type": "github", - "url": "https://github.com/sponsors/NaturalIntelligence" - } - ], - "dependencies": { - "strnum": "^1.0.5" - }, - "bin": { - "fxparser": "src/cli/cli.js" - } - }, "node_modules/@aws-sdk/client-sts/node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -4695,6 +4741,15 @@ "node": ">=10.0.0" } }, + "node_modules/@aws-sdk/client-translate/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-sdk/config-resolver": { "version": "3.6.1", "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.6.1.tgz", @@ -5620,6 +5675,15 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, + "node_modules/@aws-sdk/middleware-retry/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@aws-sdk/middleware-retry/node_modules/ws": { "version": "6.2.3", "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", @@ -11558,6 +11622,36 @@ "node": ">=6" } }, + "node_modules/@mui/x-tree-view": { + "version": "6.17.0", + "resolved": "https://registry.npmjs.org/@mui/x-tree-view/-/x-tree-view-6.17.0.tgz", + "integrity": "sha512-09dc2D+Rjg2z8KOaxbUXyPi0aw7fm2jurEtV8Xw48xJ00joLWd5QJm1/v4CarEvaiyhTQzHImNqdgeJW8ZQB6g==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.23.2", + "@mui/base": "^5.0.0-beta.20", + "@mui/utils": "^5.14.14", + "@types/react-transition-group": "^4.4.8", + "clsx": "^2.0.0", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" + }, + "engines": { + "node": ">=14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mui" + }, + "peerDependencies": { + "@emotion/react": "^11.9.0", + "@emotion/styled": "^11.8.1", + "@mui/material": "^5.8.6", + "@mui/system": "^5.8.0", + "react": "^17.0.0 || ^18.0.0", + "react-dom": "^17.0.0 || ^18.0.0" + } + }, "node_modules/@nicolo-ribaudo/eslint-scope-5-internals": { "version": "5.1.1-v1", "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/eslint-scope-5-internals/-/eslint-scope-5-internals-5.1.1-v1.tgz", @@ -14039,15 +14133,6 @@ "@types/json-schema": "*" } }, - "node_modules/@types/eslint-scope": { - "version": "3.7.7", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.7.tgz", - "integrity": "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==", - "dependencies": { - "@types/eslint": "*", - "@types/estree": "*" - } - }, "node_modules/@types/estree": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", @@ -14890,10 +14975,11 @@ "node": ">=0.4.0" } }, - "node_modules/acorn-import-assertions": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz", - "integrity": "sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA==", + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "resolved": "https://registry.npmjs.org/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz", + "integrity": "sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==", + "license": "MIT", "peerDependencies": { "acorn": "^8" } @@ -15776,9 +15862,9 @@ } }, "node_modules/axios": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", - "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", @@ -16301,9 +16387,10 @@ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", @@ -16313,7 +16400,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -16355,6 +16442,21 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/bonjour-service": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.2.1.tgz", @@ -17159,18 +17261,30 @@ } }, "node_modules/cross-spawn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", - "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.5.tgz", + "integrity": "sha512-ZVJrKKYunU38/76t0RMOulHOnUcbU9GbpWKAOZ0mhjr7CX6FVrH+4FrAapSOekrgFQ3f/8gwMEuIft0aKq6Hug==", "dependencies": { - "nice-try": "^1.0.4", - "path-key": "^2.0.1", - "semver": "^5.5.0", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" }, "engines": { - "node": ">=4.8" + "node": ">= 8" + } + }, + "node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, "node_modules/crypto-js": { @@ -18197,9 +18311,10 @@ } }, "node_modules/enhanced-resolve": { - "version": "5.17.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.0.tgz", - "integrity": "sha512-dwDPwZL0dmye8Txp2gzFmA6sxALaSvdRDjPH0viLcKrtlOL3tw62nWWweVD1SdILDTJrbrL6tdWVN58Wo6U3eA==", + "version": "5.17.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz", + "integrity": "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg==", + "license": "MIT", "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" @@ -18232,65 +18347,6 @@ "node": ">=8.0.0" } }, - "node_modules/env-cmd/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/env-cmd/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/env-cmd/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/env-cmd/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/env-cmd/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/envinfo": { "version": "7.13.0", "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.13.0.tgz", @@ -19084,19 +19140,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/eslint/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/eslint/node_modules/globals": { "version": "13.24.0", "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", @@ -19119,33 +19162,6 @@ "node": ">=8" } }, - "node_modules/eslint/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/eslint/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -19168,20 +19184,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/espree": { "version": "9.6.1", "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", @@ -19314,60 +19316,6 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/execa/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/execa/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/execa/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/execa/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, - "node_modules/execa/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", @@ -19392,36 +19340,37 @@ } }, "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.20.0.tgz", + "integrity": "sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw==", + "license": "MIT", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.10", "proxy-addr": "~2.0.7", "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -19448,6 +19397,15 @@ "ms": "2.0.0" } }, + "node_modules/express/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/express/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -19506,9 +19464,9 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, "node_modules/fast-xml-parser": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.0.tgz", - "integrity": "sha512-kLY3jFlwIYwBNDojclKsNAC12sfD6NwW74QB2CoNGPvtVxjliYehVunB3HYyNi+n4Tt1dAcgwYvmKF/Z18flqg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", "funding": [ { "type": "github", @@ -19803,46 +19761,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/foreground-child/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/foreground-child/node_modules/signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", @@ -19854,20 +19772,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/foreground-child/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/fork-ts-checker-webpack-plugin": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz", @@ -20773,9 +20677,9 @@ } }, "node_modules/http-proxy-middleware": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", - "integrity": "sha512-ya/UeJ6HVBYxrgYotAZo1KvPWlgB48kUJLDePFeneHsVujFaW5WNj2NgWCAE//B1Dl02BIfYlpNgBy8Kf8Rjmw==", + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.7.tgz", + "integrity": "sha512-fgVY8AV7qU7z/MmXJ/rxwbrtQH4jBQ9m7kp3llF0liB7glmFeVZFBepQb32T3y8n8k2+AEYuMPCpinYW+/CuRA==", "dependencies": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", @@ -26037,9 +25941,13 @@ "dev": true }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge-stream": { "version": "2.0.0", @@ -26743,15 +26651,6 @@ "integrity": "sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA==", "peer": true }, - "node_modules/metro/node_modules/ws": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", - "integrity": "sha512-jmTjYU0j60B+vHey6TfR3Z7RD61z/hmxBS3VMSGIrroOWXQEneK1zNuotOUrGyBHQj0yrpsLHPWtigEFd13ndA==", - "peer": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, "node_modules/metro/node_modules/yargs": { "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", @@ -26780,9 +26679,9 @@ } }, "node_modules/micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -26938,15 +26837,16 @@ "integrity": "sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==" }, "node_modules/nanoid": { - "version": "3.3.7", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", - "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "version": "3.3.8", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", + "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -26977,11 +26877,6 @@ "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, - "node_modules/nice-try": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", - "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==" - }, "node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", @@ -27142,14 +27037,6 @@ "node": ">=8" } }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, "node_modules/nprogress": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz", @@ -27679,11 +27566,11 @@ } }, "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "engines": { - "node": ">=4" + "node": ">=8" } }, "node_modules/path-parse": { @@ -27715,9 +27602,10 @@ } }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" }, "node_modules/path-type": { "version": "4.0.0", @@ -29559,19 +29447,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/react-dev-utils/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/react-dev-utils/node_modules/has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -29588,33 +29463,6 @@ "node": ">= 12.13.0" } }, - "node_modules/react-dev-utils/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/react-dev-utils/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/react-dev-utils/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -29626,20 +29474,6 @@ "node": ">=8" } }, - "node_modules/react-dev-utils/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/react-devtools-core": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/react-devtools-core/-/react-devtools-core-5.2.0.tgz", @@ -29650,15 +29484,6 @@ "ws": "^7" } }, - "node_modules/react-devtools-core/node_modules/ws": { - "version": "6.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.3.tgz", - "integrity": "sha512-jmTjYU0j60B+vHey6TfR3Z7RD61z/hmxBS3VMSGIrroOWXQEneK1zNuotOUrGyBHQj0yrpsLHPWtigEFd13ndA==", - "peer": true, - "dependencies": { - "async-limiter": "~1.0.0" - } - }, "node_modules/react-dom": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz", @@ -30352,14 +30177,16 @@ } }, "node_modules/rollup": { - "version": "2.79.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-2.79.1.tgz", - "integrity": "sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==", + "version": "3.29.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.5.tgz", + "integrity": "sha512-GVsDdsbJzzy4S/v3dqWPJ7EfvZJfCHiDqe80IyrF59LYuP+e6U1LJoUqeuqRbwAWoMNoXivMNeNAOf5E22VA1w==", + "license": "MIT", "bin": { "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=10.0.0" + "node": ">=14.18.0", + "npm": ">=8.0.0" }, "optionalDependencies": { "fsevents": "~2.3.2" @@ -30641,14 +30468,16 @@ "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "peer": true, "bin": { "semver": "bin/semver" } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -30774,9 +30603,10 @@ } }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.0.tgz", + "integrity": "sha512-pDLK8zwl2eKaYrs8mrPZBJua4hMplRWJ1tIFksVC3FtBEBnl8dxgeHtsaMS8DhS9i4fLObaon6ABoc4/hQGdPA==", + "license": "MIT", "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", @@ -30846,22 +30676,22 @@ "integrity": "sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==" }, "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shell-quote": { @@ -32610,12 +32440,15 @@ } }, "node_modules/uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", - "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], "bin": { - "uuid": "bin/uuid" + "uuid": "dist/bin/uuid" } }, "node_modules/v8-to-istanbul": { @@ -32740,20 +32573,20 @@ } }, "node_modules/webpack": { - "version": "5.91.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.91.0.tgz", - "integrity": "sha512-rzVwlLeBWHJbmgTC/8TvAcu5vpJNII+MelQpylD4jNERPwpBJOE2lEcko1zJX3QJeLjTTAnQxn/OJ8bjDzVQaw==", + "version": "5.94.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", + "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", + "license": "MIT", "dependencies": { - "@types/eslint-scope": "^3.7.3", "@types/estree": "^1.0.5", "@webassemblyjs/ast": "^1.12.1", "@webassemblyjs/wasm-edit": "^1.12.1", "@webassemblyjs/wasm-parser": "^1.12.1", "acorn": "^8.7.1", - "acorn-import-assertions": "^1.9.0", + "acorn-import-attributes": "^1.9.5", "browserslist": "^4.21.10", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.16.0", + "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", diff --git a/frontend/package.json b/frontend/package.json index 9afe2219b..ef210e710 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,6 +29,7 @@ "@mui/styles": "^5.5.1", "@mui/x-data-grid": "^5.17.26", "@mui/x-date-pickers": "^5.0.0", + "@mui/x-tree-view": "^6.17.0", "@reduxjs/toolkit": "^1.8.0", "@testing-library/jest-dom": "^5.16.2", "@testing-library/react": "^12.1.4", @@ -37,7 +38,8 @@ "apexcharts": "^3.33.2", "apollo-boost": "^0.4.9", "aws-amplify": "^5.3.14", - "axios": "^1.6.5", + "braces": "3.0.3", + "axios": "^1.7.4", "classnames": "^2.3.1", "date-fns": "^2.28.0", "dayjs": "^1.11.0", @@ -66,9 +68,11 @@ "react-scripts": "^5.0.1", "simplebar": "^5.3.6", "simplebar-react": "^2.3.6", + "uuid": "^10.0.0", "web-vitals": "^2.1.4", "yup": "^0.32.11", - "braces": "3.0.3" + "webpack": "^5.94.0", + "nanoid": "^3.3.8" }, "overrides": { "aws-amplify": { @@ -91,8 +95,15 @@ "ip": "1.1.9", "follow-redirects": "1.15.6", "webpack-dev-middleware": "5.3.4", - "express": "4.19.2", - "ejs": "3.1.10" + "express": "4.20.0", + "ejs": "3.1.10", + "fast-xml-parser": "4.4.1", + "path-to-regexp": "0.1.12", + "body-parser": "^1.20.3", + "send": "0.19.0", + "rollup": "3.29.5", + "http-proxy-middleware": "2.0.7", + "cross-spawn": "7.0.5" }, "resolutions": { "react-redux": "^7.2.6", @@ -105,9 +116,16 @@ "ip": "1.1.9", "follow-redirects": "1.15.6", "webpack-dev-middleware": "5.3.4", - "express": "4.19.2", + "express": "4.20.0", "ejs": "3.1.10", - "ws": "^8.17.1" + "ws": "^8.17.1", + "fast-xml-parser": "4.4.1", + "path-to-regexp": "0.1.12", + "body-parser": "^1.20.3", + "send": "0.19.0", + "rollup": "3.29.5", + "http-proxy-middleware": "2.0.7", + "cross-spawn": "7.0.5" }, "devDependencies": { "env-cmd": "^10.1.0", diff --git a/frontend/src/design/components/UpVoteButton.js b/frontend/src/design/components/UpVoteButton.js index 9b99e3913..e4ca2dd13 100644 --- a/frontend/src/design/components/UpVoteButton.js +++ b/frontend/src/design/components/UpVoteButton.js @@ -4,10 +4,11 @@ import * as PropTypes from 'prop-types'; import React from 'react'; export const UpVoteButton = (props) => { - const { upVoted, onClick, upVotes } = props; + const { upVoted, onClick, upVotes, disabled } = props; return ( + {isAdmin && ( + + )} + {isAdmin && ( + + )}