diff --git a/.gitattributes b/.gitattributes index 79f44a6b2637..5f47e6acc8ee 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ docker/**/*.sh text eol=lf +*.svg binary diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index 2daab735a5e4..000000000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1,32 +0,0 @@ -# Notify all committers of DB migration changes, per SIP-59 - -# https://github.com/apache/superset/issues/13351 - -/superset/migrations/ @apache/superset-committers - -# Notify some committers of changes in the components - -/superset-frontend/src/components/Select/ @michael-s-molina @geido @kgabryje -/superset-frontend/src/components/MetadataBar/ @michael-s-molina @geido @kgabryje -/superset-frontend/src/components/DropdownContainer/ @michael-s-molina @geido @kgabryje - -# Notify Helm Chart maintainers about changes in it - -/helm/superset/ @craig-rueda @dpgaspar @villebro - -# Notify E2E test maintainers of changes - -/superset-frontend/cypress-base/ @jinghua-qa @geido @eschutho @rusackas @betodealmeida - -# Notify PMC members of changes to GitHub Actions - -/.github/ @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar - -# Notify PMC members of changes to required Github Actions - -/.asf.yaml @villebro @geido @eschutho @rusackas @betodealmeida @nytai @mistercrunch @craig-rueda @john-bodley @kgabryje @dpgaspar - -# Maps are a finnicky contribution process we care about - -**/*.geojson @villebro @rusackas -/superset-frontend/plugins/legacy-plugin-chart-country-map/ @villebro @rusackas diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml deleted file mode 100644 index 48d26dbd4e9b..000000000000 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ /dev/null @@ -1,104 +0,0 @@ -name: Bug report -description: Report a bug to improve Superset's stability -labels: ["bug"] -body: - - type: markdown - attributes: - value: | - Hello Superset Community member! Please keep things tidy by putting your post in the proper place: - - 🚨 Reporting a security issue: send an email to security@superset.apache.org. DO NOT USE GITHUB ISSUES TO REPORT SECURITY PROBLEMS. - 🐛 Reporting a bug: use this form. - 🙏 Asking a question or getting help: post in the [Superset Slack chat](http://bit.ly/join-superset-slack) or [GitHub Discussions](https://github.com/apache/superset/discussions) under "Q&A / Help". - 💡 Requesting a new feature: Search [GitHub Discussions](https://github.com/apache/superset/discussions) to see if it exists already. If not, add a new post there under "Ideas". - - type: textarea - id: bug-description - attributes: - label: Bug description - description: A clear and concise description of what the bug is. - validations: - required: true - - type: textarea - id: repro-steps - attributes: - label: How to reproduce the bug - placeholder: | - 1. Go to '...' - 2. Click on '....' - 3. Scroll down to '....' - 4. See error - validations: - required: true - - type: textarea - id: screenshots-recordings - attributes: - label: Screenshots/recordings - description: If applicable, add screenshots or recordings to help explain your problem. - - type: markdown - attributes: - value: | - ### Environment - - Please specify your environment. If your environment does not match the alternatives, you need to upgrade your environment before submitting the issue as it may have already been fixed. For additional information about the releases, see [Release Process](https://github.com/apache/superset/wiki/Release-Process). - - type: dropdown - id: superset-version - attributes: - label: Superset version - options: - - master / latest-dev - - "4.0.2" - - "3.1.3" - validations: - required: true - - type: dropdown - id: python-version - attributes: - label: Python version - options: - - "3.9" - - "3.10" - - "3.11" - - Not applicable - - I don't know - validations: - required: true - - type: dropdown - id: node-version - attributes: - label: Node version - options: - - "16" - - "17" - - "18 or greater" - - Not applicable - - I don't know - validations: - required: true - - type: dropdown - id: browser - attributes: - label: Browser - options: - - Chrome - - Firefox - - Safari - - Not applicable - validations: - required: true - - type: textarea - id: additional-context - attributes: - label: Additional context - description: | - Add any other context about the problem here such as the feature flags that you have enabled, any customizations you have made, the data source you are querying, etc. - - type: checkboxes - id: checklist - attributes: - label: Checklist - description: Make sure to follow these steps before submitting your issue - thank you! - options: - - label: I have searched Superset docs and Slack and didn't find a solution to my problem. - - label: I have searched the GitHub issue tracker and didn't find a similar bug report. - - label: I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the "additional context" section. - validations: - required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 5f465d264857..000000000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -blank_issues_enabled: false -contact_links: - - name: Feature Request - url: https://github.com/apache/superset/discussions/new?category=ideas - about: Propose a feature request to the Superset community - - name: Q&A - url: https://github.com/apache/superset/discussions/new?category=q-a-help - about: Open a community Q&A thread on GitHub Discussions - - name: Slack - url: https://bit.ly/join-superset-slack - about: Join the Superset Community on Slack for other discussions and assistance diff --git a/.github/ISSUE_TEMPLATE/cosmetic.md b/.github/ISSUE_TEMPLATE/cosmetic.md deleted file mode 100644 index 1a2e6ea2da02..000000000000 --- a/.github/ISSUE_TEMPLATE/cosmetic.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Cosmetic Issue -about: Describe a cosmetic issue with CSS, positioning, layout, labeling, or similar -labels: "cosmetic-issue" ---- - -## Screenshot - -[drag & drop image(s) here!] - -## Description - -[describe the issue here!] - -## Design input -[describe any input/collaboration you'd like from designers, and -tag accordingly. For design review, add the -label `design:review`. If this includes a design proposal, -include the label `design:suggest`] diff --git a/.github/ISSUE_TEMPLATE/sip.md b/.github/ISSUE_TEMPLATE/sip.md deleted file mode 100644 index d0ca3ef1d940..000000000000 --- a/.github/ISSUE_TEMPLATE/sip.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -name: SIP -about: "Superset Improvement Proposal. See SIP-0 (https://github.com/apache/superset/issues/5602) for details. A SIP introduces any major change into Apache Superset's code or process." -labels: sip -title: "[SIP] Your Title Here (do not add SIP number)" -assignees: "apache/superset-committers" ---- - -*Please make sure you are familiar with the SIP process documented* -[here](https://github.com/apache/superset/issues/5602). The SIP will be numbered by a committer upon acceptance. - -## [SIP] Proposal for ... - -### Motivation - -Description of the problem to be solved. - -### Proposed Change - -Describe how the feature will be implemented, or the problem will be solved. If possible, include mocks, screenshots, or screencasts (even if from different tools). - -### New or Changed Public Interfaces - -Describe any new additions to the model, views or `REST` endpoints. Describe any changes to existing visualizations, dashboards and React components. Describe changes that affect the Superset CLI and how Superset is deployed. - -### New dependencies - -Describe any `npm`/`PyPI` packages that are required. Are they actively maintained? What are their licenses? - -### Migration Plan and Compatibility - -Describe any database migrations that are necessary, or updates to stored URLs. - -### Rejected Alternatives - -Describe alternative approaches that were considered and rejected. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 02de720f7527..000000000000 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,27 +0,0 @@ -<!--- -Please write the PR title following the conventions at https://www.conventionalcommits.org/en/v1.0.0/ -Example: -fix(dashboard): load charts correctly ---> - -### SUMMARY -<!--- Describe the change below, including rationale and design decisions --> - -### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF -<!--- Skip this if not applicable --> - -### TESTING INSTRUCTIONS -<!--- Required! What steps can be taken to manually verify the changes? --> - -### ADDITIONAL INFORMATION -<!--- Check any relevant boxes with "x" --> -<!--- HINT: Include "Fixes #nnn" if you are fixing an existing issue --> -- [ ] Has associated issue: -- [ ] Required feature flags: -- [ ] Changes UI -- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351)) - - [ ] Migration is atomic, supports rollback & is backwards-compatible - - [ ] Confirm DB migration upgrade and downgrade tested - - [ ] Runtime estimates and downtime expectations provided -- [ ] Introduces new feature or API -- [ ] Removes existing feature or API diff --git a/.github/SECURITY.md b/.github/SECURITY.md deleted file mode 100644 index 086ff8c0cad0..000000000000 --- a/.github/SECURITY.md +++ /dev/null @@ -1,38 +0,0 @@ -# Security Policy - -This is a project of the [Apache Software Foundation](https://apache.org) and follows the -ASF [vulnerability handling process](https://apache.org/security/#vulnerability-handling). - -## Reporting Vulnerabilities - -**⚠️ Please do not file GitHub issues for security vulnerabilities as they are public! ⚠️** - - -Apache Software Foundation takes a rigorous standpoint in annihilating the security issues -in its software projects. Apache Superset is highly sensitive and forthcoming to issues -pertaining to its features and functionality. -If you have any concern or believe you have found a vulnerability in Apache Superset, -please get in touch with the Apache Superset Security Team privately at -e-mail address [security@superset.apache.org](mailto:security@superset.apache.org). - -More details can be found on the ASF website at -[ASF vulnerability reporting process](https://apache.org/security/#reporting-a-vulnerability) - -We kindly ask you to include the following information in your report: -- Apache Superset version that you are using -- A sanitized copy of your `superset_config.py` file or any config overrides -- Detailed steps to reproduce the vulnerability - -Note that Apache Superset is not responsible for any third-party dependencies that may -have security issues. Any vulnerabilities found in third-party dependencies should be -reported to the maintainers of those projects. Results from security scans of Apache -Superset dependencies found on its official Docker image can be remediated at release time -by extending the image itself. - -**Your responsible disclosure and collaboration are invaluable.** - -## Extra Information - - - [Apache Superset documentation](https://superset.apache.org/docs/security) - - [Common Vulnerabilities and Exposures by release](https://superset.apache.org/docs/security/cves) - - [How Security Vulnerabilities are Reported & Handled in Apache Superset (Blog)](https://preset.io/blog/how-security-vulnerabilities-are-reported-and-handled-in-apache-superset/) diff --git a/.github/actions/change-detector/action.yml b/.github/actions/change-detector/action.yml deleted file mode 100644 index d0f356e771db..000000000000 --- a/.github/actions/change-detector/action.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: 'Change Detector' -description: 'Detects file changes for pull request and push events' -inputs: - token: - description: 'GitHub token for authentication' - required: true -outputs: - python: - description: 'Whether Python-related files were changed' - value: ${{ steps.change-detector.outputs.python }} - frontend: - description: 'Whether frontend-related files were changed' - value: ${{ steps.change-detector.outputs.frontend }} - docker: - description: 'Whether docker-related files were changed' - value: ${{ steps.change-detector.outputs.docker }} - docs: - description: 'Whether docs-related files were changed' - value: ${{ steps.change-detector.outputs.docs }} -runs: - using: 'composite' - steps: - - name: Detect file changes - id: change-detector - run: | - python --version - python scripts/change_detector.py - shell: bash - env: - GITHUB_TOKEN: ${{ inputs.token }} - GITHUB_OUTPUT: ${{ github.output }} diff --git a/.github/actions/setup-backend/action.yml b/.github/actions/setup-backend/action.yml deleted file mode 100644 index 565b02900f0b..000000000000 --- a/.github/actions/setup-backend/action.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: 'Setup Python Environment' -description: 'Set up Python and install dependencies with optional configurations.' -inputs: - python-version: - description: 'Python version to set up. Accepts a version number, "current", or "next".' - required: true - default: 'current' - cache: - description: 'Cache dependencies. Options: pip' - required: false - default: 'pip' - requirements-type: - description: 'Type of requirements to install. Options: base, development, default' - required: false - default: 'dev' - install-superset: - description: 'Whether to install Superset itself. If false, only python is installed' - required: false - default: 'true' - -runs: - using: 'composite' - steps: - - name: Interpret Python Version - id: set-python-version - shell: bash - run: | - if [ "${{ inputs.python-version }}" = "current" ]; then - echo "PYTHON_VERSION=3.10" >> $GITHUB_ENV - elif [ "${{ inputs.python-version }}" = "next" ]; then - echo "PYTHON_VERSION=3.11" >> $GITHUB_ENV - elif [ "${{ inputs.python-version }}" = "previous" ]; then - echo "PYTHON_VERSION=3.9" >> $GITHUB_ENV - else - echo "PYTHON_VERSION=${{ inputs.python-version }}" >> $GITHUB_ENV - fi - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - cache: ${{ inputs.cache }} - - name: Install dependencies - run: | - if [ "${{ inputs.install-superset }}" = "true" ]; then - sudo apt-get update && sudo apt-get -y install libldap2-dev libsasl2-dev - pip install --upgrade pip setuptools wheel - if [ "${{ inputs.requirements-type }}" = "dev" ]; then - pip install -r requirements/development.txt - elif [ "${{ inputs.requirements-type }}" = "base" ]; then - pip install -r requirements/base.txt - fi - fi - shell: bash diff --git a/.github/actions/setup-supersetbot/action.yml b/.github/actions/setup-supersetbot/action.yml deleted file mode 100644 index c557b9b7e83f..000000000000 --- a/.github/actions/setup-supersetbot/action.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: 'Setup supersetbot' -description: 'Sets up supersetbot npm lib from the repo or npm' -inputs: - from-npm: - description: 'Install from npm instead of local setup' - required: false - default: 'true' # Defaults to using the local setup -runs: - using: 'composite' - steps: - - - name: Setup Node Env - uses: actions/setup-node@v4 - with: - node-version: '20' - - - name: Install supersetbot from npm - if: ${{ inputs.from-npm == 'true' }} - shell: bash - run: npm install -g supersetbot - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - if: ${{ inputs.from-npm == 'false' }} - uses: actions/checkout@v4 - with: - repository: apache-superset/supersetbot - path: supersetbot - - - name: Setup supersetbot from repo - if: ${{ inputs.from-npm == 'false' }} - shell: bash - working-directory: supersetbot - run: | - # simple trick to install globally with dependencies - npm pack - npm install -g ./supersetbot*.tgz - - - name: echo supersetbot version - shell: bash - run: supersetbot version diff --git a/.github/config.yml b/.github/config.yml deleted file mode 100644 index 23c1258d5405..000000000000 --- a/.github/config.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Configuration for request-info - https://github.com/behaviorbot/request-info - -# *Required* Comment to reply with -requestInfoReplyComment: > - We would appreciate it if you could provide us with more info about this issue/pr! - Please do not leave the `title` or `description` empty. - -# *OPTIONAL* default titles to check against for lack of descriptiveness -# MUST BE ALL LOWERCASE -requestInfoDefaultTitles: - - update readme.md - - updates - -# *OPTIONAL* Label to be added to Issues and Pull Requests with insufficient information given -requestInfoLabelToAdd: "need:more-info" diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 447ffec10a59..000000000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,351 +0,0 @@ -version: 2 -updates: - - - package-ecosystem: "github-actions" - directory: "/" - schedule: - interval: "monthly" - - - package-ecosystem: "npm" - ignore: - # not until node >= 18.12.0 - - dependency-name: "css-minimizer-webpack-plugin" - directory: "/superset-frontend/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 30 - versioning-strategy: increase - - - # - package-ecosystem: "pip" - # NOTE: as dependabot isn't compatible with our python - # dependency setup (pip-compile-multi), we'll be using - # `supersetbot` instead - - - package-ecosystem: "npm" - directory: ".github/actions" - schedule: - interval: "monthly" - open-pull-requests-limit: 10 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/docs/" - schedule: - interval: "monthly" - open-pull-requests-limit: 10 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-websocket/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-websocket/utils/client-ws-app/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 10 - versioning-strategy: increase - - # Now for all of our plugins and packages! - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-calendar/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-histogram/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-partition/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-world-map/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/plugin-chart-pivot-table/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-chord/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-horizon/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-rose/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-preset-chart-deckgl/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/plugin-chart-table/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-country-map/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-map-box/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-preset-chart-nvd3/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/plugin-chart-word-cloud/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-event-flow/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-paired-t-test/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/plugin-chart-echarts/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/preset-chart-xy/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-heatmap/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/legacy-plugin-chart-sunburst/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/plugins/plugin-chart-handlebars/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/packages/generator-superset/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/packages/superset-ui-chart-controls/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/packages/superset-ui-core/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/packages/superset-ui-demo/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase - - - package-ecosystem: "npm" - directory: "/superset-frontend/packages/superset-ui-switchboard/" - schedule: - interval: "monthly" - labels: - - npm - - dependabot - open-pull-requests-limit: 5 - versioning-strategy: increase diff --git a/.github/issue_label_bot.yaml b/.github/issue_label_bot.yaml deleted file mode 100644 index 8af21514c3d9..000000000000 --- a/.github/issue_label_bot.yaml +++ /dev/null @@ -1,5 +0,0 @@ -# for Issue Label Bot https://github.com/marketplace/issue-label-bot -label-alias: - bug: '#bug' - feature_request: '#enhancement' - question: '#question' diff --git a/.github/labeler.yml b/.github/labeler.yml deleted file mode 100644 index 291516e11b50..000000000000 --- a/.github/labeler.yml +++ /dev/null @@ -1,153 +0,0 @@ -# TODO (if we can) -# - Label PRs in need of codeowner review -# - viz:charts:xyz labels -# – component/design system areas -# - storybook(s) -# - f/e and b/e test changes? -# - product areas (SQL Lab, Explore, Dashboard, etc.) -# - database areas (SQLAlchemy, labelind DBs by driver, etc.) - -############################################ -# General workflow warnings -# full list of labels is here: https://github.com/apache/superset/labels -############################################ - -"risk:db-migration": -- changed-files: - - any-glob-to-any-file: - - 'superset/migrations/**' - -############################################ -# Dependencies -############################################ - -"dependencies:python": -- changed-files: - - any-glob-to-any-file: - - 'superset/requirements/**' - - 'superset/translations/requirements.txt' - - 'RELEASING/requirements.txt' - -"dependencies:npm": -- changed-files: - - any-glob-to-any-file: - - 'superset-frontend/package.json' - - 'superset-frontend/package-lock.json' - - 'superset-embedded-sdk/package.json' - - 'superset-embedded-sdk/package-lock.json' - - 'superset-websocket/package.json' - - 'superset-websocket/package-lock.json' - - 'superset-frontend/cypress-base/package.json' - - 'superset-frontend/cypress-base/package-lock.json' - - 'superset-frontend/packages/**/package.json' - - 'superset-frontend/plugins/**/package.json' - -############################################ -# Areas of the main codebase -############################################ - -"doc": -- changed-files: - - any-glob-to-any-file: - - 'docs/**' - -"api": -- changed-files: - - any-glob-to-any-file: - - 'superset/**/api.py' - - 'superset/views/core.py' - -"i18n": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/**' - -"i18n:brazilian": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/pt_BR/**' - -"i18n:chinese": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/zh/**' - -"i18n:traditional-chinese": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/zh_TW/**' - -"i18n:dutch": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/nl/**' - -"i18n:french": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/fr/**' - -"i18n:italian": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/it/**' - -"i18n:japanese": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/ja/**' - -"i18n:korean": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/ko/**' - -"i18n:portuguese": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/pt/**' - -"i18n:russian": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/ru/**' - -"i18n:slovak": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/sk/**' - -"i18n:ukrainian": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/uk/**' - -"i18n:spanish": -- changed-files: - - any-glob-to-any-file: - - 'superset/translations/es/**' - -############################################ -# Sub-projects and monorepo packages -############################################ - -"plugins": -- changed-files: - - any-glob-to-any-file: - - 'superset-frontend/plugins/**' - -"packages": -- changed-files: - - any-glob-to-any-file: - - 'superset-frontend/packages/**' - -"embedded": -- changed-files: - - any-glob-to-any-file: - - 'superset-embedded-sdk/**' - -"github_actions": -- changed-files: - - any-glob-to-any-file: - - '.github/actions/**' - - '.github/workflows/**' diff --git a/.github/move.yml b/.github/move.yml deleted file mode 100644 index 3c4320b5cd02..000000000000 --- a/.github/move.yml +++ /dev/null @@ -1,27 +0,0 @@ -# Configuration for Move Issues - https://github.com/dessant/move-issues - -# Delete the command comment when it contains no other content -deleteCommand: true - -# Close the source issue after moving -closeSourceIssue: true - -# Lock the source issue after moving -lockSourceIssue: false - -# Mention issue and comment authors -mentionAuthors: true - -# Preserve mentions in the issue content -keepContentMentions: false - -# Move labels that also exist on the target repository -moveLabels: true - -# Set custom aliases for targets -# aliases: -# r: repo -# or: owner/repo - -# Repository to extend settings from -# _extends: repo diff --git a/.github/stale.yml b/.github/stale.yml deleted file mode 100644 index bf5e23ad2fe2..000000000000 --- a/.github/stale.yml +++ /dev/null @@ -1,19 +0,0 @@ -# Number of days of inactivity before an issue becomes stale -daysUntilStale: 60 -# Number of days of inactivity before a stale issue is closed -daysUntilClose: 7 -# Issues with these labels will never be considered stale -exemptLabels: - - "#SIP" - - ".pinned" - - ".security" -# Label to use when marking an issue as stale -staleLabel: inactive -# Comment to post when marking an issue as stale. Set to `false` to disable -markComment: > - This issue has been automatically marked as stale because it has not had - recent activity. It will be closed if no further activity occurs. - Thank you for your contributions. For admin, please label this issue `.pinned` - to prevent stale bot from closing the issue. -# Comment to post when closing a stale issue. Set to `false` to disable -closeComment: false diff --git a/.github/workflows/bashlib.sh b/.github/workflows/bashlib.sh deleted file mode 100644 index 31e7d04b23bc..000000000000 --- a/.github/workflows/bashlib.sh +++ /dev/null @@ -1,202 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -set -e - -GITHUB_WORKSPACE=${GITHUB_WORKSPACE:-.} -ASSETS_MANIFEST="$GITHUB_WORKSPACE/superset/static/assets/manifest.json" - -# Rounded job start time, used to create a unique Cypress build id for -# parallelization so we can manually rerun a job after 20 minutes -NONCE=$(echo "$(date "+%Y%m%d%H%M") - ($(date +%M)%20)" | bc) - -# Echo only when not in parallel mode -say() { - if [[ $(echo "$INPUT_PARALLEL" | tr '[:lower:]' '[:upper:]') != 'TRUE' ]]; then - echo "$1" - fi -} - -pip-upgrade() { - say "::group::Upgrade pip" - pip install --upgrade pip - say "::endgroup::" -} - -# prepare (lint and build) frontend code -npm-install() { - cd "$GITHUB_WORKSPACE/superset-frontend" - - # cache-restore npm - say "::group::Install npm packages" - echo "npm: $(npm --version)" - echo "node: $(node --version)" - npm ci - say "::endgroup::" - - # cache-save npm -} - -build-assets() { - cd "$GITHUB_WORKSPACE/superset-frontend" - - say "::group::Build static assets" - npm run build - say "::endgroup::" -} - -build-instrumented-assets() { - cd "$GITHUB_WORKSPACE/superset-frontend" - - say "::group::Build static assets with JS instrumented for test coverage" - cache-restore instrumented-assets - if [[ -f "$ASSETS_MANIFEST" ]]; then - echo 'Skip frontend build because instrumented static assets already exist.' - else - npm run build-instrumented - cache-save instrumented-assets - fi - say "::endgroup::" -} - -setup-postgres() { - say "::group::Install dependency for unit tests" - sudo apt-get update && sudo apt-get install --yes libecpg-dev - say "::group::Initialize database" - psql "postgresql://superset:superset@127.0.0.1:15432/superset" <<-EOF - DROP SCHEMA IF EXISTS sqllab_test_db CASCADE; - DROP SCHEMA IF EXISTS admin_database CASCADE; - CREATE SCHEMA sqllab_test_db; - CREATE SCHEMA admin_database; -EOF - say "::endgroup::" -} - -setup-mysql() { - say "::group::Initialize database" - mysql -h 127.0.0.1 -P 13306 -u root --password=root <<-EOF - SET GLOBAL transaction_isolation='READ-COMMITTED'; - SET GLOBAL TRANSACTION ISOLATION LEVEL READ COMMITTED; - DROP DATABASE IF EXISTS superset; - CREATE DATABASE superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci; - DROP DATABASE IF EXISTS sqllab_test_db; - CREATE DATABASE sqllab_test_db DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci; - DROP DATABASE IF EXISTS admin_database; - CREATE DATABASE admin_database DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci; - CREATE USER 'superset'@'%' IDENTIFIED BY 'superset'; - GRANT ALL ON *.* TO 'superset'@'%'; - FLUSH PRIVILEGES; -EOF - say "::endgroup::" -} - -testdata() { - cd "$GITHUB_WORKSPACE" - say "::group::Load test data" - # must specify PYTHONPATH to make `tests.superset_test_config` importable - export PYTHONPATH="$GITHUB_WORKSPACE" - pip install -e . - superset db upgrade - superset load_test_users - superset load_examples --load-test-data - superset init - say "::endgroup::" -} - -celery-worker() { - cd "$GITHUB_WORKSPACE" - say "::group::Start Celery worker" - # must specify PYTHONPATH to make `tests.superset_test_config` importable - export PYTHONPATH="$GITHUB_WORKSPACE" - celery \ - --app=superset.tasks.celery_app:app \ - worker \ - --concurrency=2 \ - --detach \ - --optimization=fair - say "::endgroup::" -} - -cypress-install() { - cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base" - - cache-restore cypress - - say "::group::Install Cypress" - npm ci - say "::endgroup::" - - cache-save cypress -} - -cypress-run-all() { - local USE_DASHBOARD=$1 - cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base" - - # Start Flask and run it in background - # --no-debugger means disable the interactive debugger on the 500 page - # so errors can print to stderr. - local flasklog="${HOME}/flask.log" - local port=8081 - export CYPRESS_BASE_URL="http://localhost:${port}" - - nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null & - local flaskProcessId=$! - - USE_DASHBOARD_FLAG='' - if [ "$USE_DASHBOARD" = "true" ]; then - USE_DASHBOARD_FLAG='--use-dashboard' - fi - - python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID $USE_DASHBOARD_FLAG - - # After job is done, print out Flask log for debugging - echo "::group::Flask log for default run" - cat "$flasklog" - echo "::endgroup::" - # make sure the program exits - kill $flaskProcessId -} - -eyes-storybook-dependencies() { - say "::group::install eyes-storyook dependencies" - sudo apt-get update -y && sudo apt-get -y install gconf-service ca-certificates libxshmfence-dev fonts-liberation libappindicator3-1 libasound2 libatk-bridge2.0-0 libatk1.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libglib2.0-0 libgdk-pixbuf2.0-0 libgtk-3-0 libnspr4 libnss3 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 lsb-release xdg-utils libappindicator1 - say "::endgroup::" -} - -cypress-run-applitools() { - cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base" - - local flasklog="${HOME}/flask.log" - local port=8081 - local cypress="./node_modules/.bin/cypress run" - local browser=${CYPRESS_BROWSER:-chrome} - - export CYPRESS_BASE_URL="http://localhost:${port}" - - nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null & - local flaskProcessId=$! - - $cypress --spec "cypress/applitools/**/*" --browser "$browser" --headless - - say "::group::Flask log for default run" - cat "$flasklog" - say "::endgroup::" - - # make sure the program exits - kill $flaskProcessId -} diff --git a/.github/workflows/bump-python-package.yml b/.github/workflows/bump-python-package.yml deleted file mode 100644 index 846291828abb..000000000000 --- a/.github/workflows/bump-python-package.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Bump Python Package - -on: - # Can be triggered manually - workflow_dispatch: - inputs: - package: - required: false - description: The python package to bump (all if empty) - group: - required: false - description: The optional dependency group to bump (as defined in pyproject.toml) - limit: - required: true - description: Max number of PRs to open (0 for no limit) - default: 5 - -jobs: - bump-python-package: - runs-on: ubuntu-22.04 - permissions: - actions: write - contents: write - pull-requests: write - checks: write - steps: - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: true - ref: master - - - name: Setup supersetbot - uses: ./.github/actions/setup-supersetbot/ - - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install pip-compile-multi - run: pip install pip-compile-multi - - - name: supersetbot bump-python -p "${{ github.event.inputs.package }}" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - git config --global user.email "action@github.com" - git config --global user.name "GitHub Action" - - PACKAGE_OPT="" - if [ -n "${{ github.event.inputs.package }}" ]; then - PACKAGE_OPT="-p ${{ github.event.inputs.package }}" - fi - - GROUP_OPT="" - if [ -n "${{ github.event.inputs.group }}" ]; then - GROUP_OPT="-g ${{ github.event.inputs.group }}" - fi - - supersetbot bump-python \ - --verbose \ - --use-current-repo \ - --include-subpackages \ - --limit ${{ github.event.inputs.limit }} \ - $PACKAGE_OPT \ - $GROUP_OPT diff --git a/.github/workflows/caches.js b/.github/workflows/caches.js deleted file mode 100644 index 66fd7ee95933..000000000000 --- a/.github/workflows/caches.js +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// always use absolute directory -const workspaceDirectory = process.env.GITHUB_WORKSPACE; -const homeDirectory = process.env.HOME; - -const assetsConfig = { - path: [`${workspaceDirectory}/superset/static/assets`], - hashFiles: [ - `${workspaceDirectory}/superset-frontend/src/**/*`, - `${workspaceDirectory}/superset-frontend/packages/**/*`, - `${workspaceDirectory}/superset-frontend/plugins/**/*`, - `${workspaceDirectory}/superset-frontend/*.js`, - `${workspaceDirectory}/superset-frontend/*.json`, - ], - // dont use restore keys as it may give an invalid older build - restoreKeys: '', -}; - -// Multi-layer cache definition -module.exports = { - pip: { - path: [`${homeDirectory}/.cache/pip`], - hashFiles: [`${workspaceDirectory}/requirements/*.txt`], - }, - npm: { - path: [`${homeDirectory}/.npm`], - hashFiles: [`${workspaceDirectory}/superset-frontend/package-lock.json`], - }, - assets: assetsConfig, - // use separate cache for instrumented JS files and regular assets - // one is built with `npm run build`, - // another is built with `npm run build-instrumented` - 'instrumented-assets': assetsConfig, - cypress: { - path: [`${homeDirectory}/.cache/Cypress`], - hashFiles: [ - `${workspaceDirectory}/superset-frontend/cypress-base/package-lock.json`, - ], - }, -}; diff --git a/.github/workflows/cancel_duplicates.yml b/.github/workflows/cancel_duplicates.yml deleted file mode 100644 index a749a2add605..000000000000 --- a/.github/workflows/cancel_duplicates.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Cancel Duplicates -on: - workflow_run: - workflows: - - "Miscellaneous" - types: - - requested - -jobs: - cancel-duplicate-runs: - name: Cancel duplicate workflow runs - runs-on: ubuntu-22.04 - permissions: - actions: write - contents: read - steps: - - name: Check number of queued tasks - id: check_queued - env: - GITHUB_TOKEN: ${{ github.token }} - GITHUB_REPO: ${{ github.repository }} - run: | - get_count() { - echo $(curl -s -H "Authorization: token $GITHUB_TOKEN" \ - "https://api.github.com/repos/$GITHUB_REPO/actions/runs?status=$1" | \ - jq ".total_count") - } - count=$(( `get_count queued` + `get_count in_progress` )) - echo "Found $count unfinished jobs." - echo "count=$count" >> $GITHUB_OUTPUT - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - if: steps.check_queued.outputs.count >= 20 - uses: actions/checkout@v4 - - - name: Cancel duplicate workflow runs - if: steps.check_queued.outputs.count >= 20 - env: - GITHUB_TOKEN: ${{ github.token }} - GITHUB_REPOSITORY: ${{ github.repository }} - run: | - pip install click requests typing_extensions python-dateutil - python ./scripts/cancel_github_workflows.py diff --git a/.github/workflows/check_db_migration_confict.yml b/.github/workflows/check_db_migration_confict.yml deleted file mode 100644 index af291becde6f..000000000000 --- a/.github/workflows/check_db_migration_confict.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Check DB migration conflict -on: - push: - paths: - - "superset/migrations/**" - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - paths: - - "superset/migrations/**" - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - check_db_migration_conflict: - name: Check DB migration conflict - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: write - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - - name: Check and notify - uses: actions/github-script@v7 - with: - github-token: ${{ github.token }} - script: | - // API reference: https://octokit.github.io/rest.js - const currentBranch = context.ref.replace('refs/heads/', ''); - - // Find all pull requests to current branch - const opts = github.rest.pulls.list.endpoint.merge({ - owner: context.repo.owner, - repo: context.repo.repo, - base: context.ref, - state: 'open', - sort: 'updated', - per_page: 100, - }); - const pulls = await github.paginate(opts); - if (pulls.length > 0) { - console.log(`Found ${pulls.length} open PRs for base branch "${currentBranch}"`) - } - - for (const pull of pulls) { - const listFilesOpts = await github.rest.pulls.listFiles.endpoint.merge({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: pull.number, - }); - const files = await github.paginate(listFilesOpts); - if ( - files.some(x => x.contents_url.includes('/contents/superset/migrations')) - ) { - console.log(`PR #${pull.number} "${pull.title}" also added db migration`) - await github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: pull.number, - body: - `# 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️ 🙅‍♂️` + - `❗ @${pull.user.login} Your base branch \`${currentBranch}\` has ` + - 'also updated `superset/migrations`.\n' + - '\n' + - '**Please consider rebasing your branch and [resolving potential db migration conflicts](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#merging-db-migrations).**', - }); - } - } diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index d9d5abdbdb56..000000000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: "CodeQL" - -on: - push: - branches: ["master", "[0-9].[0-9]*"] - pull_request: - # The branches below must be a subset of the branches above - branches: ["master"] - schedule: - - cron: "0 4 * * *" - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-22.04 - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: ["python", "javascript"] - # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality - - - name: Perform CodeQL Analysis - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: github/codeql-action/analyze@v3 - with: - category: "/language:${{matrix.language}}" diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml deleted file mode 100644 index 773e7358345f..000000000000 --- a/.github/workflows/dependency-review.yml +++ /dev/null @@ -1,35 +0,0 @@ -# Dependency Review Action -# -# This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. -# -# Source repository: https://github.com/actions/dependency-review-action -# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement -name: "Dependency Review" -on: [pull_request] - -permissions: - contents: read - -jobs: - dependency-review: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout Repository" - uses: actions/checkout@v4 - - name: "Dependency Review" - uses: actions/dependency-review-action@v4 - with: - fail-on-severity: critical - # compatible/incompatible licenses addressed here: https://www.apache.org/legal/resolved.html - # find SPDX identifiers here: https://spdx.org/licenses/ - deny-licenses: MS-LPL, BUSL-1.1, QPL-1.0, Sleepycat, SSPL-1.0, CPOL-1.02, AGPL-3.0, GPL-1.0+, BSD-4-Clause-UC, NPL-1.0, NPL-1.1, JSON - # pkg:npm/store2@2.14.2 - # adding an exception for an ambigious license on store2, which has been resolved in - # the latest version. It's MIT: https://github.com/nbubna/store/blob/master/LICENSE-MIT - # pkg:npm/applitools/* - # adding exception for all applitools modules (eyes-cypress and its dependencies), - # which has an explicit OSS license approved by ASF - # license: https://applitools.com/legal/open-source-terms-of-use/ - # pkg:npm/node-forge@1.3.1 - # selecting BSD-3-Clause licensing terms for node-forge to ensure compatibility with Apache - allow-dependencies-licenses: pkg:npm/store2@2.14.2, pkg:npm/applitools/core, pkg:npm/applitools/core-base, pkg:npm/applitools/css-tree, pkg:npm/applitools/ec-client, pkg:npm/applitools/eg-socks5-proxy-server, pkg:npm/applitools/eyes, pkg:npm/applitools/eyes-cypress, pkg:npm/applitools/nml-client, pkg:npm/applitools/tunnel-client, pkg:npm/applitools/utils, pkg:npm/node-forge@1.3.1 diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml deleted file mode 100644 index c8c4756ea543..000000000000 --- a/.github/workflows/docker.yml +++ /dev/null @@ -1,90 +0,0 @@ -name: Build & publish docker images - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - branches: - - "master" - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - setup_matrix: - runs-on: ubuntu-22.04 - outputs: - matrix_config: ${{ steps.set_matrix.outputs.matrix_config }} - steps: - - id: set_matrix - run: | - MATRIX_CONFIG=$(if [ "${{ github.event_name }}" == "pull_request" ]; then echo '["dev"]'; else echo '["dev", "lean", "py310", "websocket", "dockerize"]'; fi) - echo "matrix_config=${MATRIX_CONFIG}" >> $GITHUB_OUTPUT - echo $GITHUB_OUTPUT - - docker-build: - name: docker-build - needs: setup_matrix - runs-on: ubuntu-22.04 - strategy: - matrix: - build_preset: ${{fromJson(needs.setup_matrix.outputs.matrix_config)}} - fail-fast: false - env: - DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} - DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - - steps: - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Set up QEMU - if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker - uses: docker/setup-buildx-action@v3 - - - name: Try to login to DockerHub - if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker - continue-on-error: true - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Setup supersetbot - if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker - uses: ./.github/actions/setup-supersetbot/ - - - name: Build Docker Image - if: steps.check.outputs.python || steps.check.outputs.frontend || steps.check.outputs.docker - shell: bash - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - # Single platform builds in pull_request context to speed things up - if [ "${{ github.event_name }}" = "push" ]; then - PLATFORM_ARG="--platform linux/arm64 --platform linux/amd64" - elif [ "${{ github.event_name }}" = "pull_request" ]; then - PLATFORM_ARG="--platform linux/amd64" - fi - - supersetbot docker \ - --preset ${{ matrix.build_preset }} \ - --context "$EVENT" \ - --context-ref "$RELEASE" $FORCE_LATEST \ - $PLATFORM_ARG diff --git a/.github/workflows/ecs-task-definition.json b/.github/workflows/ecs-task-definition.json deleted file mode 100644 index 7ef503681d60..000000000000 --- a/.github/workflows/ecs-task-definition.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "containerDefinitions": [ - { - "name": "superset-ci", - "image": "apache/superset:latest", - "cpu": 0, - "links": [], - "portMappings": [ - { - "containerPort": 8080, - "hostPort": 8080, - "protocol": "tcp" - } - ], - "essential": true, - "entryPoint": [], - "command": [], - "environment": [ - { - "name": "SUPERSET_LOAD_EXAMPLES", - "value": "yes" - }, - { - "name": "SUPERSET_PORT", - "value": "8080" - }, - { - "name": "SUPERSET_SECRET_KEY", - "value": "super-secret-for-ephemerals" - }, - { - "name": "TALISMAN_ENABLED", - "value": "False" - } - ], - "mountPoints": [], - "volumesFrom": [], - "logConfiguration": { - "logDriver": "awslogs", - "options": { - "awslogs-group": "/ecs/superset-ci", - "awslogs-region": "us-west-2", - "awslogs-stream-prefix": "ecs" - } - } - } - ], - "family": "superset-ci", - "taskRoleArn": "ecsTaskExecutionRole", - "executionRoleArn": "ecsTaskExecutionRole", - "networkMode": "awsvpc", - "volumes": [], - "placementConstraints": [], - "requiresCompatibilities": [ - "FARGATE" - ], - "cpu": "512", - "memory": "1024" -} diff --git a/.github/workflows/embedded-sdk-release.yml b/.github/workflows/embedded-sdk-release.yml deleted file mode 100644 index b0c75343824d..000000000000 --- a/.github/workflows/embedded-sdk-release.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Embedded SDK Release - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.NPM_TOKEN != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - build: - needs: config - if: needs.config.outputs.has-secrets - runs-on: ubuntu-22.04 - defaults: - run: - working-directory: superset-embedded-sdk - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: "18" - registry-url: 'https://registry.npmjs.org' - - run: npm ci - - run: npm run ci:release - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/embedded-sdk-test.yml b/.github/workflows/embedded-sdk-test.yml deleted file mode 100644 index 50058d0af75f..000000000000 --- a/.github/workflows/embedded-sdk-test.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Embedded SDK PR Checks - -on: - pull_request: - paths: - - "superset-embedded-sdk/**" - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - embedded-sdk-test: - runs-on: ubuntu-22.04 - defaults: - run: - working-directory: superset-embedded-sdk - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 - with: - node-version: "18" - registry-url: 'https://registry.npmjs.org' - - run: npm ci - - run: npm test - - run: npm run build diff --git a/.github/workflows/ephemeral-env-pr-close.yml b/.github/workflows/ephemeral-env-pr-close.yml deleted file mode 100644 index 5fc634f6cdf7..000000000000 --- a/.github/workflows/ephemeral-env-pr-close.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Cleanup ephemeral envs (PR close) - -on: - pull_request_target: - types: [closed] - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - ephemeral-env-cleanup: - needs: config - if: needs.config.outputs.has-secrets - name: Cleanup ephemeral envs - runs-on: ubuntu-22.04 - permissions: - pull-requests: write - steps: - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - - name: Describe ECS service - id: describe-services - run: | - echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT - - - name: Delete ECS service - if: steps.describe-services.outputs.active == 'true' - id: delete-service - run: | - aws ecs delete-service \ - --cluster superset-ci \ - --service pr-${{ github.event.number }}-service \ - --force - - - name: Login to Amazon ECR - if: steps.describe-services.outputs.active == 'true' - id: login-ecr - uses: aws-actions/amazon-ecr-login@v2 - - - name: Delete ECR image tag - if: steps.describe-services.outputs.active == 'true' - id: delete-image-tag - run: | - aws ecr batch-delete-image \ - --registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \ - --repository-name superset-ci \ - --image-ids imageTag=pr-${{ github.event.number }} - - - name: Comment (success) - if: steps.describe-services.outputs.active == 'true' - uses: actions/github-script@v7 - with: - github-token: ${{github.token}} - script: | - github.rest.issues.createComment({ - issue_number: ${{ github.event.number }}, - owner: context.repo.owner, - repo: context.repo.repo, - body: 'Ephemeral environment shutdown and build artifacts deleted.' - }) diff --git a/.github/workflows/ephemeral-env.yml b/.github/workflows/ephemeral-env.yml deleted file mode 100644 index 1cd80282ec24..000000000000 --- a/.github/workflows/ephemeral-env.yml +++ /dev/null @@ -1,283 +0,0 @@ -name: Ephemeral env workflow - -on: - issue_comment: - types: [created] - -jobs: - config: - runs-on: "ubuntu-22.04" - if: github.event.issue.pull_request - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.AWS_ACCESS_KEY_ID != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - ephemeral-env-comment: - concurrency: - group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-comment - cancel-in-progress: true - needs: config - if: needs.config.outputs.has-secrets - name: Evaluate ephemeral env comment trigger (/testenv) - runs-on: ubuntu-22.04 - permissions: - pull-requests: write - outputs: - slash-command: ${{ steps.eval-body.outputs.result }} - feature-flags: ${{ steps.eval-feature-flags.outputs.result }} - - steps: - - name: Debug - run: | - echo "Comment on PR #${{ github.event.issue.number }} by ${{ github.event.issue.user.login }}, ${{ github.event.comment.author_association }}" - - - name: Eval comment body for /testenv slash command - uses: actions/github-script@v7 - id: eval-body - with: - result-encoding: string - script: | - const pattern = /^\/testenv (up|down)/ - const result = pattern.exec(context.payload.comment.body) - return result === null ? 'noop' : result[1] - - - name: Eval comment body for feature flags - uses: actions/github-script@v7 - id: eval-feature-flags - with: - script: | - const pattern = /FEATURE_(\w+)=(\w+)/g; - let results = []; - [...context.payload.comment.body.matchAll(pattern)].forEach(match => { - const config = { - name: `SUPERSET_FEATURE_${match[1]}`, - value: match[2], - }; - results.push(config); - }); - return results; - - - name: Limit to committers - if: > - steps.eval-body.outputs.result != 'noop' && - github.event.comment.author_association != 'MEMBER' && - github.event.comment.author_association != 'OWNER' - uses: actions/github-script@v7 - with: - github-token: ${{github.token}} - script: | - const errMsg = '@${{ github.event.comment.user.login }} Ephemeral environment creation is currently limited to committers.' - github.rest.issues.createComment({ - issue_number: ${{ github.event.issue.number }}, - owner: context.repo.owner, - repo: context.repo.repo, - body: errMsg - }) - core.setFailed(errMsg) - - ephemeral-docker-build: - concurrency: - group: ${{ github.workflow }}-${{ github.event.issue.number || github.run_id }}-build - cancel-in-progress: true - needs: ephemeral-env-comment - name: ephemeral-docker-build - runs-on: ubuntu-22.04 - steps: - - name: Get Info from comment - uses: actions/github-script@v7 - id: get-pr-info - with: - script: | - const request = { - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: ${{ github.event.issue.number }}, - } - core.info(`Getting PR #${request.pull_number} from ${request.owner}/${request.repo}`) - const pr = await github.rest.pulls.get(request); - return pr.data; - - - name: Debug - id: get-sha - run: | - echo "sha=${{ fromJSON(steps.get-pr-info.outputs.result).head.sha }}" >> $GITHUB_OUTPUT - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} : ${{steps.get-sha.outputs.sha}} )" - uses: actions/checkout@v4 - with: - ref: ${{ steps.get-sha.outputs.sha }} - persist-credentials: false - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Build ephemeral env image - run: | - ./scripts/build_docker.py \ - "ci" \ - "pull_request" \ - --build_context_ref ${{ github.event.issue.number }} - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v2 - - - name: Load, tag and push image to ECR - id: push-image - env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - ECR_REPOSITORY: superset-ci - IMAGE_TAG: apache/superset:${{ steps.get-sha.outputs.sha }}-ci - run: | - docker tag $IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:pr-${{ github.event.issue.number }}-ci - docker push -a $ECR_REGISTRY/$ECR_REPOSITORY - - ephemeral-env-up: - needs: [ephemeral-env-comment, ephemeral-docker-build] - if: needs.ephemeral-env-comment.outputs.slash-command == 'up' - name: Spin up an ephemeral environment - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: write - - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v4 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-2 - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v2 - - - name: Check target image exists in ECR - id: check-image - continue-on-error: true - run: | - aws ecr describe-images \ - --registry-id $(echo "${{ steps.login-ecr.outputs.registry }}" | grep -Eo "^[0-9]+") \ - --repository-name superset-ci \ - --image-ids imageTag=pr-${{ github.event.issue.number }}-ci - - - name: Fail on missing container image - if: steps.check-image.outcome == 'failure' - uses: actions/github-script@v7 - with: - github-token: ${{github.token}} - script: | - const errMsg = '@${{ github.event.comment.user.login }} Container image not yet published for this PR. Please try again when build is complete.' - github.rest.issues.createComment({ - issue_number: ${{ github.event.issue.number }}, - owner: context.repo.owner, - repo: context.repo.repo, - body: errMsg - }) - core.setFailed(errMsg) - - - name: Fill in the new image ID in the Amazon ECS task definition - id: task-def - uses: aws-actions/amazon-ecs-render-task-definition@v1 - with: - task-definition: .github/workflows/ecs-task-definition.json - container-name: superset-ci - image: ${{ steps.login-ecr.outputs.registry }}/superset-ci:pr-${{ github.event.issue.number }}-ci - - - name: Update env vars in the Amazon ECS task definition - run: | - cat <<< "$(jq '.containerDefinitions[0].environment += ${{ needs.ephemeral-env-comment.outputs.feature-flags }}' < ${{ steps.task-def.outputs.task-definition }})" > ${{ steps.task-def.outputs.task-definition }} - - - name: Describe ECS service - id: describe-services - run: | - echo "active=$(aws ecs describe-services --cluster superset-ci --services pr-${{ github.event.issue.number }}-service | jq '.services[] | select(.status == "ACTIVE") | any')" >> $GITHUB_OUTPUT - - name: Create ECS service - if: steps.describe-services.outputs.active != 'true' - id: create-service - env: - ECR_SUBNETS: subnet-0e15a5034b4121710,subnet-0e8efef4a72224974 - ECR_SECURITY_GROUP: sg-092ff3a6ae0574d91 - run: | - aws ecs create-service \ - --cluster superset-ci \ - --service-name pr-${{ github.event.issue.number }}-service \ - --task-definition superset-ci \ - --launch-type FARGATE \ - --desired-count 1 \ - --platform-version LATEST \ - --network-configuration "awsvpcConfiguration={subnets=[$ECR_SUBNETS],securityGroups=[$ECR_SECURITY_GROUP],assignPublicIp=ENABLED}" \ - --tags key=pr,value=${{ github.event.issue.number }} key=github_user,value=${{ github.actor }} - - - name: Deploy Amazon ECS task definition - id: deploy-task - uses: aws-actions/amazon-ecs-deploy-task-definition@v1 - with: - task-definition: ${{ steps.task-def.outputs.task-definition }} - service: pr-${{ github.event.issue.number }}-service - cluster: superset-ci - wait-for-service-stability: true - wait-for-minutes: 10 - - - name: List tasks - id: list-tasks - run: | - echo "task=$(aws ecs list-tasks --cluster superset-ci --service-name pr-${{ github.event.issue.number }}-service | jq '.taskArns | first')" >> $GITHUB_OUTPUT - - - name: Get network interface - id: get-eni - run: | - echo "eni=$(aws ecs describe-tasks --cluster superset-ci --tasks ${{ steps.list-tasks.outputs.task }} | jq '.tasks | .[0] | .attachments | .[0] | .details | map(select(.name=="networkInterfaceId")) | .[0] | .value')" >> $GITHUB_OUTPUT - - - name: Get public IP - id: get-ip - run: | - echo "ip=$(aws ec2 describe-network-interfaces --network-interface-ids ${{ steps.get-eni.outputs.eni }} | jq -r '.NetworkInterfaces | first | .Association.PublicIp')" >> $GITHUB_OUTPUT - - - name: Comment (success) - if: ${{ success() }} - uses: actions/github-script@v7 - with: - github-token: ${{github.token}} - script: | - github.rest.issues.createComment({ - issue_number: ${{ github.event.issue.number }}, - owner: context.repo.owner, - repo: context.repo.repo, - body: '@${{ github.event.comment.user.login }} Ephemeral environment spinning up at http://${{ steps.get-ip.outputs.ip }}:8080. Credentials are `admin`/`admin`. Please allow several minutes for bootstrapping and startup.' - }) - - - name: Comment (failure) - if: ${{ failure() }} - uses: actions/github-script@v7 - with: - github-token: ${{github.token}} - script: | - github.rest.issues.createComment({ - issue_number: ${{ github.event.issue.number }}, - owner: context.repo.owner, - repo: context.repo.repo, - body: '@${{ github.event.comment.user.login }} Ephemeral environment creation failed. Please check the Actions logs for details.' - }) diff --git a/.github/workflows/generate-FOSSA-report.yml b/.github/workflows/generate-FOSSA-report.yml deleted file mode 100644 index 807c8ea40fc2..000000000000 --- a/.github/workflows/generate-FOSSA-report.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: Generate FOSSA report - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.FOSSA_API_KEY != '' ) || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - license_check: - needs: config - if: needs.config.outputs.has-secrets - name: Generate Report - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Setup Java - uses: actions/setup-java@v4 - with: - distribution: "temurin" - java-version: "11" - - name: Generate fossa report - env: - FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }} - run: | - set -eo pipefail - if [[ "${{github.event_name}}" != "pull_request" ]]; then - ./scripts/fossa.sh - exit 0 - fi - - URL="https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files" - FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename') - - cat<<EOF - CHANGED FILES: - $FILES - - EOF - - if [[ "${FILES}" =~ (.*package*\.json|requirements\/[a-z_-]+\.txt|setup\.py) ]]; then - echo "Detected dependency changes... running fossa check" - - ./scripts/fossa.sh - else - echo "No dependency changes... skiping fossa check" - fi - shell: bash diff --git a/.github/workflows/github-action-validator.sh b/.github/workflows/github-action-validator.sh deleted file mode 100755 index 7904a40259cc..000000000000 --- a/.github/workflows/github-action-validator.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Inspired from https://github.com/mpalmer/action-validator?tab=readme-ov-file#pre-commit-hook-example -echo "Running pre-commit hook for GitHub Actions: https://github.com/mpalmer/action-validator" - for action in $(git ls-files .github/ | grep -E '^\.github/(workflows|actions)/.*\.ya?ml$'); do - if action-validator "$action"; then - echo "✅ $action" - else - echo "❌ $action" - exit 1 - fi -done diff --git a/.github/workflows/github-action-validator.yml b/.github/workflows/github-action-validator.yml deleted file mode 100644 index 5acc5e0880e1..000000000000 --- a/.github/workflows/github-action-validator.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: Validate All GitHub Actions - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -jobs: - - validate-all-ghas: - runs-on: ubuntu-22.04 - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '18' - - - name: Install Dependencies - run: npm install -g @action-validator/core @action-validator/cli --save-dev - - - name: Run Script - run: bash .github/workflows/github-action-validator.sh diff --git a/.github/workflows/issue_creation.yml b/.github/workflows/issue_creation.yml deleted file mode 100644 index fc3b3e45bdf7..000000000000 --- a/.github/workflows/issue_creation.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: supersetbot orglabel based on author - -on: - issues: - types: [created, edited] - - pull_request: - types: [created, edited] - -jobs: - superbot-orglabel: - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: write - issues: write - steps: - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Setup supersetbot - uses: ./.github/actions/setup-supersetbot/ - - - name: Execute supersetbot orglabel command - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - # Label the issue with the appropriate org using supersetbot - # - this requires for the author to be publicly associated with their org - # - and for the org to be listed in `supersetbot/src/metadata.js` - supersetbot orglabel --issue ${{ github.event.number }} --repo ${{ github.repository }} || true diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml deleted file mode 100644 index 5af67547f982..000000000000 --- a/.github/workflows/labeler.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: "Pull Request Labeler" -on: -- pull_request_target - -jobs: - labeler: - permissions: - contents: read - pull-requests: write - runs-on: ubuntu-22.04 - steps: - - uses: actions/labeler@v5 - with: - sync-labels: true - - # TODO: run scripts based on labels! - # - id: run-translation-scripts - # if: contains(steps.label-the-PR.outputs.all-labels, 'i18n') - # run: | - # echo "Running translation scripts" - # # Generate .pot -> .po -> .json files diff --git a/.github/workflows/latest-release-tag.yml b/.github/workflows/latest-release-tag.yml deleted file mode 100644 index 659214af9e09..000000000000 --- a/.github/workflows/latest-release-tag.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Tags -on: - release: - types: [published] # This makes it run only when a new released is published - -jobs: - latest-release: - name: Add/update tag to new release - runs-on: ubuntu-22.04 - permissions: - contents: write - - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - - name: Check for latest tag - id: latest-tag - run: | - source ./scripts/tag_latest_release.sh $(echo ${{ github.event.release.tag_name }}) --dry-run - - - name: Configure Git - run: | - git config user.name "$GITHUB_ACTOR" - git config user.email "$GITHUB_ACTOR@users.noreply.github.com" - - - name: Run latest-tag - uses: ./.github/actions/latest-tag - if: (! ${{ steps.latest-tag.outputs.SKIP_TAG }} ) - with: - description: Superset latest release - tag-name: latest - env: - GITHUB_TOKEN: ${{ github.token }} diff --git a/.github/workflows/license-check.yml b/.github/workflows/license-check.yml deleted file mode 100644 index 8974c5ae43ff..000000000000 --- a/.github/workflows/license-check.yml +++ /dev/null @@ -1,28 +0,0 @@ -name: License Template Check - -on: - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - license_check: - name: License Check - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Setup Java - uses: actions/setup-java@v4 - with: - distribution: 'temurin' - java-version: '11' - - name: Run license check - run: ./scripts/check_license.sh diff --git a/.github/workflows/no-hold-label.yml b/.github/workflows/no-hold-label.yml deleted file mode 100644 index 866650ece4c1..000000000000 --- a/.github/workflows/no-hold-label.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Hold Label Check - -on: - pull_request: - types: [labeled, unlabeled, opened, reopened, synchronize] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - check-hold-label: - runs-on: ubuntu-22.04 - steps: - - name: Check for 'hold' label - uses: actions/github-script@v7 - with: - github-token: ${{secrets.GITHUB_TOKEN}} - script: | - const payload = context.payload.pull_request - const holdLabelPresent = !!payload.labels.find(label => label.name.includes('hold')) - if (holdLabelPresent) { - core.setFailed('Hold label is present, merge is blocked.') - } diff --git a/.github/workflows/pr-lint.yml b/.github/workflows/pr-lint.yml deleted file mode 100644 index 5ba91fee6ebf..000000000000 --- a/.github/workflows/pr-lint.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: PR Lint - -on: - pull_request: - # By default, a workflow only runs when a pull_request's activity type is opened, synchronize, or reopened. We - # explicity override here so that PR titles are re-linted when the PR text content is edited. - # - # Possible values: https://help.github.com/en/actions/reference/events-that-trigger-workflows#pull-request-event-pull_request - types: [opened, edited, reopened, synchronize] - -jobs: - lint-check: - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: write - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - uses: ./.github/actions/pr-lint-action - with: - title-regex: "^(build|chore|ci|docs|feat|fix|perf|refactor|style|test|other)(\\(.+\\))?(\\!)?:\\s.+" - on-failed-regex-fail-action: true - on-failed-regex-request-changes: false - on-failed-regex-create-review: false - on-failed-regex-comment: - "Please format your PR title to match: `%regex%`!" - repo-token: "${{ github.token }}" diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml deleted file mode 100644 index af6765019250..000000000000 --- a/.github/workflows/pre-commit.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: pre-commit checks - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - pre-commit: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Setup Python - uses: ./.github/actions/setup-backend/ - - name: Enable brew and helm-docs - # Add brew to the path - see https://github.com/actions/runner-images/issues/6283 - run: | - echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH - eval "$(/home/linuxbrew/.linuxbrew/bin/brew shellenv)" - echo "HOMEBREW_PREFIX=$HOMEBREW_PREFIX" >>"${GITHUB_ENV}" - echo "HOMEBREW_CELLAR=$HOMEBREW_CELLAR" >>"${GITHUB_ENV}" - echo "HOMEBREW_REPOSITORY=$HOMEBREW_REPOSITORY" >>"${GITHUB_ENV}" - brew install norwoodj/tap/helm-docs - - name: pre-commit - run: | - if ! pre-commit run --all-files; then - git status - git diff - exit 1 - fi diff --git a/.github/workflows/prefer-typescript.yml b/.github/workflows/prefer-typescript.yml deleted file mode 100644 index 4739ae8b6bf8..000000000000 --- a/.github/workflows/prefer-typescript.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Prefer TypeScript - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - paths: - - "superset-frontend/src/**" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - paths: - - "superset-frontend/src/**" - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - prefer_typescript: - if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request' - name: Prefer TypeScript - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: write - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Get changed files - id: changed - uses: ./.github/actions/file-changes-action - with: - githubToken: ${{ github.token }} - - - name: Determine if a .js or .jsx file was added - id: check - run: | - js_files_added() { - jq -r ' - map( - select( - endswith(".js") or endswith(".jsx") - ) - ) | join("\n") - ' ${HOME}/files_added.json - } - echo "js_files_added=$(js_files_added)" >> $GITHUB_OUTPUT - - - if: steps.check.outputs.js_files_added - name: Add Comment to PR - uses: ./.github/actions/comment-on-pr - continue-on-error: true - env: - GITHUB_TOKEN: ${{ github.token }} - with: - msg: | - ### WARNING: Prefer TypeScript - - Looks like your PR contains new `.js` or `.jsx` files: - - ``` - ${{steps.check.outputs.js_files_added}} - ``` - - As decided in [SIP-36](https://github.com/apache/superset/issues/9101), all new frontend code should be written in TypeScript. Please convert above files to TypeScript then re-request review. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 4435054a5c7a..000000000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,114 +0,0 @@ -name: release-workflow - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.NPM_TOKEN != '' && secrets.GH_PERSONAL_ACCESS_TOKEN != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - build: - needs: config - if: needs.config.outputs.has-secrets - name: Bump version and publish package(s) - - runs-on: ubuntu-22.04 - - strategy: - matrix: - node-version: [18] - - steps: - - uses: actions/checkout@v4 - with: - # pulls all commits (needed for lerna / semantic release to correctly version) - fetch-depth: 0 - - name: Get tags and filter trigger tags - run: | - if ! git fetch --depth=1 origin "+refs/tags/*:refs/tags/*"; then - echo "::notice title=Workflow skipped::No tags present in repository" - exit - fi - echo "HAS_TAGS=1" >> $GITHUB_ENV" - git fetch --prune --unshallow - git tag -d `git tag | grep -E '^trigger-'` - - - name: Use Node.js ${{ matrix.node-version }} - if: env.HAS_TAGS - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node-version }} - - - name: Cache npm - if: env.HAS_TAGS - uses: actions/cache@v4 - with: - path: ~/.npm # npm cache files are stored in `~/.npm` on Linux/macOS - key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.OS }}-node- - ${{ runner.OS }}- - - - name: Get npm cache directory path - if: env.HAS_TAGS - id: npm-cache-dir-path - run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT - - name: Cache npm - if: env.HAS_TAGS - uses: actions/cache@v4 - id: npm-cache # use this to check for `cache-hit` (`steps.npm-cache.outputs.cache-hit != 'true'`) - with: - path: ${{ steps.npm-cache-dir-path.outputs.dir }} - key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-npm- - - - name: Install dependencies - if: env.HAS_TAGS - working-directory: ./superset-frontend - run: npm ci - - name: Run unit tests - if: env.HAS_TAGS - working-directory: ./superset-frontend - run: npm run test -- plugins packages - - name: Build packages - if: env.HAS_TAGS - working-directory: ./superset-frontend - run: npm run plugins:build - - - name: Configure npm and git - if: env.HAS_TAGS - run: | - echo "@superset-ui:registry=https://registry.npmjs.org/" > .npmrc - echo "registry=https://registry.npmjs.org/" >> .npmrc - echo "//registry.npmjs.org/:_authToken=\${NPM_TOKEN}" >> $HOME/.npmrc 2> /dev/null - npm whoami - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - env: - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - GITHUB_TOKEN: ${{ github.token }} - - - name: Bump version and publish package(s) - if: env.HAS_TAGS - working-directory: ./superset-frontend - run: | - git tag -d `git tag | grep -E '^trigger-'` - npm run plugins:release-from-tag - env: - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} - GITHUB_TOKEN: ${{ github.token }} - GH_TOKEN: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} diff --git a/.github/workflows/superset-applitool-cypress.yml b/.github/workflows/superset-applitool-cypress.yml deleted file mode 100644 index 72fd1a734321..000000000000 --- a/.github/workflows/superset-applitool-cypress.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Applitools Cypress - -on: - schedule: - - cron: "0 1 * * *" - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - cypress-applitools: - needs: config - if: needs.config.outputs.has-secrets - runs-on: ubuntu-22.04 - strategy: - fail-fast: false - matrix: - browser: ["chrome"] - node: [18] - env: - SUPERSET_ENV: development - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - PYTHONPATH: ${{ github.workspace }} - REDIS_PORT: 16379 - GITHUB_TOKEN: ${{ github.token }} - APPLITOOLS_APP_NAME: Superset - APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }} - APPLITOOLS_BATCH_ID: ${{ github.sha }} - APPLITOOLS_BATCH_NAME: Superset Cypress - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - - 15432:5432 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - ref: master - - name: Setup Python - uses: ./.github/actions/setup-backend/ - - name: Import test data - uses: ./.github/actions/cached-dependencies - with: - run: testdata - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - name: Install npm dependencies - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: Build javascript packages - uses: ./.github/actions/cached-dependencies - with: - run: build-instrumented-assets - - name: Setup Postgres - if: steps.check.outcome == 'failure' - uses: ./.github/actions/cached-dependencies - with: - run: setup-postgres - - name: Install cypress - uses: ./.github/actions/cached-dependencies - with: - run: cypress-install - - name: Run Cypress - uses: ./.github/actions/cached-dependencies - env: - CYPRESS_BROWSER: ${{ matrix.browser }} - with: - run: cypress-run-applitools diff --git a/.github/workflows/superset-applitools-storybook.yml b/.github/workflows/superset-applitools-storybook.yml deleted file mode 100644 index 5382120bc163..000000000000 --- a/.github/workflows/superset-applitools-storybook.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Applitools Storybook - -on: - schedule: - - cron: "0 0 * * *" - -env: - APPLITOOLS_APP_NAME: Superset - APPLITOOLS_API_KEY: ${{ secrets.APPLITOOLS_API_KEY }} - APPLITOOLS_BATCH_ID: ${{ github.sha }} - APPLITOOLS_BATCH_NAME: Superset Storybook - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.APPLITOOLS_API_KEY != '' && secrets.APPLITOOLS_API_KEY != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - cron: - needs: config - if: needs.config.outputs.has-secrets - runs-on: ubuntu-22.04 - strategy: - matrix: - node: [18] - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - ref: master - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ matrix.node }} - - name: Install eyes-storybook dependencies - uses: ./.github/actions/cached-dependencies - with: - run: eyes-storybook-dependencies - - name: Install NPM dependencies - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: Run Applitools Eyes-Storybook - working-directory: ./superset-frontend - run: npx eyes-storybook -u https://superset-storybook.netlify.app/ diff --git a/.github/workflows/superset-cli.yml b/.github/workflows/superset-cli.yml deleted file mode 100644 index 29dd87adfe17..000000000000 --- a/.github/workflows/superset-cli.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: Superset CLI tests - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - test-load-examples: - runs-on: ubuntu-22.04 - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - # Use custom ports for services to avoid accidentally connecting to - # GitHub action runner's default installations - - 15432:5432 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - if: steps.check.outputs.python - uses: ./.github/actions/setup-backend/ - - name: Setup Postgres - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: setup-postgres - - name: superset init - if: steps.check.outputs.python - run: | - pip install -e . - superset db upgrade - superset load_test_users - - name: superset load_examples - if: steps.check.outputs.python - run: | - # load examples without test data - superset load_examples --load-big-data diff --git a/.github/workflows/superset-docs-deploy.yml b/.github/workflows/superset-docs-deploy.yml deleted file mode 100644 index 5ce2251ee526..000000000000 --- a/.github/workflows/superset-docs-deploy.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Docs Deployment - -on: - push: - paths: - - "docs/**" - - "README.md" - branches: - - "master" - - workflow_dispatch: {} - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.SUPERSET_SITE_BUILD != '' && secrets.SUPERSET_SITE_BUILD != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - build-deploy: - needs: config - if: needs.config.outputs.has-secrets - name: Build & Deploy - runs-on: "ubuntu-22.04" - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Set up Node.js 20 - uses: actions/setup-node@v4 - with: - node-version: '20' - - name: Setup Python - uses: ./.github/actions/setup-backend/ - - uses: actions/setup-java@v4 - with: - distribution: 'zulu' - java-version: '21' - - name: Install Graphviz - run: sudo apt-get install -y graphviz - - name: Compute Entity Relationship diagram (ERD) - env: - SUPERSET_SECRET_KEY: not-a-secret - run: | - python scripts/erd/erd.py - curl -L http://sourceforge.net/projects/plantuml/files/1.2023.7/plantuml.1.2023.7.jar/download > ~/plantuml.jar - java -jar ~/plantuml.jar -v -tsvg -r -o "${{ github.workspace }}/docs/static/img/" "${{ github.workspace }}/scripts/erd/erd.puml" - - name: yarn install - working-directory: docs - run: | - yarn install --check-cache - - name: yarn build - working-directory: docs - run: | - yarn build - - name: deploy docs - uses: ./.github/actions/github-action-push-to-another-repository - env: - API_TOKEN_GITHUB: ${{ secrets.SUPERSET_SITE_BUILD }} - with: - source-directory: "./docs/build" - destination-github-username: "apache" - destination-repository-name: "superset-site" - target-branch: "asf-site" - commit-message: "deploying docs: ${{ github.event.head_commit.message }} (apache/superset@${{ github.sha }})" - user-email: dev@superset.apache.org diff --git a/.github/workflows/superset-docs-verify.yml b/.github/workflows/superset-docs-verify.yml deleted file mode 100644 index de82268e046d..000000000000 --- a/.github/workflows/superset-docs-verify.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Docs Testing - -on: - pull_request: - paths: - - "docs/**" - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - build-deploy: - name: Build & Deploy - runs-on: ubuntu-22.04 - defaults: - run: - working-directory: docs - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Set up Node.js 20 - uses: actions/setup-node@v4 - with: - node-version: '20' - - name: yarn install - run: | - yarn install --check-cache - - name: yarn typecheck - run: | - yarn typecheck - - name: yarn build - run: | - yarn build diff --git a/.github/workflows/superset-e2e.yml b/.github/workflows/superset-e2e.yml deleted file mode 100644 index 092864246240..000000000000 --- a/.github/workflows/superset-e2e.yml +++ /dev/null @@ -1,141 +0,0 @@ -name: E2E - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - workflow_dispatch: - inputs: - use_dashboard: - description: 'Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]. You MUST provide a branch and/or PR number below for this to work.' - required: false - default: 'false' - ref: - description: 'The branch or tag to checkout' - required: false - default: '' - pr_id: - description: 'The pull request ID to checkout' - required: false - default: '' - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - cypress-matrix: - runs-on: ubuntu-22.04 - permissions: - contents: read - pull-requests: read - strategy: - # when one test fails, DO NOT cancel the other - # parallel_id, because this will kill Cypress processes - # leaving the Dashboard hanging ... - # https://github.com/cypress-io/github-action/issues/48 - fail-fast: false - matrix: - parallel_id: [0, 1, 2, 3, 4, 5] - browser: ["chrome"] - env: - SUPERSET_ENV: development - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - PYTHONPATH: ${{ github.workspace }} - REDIS_PORT: 16379 - GITHUB_TOKEN: ${{ github.token }} - USE_DASHBOARD: ${{ github.event.inputs.use_dashboard || (github.ref == 'refs/heads/master' && 'true') || 'false' }} - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - - 15432:5432 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - # ------------------------------------------------------- - # Conditional checkout based on context - - name: Checkout for push or pull_request event - if: github.event_name == 'push' || github.event_name == 'pull_request' - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Checkout using ref (workflow_dispatch) - if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != '' - uses: actions/checkout@v4 - with: - persist-credentials: false - ref: ${{ github.event.inputs.ref }} - submodules: recursive - - name: Checkout using PR ID (workflow_dispatch) - if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != '' - uses: actions/checkout@v4 - with: - persist-credentials: false - ref: refs/pull/${{ github.event.inputs.pr_id }}/merge - submodules: recursive - # ------------------------------------------------------- - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python || steps.check.outputs.frontend - - name: Setup postgres - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: setup-postgres - - name: Import test data - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: testdata - - name: Setup Node.js - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: actions/setup-node@v4 - with: - node-version: "18" - - name: Install npm dependencies - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: Build javascript packages - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: build-instrumented-assets - - name: Install cypress - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: cypress-install - - name: Run Cypress - if: steps.check.outputs.python || steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - env: - CYPRESS_BROWSER: ${{ matrix.browser }} - PARALLEL_ID: ${{ matrix.parallel_id }} - PARALLELISM: 6 - CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }} - with: - run: cypress-run-all ${{ env.USE_DASHBOARD }} - - name: Upload Artifacts - uses: actions/upload-artifact@v4 - if: github.event_name == 'workflow_dispatch' && (steps.check.outputs.python || steps.check.outputs.frontend) - with: - path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots - name: cypress-artifact-${{ github.run_id }}-${{ github.job }} diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml deleted file mode 100644 index eb405963611a..000000000000 --- a/.github/workflows/superset-frontend.yml +++ /dev/null @@ -1,85 +0,0 @@ -name: Frontend - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - frontend-build: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check npm lock file version - run: ./scripts/ci_check_npm_lock_version.sh ./superset-frontend/package-lock.json - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Node.js - if: steps.check.outputs.frontend - uses: actions/setup-node@v4 - with: - node-version: "18" - - name: Install dependencies - if: steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: eslint - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run eslint -- . --quiet - - name: tsc - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run type - - name: prettier - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run prettier-check - - name: Build plugins packages - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: npm run plugins:build - - name: Build plugins Storybook - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: npm run plugins:build-storybook - - name: superset-ui/core coverage - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run core:cover - - name: unit tests - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run test -- --coverage --silent - # todo: remove this step when fix generator as a project in root jest.config.js - - name: generator-superset unit tests - if: steps.check.outputs.frontend - working-directory: ./superset-frontend/packages/generator-superset - run: npm run test - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: javascript - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true diff --git a/.github/workflows/superset-helm-lint.yml b/.github/workflows/superset-helm-lint.yml deleted file mode 100644 index 5649f491a490..000000000000 --- a/.github/workflows/superset-helm-lint.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Lint and Test Charts - -on: - pull_request: - types: [opened, edited, reopened, synchronize] - paths: - - "helm/**" - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - lint-test: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - fetch-depth: 0 - - - name: Set up Helm - uses: azure/setup-helm@v4 - with: - version: v3.5.4 - - - name: Setup Python - uses: ./.github/actions/setup-backend/ - with: - install-superset: 'false' - - - name: Set up chart-testing - uses: ./.github/actions/chart-testing-action - - - name: Run chart-testing (list-changed) - id: list-changed - run: | - changed=$(ct list-changed --print-config) - if [[ -n "$changed" ]]; then - echo "changed=true" >> $GITHUB_OUTPUT - fi - env: - CT_CHART_DIRS: helm - CT_SINCE: HEAD - - - name: Run chart-testing (lint) - run: ct lint --print-config - env: - CT_CHART_DIRS: helm - CT_LINT_CONF: lintconf.yaml - CT_SINCE: HEAD - CT_CHART_REPOS: bitnami=https://charts.bitnami.com/bitnami diff --git a/.github/workflows/superset-helm-release.yml b/.github/workflows/superset-helm-release.yml deleted file mode 100644 index 242820afce45..000000000000 --- a/.github/workflows/superset-helm-release.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Release Charts - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - paths: - - "helm/**" - -jobs: - release: - runs-on: ubuntu-22.04 - permissions: - contents: write - - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - fetch-depth: 0 - - - name: Configure Git - run: | - git config user.name "$GITHUB_ACTOR" - git config user.email "$GITHUB_ACTOR@users.noreply.github.com" - - - name: Install Helm - uses: azure/setup-helm@v4 - with: - version: v3.5.4 - - - name: Add bitnami repo dependency - run: helm repo add bitnami https://charts.bitnami.com/bitnami - - - name: Run chart-releaser - uses: helm/chart-releaser-action@v1.6.0 - with: - charts_dir: helm - mark_as_latest: false - env: - CR_TOKEN: "${{ github.token }}" - CR_RELEASE_NAME_TEMPLATE: "superset-helm-chart-{{ .Version }}" diff --git a/.github/workflows/superset-python-integrationtest.yml b/.github/workflows/superset-python-integrationtest.yml deleted file mode 100644 index 7cd135e55959..000000000000 --- a/.github/workflows/superset-python-integrationtest.yml +++ /dev/null @@ -1,188 +0,0 @@ -# Python integration tests -name: Python-Integration - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - test-mysql: - runs-on: ubuntu-22.04 - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: | - mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true - services: - mysql: - image: mysql:8.0 - env: - MYSQL_ROOT_PASSWORD: root - ports: - - 13306:3306 - options: >- - --health-cmd="mysqladmin ping --silent" - --health-interval=10s - --health-timeout=5s - --health-retries=5 - redis: - image: redis:7-alpine - options: --entrypoint redis-server - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - - name: Setup MySQL - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: setup-mysql - - name: Start Celery worker - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: celery-worker - - name: Python integration tests (MySQL) - if: steps.check.outputs.python - run: | - ./scripts/python_tests.sh - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,mysql - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true - test-postgres: - runs-on: ubuntu-22.04 - strategy: - matrix: - python-version: ["current", "next", "previous"] - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - # Use custom ports for services to avoid accidentally connecting to - # GitHub action runner's default installations - - 15432:5432 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - with: - python-version: ${{ matrix.python-version }} - - name: Setup Postgres - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: | - setup-postgres - - name: Start Celery worker - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: celery-worker - - name: Python integration tests (PostgreSQL) - if: steps.check.outputs.python - run: | - ./scripts/python_tests.sh - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,postgres - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true - - test-sqlite: - runs-on: ubuntu-22.04 - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: | - sqlite:///${{ github.workspace }}/.temp/superset.db?check_same_thread=true - SUPERSET__SQLALCHEMY_EXAMPLES_URI: | - sqlite:///${{ github.workspace }}/.temp/examples.db?check_same_thread=true - services: - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - - name: Install dependencies - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: | - # sqlite needs this working directory - mkdir ${{ github.workspace }}/.temp - - name: Start Celery worker - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: celery-worker - - name: Python integration tests (SQLite) - if: steps.check.outputs.python - run: | - ./scripts/python_tests.sh - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,sqlite - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true diff --git a/.github/workflows/superset-python-misc.yml b/.github/workflows/superset-python-misc.yml deleted file mode 100644 index 12417d147a50..000000000000 --- a/.github/workflows/superset-python-misc.yml +++ /dev/null @@ -1,53 +0,0 @@ -# Python Misc unit tests -name: Python Misc - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - python-lint: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - - babel-extract: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - if: steps.check.outputs.python - uses: ./.github/actions/setup-backend/ - - name: Test babel extraction - if: steps.check.outputs.python - run: scripts/translations/babel_update.sh diff --git a/.github/workflows/superset-python-presto-hive.yml b/.github/workflows/superset-python-presto-hive.yml deleted file mode 100644 index d87a70964cc5..000000000000 --- a/.github/workflows/superset-python-presto-hive.yml +++ /dev/null @@ -1,151 +0,0 @@ -# Python Presto/Hive unit tests -name: Python Presto/Hive - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - test-postgres-presto: - runs-on: ubuntu-22.04 - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - SUPERSET__SQLALCHEMY_EXAMPLES_URI: presto://localhost:15433/memory/default - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - # Use custom ports for services to avoid accidentally connecting to - # GitHub action runner's default installations - - 15432:5432 - presto: - image: starburstdata/presto:350-e.6 - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - # Use custom ports for services to avoid accidentally connecting to - # GitHub action runner's default installations - - 15433:8080 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python == 'true' - - name: Setup Postgres - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: | - echo "${{ steps.check.outputs.python }}" - setup-postgres - - name: Start Celery worker - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: celery-worker - - name: Python unit tests (PostgreSQL) - if: steps.check.outputs.python - run: | - ./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow' - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,presto - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true - - test-postgres-hive: - runs-on: ubuntu-22.04 - env: - PYTHONPATH: ${{ github.workspace }} - SUPERSET_CONFIG: tests.integration_tests.superset_test_config - REDIS_PORT: 16379 - SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset - SUPERSET__SQLALCHEMY_EXAMPLES_URI: hive://localhost:10000/default - UPLOAD_FOLDER: /tmp/.superset/uploads/ - services: - postgres: - image: postgres:15-alpine - env: - POSTGRES_USER: superset - POSTGRES_PASSWORD: superset - ports: - # Use custom ports for services to avoid accidentally connecting to - # GitHub action runner's default installations - - 15432:5432 - redis: - image: redis:7-alpine - ports: - - 16379:6379 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Create csv upload directory - if: steps.check.outputs.python - run: sudo mkdir -p /tmp/.superset/uploads - - name: Give write access to the csv upload directory - if: steps.check.outputs.python - run: sudo chown -R $USER:$USER /tmp/.superset - - name: Start hadoop and hive - if: steps.check.outputs.python - run: docker compose -f scripts/databases/hive/docker-compose.yml up -d - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - - name: Setup Postgres - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: setup-postgres - - name: Start Celery worker - if: steps.check.outputs.python - uses: ./.github/actions/cached-dependencies - with: - run: celery-worker - - name: Python unit tests (PostgreSQL) - if: steps.check.outputs.python - run: | - ./scripts/python_tests.sh -m 'chart_data_flow or sql_json_flow' - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,hive - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true diff --git a/.github/workflows/superset-python-unittest.yml b/.github/workflows/superset-python-unittest.yml deleted file mode 100644 index 0f9cfc8aa10e..000000000000 --- a/.github/workflows/superset-python-unittest.yml +++ /dev/null @@ -1,53 +0,0 @@ -# Python unit tests -name: Python-Unit - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - unit-tests: - runs-on: ubuntu-22.04 - strategy: - matrix: - python-version: ["current", "next"] - env: - PYTHONPATH: ${{ github.workspace }} - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: Setup Python - uses: ./.github/actions/setup-backend/ - if: steps.check.outputs.python - with: - python-version: ${{ matrix.python-version }} - - name: Python unit tests - if: steps.check.outputs.python - env: - SUPERSET_TESTENV: true - SUPERSET_SECRET_KEY: not-a-secret - run: | - pytest --durations-min=0.5 --cov-report= --cov=superset ./tests/common ./tests/unit_tests --cache-clear - - name: Upload code coverage - uses: codecov/codecov-action@v4 - with: - flags: python,unit - token: ${{ secrets.CODECOV_TOKEN }} - verbose: true diff --git a/.github/workflows/superset-translations.yml b/.github/workflows/superset-translations.yml deleted file mode 100644 index 292a42afdb83..000000000000 --- a/.github/workflows/superset-translations.yml +++ /dev/null @@ -1,67 +0,0 @@ -name: Translations - -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - pull_request: - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - frontend-check-translations: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup Node.js - if: steps.check.outputs.frontend - uses: actions/setup-node@v4 - with: - node-version: '18' - - name: Install dependencies - if: steps.check.outputs.frontend - uses: ./.github/actions/cached-dependencies - with: - run: npm-install - - name: lint - if: steps.check.outputs.frontend - working-directory: ./superset-frontend - run: | - npm run build-translation - - babel-extract: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - submodules: recursive - - name: Check for file changes - id: check - uses: ./.github/actions/change-detector/ - with: - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup Python - if: steps.check.outputs.python - uses: ./.github/actions/setup-backend/ - - name: Test babel extraction - if: steps.check.outputs.python - run: ./scripts/translations/babel_update.sh diff --git a/.github/workflows/superset-websocket.yml b/.github/workflows/superset-websocket.yml deleted file mode 100644 index f1785a39abe4..000000000000 --- a/.github/workflows/superset-websocket.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: WebSocket server -on: - push: - branches: - - "master" - - "[0-9].[0-9]*" - paths: - - "superset-websocket/**" - pull_request: - paths: - - "superset-websocket/**" - types: [synchronize, opened, reopened, ready_for_review] - -# cancel previous workflow jobs for PRs -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }} - cancel-in-progress: true - -jobs: - app-checks: - runs-on: ubuntu-22.04 - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Install dependencies - working-directory: ./superset-websocket - run: npm ci - - name: eslint - working-directory: ./superset-websocket - run: npm run eslint -- . --quiet - - name: typescript checks - working-directory: ./superset-websocket - run: npm run type - - name: prettier - working-directory: ./superset-websocket - run: npm run prettier-check - - name: unit tests - working-directory: ./superset-websocket - run: npm run test - - name: build - working-directory: ./superset-websocket - run: npm run build diff --git a/.github/workflows/supersetbot.yml b/.github/workflows/supersetbot.yml deleted file mode 100644 index f7e106ed9c7a..000000000000 --- a/.github/workflows/supersetbot.yml +++ /dev/null @@ -1,56 +0,0 @@ -name: SupersetBot Workflow - -on: - issue_comment: - types: [created, edited] - - # Making the workflow testable since `issue_comment` only triggers on - # the default branch - workflow_dispatch: - inputs: - comment_body: - description: 'Comment Body' - required: true - type: string - -jobs: - supersetbot: - runs-on: ubuntu-22.04 - if: > - github.event_name == 'workflow_dispatch' || - (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot')) - permissions: - contents: read - pull-requests: write - issues: write - steps: - - name: Quickly add thumbs up! - if: github.event_name == 'issue_comment' && contains(github.event.comment.body, '@supersetbot') - uses: actions/github-script@v7 - with: - script: | - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/') - await github.rest.reactions.createForIssueComment({ - owner, - repo, - comment_id: context.payload.comment.id, - content: '+1' - }); - - - name: "Checkout ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Setup supersetbot - uses: ./.github/actions/setup-supersetbot/ - - - name: Execute custom Node.js script - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GITHUB_ACTOR: ${{ github.actor }} - GITHUB_REPOSITORY: ${{ github.repository }} - GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }} - COMMENT_BODY: ${{ github.event.comment.body }}${{ github.event.inputs.comment_body }} - run: | - supersetbot run "$COMMENT_BODY" diff --git a/.github/workflows/tag-release.yml b/.github/workflows/tag-release.yml deleted file mode 100644 index 8ee03b3d04c2..000000000000 --- a/.github/workflows/tag-release.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Publish a Release - -on: - release: - types: [published, edited] - - # Can be triggered manually - workflow_dispatch: - inputs: - release: - required: true - description: The version to generate - git-ref: - required: true - description: The git reference to checkout prior to running the docker build - force-latest: - required: true - type: choice - default: 'false' - description: Whether to force a latest tag on the release - options: - - 'true' - - 'false' -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - docker-release: - needs: config - if: needs.config.outputs.has-secrets - name: docker-release - runs-on: ubuntu-22.04 - strategy: - matrix: - build_preset: ["dev", "lean", "py310", "websocket", "dockerize"] - fail-fast: false - steps: - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - tags: true - fetch-depth: 0 - - - name: Setup supersetbot - uses: ./.github/actions/setup-supersetbot/ - - - name: Try to login to DockerHub - continue-on-error: true - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USER }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Execute custom Node.js script - env: - DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} - DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - RELEASE="${{ github.event.release.tag_name }}" - FORCE_LATEST="" - EVENT="${{github.event_name}}" - if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - # in the case of a manually-triggered run, read release from input - RELEASE="${{ github.event.inputs.release }}" - if [ "${{ github.event.inputs.force-latest }}" = "true" ]; then - FORCE_LATEST="--force-latest" - fi - git checkout "${{ github.event.inputs.git-ref }}" - EVENT="release" - fi - - supersetbot docker \ - --preset ${{ matrix.build_preset }} \ - --context "$EVENT" \ - --context-ref "$RELEASE" $FORCE_LATEST \ - --platform "linux/arm64" \ - --platform "linux/amd64" - - # Going back on original branch to allow "post" GHA operations - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v4 - with: - persist-credentials: false - - - name: Label the PRs with the right release-related labels - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - RELEASE="${{ github.event.release.tag_name }}" - if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - # in the case of a manually-triggered run, read release from input - RELEASE="${{ github.event.inputs.release }}" - fi - supersetbot release-label $RELEASE diff --git a/.github/workflows/tech-debt.yml b/.github/workflows/tech-debt.yml deleted file mode 100644 index f17e220a402b..000000000000 --- a/.github/workflows/tech-debt.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Upload Technical Debt Metrics to Google Sheets - -on: - push: - branches: - - master - - "[0-9].[0-9]*" - -jobs: - config: - runs-on: "ubuntu-22.04" - outputs: - has-secrets: ${{ steps.check.outputs.has-secrets }} - steps: - - name: "Check for secrets" - id: check - shell: bash - run: | - if [ -n "${{ (secrets.GSHEET_KEY != '' ) || '' }}" ]; then - echo "has-secrets=1" >> "$GITHUB_OUTPUT" - fi - - process-and-upload: - needs: config - if: needs.config.outputs.has-secrets - runs-on: ubuntu-22.04 - name: Generate Reports - steps: - - name: Checkout Repository - uses: actions/checkout@v4 - - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '18' - - - name: Install Dependencies - run: npm install - working-directory: ./superset-frontend - - - name: Run Script - env: - SPREADSHEET_ID: "1oABNnzxJYzwUrHjr_c9wfYEq9dFL1ScVof9LlaAdxvo" - SERVICE_ACCOUNT_KEY: ${{ secrets.GSHEET_KEY }} - run: npm run lint-stats - continue-on-error: true - working-directory: ./superset-frontend diff --git a/.github/workflows/welcome-new-users.yml b/.github/workflows/welcome-new-users.yml deleted file mode 100644 index 2c602967770d..000000000000 --- a/.github/workflows/welcome-new-users.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Welcome New Contributor - -on: - pull_request_target: - types: [opened] - -jobs: - welcome: - runs-on: ubuntu-22.04 - permissions: - pull-requests: write - - steps: - - name: Welcome Message - uses: actions/first-interaction@v1 - continue-on-error: true - with: - repo-token: ${{ github.token }} - pr-message: |- - Congrats on making your first PR and thank you for contributing to Superset! :tada: :heart: - - We hope to see you in our [Slack](https://apache-superset.slack.com/) community too! Not signed up? Use our [Slack App](http://bit.ly/join-superset-slack) to self-register. diff --git a/.gitignore b/.gitignore index ba9c2a047ecb..cb2e90e86382 100644 --- a/.gitignore +++ b/.gitignore @@ -104,6 +104,7 @@ ghostdriver.log testCSV.csv .terser-plugin-cache/ apache-superset-*.tar.gz* +apache_superset-*.tar.gz* release.json # Translation-related files diff --git a/CHANGELOG.md b/CHANGELOG.md index 38df8d63134f..f00ba2d39f00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -43,3 +43,4 @@ under the License. - [4.0.0](./CHANGELOG/4.0.0.md) - [4.0.1](./CHANGELOG/4.0.1.md) - [4.0.2](./CHANGELOG/4.0.2.md) +- [4.1.0](./CHANGELOG/4.1.0.md) diff --git a/CHANGELOG/4.1.0.md b/CHANGELOG/4.1.0.md new file mode 100644 index 000000000000..5f23c946a95d --- /dev/null +++ b/CHANGELOG/4.1.0.md @@ -0,0 +1,995 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +## Change Log + +### 4.1 (Fri Nov 1 15:24:51 2024 -0700) + +**Database Migrations** + +- [#30275](https://github.com/apache/superset/pull/30275) fix(migration): 87d38ad83218 failing on upgrade (@villebro) +- [#30017](https://github.com/apache/superset/pull/30017) fix: pass if table is already removed on upgrade (@sadpandajoe) +- [#30029](https://github.com/apache/superset/pull/30029) fix(migrations): Fix the time comparison migration (@Antonio-RiveroMartnez) +- [#29625](https://github.com/apache/superset/pull/29625) fix: try to prevent deadlocks when running upgrade (@sadpandajoe) +- [#29906](https://github.com/apache/superset/pull/29906) fix: Error when downgrading add_catalog_perm_to_tables migration (@michael-s-molina) +- [#29799](https://github.com/apache/superset/pull/29799) fix: Downgrade of revision 678eefb4ab44 throws error (@michael-s-molina) +- [#29166](https://github.com/apache/superset/pull/29166) chore: enable ruff lint rule TRY201 and B904 to improve `raise` stack traces (@mistercrunch) +- [#28838](https://github.com/apache/superset/pull/28838) fix: Update downgrade path for migration to remove sl_tables (@sadpandajoe) +- [#28704](https://github.com/apache/superset/pull/28704) chore: remove sl\_ tables (@mistercrunch) +- [#28482](https://github.com/apache/superset/pull/28482) fix: Update migration logic in #27119 (@john-bodley) +- [#28556](https://github.com/apache/superset/pull/28556) fix: db migration revision (@justinpark) +- [#28416](https://github.com/apache/superset/pull/28416) feat: add support for catalogs (@betodealmeida) +- [#27718](https://github.com/apache/superset/pull/27718) refactor(plugins): BigNumber Time Comparison with existing time_offset API (@Antonio-RiveroMartnez) +- [#26327](https://github.com/apache/superset/pull/26327) feat: Customizable email subject name (@puridach-w) +- [#28422](https://github.com/apache/superset/pull/28422) fix: Update migration logic in #27119 (@john-bodley) +- [#28394](https://github.com/apache/superset/pull/28394) feat: catalog support for Databricks native (@betodealmeida) +- [#28361](https://github.com/apache/superset/pull/28361) chore: fix master build by merging alembic migration heads (@mistercrunch) +- [#27392](https://github.com/apache/superset/pull/27392) fix: Missing sql_editor_id index (@justinpark) +- [#28317](https://github.com/apache/superset/pull/28317) feat(SIP-95): permissions for catalogs (@betodealmeida) +- [#28192](https://github.com/apache/superset/pull/28192) feat: new Columnar upload form and API (@dpgaspar) +- [#28267](https://github.com/apache/superset/pull/28267) chore: enable ruff's isort equivalent (@mistercrunch) +- [#28122](https://github.com/apache/superset/pull/28122) feat(SIP-95): new endpoint for table metadata (@betodealmeida) +- [#28158](https://github.com/apache/superset/pull/28158) chore: set up ruff as a new linter/formatter (@mistercrunch) +- [#28105](https://github.com/apache/superset/pull/28105) feat: new Excel upload form and API (@dpgaspar) +- [#28106](https://github.com/apache/superset/pull/28106) fix: db migrations on downgrade (@dpgaspar) +- [#27849](https://github.com/apache/superset/pull/27849) feat: Slack Avatar integration (@mistercrunch) +- [#27840](https://github.com/apache/superset/pull/27840) feat: new CSV upload form and API (@dpgaspar) +- [#27631](https://github.com/apache/superset/pull/27631) feat(SIP-85): OAuth2 for databases (@betodealmeida) +- [#27351](https://github.com/apache/superset/pull/27351) fix: Migration for single metric in Big Number with Time Comparison (@kgabryje) + +**Features** + +- [#30614](https://github.com/apache/superset/pull/30614) feat: use dialect when tokenizing (@betodealmeida) +- [#30132](https://github.com/apache/superset/pull/30132) feat(embedded): add hook to allow superset admins to validate guest token parameters (@dmarkey) +- [#29959](https://github.com/apache/superset/pull/29959) feat(sqllab): Add timeout on fetching query results (@justinpark) +- [#30177](https://github.com/apache/superset/pull/30177) feat: `is_mutating` method (@betodealmeida) +- [#29088](https://github.com/apache/superset/pull/29088) feat(alert/report): Added optional CC and BCC fields for email notifi… (@nsivarajan) +- [#29264](https://github.com/apache/superset/pull/29264) feat: add slackv2 notification (@eschutho) +- [#29584](https://github.com/apache/superset/pull/29584) feat(frontend/hooks): replace 3rd-party BroadcastChannel with native Web API equivalence (@hainenber) +- [#29590](https://github.com/apache/superset/pull/29590) feat: custom values to sandbox iframe (@dacopan) +- [#29419](https://github.com/apache/superset/pull/29419) feat(build): uplift Lerna + replace insecure shortid with nanoid + uplift Yeoman-related packages + ESM-ize generator-superset (@hainenber) +- [#29225](https://github.com/apache/superset/pull/29225) feat: add connector for CouchbaseDB (@ayush33143314) +- [#29408](https://github.com/apache/superset/pull/29408) feat(build): uplift Storybook to v8 (@hainenber) +- [#29496](https://github.com/apache/superset/pull/29496) feat(database): Add OceanBase support (@yuanoOo) +- [#29384](https://github.com/apache/superset/pull/29384) feat: add support to NOT LIKE operator (@dacopan) +- [#29498](https://github.com/apache/superset/pull/29498) feat: Enable customizing the docker admin password (@c-w) +- [#29187](https://github.com/apache/superset/pull/29187) feat(dashboard): add API endpoints for generating and downloading screenshots (@eulloa10) +- [#27221](https://github.com/apache/superset/pull/27221) feat(CLI command): Apache Superset "Factory Reset" CLI command #27207 (@mknadh) +- [#29328](https://github.com/apache/superset/pull/29328) feat: Add Ant Design 5 Theme (@geido) +- [#29351](https://github.com/apache/superset/pull/29351) feat(e2e): implementing Cypress Dashboard on `master` branch merges (@rusackas) +- [#29361](https://github.com/apache/superset/pull/29361) feat: Adds chart IDs option to migrate-viz (@michael-s-molina) +- [#29329](https://github.com/apache/superset/pull/29329) feat: Adds the ECharts Sankey chart (@michael-s-molina) +- [#29118](https://github.com/apache/superset/pull/29118) feat(build): uplift `Jest` to v29 (@hainenber) +- [#29231](https://github.com/apache/superset/pull/29231) feat: add new SQLLAB_FORCE_RUN_ASYNC feature flag (@mistercrunch) +- [#29123](https://github.com/apache/superset/pull/29123) feat(dashboard): Enables pivot table download option at dashboard level (@adimyth) +- [#27962](https://github.com/apache/superset/pull/27962) feat: Dashboard tabs api endpoint (@fisjac) +- [#29242](https://github.com/apache/superset/pull/29242) feat: Improves the Drill By feature (@michael-s-molina) +- [#28057](https://github.com/apache/superset/pull/28057) feat(table): Table with Time Comparison (@Antonio-RiveroMartnez) +- [#29241](https://github.com/apache/superset/pull/29241) feat: Support a dynamic minimum interval for alerts and reports (@Vitor-Avila) +- [#29164](https://github.com/apache/superset/pull/29164) feat(trino): Add functionality to upload data (@john-bodley) +- [#28774](https://github.com/apache/superset/pull/28774) feat(echarts-pie): add string template support for labels (@hexcafe) +- [#24263](https://github.com/apache/superset/pull/24263) feat(formatters): Add custom d3-time-format locale (@matheusbsilva) +- [#29109](https://github.com/apache/superset/pull/29109) feat: OAuth2 client initial work (@betodealmeida) +- [#28637](https://github.com/apache/superset/pull/28637) feat: add Current time-range options for time filter (@pranav1699) +- [#28780](https://github.com/apache/superset/pull/28780) feat: Adds Histogram chart migration logic (@michael-s-molina) +- [#28762](https://github.com/apache/superset/pull/28762) feat(helm): allow removal of Node & Worker replicas for custom HPA solutions (@hanslemm) +- [#28789](https://github.com/apache/superset/pull/28789) feat: Adds the Featured Charts dashboard (@michael-s-molina) +- [#28652](https://github.com/apache/superset/pull/28652) feat: Adds the ECharts Histogram chart (@michael-s-molina) +- [#28770](https://github.com/apache/superset/pull/28770) feat: impersonate with email prefix (@betodealmeida) +- [#28483](https://github.com/apache/superset/pull/28483) feat: bake translations as part of the build processes (@mistercrunch) +- [#27851](https://github.com/apache/superset/pull/27851) feat(reports): allowing the email mutator to update recipients (@SkinnyPigeon) +- [#28597](https://github.com/apache/superset/pull/28597) feat: add Nightingale chart support for echarts pie chart (@hexcafe) +- [#28602](https://github.com/apache/superset/pull/28602) feat: Adds Bar chart migration logic (@michael-s-molina) +- [#28521](https://github.com/apache/superset/pull/28521) feat: unpack payload into log function (@mistercrunch) +- [#28629](https://github.com/apache/superset/pull/28629) feat: Data Zoom scrolls using the mouse (mark II) (@hughhhh) +- [#28265](https://github.com/apache/superset/pull/28265) feat(maps): Adding ALL the countries to the Country Map plugin! 🌎 (@rusackas) +- [#27857](https://github.com/apache/superset/pull/27857) feat(dashboard): Add metadata bar to the header (@justinpark) +- [#28425](https://github.com/apache/superset/pull/28425) feat: clarify that 'Text' supports markdown (@mistercrunch) +- [#27995](https://github.com/apache/superset/pull/27995) feat(explore): Color scheme groups, new color schemes (@kgabryje) +- [#28376](https://github.com/apache/superset/pull/28376) feat(SIP-95): catalogs in SQL Lab and datasets (@betodealmeida) +- [#28176](https://github.com/apache/superset/pull/28176) feat(reports): Set a minimum interval for each report's execution (@Vitor-Avila) +- [#27950](https://github.com/apache/superset/pull/27950) feat: Utility function to render chart tooltips (@michael-s-molina) +- [#28345](https://github.com/apache/superset/pull/28345) feat(docs): uplift Docusaurus to v3 (@hainenber) +- [#28282](https://github.com/apache/superset/pull/28282) feat: accelerate webpack builds with filesystem cache (@mistercrunch) +- [#28035](https://github.com/apache/superset/pull/28035) feat: Add Czech Republic country map. (@martinspudich) +- [#27933](https://github.com/apache/superset/pull/27933) feat(country-map): Adds Philippines regional map and updates/cleans existing Philippines provincial map (@jdruii) +- [#28169](https://github.com/apache/superset/pull/28169) feat(translations): Traditional Chinese translation files added (@bestlong) +- [#24449](https://github.com/apache/superset/pull/24449) feat: custom refresh frequency (@Abhishek-kumar-samsung) +- [#27943](https://github.com/apache/superset/pull/27943) feat: improve event logging for queries + refactor (@mistercrunch) +- [#28107](https://github.com/apache/superset/pull/28107) feat: label PR with release tags (@mistercrunch) +- [#28063](https://github.com/apache/superset/pull/28063) feat(SIP-95): new endpoint for extra table metadata (@betodealmeida) +- [#27908](https://github.com/apache/superset/pull/27908) feat(dbview): Add token request button to DuckDB and MotherDuck database modal (@guenp) +- [#27953](https://github.com/apache/superset/pull/27953) feat: optimize docker-compose up for faster boot time (@mistercrunch) +- [#27969](https://github.com/apache/superset/pull/27969) feat: add option to disable rendering of html in sql lab and table chart (@soniagtm) +- [#27773](https://github.com/apache/superset/pull/27773) feat(alert report tabs): adding feature flag (@fisjac) +- [#27863](https://github.com/apache/superset/pull/27863) feat: GHA to bump python packages using supersetbot (@mistercrunch) +- [#27788](https://github.com/apache/superset/pull/27788) feat(explore): Clear temporal filter value (@kgabryje) +- [#26138](https://github.com/apache/superset/pull/26138) feat(accessibility): add tabbing to chart menu in dashboard (@eschutho) +- [#27708](https://github.com/apache/superset/pull/27708) feat(viz picker): Remove some tags, refactor Recommended section (@kgabryje) +- [#27647](https://github.com/apache/superset/pull/27647) feat: move supersetbot out of repo (@mistercrunch) +- [#27859](https://github.com/apache/superset/pull/27859) feat: setup a pyproject.toml (@mistercrunch) +- [#27847](https://github.com/apache/superset/pull/27847) feat(db): Adding DB_SQLA_URI_VALIDATOR (@craig-rueda) +- [#27771](https://github.com/apache/superset/pull/27771) feat: Adds Heatmap chart migration logic (@michael-s-molina) +- [#27665](https://github.com/apache/superset/pull/27665) feat(db_engine): Add custom_user_agent when connecting to MotherDuck (@guenp) +- [#25353](https://github.com/apache/superset/pull/25353) feat: Adds the ECharts Heatmap chart (@michael-s-molina) +- [#27615](https://github.com/apache/superset/pull/27615) feat: use the local supersetbot (@mistercrunch) +- [#27582](https://github.com/apache/superset/pull/27582) feat(jinja): metric macro (@Vitor-Avila) +- [#27497](https://github.com/apache/superset/pull/27497) feat(alerts-reports): adding pdf filetype to email and slack reports (@fisjac) +- [#27522](https://github.com/apache/superset/pull/27522) feat: support for KQL in `SQLScript` (@betodealmeida) +- [#27589](https://github.com/apache/superset/pull/27589) feat(bar_chart): Stacked Bar chart with Time comparison in separated stacks (@Antonio-RiveroMartnez) +- [#27536](https://github.com/apache/superset/pull/27536) feat: Adds option to disable drill to detail per database (@michael-s-molina) +- [#27571](https://github.com/apache/superset/pull/27571) feat(supersetbot): label PRs and issues with author's public org (@mistercrunch) +- [#27542](https://github.com/apache/superset/pull/27542) feat(maps): Add Italy regions code to the map generator notebook (@iskenderulgen) +- [#27524](https://github.com/apache/superset/pull/27524) feat(plugins): add color options for big number with time comparison (@lilykuang) +- [#27455](https://github.com/apache/superset/pull/27455) feat: Add Turkey's regions to country map visualization (@iskenderulgen) +- [#27046](https://github.com/apache/superset/pull/27046) feat(supersetbot): introduce `supersetbot` as its own npm package, CLI and comment-operated bot (@mistercrunch) +- [#27255](https://github.com/apache/superset/pull/27255) feat: show more information when loading chart (@betodealmeida) +- [#27434](https://github.com/apache/superset/pull/27434) feat: docker-compose to work off repo Dockerfile (@mistercrunch) +- [#27244](https://github.com/apache/superset/pull/27244) feat(translations): Turkish translation files added (@coteli) +- [#27372](https://github.com/apache/superset/pull/27372) feat: Add repo activity stats to README.md (@rusackas) +- [#27375](https://github.com/apache/superset/pull/27375) feat: Responsive UI for Big Number with Time Comparison (@kgabryje) +- [#27370](https://github.com/apache/superset/pull/27370) feat: support to fetch multiple date time in time_range endpoint (@zhaoyongjie) +- [#27368](https://github.com/apache/superset/pull/27368) feat: datediff in datetime_parser (@zhaoyongjie) +- [#24408](https://github.com/apache/superset/pull/24408) feat(embedded-sdk): Add 'urlParams' option to pass query parameters to embedded dashboard (@grvoicu) +- [#27298](https://github.com/apache/superset/pull/27298) feat(logs context): Adding dashboard id to logs context (@Vitor-Avila) +- [#27197](https://github.com/apache/superset/pull/27197) feat(jinja): current_user_email macro (@Vitor-Avila) +- [#27146](https://github.com/apache/superset/pull/27146) feat(ci): no more docker builds on PR-related events (@mistercrunch) +- [#27193](https://github.com/apache/superset/pull/27193) feat: Use standardized controls in Big Number with Time Comparison (@kgabryje) +- [#27176](https://github.com/apache/superset/pull/27176) feat(docs): Adds an "Edit this page on GitHub" button to docs pages (@rusackas) +- [#27163](https://github.com/apache/superset/pull/27163) feat(helm): optionally set pod disruption budgets (@pradasouvanlasy) +- [#27162](https://github.com/apache/superset/pull/27162) feat(adt): add 403 to api response status codes (@anirudh-hegde) + +**Fixes** + +- [#30819](https://github.com/apache/superset/pull/30819) fix(plugin-chart-echarts): sort tooltip correctly (@villebro) +- [#30755](https://github.com/apache/superset/pull/30755) fix(Dashboard): Sync/Async Dashboard Screenshot Generation and Default Cache (@geido) +- [#30773](https://github.com/apache/superset/pull/30773) fix: catalog migration w/o connection (@betodealmeida) +- [#30429](https://github.com/apache/superset/pull/30429) fix: CI remove cypress command --headed (@mistercrunch) +- [#30735](https://github.com/apache/superset/pull/30735) fix(Jinja): Extra cache keys for calculated columns and metrics using Jinja (@Vitor-Avila) +- [#30699](https://github.com/apache/superset/pull/30699) fix: Nested transaction is inactive when embedding dashboard (@michael-s-molina) +- [#30675](https://github.com/apache/superset/pull/30675) fix(dashboard): Include `urlParams` in the screenshot generation (@Vitor-Avila) +- [#30715](https://github.com/apache/superset/pull/30715) fix(Jinja): Extra cache keys for Jinja columns (@geido) +- [#30680](https://github.com/apache/superset/pull/30680) fix(chart): Table and page entries misaligned (@justinpark) +- [#30348](https://github.com/apache/superset/pull/30348) fix(explore): Missing markarea component broke annotations in echarts (@kgabryje) +- [#30628](https://github.com/apache/superset/pull/30628) fix: First item hovered on stacked bar (@michael-s-molina) +- [#30617](https://github.com/apache/superset/pull/30617) fix(docs): address two linkinator failures (@sfirke) +- [#30438](https://github.com/apache/superset/pull/30438) fix(Filters): Apply native & cross filters on common columns (@geido) +- [#30581](https://github.com/apache/superset/pull/30581) fix(filters): Adds a fix for saving time range adhoc_filters (@ObservabilityTeam) +- [#30578](https://github.com/apache/superset/pull/30578) fix: `sqlparse` fallback for formatting queries (@betodealmeida) +- [#30565](https://github.com/apache/superset/pull/30565) fix: update html rendering to true from false (@sadpandajoe) +- [#30202](https://github.com/apache/superset/pull/30202) fix: adhoc metrics (@betodealmeida) +- [#30549](https://github.com/apache/superset/pull/30549) fix(Jinja): Extra cache keys to consider vars with set (@geido) +- [#30425](https://github.com/apache/superset/pull/30425) fix(dashboard-export): Fixes datasetId is not replaced with datasetUuid in Dashboard export in 4.1.x (@fmannhardt) +- [#30563](https://github.com/apache/superset/pull/30563) fix: Horizon Chart are not working any more (@michael-s-molina) +- [#30564](https://github.com/apache/superset/pull/30564) fix: Incorrect type in config.py (@michael-s-molina) +- [#30560](https://github.com/apache/superset/pull/30560) fix: Unable to parse escaped tables (@michael-s-molina) +- [#30447](https://github.com/apache/superset/pull/30447) fix(explore): don't discard controls on deprecated (@justinpark) +- [#30532](https://github.com/apache/superset/pull/30532) fix(migration): replace unquote with double percentages (@villebro) +- [#30490](https://github.com/apache/superset/pull/30490) fix(Explore): Apply RLS at column values (@geido) +- [#30503](https://github.com/apache/superset/pull/30503) fix(imports): Error when importing charts / dashboards with missing DB credentials (@fisjac) +- [#30350](https://github.com/apache/superset/pull/30350) fix: don't reformat generated queries (@betodealmeida) +- [#30502](https://github.com/apache/superset/pull/30502) fix: Open control with Simple tab selected when there is no column selected (@michael-s-molina) +- [#30491](https://github.com/apache/superset/pull/30491) fix(embedded): sankey charts (@betodealmeida) +- [#30416](https://github.com/apache/superset/pull/30416) fix: Histogram chart not able to use decimal datatype column (@michael-s-molina) +- [#30405](https://github.com/apache/superset/pull/30405) fix: Incorrect hovered items in tooltips (@michael-s-molina) +- [#30393](https://github.com/apache/superset/pull/30393) fix: Allows X-Axis Sort By for custom SQL (@michael-s-molina) +- [#30389](https://github.com/apache/superset/pull/30389) fix: Pre-query normalization with custom SQL (@michael-s-molina) +- [#30339](https://github.com/apache/superset/pull/30339) fix: KeyError 'sql' when opening a Trino virtual dataset (@michael-s-molina) +- [#30335](https://github.com/apache/superset/pull/30335) fix(table): Use extras in queries (@Antonio-RiveroMartnez) +- [#30272](https://github.com/apache/superset/pull/30272) fix(dashboard): Invalid owner's name displayed after updates (@justinpark) +- [#30271](https://github.com/apache/superset/pull/30271) fix: unable to disallow csv upload on header menu (@justinpark) +- [#30265](https://github.com/apache/superset/pull/30265) fix(Screenshot): Dashboard screenshot cache key to include state (@geido) +- [#30252](https://github.com/apache/superset/pull/30252) fix(CrossFilters): Do not reload unrelated filters in global scope (@geido) +- [#30215](https://github.com/apache/superset/pull/30215) fix(Fave): Charts and Dashboards fave/unfave do not commit transactions (@geido) +- [#30222](https://github.com/apache/superset/pull/30222) fix(uploads): respect db engine spec's supports_multivalues_insert value for file uploads & enable multi-insert for MSSQL (@sfirke) +- [#30180](https://github.com/apache/superset/pull/30180) fix: filters panel broken due to tabs scroll (@justinpark) +- [#30224](https://github.com/apache/superset/pull/30224) fix(Celery): Pass guest_token as user context is not available in Celery (@geido) +- [#30212](https://github.com/apache/superset/pull/30212) fix(Dashboard download): Download dashboard screenshot/PDF using SupersetClient (@Vitor-Avila) +- [#30200](https://github.com/apache/superset/pull/30200) fix(Embedded): Dashboard screenshot should use GuestUser (@geido) +- [#28706](https://github.com/apache/superset/pull/28706) fix: Chart cache-warmup task fails on Superset 4.0 (@rmasters) +- [#30174](https://github.com/apache/superset/pull/30174) fix: set default mysql isolation level to 'READ COMMITTED' (@mistercrunch) +- [#30176](https://github.com/apache/superset/pull/30176) fix: Disable cross filtering on charts with no dimensions (@kgabryje) +- [#30060](https://github.com/apache/superset/pull/30060) fix: Delete modal button with lowercase text (@michael-s-molina) +- [#30171](https://github.com/apache/superset/pull/30171) fix(sqllab): Skip AceEditor in inactive tabs (@justinpark) +- [#30164](https://github.com/apache/superset/pull/30164) fix(native filter): undefined layout type on filterInScope (@justinpark) +- [#30023](https://github.com/apache/superset/pull/30023) fix(plugins): display correct tooltip (fixes #3342) (@jonaschn) +- [#30156](https://github.com/apache/superset/pull/30156) fix: FacePile is requesting avatars when SLACK_ENABLE_AVATARS is false (@michael-s-molina) +- [#30154](https://github.com/apache/superset/pull/30154) fix(sqllab): race condition when updating cursor position (@justinpark) +- [#30139](https://github.com/apache/superset/pull/30139) fix(catalog): Table Schema View with no catalog (@Antonio-RiveroMartnez) +- [#30137](https://github.com/apache/superset/pull/30137) fix: New tooltip inappropriately combines series on mixed chart (@michael-s-molina) +- [#30138](https://github.com/apache/superset/pull/30138) fix: JSON loading logs (@michael-s-molina) +- [#30140](https://github.com/apache/superset/pull/30140) fix: DeckGL legend layout (@michael-s-molina) +- [#30077](https://github.com/apache/superset/pull/30077) fix(accessibility): logo outline on tab navigation, but not on click (@rusackas) +- [#30042](https://github.com/apache/superset/pull/30042) fix: use StrEnum type for GuestTokenResourceType to fix token parsing (@hao-zhuventures) +- [#30073](https://github.com/apache/superset/pull/30073) fix: When hovering Drill By the dashboard is scrolled to the top (@michael-s-molina) +- [#30074](https://github.com/apache/superset/pull/30074) fix: Retrieving Slack channels when Slack is disabled (@michael-s-molina) +- [#30019](https://github.com/apache/superset/pull/30019) fix: Partition calls from Jinja context (@michael-s-molina) +- [#30025](https://github.com/apache/superset/pull/30025) fix: Dashboard list row height does not match other lists (@michael-s-molina) +- [#30020](https://github.com/apache/superset/pull/30020) fix(user-dao): return user model instances (@villebro) +- [#29989](https://github.com/apache/superset/pull/29989) fix(screenshots): dashboard screenshots do not capture filter state (@fisjac) +- [#27229](https://github.com/apache/superset/pull/27229) fix: set columns numeric datatypes when exporting to excel (@squalou) +- [#29997](https://github.com/apache/superset/pull/29997) fix(trino): handle missing db in migration (@villebro) +- [#29687](https://github.com/apache/superset/pull/29687) fix: Gamma users shouldn't be able to create roles (@hughhhh) +- [#29884](https://github.com/apache/superset/pull/29884) fix: Security manager incorrect calls (@michael-s-molina) +- [#29993](https://github.com/apache/superset/pull/29993) fix: Duplicated example dataset (@michael-s-molina) +- [#29981](https://github.com/apache/superset/pull/29981) fix: trino thread app missing full context (@dpgaspar) +- [#29978](https://github.com/apache/superset/pull/29978) fix(sqllab): flaky json explore modal due to shallow equality checks for extra data (@justinpark) +- [#29830](https://github.com/apache/superset/pull/29830) fix(ci): remove unused "type: ignore" comment to unblock precommit check in CI (@hainenber) +- [#29956](https://github.com/apache/superset/pull/29956) fix(sqllab): Add abort call on query refresh timeout (@justinpark) +- [#29860](https://github.com/apache/superset/pull/29860) fix: upgrade_catalog_perms and downgrade_catalog_perms implementation (@michael-s-molina) +- [#29953](https://github.com/apache/superset/pull/29953) fix(embedded): Remove CSRF requirement for dashboard download API (@Vitor-Avila) +- [#29672](https://github.com/apache/superset/pull/29672) fix(explore): missing column autocomplete in custom SQL (@justinpark) +- [#29840](https://github.com/apache/superset/pull/29840) fix: handle empty catalog when DB supports them (@betodealmeida) +- [#29287](https://github.com/apache/superset/pull/29287) fix: Add user filtering to changed_by. Fixes #27986 (@marre) +- [#29921](https://github.com/apache/superset/pull/29921) fix: add imports back to celery file (@sadpandajoe) +- [#29894](https://github.com/apache/superset/pull/29894) fix(Embedded): Deleting Embedded Dashboards does not commit the transaction (@geido) +- [#29862](https://github.com/apache/superset/pull/29862) fix: update celery config imports (@mistercrunch) +- [#29846](https://github.com/apache/superset/pull/29846) fix: load slack channels earlier (@eschutho) +- [#29805](https://github.com/apache/superset/pull/29805) fix: bump packages to unblock ci (@eschutho) +- [#29802](https://github.com/apache/superset/pull/29802) fix: create permissions on DB import (@betodealmeida) +- [#29780](https://github.com/apache/superset/pull/29780) fix: catalog upgrade/downgrade (@betodealmeida) +- [#29776](https://github.com/apache/superset/pull/29776) fix(Dashboard): Copying a Dashboard does not commit the transaction (@geido) +- [#29721](https://github.com/apache/superset/pull/29721) fix: pass slack recipients correctly (@eschutho) +- [#29681](https://github.com/apache/superset/pull/29681) fix(Database): Refresh catalogs on db update returns database error (@geido) +- [#29669](https://github.com/apache/superset/pull/29669) fix: Use default custom time range time without timezone (@kgabryje) +- [#29667](https://github.com/apache/superset/pull/29667) fix: Dashboard editable title weird behavior when adding spaces (@kgabryje) +- [#29648](https://github.com/apache/superset/pull/29648) fix: Layout of native filters modal with lengthy columns (@michael-s-molina) +- [#29647](https://github.com/apache/superset/pull/29647) fix: Loading of native filter column (@michael-s-molina) +- [#29643](https://github.com/apache/superset/pull/29643) fix: Required native filter message wrongfully appearing (@michael-s-molina) +- [#29638](https://github.com/apache/superset/pull/29638) fix(sqllab): prev shema/table options remained on fail (@justinpark) +- [#29567](https://github.com/apache/superset/pull/29567) fix: Add Japanese Translations (@avintonOfficial) +- [#29607](https://github.com/apache/superset/pull/29607) fix(sqllab): Show warning message when deprecated db is selected (@justinpark) +- [#29610](https://github.com/apache/superset/pull/29610) fix: sort schemas when uploading data (@betodealmeida) +- [#29604](https://github.com/apache/superset/pull/29604) fix: schemas for upload API (@betodealmeida) +- [#28496](https://github.com/apache/superset/pull/28496) fix(docs): fix broken indexed link from Google search (@sfirke) +- [#29587](https://github.com/apache/superset/pull/29587) fix(storybook): fix broken Storybook stories during development (@hainenber) +- [#29581](https://github.com/apache/superset/pull/29581) fix: catalog permission check (@betodealmeida) +- [#29579](https://github.com/apache/superset/pull/29579) fix: small fixes to the catalog migration (@betodealmeida) +- [#29566](https://github.com/apache/superset/pull/29566) fix: Trino `get_columns` (@betodealmeida) +- [#29576](https://github.com/apache/superset/pull/29576) fix(dataset import): Support catalog field during dataset import (@Vitor-Avila) +- [#29549](https://github.com/apache/superset/pull/29549) fix: make catalog migration lenient (@betodealmeida) +- [#29412](https://github.com/apache/superset/pull/29412) fix(Tags filter): Filter assets by tag ID (@Vitor-Avila) +- [#29548](https://github.com/apache/superset/pull/29548) fix: babel_update script crash (@CodeWithEmad) +- [#29530](https://github.com/apache/superset/pull/29530) fix: prevent guest users from changing columns (@betodealmeida) +- [#29538](https://github.com/apache/superset/pull/29538) fix(websocket): add error handling (@harshit2283) +- [#29330](https://github.com/apache/superset/pull/29330) fix: refactor view error handling into a separate module (@mistercrunch) +- [#29525](https://github.com/apache/superset/pull/29525) fix: Table time comparison breaking after form data update (@kgabryje) +- [#29520](https://github.com/apache/superset/pull/29520) fix(plugins): Big Number with Time Comparison (@Antonio-RiveroMartnez) +- [#29517](https://github.com/apache/superset/pull/29517) fix(plugins): Fix dashboard filter for Table and Big Number with Time Comparison (@Antonio-RiveroMartnez) +- [#29454](https://github.com/apache/superset/pull/29454) fix: add more disallowed pg functions (@dpgaspar) +- [#29470](https://github.com/apache/superset/pull/29470) fix: remove info from datasource access error (@dpgaspar) +- [#28364](https://github.com/apache/superset/pull/28364) fix: Enable explore button on SQL Lab view when connected to Apache Pinot as a database (@soumitra-st) +- [#29456](https://github.com/apache/superset/pull/29456) fix: Dashboard hangs when initial filters cannot be loaded (@michael-s-molina) +- [#29461](https://github.com/apache/superset/pull/29461) fix: OAuth2 in async DBs (@betodealmeida) +- [#29446](https://github.com/apache/superset/pull/29446) fix: re-add missing code from PR #28132 (@sadpandajoe) +- [#29451](https://github.com/apache/superset/pull/29451) fix(metastore-cache): import dao in methods (@villebro) +- [#29420](https://github.com/apache/superset/pull/29420) fix: SQL label missing for non-group-by queries (@hexcafe) +- [#29392](https://github.com/apache/superset/pull/29392) fix(readme): changing video from mp4 to webm format (@rusackas) +- [#29368](https://github.com/apache/superset/pull/29368) fix(tox): Address issue with generative environment variables (@john-bodley) +- [#29367](https://github.com/apache/superset/pull/29367) fix(explore): don't respect y-axis formatting (@justinpark) +- [#29321](https://github.com/apache/superset/pull/29321) fix(Query): Parse html string error responses to avoid displaying raw HTML as error message (@rtexelm) +- [#27777](https://github.com/apache/superset/pull/27777) fix: default logging (@jessie-ross) +- [#29352](https://github.com/apache/superset/pull/29352) fix(tests): Ensure fixture is invoked (@john-bodley) +- [#29345](https://github.com/apache/superset/pull/29345) fix(revert 27883): Excess padding in horizontal Bar charts (@michael-s-molina) +- [#14817](https://github.com/apache/superset/pull/14817) fix: actually write changes on "superset import-datasources" (@regisb) +- [#29349](https://github.com/apache/superset/pull/29349) fix(explore): restored hidden field values has discarded (@justinpark) +- [#29346](https://github.com/apache/superset/pull/29346) fix: Cannot delete empty column inside a tab using the dashboard editor (@michael-s-molina) +- [#29314](https://github.com/apache/superset/pull/29314) fix: Remove recursive repr call (@jessie-ross) +- [#28753](https://github.com/apache/superset/pull/28753) fix: don't strip SQL comments in Explore - 2nd try (@mistercrunch) +- [#28429](https://github.com/apache/superset/pull/28429) fix(ui): Disable ability to export data when user does not have the correct permission (@edjannoo) +- [#27439](https://github.com/apache/superset/pull/27439) fix(Dashboard): Color inconsistency on refreshes and conflicts (@geido) +- [#29286](https://github.com/apache/superset/pull/29286) fix(key-value): use flush instead of commit (@villebro) +- [#29301](https://github.com/apache/superset/pull/29301) fix(metastore-cache): prune before add (@villebro) +- [#29279](https://github.com/apache/superset/pull/29279) fix(sqllab): excessive API calls for schemas (@justinpark) +- [#29278](https://github.com/apache/superset/pull/29278) fix(sqllab): invalid empty state on switch tab (@justinpark) +- [#29291](https://github.com/apache/superset/pull/29291) fix: filters not updating with force update when caching is enabled (@ka-weihe) +- [#28744](https://github.com/apache/superset/pull/28744) fix(permalink): adding anchor to dashboard permalink generation (@fisjac) +- [#29257](https://github.com/apache/superset/pull/29257) fix: Catalog with restricted permissions produces an error during database connection (@geido) +- [#29260](https://github.com/apache/superset/pull/29260) fix: Custom SQL filter control (@michael-s-molina) +- [#29248](https://github.com/apache/superset/pull/29248) fix(sqllab): Do not strip comments when executing SQL statements (@john-bodley) +- [#29234](https://github.com/apache/superset/pull/29234) fix(Explore): Keep necessary form data to allow query mode switching (@rtexelm) +- [#28755](https://github.com/apache/superset/pull/28755) fix: Workaround for Pandas.DataFrame.to_csv bug (@john-bodley) +- [#29230](https://github.com/apache/superset/pull/29230) fix(sqllab): run previous state query (@justinpark) +- [#29229](https://github.com/apache/superset/pull/29229) fix: Improving handling for tag relationship when deleting assets v2 (@Vitor-Avila) +- [#29170](https://github.com/apache/superset/pull/29170) fix(maps): Load indian map borders correctly (Restores #24927 fixes) (@PushpenderSaini0) +- [#29117](https://github.com/apache/superset/pull/29117) fix: Improving handling for tag relationship when deleting assets (@Vitor-Avila) +- [#29119](https://github.com/apache/superset/pull/29119) fix(mixed-timeseries-plugin): Second query stacks stacked on top of first query series (@kgabryje) +- [#29110](https://github.com/apache/superset/pull/29110) fix: CI failture due to Default React import (@justinpark) +- [#29091](https://github.com/apache/superset/pull/29091) fix(helm): Set priorityClassName to pods (superset, celeryBeat, celeryBeatFlower, celeryBeatWorker, celeryBeatWebsocket, jobs) (@sabyrzhan) +- [#28932](https://github.com/apache/superset/pull/28932) fix(embedded): add missing GUEST_TOKEN_HEADER_NAME to bootstrap data (@hexcafe) +- [#29098](https://github.com/apache/superset/pull/29098) fix: Cypress CI process while opening PR from a fork (@mistercrunch) +- [#28572](https://github.com/apache/superset/pull/28572) fix(i18n): improved Russian translation (@goldjee) +- [#29084](https://github.com/apache/superset/pull/29084) fix: Remove BASE_AXIS from pre-query (@john-bodley) +- [#29081](https://github.com/apache/superset/pull/29081) fix(explore): Drill to detail truncates int64 IDs (@justinpark) +- [#29089](https://github.com/apache/superset/pull/29089) fix: CI errors as the result of removing React imports (@michael-s-molina) +- [#27017](https://github.com/apache/superset/pull/27017) fix(embedded-sdk): add accessible title to iframe (@bhaugeea) +- [#28797](https://github.com/apache/superset/pull/28797) fix: use channel id with new slack api for file uploads (@eschutho) +- [#28771](https://github.com/apache/superset/pull/28771) fix(Mixed Chart Filter Control): Allow delete condition for `adhoc_filters_b` (@rtexelm) +- [#28783](https://github.com/apache/superset/pull/28783) fix: use upload v2 for slack (@eschutho) +- [#28772](https://github.com/apache/superset/pull/28772) fix(dashboard): unable to resize due to the overlapped droptarget (@justinpark) +- [#28750](https://github.com/apache/superset/pull/28750) fix: do not close database modal on mask click (@eschutho) +- [#28745](https://github.com/apache/superset/pull/28745) fix(reports): Update the element class to wait for when taking a screenshot (@Vitor-Avila) +- [#28749](https://github.com/apache/superset/pull/28749) fix(sqllab): Sort db selector options by the API order (@justinpark) +- [#28765](https://github.com/apache/superset/pull/28765) fix(docs): fix url typo to fix a broken image (@rusackas) +- [#28639](https://github.com/apache/superset/pull/28639) fix: adds the ability to disallow SQL functions per engine (@dpgaspar) +- [#28609](https://github.com/apache/superset/pull/28609) fix: dashboard performance (@dpgaspar) +- [#28653](https://github.com/apache/superset/pull/28653) fix: Handling of column types for Presto, Trino, et al. (@john-bodley) +- [#28633](https://github.com/apache/superset/pull/28633) fix(ci): restrict issue comments to members or owners (@dpgaspar) +- [#28613](https://github.com/apache/superset/pull/28613) fix: revert fix(presto preview): re-enable schema previsualization for Trino/Presto table/schemas" (@john-bodley) +- [#28568](https://github.com/apache/superset/pull/28568) fix: add listener to repaint on visibility change for canvas (@eschutho) +- [#28566](https://github.com/apache/superset/pull/28566) fix: Fixes workflow Applitools Cypress (@geido) +- [#28349](https://github.com/apache/superset/pull/28349) fix: Add back description column to saved queries #12431 (@imancrsrk) +- [#28567](https://github.com/apache/superset/pull/28567) fix: Revert "fix: don't strip SQL comments in Explore (#28363)" (@michael-s-molina) +- [#28497](https://github.com/apache/superset/pull/28497) fix: Correction translation (@aehanno) +- [#28555](https://github.com/apache/superset/pull/28555) fix(explore): hide a control wrapped with StashFormDataContainer correctly (@justinpark) +- [#28487](https://github.com/apache/superset/pull/28487) fix(i18n): Adding and modifying Japanese translations (@aikawa-ohno) +- [#28550](https://github.com/apache/superset/pull/28550) fix(Dashboard): Prevent scroll when hovering filters (@geido) +- [#28423](https://github.com/apache/superset/pull/28423) fix: move to slack-sdk files_upload_v2 (@mistercrunch) +- [#28486](https://github.com/apache/superset/pull/28486) fix: utf-16 json encoder support (@eyalezer) +- [#28512](https://github.com/apache/superset/pull/28512) fix: improve df to records performance (@dpgaspar) +- [#28507](https://github.com/apache/superset/pull/28507) fix(dashboard): invalid drop item on a tab (@justinpark) +- [#28432](https://github.com/apache/superset/pull/28432) fix: Time shifts calculation for ECharts plugins (@michael-s-molina) +- [#28144](https://github.com/apache/superset/pull/28144) fix: bump sqlparse to 0.5.0 (@dpgaspar) +- [#26782](https://github.com/apache/superset/pull/26782) fix(presto preview): re-enable schema previsualization for Trino/Presto table/schemas (@brouberol) +- [#28451](https://github.com/apache/superset/pull/28451) fix: jwt extended broken by flask bump (@dpgaspar) +- [#28409](https://github.com/apache/superset/pull/28409) fix(ar-modal): updateNotificationSettings not updating state (@fisjac) +- [#28457](https://github.com/apache/superset/pull/28457) fix: Color scheme control crashing when dashboardId present (@kgabryje) +- [#28442](https://github.com/apache/superset/pull/28442) fix(ci): fix failed `docker-build` CI job (@hainenber) +- [#28433](https://github.com/apache/superset/pull/28433) fix(docs): add missing link to meta-cross-db feature flag docs (@sfirke) +- [#28395](https://github.com/apache/superset/pull/28395) fix(dashboard): Change class name on last Droppable in a column (@rtexelm) +- [#28419](https://github.com/apache/superset/pull/28419) fix: run some CI tests against previous python version (@mistercrunch) +- [#28415](https://github.com/apache/superset/pull/28415) fix(SIP-95): missing catalog cache key (@justinpark) +- [#28418](https://github.com/apache/superset/pull/28418) fix: set supersetbot orglabel to always succeed (@mistercrunch) +- [#28412](https://github.com/apache/superset/pull/28412) fix(docs): fix typo in development.mdx (@eschutho) +- [#28410](https://github.com/apache/superset/pull/28410) fix: pass catalog when estimating query cost (@betodealmeida) +- [#28413](https://github.com/apache/superset/pull/28413) fix: table autocomplete should pass catalog (@betodealmeida) +- [#28408](https://github.com/apache/superset/pull/28408) fix: export/import catalogs (@betodealmeida) +- [#28396](https://github.com/apache/superset/pull/28396) fix: type annotation breaking on py3.9 (@dpgaspar) +- [#28397](https://github.com/apache/superset/pull/28397) fix: tests on database, dataset, saved_queries apis (@dpgaspar) +- [#28312](https://github.com/apache/superset/pull/28312) fix(explore): hide advanced analytics for non temporal xaxis (@justinpark) +- [#28389](https://github.com/apache/superset/pull/28389) fix: update links to reference docs listing Superset issue codes (@jonaschn) +- [#28368](https://github.com/apache/superset/pull/28368) fix: Contribution percentages for ECharts plugins (@michael-s-molina) +- [#28386](https://github.com/apache/superset/pull/28386) fix: Scroll to top when selecting a global dashboard tab (@michael-s-molina) +- [#28384](https://github.com/apache/superset/pull/28384) fix: Revert "chore(build): uplift `webpack`-related packages to v5 (#28342)" (@kgabryje) +- [#28363](https://github.com/apache/superset/pull/28363) fix: don't strip SQL comments in Explore (@mistercrunch) +- [#28341](https://github.com/apache/superset/pull/28341) fix: Remedy logic for UpdateDatasetCommand uniqueness check (@john-bodley) +- [#28334](https://github.com/apache/superset/pull/28334) fix: Small tweaks for Line and Area chart migrations (ECharts) (@michael-s-molina) +- [#28266](https://github.com/apache/superset/pull/28266) fix: use pessimistic json encoder in SQL Lab (@mistercrunch) +- [#28343](https://github.com/apache/superset/pull/28343) fix(ci): correct input type for `allow-dependencies-licenses` in Dependency Review GH action (@hainenber) +- [#28340](https://github.com/apache/superset/pull/28340) fix: database logos look stretched (@mistercrunch) +- [#28333](https://github.com/apache/superset/pull/28333) fix(website): links corrected (@frankzimper) +- [#28113](https://github.com/apache/superset/pull/28113) fix: Rename legacy line and area charts (@john-bodley) +- [#28279](https://github.com/apache/superset/pull/28279) fix(sql_parse): Ignore USE SQL keyword when determining SELECT statement (@john-bodley) +- [#28319](https://github.com/apache/superset/pull/28319) fix(docs): prevent browser to download the entire video in first page load + fix empty `controls` attribute (@hainenber) +- [#28322](https://github.com/apache/superset/pull/28322) fix(sql_parse): Add Apache Spark to SQLGlot dialect mapping (@john-bodley) +- [#28205](https://github.com/apache/superset/pull/28205) fix: all_database_access should enable access to all datasets/charts/dashboards (@mistercrunch) +- [#28269](https://github.com/apache/superset/pull/28269) fix(explore): cannot reorder dnd of Metrics (@justinpark) +- [#28283](https://github.com/apache/superset/pull/28283) fix: silence docker-compose useless warnings (@mistercrunch) +- [#28271](https://github.com/apache/superset/pull/28271) fix: % replace in `values_for_column` (@betodealmeida) +- [#28277](https://github.com/apache/superset/pull/28277) fix(ci): adding codecov token (@rusackas) +- [#28225](https://github.com/apache/superset/pull/28225) fix(Dev-Server): Edit ChartPropsConfig reexport to be a type object (@rtexelm) +- [#28232](https://github.com/apache/superset/pull/28232) fix(Webpack dev-sever warnings): Add ignoreWarning to webpack config for @data-ui error (@rtexelm) +- [#28242](https://github.com/apache/superset/pull/28242) fix(dashboard): unable to drop tabs in columns (@justinpark) +- [#28229](https://github.com/apache/superset/pull/28229) fix(Webpack dev-server build warning): Create false value alias for `moment-with-locales` (@rtexelm) +- [#28241](https://github.com/apache/superset/pull/28241) fix(explore): temporal column mixin (@justinpark) +- [#28156](https://github.com/apache/superset/pull/28156) fix(sqllab): invalid css scope for ace editor autocomplete (@justinpark) +- [#28222](https://github.com/apache/superset/pull/28222) fix: Dremio alias (@betodealmeida) +- [#28152](https://github.com/apache/superset/pull/28152) fix(sql_parse): Provide more lenient logic when extracting latest[_sub]\_partition (@john-bodley) +- [#28226](https://github.com/apache/superset/pull/28226) fix(maps): adds Crimea back to Ukraine 🇺🇦 (@rusackas) +- [#28197](https://github.com/apache/superset/pull/28197) fix: Remove deprecated ignoreTestFiles from Applitools Cypress (@geido) +- [#28189](https://github.com/apache/superset/pull/28189) fix(docs): ERD docs fail on master (@mistercrunch) +- [#27554](https://github.com/apache/superset/pull/27554) fix(AlertsReports): making log retention "None" option valid (@fisjac) +- [#28117](https://github.com/apache/superset/pull/28117) fix(sql_parse): Support Jinja format() filter when extracting latest[_sub]\_partition (@john-bodley) +- [#27195](https://github.com/apache/superset/pull/27195) fix: Upgrade eyes-cypress to latest (@geido) +- [#28061](https://github.com/apache/superset/pull/28061) fix: switch off dependabot for pip/python (@mistercrunch) +- [#28054](https://github.com/apache/superset/pull/28054) fix(Dashboard): Support "Edit chart" click on a new window (@geido) +- [#28036](https://github.com/apache/superset/pull/28036) fix: Dynamic filter does not show all values on blur/clear events (@michael-s-molina) +- [#28018](https://github.com/apache/superset/pull/28018) fix: bump client side chart timeouts to use the SUPERSET_WEBSERVER_TIMEOUT (@eschutho) +- [#28039](https://github.com/apache/superset/pull/28039) fix: support docker/.env-local for docker-compose (@mistercrunch) +- [#28017](https://github.com/apache/superset/pull/28017) fix: Select is accepting unknown pasted values when `allowNewOptions` is false (@michael-s-molina) +- [#27996](https://github.com/apache/superset/pull/27996) fix: Incorrect onChange value when an unloaded value is pasted into AsyncSelect (@michael-s-molina) +- [#27934](https://github.com/apache/superset/pull/27934) fix(time_offset): improved LIMIT-handling in advanced analytics (@Antonio-RiveroMartnez) +- [#27992](https://github.com/apache/superset/pull/27992) fix(docs): add missing code formatting, fix broken link (@sfirke) +- [#27941](https://github.com/apache/superset/pull/27941) fix(drillby): Enable DrillBy in charts w/o filters (dimensions) (@sowo) +- [#27994](https://github.com/apache/superset/pull/27994) fix(superset-frontend): remove unused `@superset-ui/plugin-chart-period-over-period-kpi` package (@corocoto) +- [#27239](https://github.com/apache/superset/pull/27239) fix(alerts/reports): removing duplicate notification method options (@fisjac) +- [#27974](https://github.com/apache/superset/pull/27974) fix(node): bump node version in nvmrc files (@rusackas) +- [#27963](https://github.com/apache/superset/pull/27963) fix(asf): removing google hosted analytics and fonts (@rusackas) +- [#27968](https://github.com/apache/superset/pull/27968) fix(Dashboard): Add aria-label to filters and search forms (@geido) +- [#27955](https://github.com/apache/superset/pull/27955) fix(node): missed one bump from node 16 to 18. (@rusackas) +- [#27701](https://github.com/apache/superset/pull/27701) fix: useTruncation infinite loop, reenable dashboard cross links on ChartList (@kgabryje) +- [#27904](https://github.com/apache/superset/pull/27904) fix: improve change detection for GHAs (@mistercrunch) +- [#27942](https://github.com/apache/superset/pull/27942) fix(docs): CSP mods to re-enable Algolia search (@rusackas) +- [#27926](https://github.com/apache/superset/pull/27926) fix: Locale sent to frontend (@michael-s-molina) +- [#27925](https://github.com/apache/superset/pull/27925) fix: docker-release GHA fails with pathspec error (@mistercrunch) +- [#27922](https://github.com/apache/superset/pull/27922) fix: fix-zh-translation-2 (@listeng) +- [#25407](https://github.com/apache/superset/pull/25407) fix(frontend): allow "constructor" property in response data (@SpencerTorres) +- [#27912](https://github.com/apache/superset/pull/27912) fix(docs): restoring search capability with new public key (@rusackas) +- [#27919](https://github.com/apache/superset/pull/27919) fix: add mariadb engine spec same as MySQL (@dpgaspar) +- [#27593](https://github.com/apache/superset/pull/27593) fix(Dashboard): Add border to row when hovering HoverMenu in edit mode (@rtexelm) +- [#27794](https://github.com/apache/superset/pull/27794) fix: corrects some inaccuracies zh translation (@listeng) +- [#27889](https://github.com/apache/superset/pull/27889) fix(pylint): Address errors/warnings introduced by #27867 (@john-bodley) +- [#27883](https://github.com/apache/superset/pull/27883) fix(bar-chart): change legend padding for horizontal orientation (@lilykuang) +- [#27861](https://github.com/apache/superset/pull/27861) fix: run pip-compile-multi --no-upgrade (@mistercrunch) +- [#27860](https://github.com/apache/superset/pull/27860) fix: GHA update-monorepo-lockfiles (@mistercrunch) +- [#27700](https://github.com/apache/superset/pull/27700) fix: row limits & row count labels are confusing (@mistercrunch) +- [#27855](https://github.com/apache/superset/pull/27855) fix: pkg-config dependency in Dockerfile (@mistercrunch) +- [#27845](https://github.com/apache/superset/pull/27845) fix(dashboard): missing null check in error extra (@justinpark) +- [#27846](https://github.com/apache/superset/pull/27846) fix: alembic's 'superset db migrate' fails with CompileError (@mistercrunch) +- [#27785](https://github.com/apache/superset/pull/27785) fix: Select's storybook (@michael-s-molina) +- [#27710](https://github.com/apache/superset/pull/27710) fix: Pylint errors on master (@michael-s-molina) +- [#27714](https://github.com/apache/superset/pull/27714) fix: Revert "chore: bump pylint (#27711)" (@michael-s-molina) +- [#27611](https://github.com/apache/superset/pull/27611) fix(dashboard,css): center align 'waiting on database' (@mistercrunch) +- [#27608](https://github.com/apache/superset/pull/27608) fix(docker): error around missing requirements/base.txt (@mistercrunch) +- [#27595](https://github.com/apache/superset/pull/27595) fix: skip another Hive test (@betodealmeida) +- [#27523](https://github.com/apache/superset/pull/27523) fix: Hive integration test (@betodealmeida) +- [#27541](https://github.com/apache/superset/pull/27541) fix: typo in configuring-superset.mdx (@armando-fandango) +- [#27502](https://github.com/apache/superset/pull/27502) fix(big-number-chart): number format is not applying to percentage number of the time comparison (@lilykuang) +- [#27515](https://github.com/apache/superset/pull/27515) fix: master build 4th attempt (@mistercrunch) +- [#27514](https://github.com/apache/superset/pull/27514) fix: another attempt at fixing docker master builds (@mistercrunch) +- [#27507](https://github.com/apache/superset/pull/27507) fix: master docker build is broken (@mistercrunch) +- [#27503](https://github.com/apache/superset/pull/27503) fix: docker builds in master fail (@mistercrunch) +- [#27209](https://github.com/apache/superset/pull/27209) fix: Allow only dttm columns in comparison filter in Period over Period chart (@kgabryje) +- [#27312](https://github.com/apache/superset/pull/27312) fix(docs): just a missing backtick (@rusackas) +- [#27303](https://github.com/apache/superset/pull/27303) fix(ci): check file changes for python should include the scripts folders (@dpgaspar) +- [#27296](https://github.com/apache/superset/pull/27296) fix: Revert "chore: Replace deprecated command with environment file (#240… (@eschutho) +- [#27282](https://github.com/apache/superset/pull/27282) fix(ci): docker builds don't work from remote forks (@mistercrunch) +- [#27280](https://github.com/apache/superset/pull/27280) fix(docs): more CSP tweaks (@rusackas) +- [#27279](https://github.com/apache/superset/pull/27279) fix(docs): more csp tweaks (@rusackas) +- [#27278](https://github.com/apache/superset/pull/27278) fix(docs): even more CSP adjustments... (@rusackas) +- [#27277](https://github.com/apache/superset/pull/27277) fix(docs): Even more access in CSP policies! (@rusackas) +- [#27275](https://github.com/apache/superset/pull/27275) fix(docs): More CSP touchups (@rusackas) +- [#27274](https://github.com/apache/superset/pull/27274) fix(docs): removing meta tag CSP, poking more holes in htaccess (@rusackas) +- [#27261](https://github.com/apache/superset/pull/27261) fix: docker CI job doesn't trigger on master (@mistercrunch) +- [#27259](https://github.com/apache/superset/pull/27259) fix(docs site): CSP changes, take 2 (@rusackas) +- [#27256](https://github.com/apache/superset/pull/27256) fix(docs site): Opening up CSP for 3rd party frame content. (@rusackas) +- [#27203](https://github.com/apache/superset/pull/27203) fix(plugin-chart-period-over-period-kpi): Blank chart when switching from BigNumberTotal (@kgabryje) +- [#27179](https://github.com/apache/superset/pull/27179) fix: docker-compose point to master tag (@dpgaspar) +- [#27168](https://github.com/apache/superset/pull/27168) fix: CSRF exempt unit_tests (@dpgaspar) + +**Others** + +- [#30729](https://github.com/apache/superset/pull/30729) chore: bump werkzeug to address vulnerability (@dpgaspar) +- [#30733](https://github.com/apache/superset/pull/30733) ci: Add Python 3.11 images to Docker Hub (@padbk) +- [#30397](https://github.com/apache/superset/pull/30397) chore: alter scripts/cypress_run to run one file per command + retry (@mistercrunch) +- [#30354](https://github.com/apache/superset/pull/30354) chore: split cypress files for less memory (@eschutho) +- [#30719](https://github.com/apache/superset/pull/30719) chore(Dashboard): Simplify scoping logic for cross/native filters (@geido) +- [#29937](https://github.com/apache/superset/pull/29937) chore: Update to Dockerfile to get creating releases to work (@sadpandajoe) +- [#29874](https://github.com/apache/superset/pull/29874) perf: Implement Echarts treeshaking (@kgabryje) +- [#26257](https://github.com/apache/superset/pull/26257) chore(chart-controls): migrate enzyme to RTL (@justinpark) +- [#30417](https://github.com/apache/superset/pull/30417) chore: improve DML check (@betodealmeida) +- [#30258](https://github.com/apache/superset/pull/30258) chore: organize SQL parsing files (@betodealmeida) +- [#30274](https://github.com/apache/superset/pull/30274) chore: move SLACK_ENABLE_AVATARS from config to feature flag (@mistercrunch) +- [#30173](https://github.com/apache/superset/pull/30173) chore(sqllab): Add shortcuts for switching tabs (@justinpark) +- [#30213](https://github.com/apache/superset/pull/30213) chore: remove duplicate `_process_sql_expression` (@betodealmeida) +- [#30243](https://github.com/apache/superset/pull/30243) chore(docs): note that release-tagged docker images no longer ship with metadata db drivers as of 4.1.0 (@sfirke) +- [#26258](https://github.com/apache/superset/pull/26258) chore(shared components): Migrate enzyme to RTL (@justinpark) +- [#30144](https://github.com/apache/superset/pull/30144) docs: document how docker-compose-image-tag requires -dev suffixed images (@mistercrunch) +- [#29943](https://github.com/apache/superset/pull/29943) chore: improve mask/unmask encrypted_extra (@betodealmeida) +- [#29936](https://github.com/apache/superset/pull/29936) chore: Allow auto pruning of the query table (@michael-s-molina) +- [#29893](https://github.com/apache/superset/pull/29893) chore: Logs the duration of migrations execution (@michael-s-molina) +- [#29262](https://github.com/apache/superset/pull/29262) chore: Add the 4.1 release notes (@sadpandajoe) +- [#29666](https://github.com/apache/superset/pull/29666) refactor(ProgressBar): Upgrade ProgressBar to Antd 5 (@geido) +- [#29631](https://github.com/apache/superset/pull/29631) docs: fix query typo in creating-your-first-dashboard.mdx (@Jaswanth-Sriram-Veturi) +- [#29650](https://github.com/apache/superset/pull/29650) chore: add catalog_access to OBJECT_SPEC_PERMISSIONS (@betodealmeida) +- [#29594](https://github.com/apache/superset/pull/29594) refactor: Remove dead code from the Word Cloud plugin (@michael-s-molina) +- [#29637](https://github.com/apache/superset/pull/29637) chore: Adds 4.1.0 RC1 daa to CHANGELOG.md and UPDATING.md (@sadpandajoe) +- [#29272](https://github.com/apache/superset/pull/29272) refactor(Dashboard): Fetch dashboard screenshot via dedicated endpoint (@geido) +- [#29593](https://github.com/apache/superset/pull/29593) refactor(Tag): Upgrade Tag and TagsList to Ant Design 5 (@geido) +- [#29612](https://github.com/apache/superset/pull/29612) docs: fix code comment explaining local override (@oscep) +- [#29602](https://github.com/apache/superset/pull/29602) chore: Clear redux localStorage on logout (@geido) +- [#29600](https://github.com/apache/superset/pull/29600) chore: Updates CHANGELOG.md with 4.0.2 data (@michael-s-molina) +- [#28124](https://github.com/apache/superset/pull/28124) docs(Database): Clarify host value expected when running in docker (@Carmageddon) +- [#28481](https://github.com/apache/superset/pull/28481) chore(docs): create architecture page (@sfirke) +- [#29603](https://github.com/apache/superset/pull/29603) docs(contributing): removing old blog post link (@rusackas) +- [#29599](https://github.com/apache/superset/pull/29599) docs: update CVEs for 4.0.2 (@dpgaspar) +- [#29552](https://github.com/apache/superset/pull/29552) chore: cleanup documentation (@CodeWithEmad) +- [#29487](https://github.com/apache/superset/pull/29487) docs: Added Keycloak auth configuration (@lindner-tj) +- [#29436](https://github.com/apache/superset/pull/29436) chore(deps): bump deck.gl from 8.9.22 to 9.0.20 in /superset-frontend (@dependabot[bot]) +- [#29537](https://github.com/apache/superset/pull/29537) docs(intro): Add OceanBase to the Supported Databases section of readme.md. (@yuanoOo) +- [#29437](https://github.com/apache/superset/pull/29437) chore(deps): bump regenerator-runtime from 0.13.11 to 0.14.1 in /superset-frontend (@dependabot[bot]) +- [#29529](https://github.com/apache/superset/pull/29529) chore(deps): bump deck.gl from 8.9.22 to 9.0.21 in /superset-frontend (@dependabot[bot]) +- [#29510](https://github.com/apache/superset/pull/29510) docs: Add frontend dependency installation steps (@CodeWithEmad) +- [#29124](https://github.com/apache/superset/pull/29124) refactor: Upgrade Badge component to Ant Design 5 (@geido) +- [#29414](https://github.com/apache/superset/pull/29414) chore(build): sync Jest version across plugins (@hainenber) +- [#29486](https://github.com/apache/superset/pull/29486) docs: Add Vasu and Jamie to the Users List (@vasu-ram) +- [#29511](https://github.com/apache/superset/pull/29511) docs: cleanup markdown warnings (@CodeWithEmad) +- [#29389](https://github.com/apache/superset/pull/29389) refactor: Upgrade Card to Ant Design 5 (@geido) +- [#29493](https://github.com/apache/superset/pull/29493) chore(Home): Avoid firing API requests when a custom Home is used (@Vitor-Avila) +- [#29459](https://github.com/apache/superset/pull/29459) chore(utils): Support select_columns with getUserOwnedObjects and split recentActivityObjs (@Vitor-Avila) +- [#29476](https://github.com/apache/superset/pull/29476) chore: run babel_update.sh to update po files (@mistercrunch) +- [#29377](https://github.com/apache/superset/pull/29377) chore(i18n): Translated charts and filters into Russian (@goldjee) +- [#29468](https://github.com/apache/superset/pull/29468) docs(docker compose): fix step 4 list formatting (@easontm) +- [#29426](https://github.com/apache/superset/pull/29426) chore(deps): bump deck.gl from 9.0.12 to 9.0.20 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#29425](https://github.com/apache/superset/pull/29425) chore(deps-dev): update @types/lodash requirement from ^4.17.4 to ^4.17.6 in /superset-frontend/plugins/plugin-chart-handlebars (@dependabot[bot]) +- [#29434](https://github.com/apache/superset/pull/29434) chore(deps): bump actions/checkout from 2 to 4 (@dependabot[bot]) +- [#29429](https://github.com/apache/superset/pull/29429) chore(deps-dev): bump webpack from 5.91.0 to 5.92.1 in /docs (@dependabot[bot]) +- [#29428](https://github.com/apache/superset/pull/29428) chore(deps): bump @algolia/client-search from 4.23.3 to 4.24.0 in /docs (@dependabot[bot]) +- [#29439](https://github.com/apache/superset/pull/29439) chore(deps): bump react-markdown from 8.0.3 to 8.0.7 in /superset-frontend (@dependabot[bot]) +- [#29447](https://github.com/apache/superset/pull/29447) chore: move all GHAs to ubuntu-22.04 (@mistercrunch) +- [#29442](https://github.com/apache/superset/pull/29442) chore: Added 10Web to the list of organizations that use Apache Superset (@saghatelian) +- [#29344](https://github.com/apache/superset/pull/29344) chore(key-value): convert command to dao (@villebro) +- [#29423](https://github.com/apache/superset/pull/29423) chore(deps-dev): bump ts-jest from 29.1.2 to 29.1.5 in /superset-websocket (@dependabot[bot]) +- [#29435](https://github.com/apache/superset/pull/29435) chore(deps-dev): bump eslint-import-resolver-typescript from 2.5.0 to 3.6.1 in /superset-frontend (@dependabot[bot]) +- [#29433](https://github.com/apache/superset/pull/29433) chore(deps): bump rehype-raw from 6.1.1 to 7.0.0 in /superset-frontend (@dependabot[bot]) +- [#29432](https://github.com/apache/superset/pull/29432) chore(deps-dev): bump typescript from 5.4.5 to 5.5.2 in /docs (@dependabot[bot]) +- [#29431](https://github.com/apache/superset/pull/29431) chore(deps): bump stream from 0.0.2 to 0.0.3 in /docs (@dependabot[bot]) +- [#29413](https://github.com/apache/superset/pull/29413) docs: Update INTHEWILD.md with Aveti Learning (@TheShubhendra) +- [#29399](https://github.com/apache/superset/pull/29399) docs: update INTHEWILD.md with bluquist (@ari-jane) +- [#29405](https://github.com/apache/superset/pull/29405) chore(frontend): remove obsolete ESLint rules in tests (@hainenber) +- [#24969](https://github.com/apache/superset/pull/24969) chore(dao/command): Add transaction decorator to try to enforce "unit of work" (@john-bodley) +- [#29380](https://github.com/apache/superset/pull/29380) refactor(src/explore/comp/controls/metricControl): migrate Enzyme test to RTL syntax (@hainenber) +- [#29400](https://github.com/apache/superset/pull/29400) docs: fix typos (@jansule) +- [#28816](https://github.com/apache/superset/pull/28816) chore(deps): bump scroll-into-view-if-needed from 2.2.28 to 3.1.0 in /superset-frontend (@dependabot[bot]) +- [#29391](https://github.com/apache/superset/pull/29391) chore(Table): Add aria-label to Table page size selector (@geido) +- [#29390](https://github.com/apache/superset/pull/29390) docs: fix typo in docker compose doc (@jansule) +- [#29388](https://github.com/apache/superset/pull/29388) ci: remove update repo on issue comment (@dpgaspar) +- [#29386](https://github.com/apache/superset/pull/29386) chore(tests): Remove unnecessary mock (@john-bodley) +- [#29381](https://github.com/apache/superset/pull/29381) chore(security): Clean up session/commit logic (@john-bodley) +- [#29371](https://github.com/apache/superset/pull/29371) chore(ci): Start Celery worker as a background process (@john-bodley) +- [#29366](https://github.com/apache/superset/pull/29366) chore(tests): Mark TestConnectionDatabaseCommand as non-test related (@john-bodley) +- [#29353](https://github.com/apache/superset/pull/29353) refactor(Homepage): Migrate Home.test to RTL (@rtexelm) +- [#29356](https://github.com/apache/superset/pull/29356) chore(tests): Fix MySQL logic (@john-bodley) +- [#29355](https://github.com/apache/superset/pull/29355) chore(tests): Cleanup Celery tests (@john-bodley) +- [#29360](https://github.com/apache/superset/pull/29360) chore: Rename Totals to Summary in table chart (@michael-s-molina) +- [#29337](https://github.com/apache/superset/pull/29337) docs: Update INTHEWILD.md with Bluesquare (@madewulf) +- [#29327](https://github.com/apache/superset/pull/29327) chore(e2e): simplify Cypress record key usage (@rusackas) +- [#29325](https://github.com/apache/superset/pull/29325) refactor: Adds the sort_by_metric control to sharedControls (@michael-s-molina) +- [#29313](https://github.com/apache/superset/pull/29313) docs: update CVEs fixed on 4.0.1 and 3.1.3 (@dpgaspar) +- [#28296](https://github.com/apache/superset/pull/28296) build(deps): bump deck.gl from 9.0.6 to 9.0.12 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#29319](https://github.com/apache/superset/pull/29319) chore(e2e): more instructions for manual test runs. (@rusackas) +- [#28201](https://github.com/apache/superset/pull/28201) chore(applitools): making tests more static for consistent testing (@rusackas) +- [#29302](https://github.com/apache/superset/pull/29302) chore(distributed-lock): refactor tests (@villebro) +- [#29308](https://github.com/apache/superset/pull/29308) build(deps-dev): bump ws from 7.5.7 to 7.5.10 in /superset-embedded-sdk (@dependabot[bot]) +- [#29296](https://github.com/apache/superset/pull/29296) chore(e2e): using updated repo secret, new Cypress project id (@rusackas) +- [#29300](https://github.com/apache/superset/pull/29300) docs: add Agoda to users list (@oBoMBaYo) +- [#29285](https://github.com/apache/superset/pull/29285) chore: use json codec for key value lock (@villebro) +- [#29277](https://github.com/apache/superset/pull/29277) chore: make flask-talisman work with test config (@mistercrunch) +- [#29273](https://github.com/apache/superset/pull/29273) docs: remove comment header in README.md (@mistercrunch) +- [#29275](https://github.com/apache/superset/pull/29275) build(deps): bump ws from 7.5.9 to 7.5.10 in /docs (@dependabot[bot]) +- [#29276](https://github.com/apache/superset/pull/29276) build(deps): bump ws from 8.17.0 to 8.17.1 in /superset-websocket (@dependabot[bot]) +- [#29274](https://github.com/apache/superset/pull/29274) chore: trigger CI jobs on all release-related branches (@mistercrunch) +- [#29247](https://github.com/apache/superset/pull/29247) chore: translate strings to French (@eschutho) +- [#29233](https://github.com/apache/superset/pull/29233) refactor(sqllab): nonblocking delete query editor (@justinpark) +- [#29249](https://github.com/apache/superset/pull/29249) test(Explorer): Fix minor errors in ExploreViewContainer syntax, add tests (@rtexelm) +- [#28876](https://github.com/apache/superset/pull/28876) chore(sqllab): Add logging for actions (@justinpark) +- [#29245](https://github.com/apache/superset/pull/29245) test(storybook): fix component stories (@msyavuz) +- [#29235](https://github.com/apache/superset/pull/29235) chore: Remove the need for explicit bubble up of certain exceptions (@john-bodley) +- [#28628](https://github.com/apache/superset/pull/28628) chore: Set isolation level to READ COMMITTED for testing et al. (@john-bodley) +- [#29108](https://github.com/apache/superset/pull/29108) refactor(sqllab): nonblocking switch query editor (@justinpark) +- [#29232](https://github.com/apache/superset/pull/29232) build(deps-dev): bump braces from 3.0.2 to 3.0.3 in /superset-embedded-sdk (@dependabot[bot]) +- [#29226](https://github.com/apache/superset/pull/29226) chore(intros): Update INTHEWILD.md (@RIS3cz) +- [#29167](https://github.com/apache/superset/pull/29167) build(deps-dev): bump braces from 3.0.2 to 3.0.3 in /superset-websocket (@dependabot[bot]) +- [#28836](https://github.com/apache/superset/pull/28836) chore(deps): bump distributions from 1.1.0 to 2.2.0 in /superset-frontend (@dependabot[bot]) +- [#29168](https://github.com/apache/superset/pull/29168) build(deps): bump braces from 3.0.2 to 3.0.3 in /superset-frontend/cypress-base (@dependabot[bot]) +- [#29169](https://github.com/apache/superset/pull/29169) build(deps): bump braces from 3.0.2 to 3.0.3 in /docs (@dependabot[bot]) +- [#28295](https://github.com/apache/superset/pull/28295) build(deps): update urijs requirement from ^1.19.8 to ^1.19.11 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#29160](https://github.com/apache/superset/pull/29160) chore: `s/MockFixture/MockerFixture/g` (@betodealmeida) +- [#29142](https://github.com/apache/superset/pull/29142) docs: Add Analytics Aura to INTHEWILD (@visharavana) +- [#29104](https://github.com/apache/superset/pull/29104) docs: Add Gavagai to INTHEWILD (@ninaviereckel) +- [#28786](https://github.com/apache/superset/pull/28786) refactor: Removes the export of QueryFormData (@EnxDev) +- [#28641](https://github.com/apache/superset/pull/28641) chore: change security error level (@eschutho) +- [#29093](https://github.com/apache/superset/pull/29093) docs: various adjustments across the docs (@mholthausen) +- [#29077](https://github.com/apache/superset/pull/29077) chore: only use cypress.io when triggered manually (@mistercrunch) +- [#28571](https://github.com/apache/superset/pull/28571) chore: remove React 16.4's obsolete React imports (@hainenber) +- [#28795](https://github.com/apache/superset/pull/28795) refactor(sqllab): nonblocking new query editor (@justinpark) +- [#28822](https://github.com/apache/superset/pull/28822) chore(deps-dev): update @types/lodash requirement from ^4.17.0 to ^4.17.4 in /superset-frontend/plugins/plugin-chart-handlebars (@dependabot[bot]) +- [#28814](https://github.com/apache/superset/pull/28814) chore(deps): bump core-js from 3.8.3 to 3.37.1 in /superset-frontend (@dependabot[bot]) +- [#28812](https://github.com/apache/superset/pull/28812) chore(deps): bump @types/lodash from 4.17.0 to 4.17.4 in /superset-websocket (@dependabot[bot]) +- [#28811](https://github.com/apache/superset/pull/28811) chore(deps): bump react-intersection-observer from 9.8.2 to 9.10.2 in /superset-frontend (@dependabot[bot]) +- [#28808](https://github.com/apache/superset/pull/28808) chore(deps): bump @types/json-bigint from 1.0.1 to 1.0.4 in /superset-frontend (@dependabot[bot]) +- [#28801](https://github.com/apache/superset/pull/28801) chore(deps-dev): bump @docusaurus/tsconfig from 3.3.2 to 3.4.0 in /docs (@dependabot[bot]) +- [#28799](https://github.com/apache/superset/pull/28799) chore(deps): bump @ant-design/icons from 5.3.6 to 5.3.7 in /docs (@dependabot[bot]) +- [#28802](https://github.com/apache/superset/pull/28802) chore(deps-dev): bump @types/react from 18.3.1 to 18.3.3 in /docs (@dependabot[bot]) +- [#28805](https://github.com/apache/superset/pull/28805) chore(deps): bump swagger-ui-react from 5.17.5 to 5.17.14 in /docs (@dependabot[bot]) +- [#28806](https://github.com/apache/superset/pull/28806) chore(deps-dev): bump @docusaurus/module-type-aliases from 3.2.1 to 3.4.0 in /docs (@dependabot[bot]) +- [#28809](https://github.com/apache/superset/pull/28809) chore(deps-dev): bump @types/node from 20.12.7 to 20.13.0 in /superset-websocket (@dependabot[bot]) +- [#28817](https://github.com/apache/superset/pull/28817) chore(deps-dev): bump @hot-loader/react-dom from 16.13.0 to 16.14.0 in /superset-frontend (@dependabot[bot]) +- [#28827](https://github.com/apache/superset/pull/28827) chore(deps-dev): bump exports-loader from 0.7.0 to 5.0.0 in /superset-frontend (@dependabot[bot]) +- [#28826](https://github.com/apache/superset/pull/28826) chore(deps-dev): bump imports-loader from 3.1.1 to 5.0.0 in /superset-frontend (@dependabot[bot]) +- [#28824](https://github.com/apache/superset/pull/28824) chore(deps): bump react-window and @types/react-window in /superset-frontend (@dependabot[bot]) +- [#28823](https://github.com/apache/superset/pull/28823) chore(deps): bump debug from 4.3.4 to 4.3.5 in /superset-websocket/utils/client-ws-app (@dependabot[bot]) +- [#28773](https://github.com/apache/superset/pull/28773) chore: make docker-compose use less memory (@mistercrunch) +- [#28654](https://github.com/apache/superset/pull/28654) chore(revert): "add listener to repaint on visibility change for canvas" (@eschutho) +- [#28752](https://github.com/apache/superset/pull/28752) chore: remove duplicate code in `SqlaTable` (@betodealmeida) +- [#28710](https://github.com/apache/superset/pull/28710) chore: updated Dutch translations (@Seboeb) +- [#28471](https://github.com/apache/superset/pull/28471) chore(🦾): bump python celery 5.3.6 -> 5.4.0 (@github-actions[bot]) +- [#28742](https://github.com/apache/superset/pull/28742) chore(deps): bump pug from 3.0.2 to 3.0.3 in /superset-websocket/utils/client-ws-app (@dependabot[bot]) +- [#28716](https://github.com/apache/superset/pull/28716) chore(🦾): bump python importlib-resources 5.12.0 -> 6.4.0 (@github-actions[bot]) +- [#28718](https://github.com/apache/superset/pull/28718) chore(🦾): bump python zipp 3.18.2 -> 3.19.0 (@github-actions[bot]) +- [#28719](https://github.com/apache/superset/pull/28719) chore(🦾): bump python cachetools 5.3.2 -> 5.3.3 (@github-actions[bot]) +- [#28720](https://github.com/apache/superset/pull/28720) chore(🦾): bump python markdown-it-py 2.2.0 -> 3.0.0 (@github-actions[bot]) +- [#28721](https://github.com/apache/superset/pull/28721) chore(🦾): bump python slack-sdk 3.21.3 -> 3.27.2 (@github-actions[bot]) +- [#28727](https://github.com/apache/superset/pull/28727) chore(🦾): bump python prompt-toolkit 3.0.38 -> 3.0.44 (@github-actions[bot]) +- [#28729](https://github.com/apache/superset/pull/28729) chore(🦾): bump python attrs 23.1.0 -> 23.2.0 (@github-actions[bot]) +- [#28730](https://github.com/apache/superset/pull/28730) chore(🦾): bump python apsw 3.45.3.0 -> 3.46.0.0 (@github-actions[bot]) +- [#28731](https://github.com/apache/superset/pull/28731) chore(🦾): bump python pytz 2021.3 -> 2024.1 (@github-actions[bot]) +- [#28570](https://github.com/apache/superset/pull/28570) chore(tags): Handle tagging as part of asset update call (@Vitor-Avila) +- [#28722](https://github.com/apache/superset/pull/28722) chore(🦾): bump python wrapt 1.15.0 -> 1.16.0 (@github-actions[bot]) +- [#28717](https://github.com/apache/superset/pull/28717) chore(🦾): bump python limits 3.4.0 -> 3.12.0 (@github-actions[bot]) +- [#28723](https://github.com/apache/superset/pull/28723) chore(🦾): bump python mako 1.3.3 -> 1.3.5 (@github-actions[bot]) +- [#28724](https://github.com/apache/superset/pull/28724) chore(🦾): bump python marshmallow-sqlalchemy 0.23.1 -> 0.28.2 (@github-actions[bot]) +- [#28725](https://github.com/apache/superset/pull/28725) chore(🦾): bump python wcwidth 0.2.5 -> 0.2.13 (@github-actions[bot]) +- [#28726](https://github.com/apache/superset/pull/28726) chore(🦾): bump python pyasn1 0.5.1 -> 0.6.0 (@github-actions[bot]) +- [#28732](https://github.com/apache/superset/pull/28732) chore(🦾): bump python google-auth 2.27.0 -> 2.29.0 (@github-actions[bot]) +- [#28733](https://github.com/apache/superset/pull/28733) chore(🦾): bump python certifi 2023.7.22 -> 2024.2.2 (@github-actions[bot]) +- [#28679](https://github.com/apache/superset/pull/28679) chore(🦾): bump python boto3 1.26.130 -> 1.34.112 (@github-actions[bot]) +- [#28703](https://github.com/apache/superset/pull/28703) chore: remove ipython from development dependencies (@mistercrunch) +- [#28661](https://github.com/apache/superset/pull/28661) chore(🦾): bump python stack-data 0.6.2 -> 0.6.3 (@github-actions[bot]) +- [#28663](https://github.com/apache/superset/pull/28663) chore(🦾): bump python googleapis-common-protos 1.59.0 -> 1.63.0 (@github-actions[bot]) +- [#28669](https://github.com/apache/superset/pull/28669) chore(🦾): bump python ruff 0.4.4 -> 0.4.5 (@github-actions[bot]) +- [#28674](https://github.com/apache/superset/pull/28674) chore(🦾): bump python matplotlib 3.7.1 -> 3.9.0 (@github-actions[bot]) +- [#28696](https://github.com/apache/superset/pull/28696) chore(docs): address common docker compose error message in Quickstart (@sfirke) +- [#28681](https://github.com/apache/superset/pull/28681) chore(🦾): bump python requests-oauthlib 1.3.1 -> 2.0.0 (@github-actions[bot]) +- [#28670](https://github.com/apache/superset/pull/28670) chore(🦾): bump python flask-limiter 3.3.1 -> 3.7.0 (@github-actions[bot]) +- [#28655](https://github.com/apache/superset/pull/28655) chore(🦾): bump python marshmallow 3.19.0 -> 3.21.2 (@github-actions[bot]) +- [#28590](https://github.com/apache/superset/pull/28590) chore(🦾): bump python bcrypt 4.0.1 -> 4.1.3 (@github-actions[bot]) +- [#28657](https://github.com/apache/superset/pull/28657) chore(🦾): bump python bottleneck 1.3.7 -> 1.3.8 (@github-actions[bot]) +- [#28658](https://github.com/apache/superset/pull/28658) chore(🦾): bump python cattrs 23.2.1 -> 23.2.3 (@github-actions[bot]) +- [#28659](https://github.com/apache/superset/pull/28659) chore(🦾): bump python typing-extensions 4.11.0 -> 4.12.0 (@github-actions[bot]) +- [#28660](https://github.com/apache/superset/pull/28660) chore(🦾): bump python wheel 0.40.0 -> 0.43.0 (@github-actions[bot]) +- [#28662](https://github.com/apache/superset/pull/28662) chore(🦾): bump python pexpect 4.8.0 -> 4.9.0 (@github-actions[bot]) +- [#28665](https://github.com/apache/superset/pull/28665) chore(🦾): bump python traitlets 5.9.0 -> 5.14.3 (@github-actions[bot]) +- [#28666](https://github.com/apache/superset/pull/28666) chore(🦾): bump python freezegun 1.4.0 -> 1.5.1 (@github-actions[bot]) +- [#28668](https://github.com/apache/superset/pull/28668) chore(🦾): bump python babel 2.9.1 -> 2.15.0 (@github-actions[bot]) +- [#28672](https://github.com/apache/superset/pull/28672) chore(🦾): bump python pyproject-api 1.5.2 -> 1.6.1 (@github-actions[bot]) +- [#28671](https://github.com/apache/superset/pull/28671) chore(🦾): bump python click-repl 0.2.0 -> 0.3.0 (@github-actions[bot]) +- [#28675](https://github.com/apache/superset/pull/28675) chore(🦾): bump python kombu 5.3.4 -> 5.3.7 (@github-actions[bot]) +- [#28676](https://github.com/apache/superset/pull/28676) chore(🦾): bump python cffi 1.15.1 -> 1.16.0 (@github-actions[bot]) +- [#28677](https://github.com/apache/superset/pull/28677) chore(🦾): bump python click-didyoumean 0.3.0 -> 0.3.1 (@github-actions[bot]) +- [#28680](https://github.com/apache/superset/pull/28680) chore(🦾): bump python identify 2.5.24 -> 2.5.36 (@github-actions[bot]) +- [#28682](https://github.com/apache/superset/pull/28682) chore(🦾): bump python pydruid 0.6.6 -> 0.6.9 (@github-actions[bot]) +- [#28683](https://github.com/apache/superset/pull/28683) chore(🦾): bump python kiwisolver 1.4.4 -> 1.4.5 (@github-actions[bot]) +- [#28684](https://github.com/apache/superset/pull/28684) chore(🦾): bump python requests 2.31.0 -> 2.32.2 (@github-actions[bot]) +- [#28574](https://github.com/apache/superset/pull/28574) chore(🦾): bump python dnspython 2.1.0 -> 2.6.1 (@github-actions[bot]) +- [#28573](https://github.com/apache/superset/pull/28573) chore(🦾): bump python rich 13.3.4 -> 13.7.1 (@github-actions[bot]) +- [#28535](https://github.com/apache/superset/pull/28535) chore(🦾): bump python pygments 2.15.0 -> 2.18.0 (@github-actions[bot]) +- [#28580](https://github.com/apache/superset/pull/28580) chore(🦾): bump python deprecated 1.2.13 -> 1.2.14 (@github-actions[bot]) +- [#28526](https://github.com/apache/superset/pull/28526) chore(🦾): bump python tzlocal 4.3 -> 5.2 (@github-actions[bot]) +- [#28533](https://github.com/apache/superset/pull/28533) chore(🦾): bump python lazy-object-proxy 1.9.0 -> 1.10.0 (@github-actions[bot]) +- [#28527](https://github.com/apache/superset/pull/28527) chore(🦾): bump python jsonlines 3.1.0 -> 4.0.0 (@github-actions[bot]) +- [#28576](https://github.com/apache/superset/pull/28576) chore(🦾): bump python flask-babel 1.0.0 -> 2.0.0 (@github-actions[bot]) +- [#28577](https://github.com/apache/superset/pull/28577) chore(🦾): bump python tqdm 4.65.0 -> 4.66.4 (@github-actions[bot]) +- [#28578](https://github.com/apache/superset/pull/28578) chore(🦾): bump python parso 0.8.3 -> 0.8.4 (@github-actions[bot]) +- [#28579](https://github.com/apache/superset/pull/28579) chore(🦾): bump python tzdata 2023.3 -> 2024.1 (@github-actions[bot]) +- [#28581](https://github.com/apache/superset/pull/28581) chore(🦾): bump python ijson 3.2.0.post0 -> 3.2.3 (@github-actions[bot]) +- [#28582](https://github.com/apache/superset/pull/28582) chore(🦾): bump python apsw 3.42.0.1 -> 3.45.3.0 (@github-actions[bot]) +- [#28583](https://github.com/apache/superset/pull/28583) chore(🦾): bump python distlib 0.3.6 -> 0.3.8 (@github-actions[bot]) +- [#28585](https://github.com/apache/superset/pull/28585) chore(🦾): bump python pycparser 2.20 -> 2.22 (@github-actions[bot]) +- [#28589](https://github.com/apache/superset/pull/28589) chore(🦾): bump python idna 3.2 -> 3.7 (@github-actions[bot]) +- [#28586](https://github.com/apache/superset/pull/28586) chore(🦾): bump python pre-commit 3.7.0 -> 3.7.1 (@github-actions[bot]) +- [#28587](https://github.com/apache/superset/pull/28587) chore(🦾): bump python sqlalchemy-bigquery 1.10.0 -> 1.11.0 (@github-actions[bot]) +- [#28588](https://github.com/apache/superset/pull/28588) chore(🦾): bump python google-resumable-media 2.5.0 -> 2.7.0 (@github-actions[bot]) +- [#28591](https://github.com/apache/superset/pull/28591) chore(🦾): bump python zipp 3.18.1 -> 3.18.2 (@github-actions[bot]) +- [#28593](https://github.com/apache/superset/pull/28593) chore(🦾): bump python pip-tools 7.3.0 -> 7.4.1 (@github-actions[bot]) +- [#28584](https://github.com/apache/superset/pull/28584) chore(🦾): bump python ruff 0.4.0 -> 0.4.4 (@github-actions[bot]) +- [#28540](https://github.com/apache/superset/pull/28540) chore(🦾): bump python tomlkit 0.11.8 -> 0.12.5 (@github-actions[bot]) +- [#28541](https://github.com/apache/superset/pull/28541) chore(🦾): bump python db-dtypes 1.1.1 -> 1.2.0 (@github-actions[bot]) +- [#28563](https://github.com/apache/superset/pull/28563) refactor(superset-ui-core): Migrate ChartFrame to RTL (@rtexelm) +- [#28522](https://github.com/apache/superset/pull/28522) refactor: Migration of json utilities from core (@eyalezer) +- [#28532](https://github.com/apache/superset/pull/28532) chore(🦾): bump python nodeenv 1.7.0 -> 1.8.0 (@github-actions[bot]) +- [#28537](https://github.com/apache/superset/pull/28537) chore(🦾): bump python numba 0.57.1 -> 0.59.1 (@github-actions[bot]) +- [#28539](https://github.com/apache/superset/pull/28539) chore(🦾): bump python dill 0.3.6 -> 0.3.8 (@github-actions[bot]) +- [#28531](https://github.com/apache/superset/pull/28531) chore(🦾): bump python charset-normalizer 3.2.0 -> 3.3.2 (@github-actions[bot]) +- [#28530](https://github.com/apache/superset/pull/28530) chore(🦾): bump python jsonschema-spec 0.1.4 -> 0.1.6 (@github-actions[bot]) +- [#28474](https://github.com/apache/superset/pull/28474) chore(🦾): bump python croniter 2.0.3 -> 2.0.5 (@github-actions[bot]) +- [#28536](https://github.com/apache/superset/pull/28536) chore(🦾): bump python amqp 5.1.1 -> 5.2.0 (@github-actions[bot]) +- [#28544](https://github.com/apache/superset/pull/28544) chore(🦾): bump python flask-jwt-extended 4.5.3 -> 4.6.0 (@github-actions[bot]) +- [#28542](https://github.com/apache/superset/pull/28542) chore(🦾): bump python requests-cache 1.1.1 -> 1.2.0 (@github-actions[bot]) +- [#28528](https://github.com/apache/superset/pull/28528) chore(🦾): bump python zope-event 4.5.0 -> 5.0 (@github-actions[bot]) +- [#28545](https://github.com/apache/superset/pull/28545) chore(🦾): bump python pyasn1-modules 0.3.0 -> 0.4.0 (@github-actions[bot]) +- [#28500](https://github.com/apache/superset/pull/28500) chore(🦾): bump python fonttools 4.43.0 -> 4.51.0 (@github-actions[bot]) +- [#28503](https://github.com/apache/superset/pull/28503) chore(🦾): bump python email-validator 1.1.3 -> 2.1.1 (@github-actions[bot]) +- [#28506](https://github.com/apache/superset/pull/28506) chore(🦾): bump python numexpr 2.9.0 -> 2.10.0 (@github-actions[bot]) +- [#28508](https://github.com/apache/superset/pull/28508) chore(docker): Reduce image size and update GECKODRIVER_VERSION ,FIRE… (@alekseyolg) +- [#28499](https://github.com/apache/superset/pull/28499) docs: creating a redirect for a legacy link about pre-commit hook (@rusackas) +- [#28520](https://github.com/apache/superset/pull/28520) chore: Adds setActiveTabs back (@michael-s-molina) +- [#27951](https://github.com/apache/superset/pull/27951) chore(docs): updating alerts & reports documentation WEBDRIVER_BASEURL settings for docker compose (@fisjac) +- [#28435](https://github.com/apache/superset/pull/28435) chore(D2D): Add granular permission for dashboard drilling operations (@Vitor-Avila) +- [#28399](https://github.com/apache/superset/pull/28399) chore: deprecate old Dashboard endpoints (@dpgaspar) +- [#28492](https://github.com/apache/superset/pull/28492) chore: deprecate multiple old APIs (@dpgaspar) +- [#28490](https://github.com/apache/superset/pull/28490) chore: bump gunicorn to 22.0.0 (@dpgaspar) +- [#28498](https://github.com/apache/superset/pull/28498) chore: Don't mark Helm releases as latest (@michael-s-molina) +- [#28046](https://github.com/apache/superset/pull/28046) refactor: Migrate saveModalActions to TypeScript (@EnxDev) +- [#28484](https://github.com/apache/superset/pull/28484) chore: remove lost file (@betodealmeida) +- [#28309](https://github.com/apache/superset/pull/28309) build(deps): bump ejs from 3.1.8 to 3.1.10 in /superset-frontend (@dependabot[bot]) +- [#28467](https://github.com/apache/superset/pull/28467) chore(🦾): bump python redis subpackage(s) (@github-actions[bot]) +- [#28469](https://github.com/apache/superset/pull/28469) chore(🦾): bump python flask-compress 1.14 -> 1.15 (@github-actions[bot]) +- [#28453](https://github.com/apache/superset/pull/28453) chore: deprecate old Dataset related endpoints (@dpgaspar) +- [#28479](https://github.com/apache/superset/pull/28479) chore(🦾): bump python geopy subpackage(s) (@github-actions[bot]) +- [#28468](https://github.com/apache/superset/pull/28468) chore(🦾): bump python cryptography 42.0.5 -> 42.0.7 (@github-actions[bot]) +- [#28472](https://github.com/apache/superset/pull/28472) chore(🦾): bump python flask-session subpackage(s) (@github-actions[bot]) +- [#28465](https://github.com/apache/superset/pull/28465) chore(🦾): bump python flask-migrate subpackage(s) (@github-actions[bot]) +- [#28464](https://github.com/apache/superset/pull/28464) chore(🦾): bump python markdown subpackage(s) (@github-actions[bot]) +- [#28463](https://github.com/apache/superset/pull/28463) chore(🦾): bump python flask-caching 2.1.0 -> 2.3.0 (@github-actions[bot]) +- [#28436](https://github.com/apache/superset/pull/28436) chore(models): Adding encrypted field checks (@craig-rueda) +- [#28456](https://github.com/apache/superset/pull/28456) chore(helm): bumping app version to 4.0.1 in helm chart (@lodu) +- [#28452](https://github.com/apache/superset/pull/28452) chore: Updates CHANGELOG.md with 4.0.1 data (@michael-s-molina) +- [#28404](https://github.com/apache/superset/pull/28404) chore: deprecate old Database endpoints (@dpgaspar) +- [#28421](https://github.com/apache/superset/pull/28421) chore(🦾): bump python werkzeug 3.0.1 -> 3.0.3 (@mistercrunch) +- [#28430](https://github.com/apache/superset/pull/28430) chore(docs): fix two broken Docusaurus redirect links (@sfirke) +- [#28379](https://github.com/apache/superset/pull/28379) chore(build): fix issue that prevent `eslint` displaying type-check report during build (@hainenber) +- [#28393](https://github.com/apache/superset/pull/28393) chore(Databricks): New Databricks driver (@Vitor-Avila) +- [#28406](https://github.com/apache/superset/pull/28406) chore: unit tests for `catalog_access` (@betodealmeida) +- [#28398](https://github.com/apache/superset/pull/28398) chore: Updates CHANGELOG.md with 3.1.3 data (@michael-s-molina) +- [#28358](https://github.com/apache/superset/pull/28358) chore: add a github "action-validator" in CI (@mistercrunch) +- [#28387](https://github.com/apache/superset/pull/28387) chore: remove and deprecate old CSS templates endpoints (@dpgaspar) +- [#28342](https://github.com/apache/superset/pull/28342) chore(build): uplift `webpack`-related packages to v5 (@hainenber) +- [#28373](https://github.com/apache/superset/pull/28373) docs: update CVE list (@dpgaspar) +- [#28359](https://github.com/apache/superset/pull/28359) refactor(superset-ui-core): Migrate FallbackComponent.test to RTL (@rtexelm) +- [#28360](https://github.com/apache/superset/pull/28360) docs: clarifying that config.SQL_QUERY_MUTATOR does not affect cache (@mistercrunch) +- [#28362](https://github.com/apache/superset/pull/28362) build(deps): bump swagger-ui-react from 5.17.2 to 5.17.5 in /docs (@dependabot[bot]) +- [#28344](https://github.com/apache/superset/pull/28344) docs(intro): embed overview video into README.md (@hainenber) +- [#28335](https://github.com/apache/superset/pull/28335) chore: Add Apache Spark Jinja template processor (@john-bodley) +- [#28285](https://github.com/apache/superset/pull/28285) docs: various improvements across the docs (@mistercrunch) +- [#28288](https://github.com/apache/superset/pull/28288) build(deps): bump ws from 8.16.0 to 8.17.0 in /superset-websocket (@dependabot[bot]) +- [#23730](https://github.com/apache/superset/pull/23730) docs: add npm publish steps to release/readme (@lilykuang) +- [#28308](https://github.com/apache/superset/pull/28308) refactor(helm): Allow chart operators to exclude the creation of the secret manifest (@asaf400) +- [#28321](https://github.com/apache/superset/pull/28321) chore(dev): remove obsolete image reference to `superset-websocket` + fix minor typo (@hainenber) +- [#28311](https://github.com/apache/superset/pull/28311) chore: Move #26288 from "Database Migration" to "Other" (@john-bodley) +- [#28154](https://github.com/apache/superset/pull/28154) chore(commands): Remove unnecessary commit (@john-bodley) +- [#28298](https://github.com/apache/superset/pull/28298) build(deps): bump markdown-to-jsx from 7.4.1 to 7.4.7 in /superset-frontend (@dependabot[bot]) +- [#28301](https://github.com/apache/superset/pull/28301) build(deps): bump clsx from 2.1.0 to 2.1.1 in /docs (@dependabot[bot]) +- [#28306](https://github.com/apache/superset/pull/28306) build(deps-dev): bump eslint-plugin-testing-library from 6.2.0 to 6.2.2 in /superset-frontend (@dependabot[bot]) +- [#28246](https://github.com/apache/superset/pull/28246) chore: clean up DB create command (@betodealmeida) +- [#28284](https://github.com/apache/superset/pull/28284) chore(docs): video now hosted by ASF instead of GitHub (@rusackas) +- [#28281](https://github.com/apache/superset/pull/28281) docs: merge database config under Configuration section (@mistercrunch) +- [#28278](https://github.com/apache/superset/pull/28278) chore: allow codecov to detect SHA (@mistercrunch) +- [#28276](https://github.com/apache/superset/pull/28276) chore: use depth=1 for cloning (@rantoniuk) +- [#28163](https://github.com/apache/superset/pull/28163) docs(intro): embed overview video into Intro document (@hainenber) +- [#28275](https://github.com/apache/superset/pull/28275) docs(upgrading): clarify upgrade process (@SaTae66) +- [#28187](https://github.com/apache/superset/pull/28187) chore(superset-ui-core and NoResultsComponent): Migrate to RTL, add RTL modules to the ui-core (@rtexelm) +- [#27891](https://github.com/apache/superset/pull/27891) chore(AlteredSliceTag): Migrate to functional (@rtexelm) +- [#28247](https://github.com/apache/superset/pull/28247) docs: set up redirects (@mistercrunch) +- [#28240](https://github.com/apache/superset/pull/28240) build(deps): bump polished from 3.7.2 to 4.3.1 in /superset-frontend (@dependabot[bot]) +- [#27003](https://github.com/apache/superset/pull/27003) docs(maps): jupyter notebook now auto-updates docs site (@rusackas) +- [#28220](https://github.com/apache/superset/pull/28220) docs: reorganize the CONTRIBUTING section (@mistercrunch) +- [#28243](https://github.com/apache/superset/pull/28243) chore(docs): Move ::: onto its own line to fix caution formatting (@sfirke) +- [#28236](https://github.com/apache/superset/pull/28236) chore(docs): add closing ::: to caution tag (@sfirke) +- [#28237](https://github.com/apache/superset/pull/28237) chore(docs): reorder pages in the Configuring Superset section (@sfirke) +- [#28153](https://github.com/apache/superset/pull/28153) chore: Add custom keywords for SQL Lab autocomplete (@justinpark) +- [#28223](https://github.com/apache/superset/pull/28223) chore(plugin-chart-country-map): fix broken urls (@villebro) +- [#28217](https://github.com/apache/superset/pull/28217) docs: update README.md to avoid 404 issue (@schuberng) +- [#28137](https://github.com/apache/superset/pull/28137) chore: add pylint to pre-commit hook (@mistercrunch) +- [#28161](https://github.com/apache/superset/pull/28161) docs: Refactor Documentation Structure (@artofcomputing) +- [#28159](https://github.com/apache/superset/pull/28159) chore(tests): Remove unnecessary/problematic app contexts (@john-bodley) +- [#28130](https://github.com/apache/superset/pull/28130) docs: add dynamic entity-relationship diagram to docs (@mistercrunch) +- [#27831](https://github.com/apache/superset/pull/27831) build(deps): update @types/fetch-mock requirement from ^7.3.3 to ^7.3.8 in /superset-frontend/packages/superset-ui-core (@dependabot[bot]) +- [#28177](https://github.com/apache/superset/pull/28177) build(deps): bump gh-pages from 3.2.3 to 5.0.0 in /superset-frontend (@dependabot[bot]) +- [#28134](https://github.com/apache/superset/pull/28134) chore: clean up console upon firing up the CLI (@mistercrunch) +- [#28135](https://github.com/apache/superset/pull/28135) chore: get websocket service to start in docker-compose (@mistercrunch) +- [#28164](https://github.com/apache/superset/pull/28164) chore: refactor file upload commands (@dpgaspar) +- [#28019](https://github.com/apache/superset/pull/28019) chore: change deprecation versions post 4.0 (@eschutho) +- [#28129](https://github.com/apache/superset/pull/28129) chore(translations): add Arabic translations stub (@OmarIthawi) +- [#28031](https://github.com/apache/superset/pull/28031) chore(translations): fix translations order (@lscheibel) +- [#28082](https://github.com/apache/superset/pull/28082) build(deps): bump match-sorter from 6.3.3 to 6.3.4 in /superset-frontend (@dependabot[bot]) +- [#28085](https://github.com/apache/superset/pull/28085) build(deps): bump react-virtualized-auto-sizer from 1.0.7 to 1.0.24 in /superset-frontend (@dependabot[bot]) +- [#28069](https://github.com/apache/superset/pull/28069) build(deps): update underscore requirement from ^1.12.1 to ^1.13.6 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#28075](https://github.com/apache/superset/pull/28075) build(deps): update prop-types requirement from ^15.6.0 to ^15.8.1 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#28068](https://github.com/apache/superset/pull/28068) build(deps-dev): bump fs-extra from 10.1.0 to 11.2.0 in /superset-frontend/packages/generator-superset (@dependabot[bot]) +- [#28083](https://github.com/apache/superset/pull/28083) build(deps): bump @types/node from 18.0.0 to 20.12.7 in /superset-frontend (@dependabot[bot]) +- [#28071](https://github.com/apache/superset/pull/28071) build(deps): update xss requirement from ^1.0.10 to ^1.0.15 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#27965](https://github.com/apache/superset/pull/27965) build(deps): bump deck.gl from 8.8.27 to 9.0.6 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#28131](https://github.com/apache/superset/pull/28131) docs: Updated quick start page. Docker compose command had a typo (@jonedmiston) +- [#26746](https://github.com/apache/superset/pull/26746) build(deps): bump chrono-node from 2.2.6 to 2.7.5 in /superset-frontend (@dependabot[bot]) +- [#26896](https://github.com/apache/superset/pull/26896) build(deps): bump d3-interpolate and @types/d3-interpolate in /superset-frontend (@dependabot[bot]) +- [#26564](https://github.com/apache/superset/pull/26564) build(deps-dev): bump babel-plugin-jsx-remove-data-test-id from 2.1.3 to 3.0.0 in /superset-frontend (@dependabot[bot]) +- [#26563](https://github.com/apache/superset/pull/26563) build(deps-dev): bump @types/js-levenshtein from 1.1.0 to 1.1.3 in /superset-frontend (@dependabot[bot]) +- [#28080](https://github.com/apache/superset/pull/28080) build(deps-dev): bump @docusaurus/module-type-aliases from 3.2.0 to 3.2.1 in /docs (@dependabot[bot]) +- [#28084](https://github.com/apache/superset/pull/28084) build(deps-dev): bump @applitools/eyes-storybook from 3.46.0 to 3.49.0 in /superset-frontend (@dependabot[bot]) +- [#28086](https://github.com/apache/superset/pull/28086) build(deps-dev): bump eslint-plugin-storybook from 0.6.15 to 0.8.0 in /superset-frontend (@dependabot[bot]) +- [#28089](https://github.com/apache/superset/pull/28089) build(deps-dev): bump jsdom from 20.0.0 to 24.0.0 in /superset-frontend (@dependabot[bot]) +- [#28088](https://github.com/apache/superset/pull/28088) build(deps-dev): bump esbuild-loader from 4.0.3 to 4.1.0 in /superset-frontend (@dependabot[bot]) +- [#28067](https://github.com/apache/superset/pull/28067) build(deps): bump @types/d3-scale from 2.2.10 to 4.0.8 in /superset-frontend/plugins/plugin-chart-word-cloud (@dependabot[bot]) +- [#27340](https://github.com/apache/superset/pull/27340) build(deps): bump azure/setup-helm from 3 to 4 (@dependabot[bot]) +- [#28070](https://github.com/apache/superset/pull/28070) build(deps-dev): bump @types/node from 20.12.4 to 20.12.7 in /superset-websocket (@dependabot[bot]) +- [#28065](https://github.com/apache/superset/pull/28065) build(deps): update dompurify requirement from ^3.0.11 to ^3.1.0 in /superset-frontend/plugins/legacy-preset-chart-nvd3 (@dependabot[bot]) +- [#28066](https://github.com/apache/superset/pull/28066) build(deps): update @types/lodash requirement from ^4.14.149 to ^4.17.0 in /superset-frontend/packages/superset-ui-core (@dependabot[bot]) +- [#26602](https://github.com/apache/superset/pull/26602) refactor: add "button" role to clickable UI elements for improved accessibility (@eulloa10) +- [#28127](https://github.com/apache/superset/pull/28127) chore(Dashboard): Improve accessibility chart descriptions (@geido) +- [#28081](https://github.com/apache/superset/pull/28081) build(deps): bump react-intersection-observer from 9.6.0 to 9.8.2 in /superset-frontend (@dependabot[bot]) +- [#28090](https://github.com/apache/superset/pull/28090) build(deps-dev): bump babel-loader from 8.3.0 to 9.1.3 in /superset-frontend (@dependabot[bot]) +- [#28092](https://github.com/apache/superset/pull/28092) build(deps-dev): bump @types/react-gravatar from 2.6.8 to 2.6.14 in /superset-frontend (@dependabot[bot]) +- [#28102](https://github.com/apache/superset/pull/28102) docs: small fixes and update of README screenshots (@artofcomputing) +- [#28059](https://github.com/apache/superset/pull/28059) chore(Dashboard): Improve Table accessibility (@geido) +- [#28099](https://github.com/apache/superset/pull/28099) chore(asf): setting website staging server to point at superset-site's lfs branch (@rusackas) +- [#28016](https://github.com/apache/superset/pull/28016) chore(docs): splitting out "stable" feature flags by intent (config vs feature dev) (@rusackas) +- [#28077](https://github.com/apache/superset/pull/28077) build(deps): bump @algolia/client-search from 4.23.2 to 4.23.3 in /docs (@dependabot[bot]) +- [#28074](https://github.com/apache/superset/pull/28074) build(deps-dev): bump typescript from 5.4.3 to 5.4.5 in /docs (@dependabot[bot]) +- [#28048](https://github.com/apache/superset/pull/28048) chore(asf): disable calendar display by default, click to show (@rusackas) +- [#27921](https://github.com/apache/superset/pull/27921) docs: add more warnings for default secrets and docker-compose (@dpgaspar) +- [#28064](https://github.com/apache/superset/pull/28064) chore(csp): nix bugherd, add githubusercontent (@rusackas) +- [#27998](https://github.com/apache/superset/pull/27998) docs: move mp4 video to superset-site/tree/lfs (@mistercrunch) +- [#27978](https://github.com/apache/superset/pull/27978) chore(ASF): adds DOAP file and bumping apache-rat (@rusackas) +- [#28041](https://github.com/apache/superset/pull/28041) chore: Updates release related assets (@michael-s-molina) +- [#28045](https://github.com/apache/superset/pull/28045) chore(docs): disable bugherd for now (@rusackas) +- [#28028](https://github.com/apache/superset/pull/28028) chore: stabilize MySQL tests by aligning isolation levels (@mistercrunch) +- [#27884](https://github.com/apache/superset/pull/27884) chore: consolidate the Superset python package metadata (@mistercrunch) +- [#28040](https://github.com/apache/superset/pull/28040) docs: Updated NOTICE to 2024 (@esivakumar26) +- [#28015](https://github.com/apache/superset/pull/28015) chore(Dashboard): Accessibility filters Popover (@geido) +- [#27999](https://github.com/apache/superset/pull/27999) chore: Revert "chore(ci): make pre-commit step faster by skipping superset install" (@mistercrunch) +- [#28012](https://github.com/apache/superset/pull/28012) refactor: rename get_sqla_engine_with_context (@betodealmeida) +- [#27980](https://github.com/apache/superset/pull/27980) chore: remove no-op.yml as it's not needed anymore (@mistercrunch) +- [#27979](https://github.com/apache/superset/pull/27979) chore(ci): make pre-commit step faster by skipping superset install (@mistercrunch) +- [#27956](https://github.com/apache/superset/pull/27956) docs: deploy docs when merging to master (@mistercrunch) +- [#27906](https://github.com/apache/superset/pull/27906) chore: [proposal] de-matrix python-version in GHAs (@mistercrunch) +- [#27976](https://github.com/apache/superset/pull/27976) chore(docs): remove seemingly unused unpkg domain from CSPs (@rusackas) +- [#27977](https://github.com/apache/superset/pull/27977) chore(docs): removing Superset Community Newsletter archive (@rusackas) +- [#27975](https://github.com/apache/superset/pull/27975) chore(docs): adding ASF Privacy Link. (@rusackas) +- [#27954](https://github.com/apache/superset/pull/27954) docs(k8s): making it clear users MUST update secrets for prod instances. (@rusackas) +- [#27810](https://github.com/apache/superset/pull/27810) build(deps-dev): update @types/mapbox\_\_geojson-extent requirement from ^1.0.0 to ^1.0.3 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#27946](https://github.com/apache/superset/pull/27946) chore(helm): bumping app version to 4.0.0 in helm chart (@lodu) +- [#27149](https://github.com/apache/superset/pull/27149) chore(tests): Remove ineffectual login (@john-bodley) +- [#27937](https://github.com/apache/superset/pull/27937) chore: Adds 4.0.0 data to CHANGELOG.md and UPDATING.md (@michael-s-molina) +- [#27932](https://github.com/apache/superset/pull/27932) docs: fix broken OS Dependencies link in CONTRIBUTING.md (@bgreenlee) +- [#27717](https://github.com/apache/superset/pull/27717) chore(explore): Hide non-droppable metric and column list (@justinpark) +- [#27880](https://github.com/apache/superset/pull/27880) chore(OAuth2): refactor for custom OAuth2 clients (@betodealmeida) +- [#27915](https://github.com/apache/superset/pull/27915) chore(helm): Bumping app version to 3.1.2 in helm chart (@joshkoeneHawking) +- [#27334](https://github.com/apache/superset/pull/27334) build(deps-dev): update @babel/types requirement from ^7.23.9 to ^7.24.0 in /superset-frontend/plugins/plugin-chart-pivot-table (@dependabot[bot]) +- [#27321](https://github.com/apache/superset/pull/27321) build(deps-dev): bump fork-ts-checker-webpack-plugin from 5.2.1 to 9.0.2 in /superset-frontend/packages/superset-ui-demo (@dependabot[bot]) +- [#27322](https://github.com/apache/superset/pull/27322) build(deps): bump memoize-one from 5.2.1 to 6.0.0 in /superset-frontend/packages/superset-ui-demo (@dependabot[bot]) +- [#27319](https://github.com/apache/superset/pull/27319) build(deps): update @types/d3-time requirement from ^3.0.0 to ^3.0.3 in /superset-frontend/packages/superset-ui-core (@dependabot[bot]) +- [#27903](https://github.com/apache/superset/pull/27903) docs: replace broken david badges with libraries.io (@10xLaCroixDrinker) +- [#27725](https://github.com/apache/superset/pull/27725) chore(sqllab): Do not strip comments when executing SQL statements (@john-bodley) +- [#27888](https://github.com/apache/superset/pull/27888) build(deps-dev): bump @types/node from 20.11.24 to 20.12.4 in /superset-websocket (@dependabot[bot]) +- [#27805](https://github.com/apache/superset/pull/27805) build(deps): bump @types/lodash from 4.14.202 to 4.17.0 in /superset-websocket (@dependabot[bot]) +- [#27887](https://github.com/apache/superset/pull/27887) build(deps): bump fetch-retry from 4.1.1 to 6.0.0 in /superset-frontend (@dependabot[bot]) +- [#27772](https://github.com/apache/superset/pull/27772) chore: Cleanup table access check naming (@john-bodley) +- [#27804](https://github.com/apache/superset/pull/27804) build(deps): bump winston from 3.11.0 to 3.13.0 in /superset-websocket (@dependabot[bot]) +- [#27800](https://github.com/apache/superset/pull/27800) build(deps-dev): update @types/lodash requirement from ^4.14.202 to ^4.17.0 in /superset-frontend/plugins/plugin-chart-handlebars (@dependabot[bot]) +- [#27318](https://github.com/apache/superset/pull/27318) build(deps): update lodash requirement from ^4.17.15 to ^4.17.21 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#27317](https://github.com/apache/superset/pull/27317) build(deps): bump bootstrap-slider from 10.6.2 to 11.0.2 in /superset-frontend/plugins/legacy-preset-chart-deckgl (@dependabot[bot]) +- [#26975](https://github.com/apache/superset/pull/26975) build(deps-dev): update @types/jest requirement from ^29.5.11 to ^29.5.12 in /superset-frontend/plugins/plugin-chart-pivot-table (@dependabot[bot]) +- [#27833](https://github.com/apache/superset/pull/27833) build(deps): update @types/react-table requirement from ^7.7.19 to ^7.7.20 in /superset-frontend/plugins/plugin-chart-table (@dependabot[bot]) +- [#27813](https://github.com/apache/superset/pull/27813) build(deps): bump @docsearch/react from 3.5.2 to 3.6.0 in /docs (@dependabot[bot]) +- [#27864](https://github.com/apache/superset/pull/27864) chore(🦾): bump python pytest 7.3.1 -> 7.4.4 (@github-actions[bot]) +- [#27343](https://github.com/apache/superset/pull/27343) build(deps-dev): bump @types/underscore from 1.11.6 to 1.11.15 in /superset-frontend (@dependabot[bot]) +- [#27852](https://github.com/apache/superset/pull/27852) refactor: Move fetchTimeRange to core package (@kgabryje) +- [#27843](https://github.com/apache/superset/pull/27843) chore: Default to engine specification regarding using wildcard (@john-bodley) +- [#27878](https://github.com/apache/superset/pull/27878) chore: Updates CHANGELOG.md with 3.1.2 data (@michael-s-molina) +- [#27867](https://github.com/apache/superset/pull/27867) chore(🦾): bump python pylint 2.17.7 -> 3.1.0 (@github-actions[bot]) +- [#27836](https://github.com/apache/superset/pull/27836) build(deps-dev): bump @types/redux-mock-store from 1.0.2 to 1.0.6 in /superset-frontend (@dependabot[bot]) +- [#27858](https://github.com/apache/superset/pull/27858) chore(sql_parse): Provide more meaningful SQLGlot errors (@john-bodley) +- [#27824](https://github.com/apache/superset/pull/27824) build(deps): bump @algolia/client-search from 4.22.1 to 4.23.2 in /docs (@dependabot[bot]) +- [#27816](https://github.com/apache/superset/pull/27816) build(deps): bump dompurify from 2.4.9 to 3.0.11 in /superset-frontend/plugins/legacy-preset-chart-nvd3 (@dependabot[bot]) +- [#27874](https://github.com/apache/superset/pull/27874) chore(🦾): bump python pyfakefs 5.2.2 -> 5.3.5 (@github-actions[bot]) +- [#27872](https://github.com/apache/superset/pull/27872) chore(🦾): bump python grpcio 1.60.1 -> 1.62.1 (@github-actions[bot]) +- [#27868](https://github.com/apache/superset/pull/27868) chore(🦾): bump python google-cloud-bigquery 3.20.0 -> 3.20.1 (@github-actions[bot]) +- [#27866](https://github.com/apache/superset/pull/27866) chore(🦾): bump python pytest-cov 4.0.0 -> 5.0.0 (@github-actions[bot]) +- [#27871](https://github.com/apache/superset/pull/27871) chore(🦾): bump python sqloxide 0.1.33 -> 0.1.43 (@github-actions[bot]) +- [#27875](https://github.com/apache/superset/pull/27875) chore(🦾): bump python sqlglot 23.2.0 -> 23.6.3 (@github-actions[bot]) +- [#27870](https://github.com/apache/superset/pull/27870) chore(🦾): bump python docker 6.1.1 -> 7.0.0 (@github-actions[bot]) +- [#27869](https://github.com/apache/superset/pull/27869) chore(🦾): bump python freezegun 1.2.2 -> 1.4.0 (@github-actions[bot]) +- [#27873](https://github.com/apache/superset/pull/27873) chore(🦾): bump python pillow 10.2.0 -> 10.3.0 (@github-actions[bot]) +- [#27865](https://github.com/apache/superset/pull/27865) chore(🦾): bump python pre-commit 3.3.3 -> 3.7.0 (@github-actions[bot]) +- [#27791](https://github.com/apache/superset/pull/27791) docs: small cleanup (@artofcomputing) +- [#27835](https://github.com/apache/superset/pull/27835) build(deps): update xss requirement from ^1.0.14 to ^1.0.15 in /superset-frontend/plugins/plugin-chart-table (@dependabot[bot]) +- [#27808](https://github.com/apache/superset/pull/27808) build(deps-dev): bump react-test-renderer from 16.9.0 to 16.14.0 in /superset-frontend (@dependabot[bot]) +- [#27819](https://github.com/apache/superset/pull/27819) build(deps): bump @ant-design/icons from 5.3.1 to 5.3.6 in /docs (@dependabot[bot]) +- [#27842](https://github.com/apache/superset/pull/27842) chore(sql_parse): Strip leading/trailing whitespace in Jinja macro extraction (@john-bodley) +- [#27198](https://github.com/apache/superset/pull/27198) chore(node): bumping Superset to Node 18 (@rusackas) +- [#27814](https://github.com/apache/superset/pull/27814) build(deps-dev): bump typescript from 5.3.3 to 5.4.3 in /docs (@dependabot[bot]) +- [#27818](https://github.com/apache/superset/pull/27818) build(deps-dev): bump @docusaurus/module-type-aliases from 3.1.1 to 3.2.0 in /docs (@dependabot[bot]) +- [#27823](https://github.com/apache/superset/pull/27823) build(deps-dev): bump @tsconfig/docusaurus from 2.0.2 to 2.0.3 in /docs (@dependabot[bot]) +- [#24112](https://github.com/apache/superset/pull/24112) chore: Bump to Python3.10 (@EugeneTorap) +- [#27802](https://github.com/apache/superset/pull/27802) build(deps): bump actions/github-script from 5 to 7 (@dependabot[bot]) +- [#27751](https://github.com/apache/superset/pull/27751) chore(🦾): bump python flask-session 0.5.0 -> 0.8.0 (@github-actions[bot]) +- [#27757](https://github.com/apache/superset/pull/27757) chore(🦾): bump python simplejson 3.17.3 -> 3.19.2 (@github-actions[bot]) +- [#27839](https://github.com/apache/superset/pull/27839) chore: Updates translation owners (@michael-s-molina) +- [#27754](https://github.com/apache/superset/pull/27754) chore(🦾): bump python thrift 0.16.0 -> 0.20.0 (@github-actions[bot]) +- [#27612](https://github.com/apache/superset/pull/27612) docs: simplify the Quickstart guide (@mistercrunch) +- [#27750](https://github.com/apache/superset/pull/27750) chore(🦾): bump python pandas-gbq 0.19.1 -> 0.22.0 (@github-actions[bot]) +- [#27747](https://github.com/apache/superset/pull/27747) chore(🦾): bump python xlsxwriter 3.0.7 -> 3.0.9 (@github-actions[bot]) +- [#27758](https://github.com/apache/superset/pull/27758) chore(🦾): bump python google-cloud-bigquery 3.10.0 -> 3.20.0 (@github-actions[bot]) +- [#27759](https://github.com/apache/superset/pull/27759) chore(🦾): bump python python-dotenv 0.19.0 -> 1.0.1 (@github-actions[bot]) +- [#27748](https://github.com/apache/superset/pull/27748) chore(🦾): bump python flask-cors 3.0.10 -> 4.0.0 (@github-actions[bot]) +- [#27746](https://github.com/apache/superset/pull/27746) chore(🦾): bump python cron-descriptor 1.2.24 -> 1.4.3 (@github-actions[bot]) +- [#27749](https://github.com/apache/superset/pull/27749) chore(🦾): bump python sqlglot 23.0.2 -> 23.2.0 (@github-actions[bot]) +- [#27756](https://github.com/apache/superset/pull/27756) chore(🦾): bump python humanize 3.11.0 -> 4.9.0 (@github-actions[bot]) +- [#27755](https://github.com/apache/superset/pull/27755) chore(🦾): bump python flask-talisman 1.0.0 -> 1.1.0 (@github-actions[bot]) +- [#27753](https://github.com/apache/superset/pull/27753) chore(🦾): bump python packaging 23.1 -> 23.2 (@github-actions[bot]) +- [#27752](https://github.com/apache/superset/pull/27752) chore(🦾): bump python google-cloud-bigquery 3.10.0 -> 3.20.0 (@github-actions[bot]) +- [#27728](https://github.com/apache/superset/pull/27728) chore(🦾): bump python gevent 23.9.1 -> 24.2.1 (@github-actions[bot]) +- [#27740](https://github.com/apache/superset/pull/27740) chore(🦾): bump python flask-compress 1.13 -> 1.14 (@github-actions[bot]) +- [#27729](https://github.com/apache/superset/pull/27729) chore(🦾): bump python mysqlclient 2.1.0 -> 2.2.4 (@github-actions[bot]) +- [#27727](https://github.com/apache/superset/pull/27727) chore(🦾): bump python sqlalchemy-bigquery 1.6.1 -> 1.10.0 (@github-actions[bot]) +- [#27732](https://github.com/apache/superset/pull/27732) chore(🦾): bump python tableschema 1.20.2 -> 1.20.10 (@github-actions[bot]) +- [#27733](https://github.com/apache/superset/pull/27733) chore(🦾): bump python tabulate 0.8.9 -> 0.8.10 (@github-actions[bot]) +- [#27735](https://github.com/apache/superset/pull/27735) chore(🦾): bump python mako 1.2.4 -> 1.3.2 (@github-actions[bot]) +- [#27736](https://github.com/apache/superset/pull/27736) chore(🦾): bump python python-dateutil 2.8.2 -> 2.9.0.post0 (@github-actions[bot]) +- [#27737](https://github.com/apache/superset/pull/27737) chore(🦾): bump python pyjwt 2.4.0 -> 2.8.0 (@github-actions[bot]) +- [#27741](https://github.com/apache/superset/pull/27741) chore(🦾): bump python click-option-group 0.5.5 -> 0.5.6 (@github-actions[bot]) +- [#27742](https://github.com/apache/superset/pull/27742) chore(🦾): bump python typing-extensions 4.4.0 -> 4.10.0 (@github-actions[bot]) +- [#27726](https://github.com/apache/superset/pull/27726) chore(🦾): bump python playwright 1.41.2 -> 1.42.0 (@github-actions[bot]) +- [#27731](https://github.com/apache/superset/pull/27731) chore(🦾): bump python pydruid 0.6.5 -> 0.6.6 (@github-actions[bot]) +- [#27730](https://github.com/apache/superset/pull/27730) chore(🦾): bump python thrift 0.16.0 -> 0.20.0 (@github-actions[bot]) +- [#27695](https://github.com/apache/superset/pull/27695) chore(🦾): bump python "sqlalchemy==1.4.52" (@github-actions[bot]) +- [#27687](https://github.com/apache/superset/pull/27687) chore(🦾): bump python "nh3==0.2.17" (@github-actions[bot]) +- [#27680](https://github.com/apache/superset/pull/27680) chore(🦾): bump python "isodate==0.6.1" (@github-actions[bot]) +- [#27711](https://github.com/apache/superset/pull/27711) chore: bump pylint (@betodealmeida) +- [#27696](https://github.com/apache/superset/pull/27696) chore(🦾): bump python "msgpack==1.0.8" (@github-actions[bot]) +- [#27688](https://github.com/apache/superset/pull/27688) chore(🦾): bump python "wtforms==3.1.2" (@github-actions[bot]) +- [#27634](https://github.com/apache/superset/pull/27634) other: Add TechAuditBI to supersetbot metadata.js (@TechAuditBI) +- [#27699](https://github.com/apache/superset/pull/27699) chore(🦾): bump python "geopy==2.4.1" (@github-actions[bot]) +- [#27698](https://github.com/apache/superset/pull/27698) chore(🦾): bump python "backoff==2.2.1" (@github-actions[bot]) +- [#27692](https://github.com/apache/superset/pull/27692) chore(🦾): bump python "pyparsing==3.1.2" (@github-actions[bot]) +- [#27693](https://github.com/apache/superset/pull/27693) chore(🦾): bump python "croniter==2.0.3" (@github-actions[bot]) +- [#27682](https://github.com/apache/superset/pull/27682) chore(🦾): bump python "click==8.1.7" (@github-actions[bot]) +- [#27681](https://github.com/apache/superset/pull/27681) chore(🦾): bump python "polyline==2.0.2" (@github-actions[bot]) +- [#27684](https://github.com/apache/superset/pull/27684) chore(🦾): bump python "pyarrow==14.0.2" (@github-actions[bot]) +- [#27657](https://github.com/apache/superset/pull/27657) chore(🤖): bump python "flask==2.3.3" (@mistercrunch) +- [#27655](https://github.com/apache/superset/pull/27655) chore(🤖): bump python "sqlalchemy==1.4.52" (@mistercrunch) +- [#27641](https://github.com/apache/superset/pull/27641) chore: fix master builds + bump python library "cryptography" (@mistercrunch) +- [#27650](https://github.com/apache/superset/pull/27650) chore(🤖): bump python "alembic==1.13.1" (@github-actions[bot]) +- [#27653](https://github.com/apache/superset/pull/27653) build(deps-dev): bump express from 4.17.3 to 4.19.2 in /superset-frontend (@dependabot[bot]) +- [#27651](https://github.com/apache/superset/pull/27651) build(deps): bump express from 4.18.3 to 4.19.2 in /superset-websocket/utils/client-ws-app (@dependabot[bot]) +- [#27652](https://github.com/apache/superset/pull/27652) build(deps): bump express from 4.18.2 to 4.19.2 in /docs (@dependabot[bot]) +- [#27649](https://github.com/apache/superset/pull/27649) chore(🤖): bump python "markdown==3.6" (@github-actions[bot]) +- [#27498](https://github.com/apache/superset/pull/27498) refactor: Migrate CssEditor to typescript (@EnxDev) +- [#27422](https://github.com/apache/superset/pull/27422) test(Migration to RTL): Refactor ActivityTable.test.tsx from Enzyme to RTL (@rtexelm) +- [#27626](https://github.com/apache/superset/pull/27626) build(deps-dev): bump webpack from 5.90.1 to 5.91.0 in /docs (@dependabot[bot]) +- [#25540](https://github.com/apache/superset/pull/25540) chore: replace "dashboard" -> "report" in chart email report modal (@sfirke) +- [#27596](https://github.com/apache/superset/pull/27596) docs: updates list of countries in country-map-tools.mdx (@jbat) +- [#27609](https://github.com/apache/superset/pull/27609) build(deps): bump webpack-dev-middleware from 5.3.1 to 5.3.4 in /docs (@dependabot[bot]) +- [#27309](https://github.com/apache/superset/pull/27309) refactor: Migrate CopyToClipboard to typescript (@EnxDev) +- [#27579](https://github.com/apache/superset/pull/27579) chore(docs): clarifying doc comments about LOGO_TARGET_PATH (@rusackas) +- [#27572](https://github.com/apache/superset/pull/27572) chore(examples): organizing example chart yaml files into dashboard folders (@rusackas) +- [#27610](https://github.com/apache/superset/pull/27610) build(deps-dev): bump webpack-dev-middleware from 5.3.3 to 5.3.4 in /superset-frontend (@dependabot[bot]) +- [#27540](https://github.com/apache/superset/pull/27540) docs: make k8s top item in Installation section (@mistercrunch) +- [#27574](https://github.com/apache/superset/pull/27574) chore: Update required jobs in .asf.yml (@john-bodley) +- [#27569](https://github.com/apache/superset/pull/27569) chore(helm): Bumping app version to 3.1.1 in helm chart (@craig-rueda) +- [#27505](https://github.com/apache/superset/pull/27505) chore: 2nd try - simplify python dependencies (@mistercrunch) +- [#27533](https://github.com/apache/superset/pull/27533) chore(docs): fix last broken Slack join link in docs (@sfirke) +- [#27518](https://github.com/apache/superset/pull/27518) build(deps-dev): bump follow-redirects from 1.15.4 to 1.15.6 in /superset-frontend (@dependabot[bot]) +- [#27516](https://github.com/apache/superset/pull/27516) build(deps-dev): bump follow-redirects from 1.15.4 to 1.15.6 in /superset-embedded-sdk (@dependabot[bot]) +- [#27517](https://github.com/apache/superset/pull/27517) build(deps): bump follow-redirects from 1.15.4 to 1.15.6 in /docs (@dependabot[bot]) +- [#27520](https://github.com/apache/superset/pull/27520) chore: add annotations to `sql_parse.py` (@betodealmeida) +- [#27486](https://github.com/apache/superset/pull/27486) chore(docs): relocating the edit page button a tad. (@rusackas) +- [#26767](https://github.com/apache/superset/pull/26767) chore: improve SQL parsing (@betodealmeida) +- [#27480](https://github.com/apache/superset/pull/27480) chore: Add an extension for Home submenu (@kgabryje) +- [#27429](https://github.com/apache/superset/pull/27429) test(Migration to RTL): Refactor ChartTable.test.tsx from Enzyme to RTL (@rtexelm) +- [#27469](https://github.com/apache/superset/pull/27469) chore: add unit test for `values_for_column` (@betodealmeida) +- [#27327](https://github.com/apache/superset/pull/27327) build(deps-dev): bump eslint from 8.56.0 to 8.57.0 in /superset-websocket (@dependabot[bot]) +- [#27326](https://github.com/apache/superset/pull/27326) build(deps-dev): bump @types/node from 20.11.16 to 20.11.24 in /superset-websocket (@dependabot[bot]) +- [#27347](https://github.com/apache/superset/pull/27347) build(deps): bump @storybook/types from 7.6.13 to 7.6.17 in /superset-frontend (@dependabot[bot]) +- [#27405](https://github.com/apache/superset/pull/27405) chore: upgrade setuptools/pip in Dockerfile (@mistercrunch) +- [#27290](https://github.com/apache/superset/pull/27290) docs(import_datasources): Remove legacy documentation and update current use (@ddxv) +- [#27325](https://github.com/apache/superset/pull/27325) build(deps-dev): bump @types/jsonwebtoken from 9.0.5 to 9.0.6 in /superset-websocket (@dependabot[bot]) +- [#27324](https://github.com/apache/superset/pull/27324) build(deps-dev): bump @typescript-eslint/eslint-plugin from 5.61.0 to 5.62.0 in /superset-websocket (@dependabot[bot]) +- [#27328](https://github.com/apache/superset/pull/27328) build(deps-dev): bump prettier from 3.2.4 to 3.2.5 in /superset-websocket (@dependabot[bot]) +- [#27342](https://github.com/apache/superset/pull/27342) build(deps): bump react-lines-ellipsis from 0.15.0 to 0.15.4 in /superset-frontend (@dependabot[bot]) +- [#27337](https://github.com/apache/superset/pull/27337) build(deps): bump express from 4.18.2 to 4.18.3 in /superset-websocket/utils/client-ws-app (@dependabot[bot]) +- [#27331](https://github.com/apache/superset/pull/27331) build(deps): bump @ant-design/icons from 5.3.0 to 5.3.1 in /docs (@dependabot[bot]) +- [#27356](https://github.com/apache/superset/pull/27356) chore(docs): remove filterbox section from Exploring docs page (@sfirke) +- [#27250](https://github.com/apache/superset/pull/27250) chore: update redis to >= 4.6.0 (@nigzak) +- [#27304](https://github.com/apache/superset/pull/27304) chore: Replace deprecated command with environment file (@jongwooo) +- [#27297](https://github.com/apache/superset/pull/27297) chore(ci): run unit tests on script changes (@eschutho) +- [#27287](https://github.com/apache/superset/pull/27287) docs: update CVEs for 3.0.4 and 3.1.1 (@dpgaspar) +- [#27219](https://github.com/apache/superset/pull/27219) build(deps): bump re-resizable from 6.6.1 to 6.9.11 in /superset-frontend (@justinpark) +- [#27264](https://github.com/apache/superset/pull/27264) build(deps): bump es5-ext from 0.10.53 to 0.10.63 in /docs (@dependabot[bot]) +- [#24063](https://github.com/apache/superset/pull/24063) chore: Replace deprecated command with environment file (@jongwooo) +- [#26932](https://github.com/apache/superset/pull/26932) build(deps): bump @ant-design/icons from 4.7.0 to 5.3.0 in /docs (@dependabot[bot]) +- [#27145](https://github.com/apache/superset/pull/27145) refactor(plugins): Time Comparison Utils (@Antonio-RiveroMartnez) +- [#26732](https://github.com/apache/superset/pull/26732) build(deps-dev): bump prettier from 3.0.3 to 3.2.4 in /superset-websocket (@dependabot[bot]) +- [#26765](https://github.com/apache/superset/pull/26765) perf(export): export generates unnecessary files content (@Always-prog) +- [#27180](https://github.com/apache/superset/pull/27180) build(deps): bump ip from 1.1.8 to 1.1.9 in /superset-frontend/cypress-base (@dependabot[bot]) +- [#27175](https://github.com/apache/superset/pull/27175) chore(docs): change 'install from scratch' to 'install from PyPI' (@sfirke) +- [#27178](https://github.com/apache/superset/pull/27178) build(deps-dev): bump ip from 2.0.0 to 2.0.1 in /superset-frontend (@dependabot[bot]) +- [#27147](https://github.com/apache/superset/pull/27147) chore: Remove obsolete actor (@john-bodley) +- [#27170](https://github.com/apache/superset/pull/27170) chore: Updates CHANGELOG.md with 3.1.1 data (@michael-s-molina) diff --git a/CHANGELOG/4.1.1.md b/CHANGELOG/4.1.1.md new file mode 100644 index 000000000000..89b5c4409a89 --- /dev/null +++ b/CHANGELOG/4.1.1.md @@ -0,0 +1,50 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +## Change Log + +### 4.1.1 (Fri Nov 15 22:13:57 2024 +0530) + +**Database Migrations** + +**Features** + +**Fixes** + +- [#30886](https://github.com/apache/superset/pull/30886) fix: blocks UI elements on right side (@samarsrivastav) +- [#30859](https://github.com/apache/superset/pull/30859) fix(package.json): Pin luxon version to unblock master (@geido) +- [#30588](https://github.com/apache/superset/pull/30588) fix(explore): column data type tooltip format (@mistercrunch) +- [#29911](https://github.com/apache/superset/pull/29911) fix: Rename database from 'couchbasedb' to 'couchbase' in documentation and db_engine_specs (@ayush-couchbase) +- [#30828](https://github.com/apache/superset/pull/30828) fix(TimezoneSelector): Failing unit tests due to timezone change (@geido) +- [#30875](https://github.com/apache/superset/pull/30875) fix: don't show metadata for embedded dashboards (@sadpandajoe) +- [#30851](https://github.com/apache/superset/pull/30851) fix: Graph chart colors (@michael-s-molina) +- [#29867](https://github.com/apache/superset/pull/29867) fix(capitalization): Capitalizing a button. (@rusackas) +- [#29782](https://github.com/apache/superset/pull/29782) fix(translations): Translate embedded errors (@rusackas) +- [#29772](https://github.com/apache/superset/pull/29772) fix: Fixing incomplete string escaping. (@rusackas) +- [#29725](https://github.com/apache/superset/pull/29725) fix(frontend/docker, ci): fix borked Docker build due to Lerna v8 uplift (@hainenber) + +**Others** + +- [#30576](https://github.com/apache/superset/pull/30576) chore: add link to Superset when report error (@eschutho) +- [#29786](https://github.com/apache/superset/pull/29786) refactor(Slider): Upgrade Slider to Antd 5 (@geido) +- [#29674](https://github.com/apache/superset/pull/29674) refactor(ChartCreation): Migrate tests to RTL (@rtexelm) +- [#29843](https://github.com/apache/superset/pull/29843) refactor(controls): Migrate AdhocMetricOption.test to RTL (@rtexelm) +- [#29845](https://github.com/apache/superset/pull/29845) refactor(controls): Migrate MetricDefinitionValue.test to RTL (@rtexelm) +- [#28424](https://github.com/apache/superset/pull/28424) docs: Check markdown files for bad links using linkinator (@rusackas) +- [#29768](https://github.com/apache/superset/pull/29768) docs(contributing): fix broken link to translations sub-section (@sfirke) diff --git a/CHANGELOG/4.1.2.md b/CHANGELOG/4.1.2.md new file mode 100644 index 000000000000..917092b0518e --- /dev/null +++ b/CHANGELOG/4.1.2.md @@ -0,0 +1,83 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +## Change Log + +### 4.1.2 (Fri Mar 7 13:28:05 2025 -0800) + +**Database Migrations** + +- [#32538](https://github.com/apache/superset/pull/32538) fix(migrations): Handle comparator None in old time comparison migration (@Antonio-RiveroMartnez) +- [#32155](https://github.com/apache/superset/pull/32155) fix(migrations): Handle no params in time comparison migration (@Antonio-RiveroMartnez) +- [#31185](https://github.com/apache/superset/pull/31185) fix: check for column before adding in migrations (@betodealmeida) + +**Features** + +- [#29974](https://github.com/apache/superset/pull/29974) feat(sqllab): Adds refresh button to table metadata in SQL Lab (@Usiel) + +**Fixes** + +- [#32515](https://github.com/apache/superset/pull/32515) fix(sqllab): Allow clear on schema and catalog (@justinpark) +- [#32500](https://github.com/apache/superset/pull/32500) fix: dashboard, chart and dataset import validation (@dpgaspar) +- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida) +- [#31407](https://github.com/apache/superset/pull/31407) fix: Big Number side cut fixed (@fardin-developer) +- [#31480](https://github.com/apache/superset/pull/31480) fix(sunburst): Use metric label from verbose map (@gerbermichi) +- [#31427](https://github.com/apache/superset/pull/31427) fix(tags): clean up bulk create api and schema (@villebro) +- [#31334](https://github.com/apache/superset/pull/31334) fix(docs): add custom editUrl path for intro page (@dwgrossberg) +- [#31353](https://github.com/apache/superset/pull/31353) fix(sqllab): duplicate error message (@betodealmeida) +- [#31323](https://github.com/apache/superset/pull/31323) fix: Use clickhouse sqlglot dialect for YDB (@vgvoleg) +- [#31198](https://github.com/apache/superset/pull/31198) fix: add more clickhouse disallowed functions on config (@dpgaspar) +- [#31194](https://github.com/apache/superset/pull/31194) fix(embedded): Hide anchor links in embedded mode (@Vitor-Avila) +- [#31960](https://github.com/apache/superset/pull/31960) fix(sqllab): Missing allowHTML props in ResultTableExtension (@justinpark) +- [#31332](https://github.com/apache/superset/pull/31332) fix: prevent multiple pvm errors on migration (@eschutho) +- [#31437](https://github.com/apache/superset/pull/31437) fix(database import): Gracefully handle error to get catalog schemas (@Vitor-Avila) +- [#31173](https://github.com/apache/superset/pull/31173) fix: cache-warmup fails (@nsivarajan) +- [#30442](https://github.com/apache/superset/pull/30442) fix(fe/src/dashboard): optional chaining for possibly nullable parent attribute in LayoutItem type (@hainenber) +- [#31639](https://github.com/apache/superset/pull/31639) fix(sqllab): unable to update saved queries (@DamianPendrak) +- [#29898](https://github.com/apache/superset/pull/29898) fix: parse pandas pivot null values (@eschutho) +- [#31414](https://github.com/apache/superset/pull/31414) fix(Pivot Table): Fix column width to respect currency config (@Vitor-Avila) +- [#31335](https://github.com/apache/superset/pull/31335) fix(histogram): axis margin padding consistent with other graphs (@tatiana-cherne) +- [#31301](https://github.com/apache/superset/pull/31301) fix(AllEntitiesTable): show Tags (@alexandrusoare) +- [#31329](https://github.com/apache/superset/pull/31329) fix: pass string to `process_template` (@betodealmeida) +- [#31341](https://github.com/apache/superset/pull/31341) fix(pinot): remove query aliases from SELECT and ORDER BY clauses in Pinot (@yuribogomolov) +- [#31308](https://github.com/apache/superset/pull/31308) fix: annotations on horizontal bar chart (@DamianPendrak) +- [#31294](https://github.com/apache/superset/pull/31294) fix(sqllab): Remove update_saved_query_exec_info to reduce lag (@justinpark) +- [#30897](https://github.com/apache/superset/pull/30897) fix: Exception handling for SQL Lab views (@michael-s-molina) +- [#31199](https://github.com/apache/superset/pull/31199) fix(Databricks): Escape catalog and schema names in pre-queries (@Vitor-Avila) +- [#31265](https://github.com/apache/superset/pull/31265) fix(trino): db session error in handle cursor (@justinpark) +- [#31024](https://github.com/apache/superset/pull/31024) fix(dataset): use sqlglot for DML check (@betodealmeida) +- [#29885](https://github.com/apache/superset/pull/29885) fix: add mutator to get_columns_description (@eschutho) +- [#30821](https://github.com/apache/superset/pull/30821) fix: x axis title disappears when editing bar chart (@DamianPendrak) +- [#31181](https://github.com/apache/superset/pull/31181) fix: Time-series Line Chart Display unnecessary total (@michael-s-molina) +- [#31163](https://github.com/apache/superset/pull/31163) fix(Dashboard): Backward compatible shared_label_colors field (@geido) +- [#31156](https://github.com/apache/superset/pull/31156) fix: check orderby (@betodealmeida) +- [#31154](https://github.com/apache/superset/pull/31154) fix: Remove unwanted commit on Trino's handle_cursor (@michael-s-molina) +- [#31151](https://github.com/apache/superset/pull/31151) fix: Revert "feat(trino): Add functionality to upload data (#29164)" (@michael-s-molina) +- [#31031](https://github.com/apache/superset/pull/31031) fix(Dashboard): Ensure shared label colors are updated (@geido) +- [#30967](https://github.com/apache/superset/pull/30967) fix(release validation): scripts now support RSA and EDDSA keys. (@rusackas) +- [#30881](https://github.com/apache/superset/pull/30881) fix(Dashboard): Native & Cross-Filters Scoping Performance (@geido) +- [#30887](https://github.com/apache/superset/pull/30887) fix(imports): import query_context for imports with charts (@lindenh) +- [#31008](https://github.com/apache/superset/pull/31008) fix(explore): verified props is not updated (@justinpark) +- [#30646](https://github.com/apache/superset/pull/30646) fix(Dashboard): Retain colors when color scheme not set (@geido) +- [#30962](https://github.com/apache/superset/pull/30962) fix(Dashboard): Exclude edit param in async screenshot (@geido) + +**Others** + +- [#32043](https://github.com/apache/superset/pull/32043) chore: Skip the creation of secondary perms during catalog migrations (@Vitor-Avila) +- [#30865](https://github.com/apache/superset/pull/30865) docs: Updating 4.1 Release Notes (@yousoph) diff --git a/CHANGELOG/4.1.3.md b/CHANGELOG/4.1.3.md new file mode 100644 index 000000000000..d15145487757 --- /dev/null +++ b/CHANGELOG/4.1.3.md @@ -0,0 +1,58 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +## Change Log + +### 4.1.3 (Thu May 29 02:31:07 2025 -0500) + +**Database Migrations** + +**Features** + +**Fixes** + +- [#33522](https://github.com/apache/superset/pull/33522) fix(Sqllab): Autocomplete got stuck in UI when open it too fast (@rebenitez1802) +- [#33425](https://github.com/apache/superset/pull/33425) fix(table-chart): time shift is not working (@justinpark) +- [#32414](https://github.com/apache/superset/pull/32414) fix(api): Added uuid to list api calls (@withnale) +- [#33354](https://github.com/apache/superset/pull/33354) fix: loading examples from raw.githubusercontent.com fails with 429 errors (@mistercrunch) +- [#32382](https://github.com/apache/superset/pull/32382) fix(pinot): revert join and subquery flags (@yuribogomolov) +- [#32473](https://github.com/apache/superset/pull/32473) fix(plugin-chart-echarts): remove erroneous upper bound value (@villebro) +- [#33048](https://github.com/apache/superset/pull/33048) fix: improve error type on parse error (@justinpark) +- [#32968](https://github.com/apache/superset/pull/32968) fix(pivot-table): Revert "fix(Pivot Table): Fix column width to respect currency config (#31414)" (@justinpark) +- [#32795](https://github.com/apache/superset/pull/32795) fix(log): store navigation path to get correct logging path (@justinpark) +- [#33216](https://github.com/apache/superset/pull/33216) fix: Downgrade to marshmallow<4 (@amotl) +- [#32866](https://github.com/apache/superset/pull/32866) fix: make packages PEP 625 compliant (@sadpandajoe) +- [#32035](https://github.com/apache/superset/pull/32035) fix(fe/dashboard-list): display modifier info for `Last modified` data (@hainenber) +- [#32708](https://github.com/apache/superset/pull/32708) fix(logging): missing path in event data (@justinpark) +- [#32699](https://github.com/apache/superset/pull/32699) fix: Signature of Celery pruner jobs (@michael-s-molina) +- [#32681](https://github.com/apache/superset/pull/32681) fix(log): Update recent_activity by event name (@justinpark) +- [#32608](https://github.com/apache/superset/pull/32608) fix(welcome): perf on distinct recent activities (@justinpark) +- [#32572](https://github.com/apache/superset/pull/32572) fix: Log table retention policy (@michael-s-molina) +- [#32406](https://github.com/apache/superset/pull/32406) fix(model/helper): represent RLS filter clause in proper textual SQL string (@hainenber) +- [#32240](https://github.com/apache/superset/pull/32240) fix: upgrade to 3.11.11-slim-bookworm to address critical vulnerabilities (@gpchandran) +- [#30858](https://github.com/apache/superset/pull/30858) fix(chart data): removing query from /chart/data payload when accessing as guest user (@fisjac) + +**Others** + +- [#33612](https://github.com/apache/superset/pull/33612) chore: update Dockerfile - Upgrade to 3.11.12 (@gpchandran) +- [#33435](https://github.com/apache/superset/pull/33435) docs: CVEs fixed on 4.1.2 (@sha174n) +- [#33339](https://github.com/apache/superset/pull/33339) chore(🦾): bump python h11 0.14.0 -> 0.16.0 (@github-actions[bot]) +- [#32745](https://github.com/apache/superset/pull/32745) chore(🦾): bump python sqlglot 26.1.3 -> 26.11.1 (@github-actions[bot]) +- [#32782](https://github.com/apache/superset/pull/32782) chore: Revert "chore: bump base image in Dockerfile with `ARG PY_VER=3.11.11-slim-bookworm`" (@sadpandajoe) +- [#32780](https://github.com/apache/superset/pull/32780) chore: bump base image in Dockerfile with `ARG PY_VER=3.11.11-slim-bookworm` (@gpchandran) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 59795d5f81a1..c2e504e3a8d1 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -94,9 +94,9 @@ This statement thanks the following, on which it draws for content and inspirati * [CouchDB Project Code of conduct](http://couchdb.apache.org/conduct.html) * [Fedora Project Code of Conduct](http://fedoraproject.org/code-of-conduct) -* [Speak Up! Code of Conduct](http://speakup.io/coc.html) +* [Speak Up! Code of Conduct](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html) * [Django Code of Conduct](https://www.djangoproject.com/conduct/) -* [Debian Code of Conduct](http://www.debian.org/vote/2014/vote_002) +* [Debian Code of Conduct](https://www.debian.org/vote/2014/vote_002) * [Twitter Open Source Code of Conduct](https://github.com/twitter/code-of-conduct/blob/master/code-of-conduct.md) * [Mozilla Code of Conduct/Draft](https://wiki.mozilla.org/Code_of_Conduct/Draft#Conflicts_of_Interest) * [Python Diversity Appendix](https://www.python.org/community/diversity/) diff --git a/Dockerfile b/Dockerfile index e83c81471d91..92a0c6694d22 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ ###################################################################### # Node stage to deal with static asset construction ###################################################################### -ARG PY_VER=3.10-slim-bookworm +ARG PY_VER=3.11.12-slim-bookworm # if BUILDPLATFORM is null, set it to 'amd64' (or leave as is otherwise). ARG BUILDPLATFORM=${BUILDPLATFORM:-amd64} @@ -28,22 +28,22 @@ ARG NPM_BUILD_CMD="build" # Somehow we need python3 + build-essential on this side of the house to install node-gyp RUN apt-get update -qq \ - && apt-get install \ - -yqq --no-install-recommends \ - build-essential \ - python3 + && apt-get install \ + -yqq --no-install-recommends \ + build-essential \ + python3 ENV BUILD_CMD=${NPM_BUILD_CMD} \ - PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true + PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true # NPM ci first, as to NOT invalidate previous steps except for when package.json changes RUN --mount=type=bind,target=/frontend-mem-nag.sh,src=./docker/frontend-mem-nag.sh \ - /frontend-mem-nag.sh + /frontend-mem-nag.sh WORKDIR /app/superset-frontend RUN --mount=type=bind,target=./package.json,src=./superset-frontend/package.json \ - --mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \ - npm ci + --mount=type=bind,target=./package-lock.json,src=./superset-frontend/package-lock.json \ + npm ci # Runs the webpack build process COPY superset-frontend /app/superset-frontend @@ -64,38 +64,38 @@ FROM python:${PY_VER} AS lean WORKDIR /app ENV LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 \ - SUPERSET_ENV=production \ - FLASK_APP="superset.app:create_app()" \ - PYTHONPATH="/app/pythonpath" \ - SUPERSET_HOME="/app/superset_home" \ - SUPERSET_PORT=8088 + LC_ALL=C.UTF-8 \ + SUPERSET_ENV=production \ + FLASK_APP="superset.app:create_app()" \ + PYTHONPATH="/app/pythonpath" \ + SUPERSET_HOME="/app/superset_home" \ + SUPERSET_PORT=8088 RUN mkdir -p ${PYTHONPATH} superset/static requirements superset-frontend apache_superset.egg-info requirements \ - && useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \ - && apt-get update -qq && apt-get install -yqq --no-install-recommends \ - curl \ - default-libmysqlclient-dev \ - libsasl2-dev \ - libsasl2-modules-gssapi-mit \ - libpq-dev \ - libecpg-dev \ - libldap2-dev \ - && touch superset/static/version_info.json \ - && chown -R superset:superset ./* \ - && rm -rf /var/lib/apt/lists/* + && useradd --user-group -d ${SUPERSET_HOME} -m --no-log-init --shell /bin/bash superset \ + && apt-get update -qq && apt-get install -yqq --no-install-recommends \ + curl \ + default-libmysqlclient-dev \ + libsasl2-dev \ + libsasl2-modules-gssapi-mit \ + libpq-dev \ + libecpg-dev \ + libldap2-dev \ + && touch superset/static/version_info.json \ + && chown -R superset:superset ./* \ + && rm -rf /var/lib/apt/lists/* COPY --chown=superset:superset pyproject.toml setup.py MANIFEST.in README.md ./ # setup.py uses the version information in package.json COPY --chown=superset:superset superset-frontend/package.json superset-frontend/ COPY --chown=superset:superset requirements/base.txt requirements/ RUN --mount=type=cache,target=/root/.cache/pip \ - apt-get update -qq && apt-get install -yqq --no-install-recommends \ - build-essential \ - && pip install --upgrade setuptools pip \ - && pip install -r requirements/base.txt \ - && apt-get autoremove -yqq --purge build-essential \ - && rm -rf /var/lib/apt/lists/* + apt-get update -qq && apt-get install -yqq --no-install-recommends \ + build-essential \ + && pip install --upgrade setuptools pip \ + && pip install -r requirements/base.txt \ + && apt-get autoremove -yqq --purge build-essential \ + && rm -rf /var/lib/apt/lists/* # Copy the compiled frontend assets COPY --chown=superset:superset --from=superset-node /app/superset/static/assets superset/static/assets @@ -103,7 +103,7 @@ COPY --chown=superset:superset --from=superset-node /app/superset/static/assets ## Lastly, let's install superset itself COPY --chown=superset:superset superset superset RUN --mount=type=cache,target=/root/.cache/pip \ - pip install -e . + pip install -e . # Copy the .json translations from the frontend layer COPY --chown=superset:superset --from=superset-node /app/superset/translations superset/translations @@ -111,9 +111,9 @@ COPY --chown=superset:superset --from=superset-node /app/superset/translations s # Compile translations for the backend - this generates .mo files, then deletes the .po files COPY ./scripts/translations/generate_mo_files.sh ./scripts/translations/ RUN ./scripts/translations/generate_mo_files.sh \ - && chown -R superset:superset superset/translations \ - && rm superset/translations/messages.pot \ - && rm superset/translations/*/LC_MESSAGES/*.po + && chown -R superset:superset superset/translations \ + && rm superset/translations/messages.pot \ + && rm superset/translations/*/LC_MESSAGES/*.po COPY --chmod=755 ./docker/run-server.sh /usr/bin/ USER superset @@ -131,42 +131,42 @@ FROM lean AS dev USER root RUN apt-get update -qq \ - && apt-get install -yqq --no-install-recommends \ - libnss3 \ - libdbus-glib-1-2 \ - libgtk-3-0 \ - libx11-xcb1 \ - libasound2 \ - libxtst6 \ - git \ - pkg-config \ - && rm -rf /var/lib/apt/lists/* + && apt-get install -yqq --no-install-recommends \ + libnss3 \ + libdbus-glib-1-2 \ + libgtk-3-0 \ + libx11-xcb1 \ + libasound2 \ + libxtst6 \ + git \ + pkg-config \ + && rm -rf /var/lib/apt/lists/* RUN --mount=type=cache,target=/root/.cache/pip \ - pip install playwright + pip install playwright RUN playwright install-deps RUN playwright install chromium # Install GeckoDriver WebDriver ARG GECKODRIVER_VERSION=v0.34.0 \ - FIREFOX_VERSION=125.0.3 + FIREFOX_VERSION=125.0.3 RUN apt-get update -qq \ - && apt-get install -yqq --no-install-recommends wget bzip2 \ - && wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \ - # Install Firefox - && wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \ - && ln -s /opt/firefox/firefox /usr/local/bin/firefox \ - && apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* + && apt-get install -yqq --no-install-recommends wget bzip2 \ + && wget -q https://github.com/mozilla/geckodriver/releases/download/${GECKODRIVER_VERSION}/geckodriver-${GECKODRIVER_VERSION}-linux64.tar.gz -O - | tar xfz - -C /usr/local/bin \ + # Install Firefox + && wget -q https://download-installer.cdn.mozilla.net/pub/firefox/releases/${FIREFOX_VERSION}/linux-x86_64/en-US/firefox-${FIREFOX_VERSION}.tar.bz2 -O - | tar xfj - -C /opt \ + && ln -s /opt/firefox/firefox /usr/local/bin/firefox \ + && apt-get autoremove -yqq --purge wget bzip2 && rm -rf /var/[log,tmp]/* /tmp/* /var/lib/apt/lists/* # Cache everything for dev purposes... COPY --chown=superset:superset requirements/development.txt requirements/ RUN --mount=type=cache,target=/root/.cache/pip \ - apt-get update -qq && apt-get install -yqq --no-install-recommends \ - build-essential \ - && pip install -r requirements/development.txt \ - && apt-get autoremove -yqq --purge build-essential \ - && rm -rf /var/lib/apt/lists/* + apt-get update -qq && apt-get install -yqq --no-install-recommends \ + build-essential \ + && pip install -r requirements/development.txt \ + && apt-get autoremove -yqq --purge build-essential \ + && rm -rf /var/lib/apt/lists/* USER superset ###################################################################### diff --git a/README.md b/README.md index 680f8253a3b3..1e09606c543b 100644 --- a/README.md +++ b/README.md @@ -22,9 +22,9 @@ under the License. [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/apache/superset?sort=semver)](https://github.com/apache/superset/tree/latest) [![Build Status](https://github.com/apache/superset/workflows/Python/badge.svg)](https://github.com/apache/superset/actions) -[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset) +[![PyPI version](https://badge.fury.io/py/apache_superset.svg)](https://badge.fury.io/py/apache_superset) [![Coverage Status](https://codecov.io/github/apache/superset/coverage.svg?branch=master)](https://codecov.io/github/apache/superset) -[![PyPI](https://img.shields.io/pypi/pyversions/apache-superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache-superset) +[![PyPI](https://img.shields.io/pypi/pyversions/apache_superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/apache_superset) [![Get on Slack](https://img.shields.io/badge/slack-join-orange.svg)](http://bit.ly/join-superset-slack) [![Documentation](https://img.shields.io/badge/docs-apache.org-blue.svg)](https://superset.apache.org) @@ -72,8 +72,10 @@ Superset provides: ## Screenshots & Gifs **Video Overview** + <!-- File hosted here https://github.com/apache/superset-site/raw/lfs/superset-video-4k.mp4 --> -[superset-video-4k.webm](https://github.com/apache/superset/assets/812905/da036bc2-150c-4ee7-80f9-75e63210ff76) + +[superset-video-1080p.webm](https://github.com/user-attachments/assets/b37388f7-a971-409c-96a7-90c4e31322e6) <br/> @@ -151,7 +153,7 @@ Want to add support for your datastore or data engine? Read more [here](https:// and please read our [Slack Community Guidelines](https://github.com/apache/superset/blob/master/CODE_OF_CONDUCT.md#slack-community-guidelines) - [Join our dev@superset.apache.org Mailing list](https://lists.apache.org/list.html?dev@superset.apache.org). To join, simply send an email to [dev-subscribe@superset.apache.org](mailto:dev-subscribe@superset.apache.org) - If you want to help troubleshoot GitHub Issues involving the numerous database drivers that Superset supports, please consider adding your name and the databases you have access to on the [Superset Database Familiarity Rolodex](https://docs.google.com/spreadsheets/d/1U1qxiLvOX0kBTUGME1AHHi6Ywel6ECF8xk_Qy-V9R8c/edit#gid=0) -- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community) +- Join Superset's Town Hall and [Operational Model](https://preset.io/blog/the-superset-operational-model-wants-you/) recurring meetings. Meeting info is available on the [Superset Community Calendar](https://superset.apache.org/community) ## Contributor Guide @@ -179,14 +181,16 @@ Understanding the Superset Points of View - [Building New Database Connectors](https://preset.io/blog/building-database-connector/) - [Create Your First Dashboard](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard/) - [Comprehensive Tutorial for Contributing Code to Apache Superset - ](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/) + ](https://preset.io/blog/tutorial-contributing-code-to-apache-superset/) - [Resources to master Superset by Preset](https://preset.io/resources/) - Deploying Superset + - [Official Docker image](https://hub.docker.com/r/apache/superset) - [Helm Chart](https://github.com/apache/superset/tree/master/helm/superset) - Recordings of Past [Superset Community Events](https://preset.io/events) + - [Mixed Time Series Charts](https://preset.io/events/mixed-time-series-visualization-in-superset-workshop/) - [How the Bing Team Customized Superset for the Internal Self-Serve Data & Analytics Platform](https://preset.io/events/how-the-bing-team-heavily-customized-superset-for-their-internal-data/) - [Live Demo: Visualizing MongoDB and Pinot Data using Trino](https://preset.io/events/2021-04-13-visualizing-mongodb-and-pinot-data-using-trino/) @@ -194,6 +198,7 @@ Understanding the Superset Points of View - [Building a Database Connector for Superset](https://preset.io/events/2021-02-16-building-a-database-connector-for-superset/) - Visualizations + - [Creating Viz Plugins](https://superset.apache.org/docs/contributing/creating-viz-plugins/) - [Managing and Deploying Custom Viz Plugins](https://medium.com/nmc-techblog/apache-superset-manage-custom-viz-plugins-in-production-9fde1a708e55) - [Why Apache Superset is Betting on Apache ECharts](https://preset.io/blog/2021-4-1-why-echarts/) diff --git a/RELEASING/Dockerfile.from_local_tarball b/RELEASING/Dockerfile.from_local_tarball index 8f5605b25d64..3794ed4c80ae 100644 --- a/RELEASING/Dockerfile.from_local_tarball +++ b/RELEASING/Dockerfile.from_local_tarball @@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset # Configure environment ENV LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 + LC_ALL=C.UTF-8 RUN apt-get update -y @@ -30,12 +30,15 @@ RUN apt-get install -y apt-transport-https apt-utils # Install superset dependencies # https://superset.apache.org/docs/installation/installing-superset-from-scratch RUN apt-get install -y build-essential libssl-dev \ - libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium + libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd # Install nodejs for custom build # https://nodejs.org/en/download/package-manager/ -RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \ - && apt-get install -y nodejs +RUN set -eux; \ + curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ + apt-get install -y nodejs; \ + node --version; +RUN if ! which npm; then apt-get install -y npm; fi RUN mkdir -p /home/superset RUN chown superset /home/superset @@ -47,21 +50,21 @@ ARG SUPERSET_RELEASE_RC_TARBALL # Can fetch source from svn or copy tarball from local mounted directory COPY $SUPERSET_RELEASE_RC_TARBALL ./ RUN tar -xvf *.tar.gz -WORKDIR /home/superset/apache-superset-$VERSION/superset-frontend +WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend RUN npm ci \ - && npm run build \ - && rm -rf node_modules + && npm run build \ + && rm -rf node_modules -WORKDIR /home/superset/apache-superset-$VERSION +WORKDIR /home/superset/apache_superset-$VERSION RUN pip install --upgrade setuptools pip \ - && pip install -r requirements/base.txt \ - && pip install --no-cache-dir . + && pip install -r requirements/base.txt \ + && pip install --no-cache-dir . RUN flask fab babel-compile --target superset/translations ENV PATH=/home/superset/superset/bin:$PATH \ - PYTHONPATH=/home/superset/superset/:$PYTHONPATH \ - SUPERSET_TESTENV=true + PYTHONPATH=/home/superset/superset/ \ + SUPERSET_TESTENV=true COPY from_tarball_entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/RELEASING/Dockerfile.from_svn_tarball b/RELEASING/Dockerfile.from_svn_tarball index 22883552cabf..33d0e9451b0b 100644 --- a/RELEASING/Dockerfile.from_svn_tarball +++ b/RELEASING/Dockerfile.from_svn_tarball @@ -20,7 +20,7 @@ RUN useradd --user-group --create-home --no-log-init --shell /bin/bash superset # Configure environment ENV LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 + LC_ALL=C.UTF-8 RUN apt-get update -y @@ -29,13 +29,16 @@ RUN apt-get install -y apt-transport-https apt-utils # Install superset dependencies # https://superset.apache.org/docs/installation/installing-superset-from-scratch -RUN apt-get install -y build-essential libssl-dev \ - libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium +RUN apt-get install -y subversion build-essential libssl-dev \ + libffi-dev python3-dev libsasl2-dev libldap2-dev libxi-dev chromium zstd # Install nodejs for custom build # https://nodejs.org/en/download/package-manager/ -RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \ - && apt-get install -y nodejs +RUN set -eux; \ + curl -sL https://deb.nodesource.com/setup_20.x | bash -; \ + apt-get install -y nodejs; \ + node --version; +RUN if ! which npm; then apt-get install -y npm; fi RUN mkdir -p /home/superset RUN chown superset /home/superset @@ -46,22 +49,20 @@ ARG VERSION # Can fetch source from svn or copy tarball from local mounted directory RUN svn co https://dist.apache.org/repos/dist/dev/superset/$VERSION ./ RUN tar -xvf *.tar.gz -WORKDIR apache-superset-$VERSION +WORKDIR /home/superset/apache_superset-$VERSION/superset-frontend -RUN cd superset-frontend \ - && npm ci \ - && npm run build \ - && rm -rf node_modules +RUN npm ci \ + && npm run build \ + && rm -rf node_modules - -WORKDIR /home/superset/apache-superset-$VERSION +WORKDIR /home/superset/apache_superset-$VERSION RUN pip install --upgrade setuptools pip \ - && pip install -r requirements/base.txt \ - && pip install --no-cache-dir . + && pip install -r requirements/base.txt \ + && pip install --no-cache-dir . RUN flask fab babel-compile --target superset/translations ENV PATH=/home/superset/superset/bin:$PATH \ - PYTHONPATH=/home/superset/superset/:$PYTHONPATH + PYTHONPATH=/home/superset/superset/ COPY from_tarball_entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/RELEASING/Dockerfile.make_tarball b/RELEASING/Dockerfile.make_tarball index 3a963723de09..4e701afd172f 100644 --- a/RELEASING/Dockerfile.make_tarball +++ b/RELEASING/Dockerfile.make_tarball @@ -17,7 +17,9 @@ FROM python:3.10-slim-bookworm RUN apt-get update -y -RUN apt-get install -y jq +RUN apt-get install -y \ + git \ + jq COPY make_tarball_entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/RELEASING/README.md b/RELEASING/README.md index 39e4177ac008..415be60f3b91 100644 --- a/RELEASING/README.md +++ b/RELEASING/README.md @@ -123,10 +123,10 @@ SUPERSET_RC=1 SUPERSET_GITHUB_BRANCH=1.5 SUPERSET_PGP_FULLNAME=villebro@apache.org SUPERSET_VERSION_RC=1.5.1rc1 -SUPERSET_RELEASE=apache-superset-1.5.1 -SUPERSET_RELEASE_RC=apache-superset-1.5.1rc1 -SUPERSET_RELEASE_TARBALL=apache-superset-1.5.1-source.tar.gz -SUPERSET_RELEASE_RC_TARBALL=apache-superset-1.5.1rc1-source.tar.gz +SUPERSET_RELEASE=apache_superset-1.5.1 +SUPERSET_RELEASE_RC=apache_superset-1.5.1rc1 +SUPERSET_RELEASE_TARBALL=apache_superset-1.5.1-source.tar.gz +SUPERSET_RELEASE_RC_TARBALL=apache_superset-1.5.1rc1-source.tar.gz SUPERSET_TMP_ASF_SITE_PATH=/tmp/incubator-superset-site-1.5.1 ------------------------------- ``` @@ -380,7 +380,7 @@ Official instructions: https://www.apache.org/info/verification.html We now have a handy script for anyone validating a release to use. The core of it is in this very folder, `verify_release.py`. Just make sure you have all three release files in the same directory (`{some version}.tar.gz`, `{some version}.tar.gz.asc` and `{some version}tar.gz.sha512`). Then you can pass this script the path to the `.gz` file like so: -`python verify_release.py ~/path/tp/apache-superset-{version/candidate}-source.tar.gz` +`python verify_release.py ~/path/tp/apache_superset-{version/candidate}-source.tar.gz` If all goes well, you will see this result in your terminal: @@ -466,7 +466,8 @@ an account first if you don't have one, and reference your username while requesting access to push packages. ```bash -twine upload dist/apache-superset-${SUPERSET_VERSION}.tar.gz +twine upload dist/apache_superset-${SUPERSET_VERSION}-py3-none-any.whl +twine upload dist/apache_superset-${SUPERSET_VERSION}.tar.gz ``` Set your username to `__token__` @@ -505,7 +506,7 @@ We also need to update the Environment section of [ISSUE_TEMPLATE/bug-report.yml Docker release with proper tags should happen automatically as version tags get pushed to the `apache/superset` GitHub repository through this -[GitHub action](https://github.com/apache/superset/blob/master/.github/workflows/docker-release.yml) +[GitHub action](https://github.com/apache/superset/blob/master/.github/workflows/docker.yml) Note that this GH action implements a `workflow_dispatch` trigger, meaning that it can be triggered manually from the GitHub UI. If anything diff --git a/RELEASING/email_templates/announce.j2 b/RELEASING/email_templates/announce.j2 index b12a2ee5b03b..01b6893fb309 100644 --- a/RELEASING/email_templates/announce.j2 +++ b/RELEASING/email_templates/announce.j2 @@ -31,7 +31,7 @@ The official source release: https://downloads.apache.org/{{ project_module }}/{{ version }} The PyPI package: -https://pypi.org/project/apache-superset/{{ version }} +https://pypi.org/project/apache_superset/{{ version }} The CHANGELOG for the release: https://github.com/apache/{{ project_module }}/blob/{{ version }}/CHANGELOG/{{ version }}.md diff --git a/RELEASING/make_tarball.sh b/RELEASING/make_tarball.sh index 47686d440227..c4c53f979e3e 100755 --- a/RELEASING/make_tarball.sh +++ b/RELEASING/make_tarball.sh @@ -32,7 +32,7 @@ else SUPERSET_VERSION="${1}" SUPERSET_RC="${2}" SUPERSET_PGP_FULLNAME="${3}" - SUPERSET_RELEASE_RC_TARBALL="apache-superset-${SUPERSET_VERSION_RC}-source.tar.gz" + SUPERSET_RELEASE_RC_TARBALL="apache_superset-${SUPERSET_VERSION_RC}-source.tar.gz" fi SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${SUPERSET_RC}" diff --git a/RELEASING/make_tarball_entrypoint.sh b/RELEASING/make_tarball_entrypoint.sh index ffbc0ac33ca6..022fca294c22 100755 --- a/RELEASING/make_tarball_entrypoint.sh +++ b/RELEASING/make_tarball_entrypoint.sh @@ -22,7 +22,7 @@ if [ -z "${SUPERSET_VERSION_RC}" ] || [ -z "${SUPERSET_SVN_DEV_PATH}" ] || [ -z exit 1 fi -SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" +SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz SUPERSET_RELEASE_RC_BASE_PATH="${SUPERSET_SVN_DEV_PATH}"/"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL_PATH="${SUPERSET_RELEASE_RC_BASE_PATH}"/"${SUPERSET_RELEASE_RC_TARBALL}" diff --git a/RELEASING/release-notes-1-0/README.md b/RELEASING/release-notes-1-0/README.md index ed1eeea0dab0..6379bf099ca4 100644 --- a/RELEASING/release-notes-1-0/README.md +++ b/RELEASING/release-notes-1-0/README.md @@ -102,7 +102,7 @@ Some of the new features in this release are disabled by default. Each has a fea This release includes **hundreds** of bugfixes and stability enhancements. Future major releases will have a continued emphasis on providing a stable and bug-free experience for the user. # PR Highlights -Below is a highlight of the PRs included in this update. The full list is much longer, and can be found [here](apache/incubator-superset/CHANGELOG.md). +Below is a highlight of the PRs included in this update. The full list is much longer, and can be found [here](https://github.com/apache/superset/blob/master/CHANGELOG.md). ## User Experience - Revert "refactor: Remove usages of reactable from TimeTable (#11046)" (#[11150](https://github.com/apache/incubator-superset/pull/11150)) @@ -222,4 +222,4 @@ Below is a highlight of the PRs included in this update. The full list is much l ## Complete Changelog Backwards incompatible changes and can be found [here](../../UPDATING.md). -To see the complete changelog, see [apache/incubator-superset/CHANGELOG.md](https://github.com/apache/superset/blob/master/CHANGELOG.md) +To see the complete changelog, see [apache/superset/CHANGELOG.md](https://github.com/apache/superset/blob/master/CHANGELOG.md) diff --git a/RELEASING/release-notes-1-5/README.md b/RELEASING/release-notes-1-5/README.md index 44cba5b89862..74389e8c7691 100644 --- a/RELEASING/release-notes-1-5/README.md +++ b/RELEASING/release-notes-1-5/README.md @@ -137,6 +137,6 @@ when available. **Changelog** To see the complete changelog in this release, head to -[CHANGELOG.MD](https://github.com/apache/superset/blob/1.5/CHANGELOG/1.5.0.md). +[CHANGELOG.MD](https://github.com/apache/superset/blob/master/CHANGELOG/1.5.0.md). As mentioned earlier, this release has a MASSIVE amount of bug fixes. The full changelog lists all of them! diff --git a/RELEASING/release-notes-4-1/README.md b/RELEASING/release-notes-4-1/README.md new file mode 100644 index 000000000000..4bdf0615c469 --- /dev/null +++ b/RELEASING/release-notes-4-1/README.md @@ -0,0 +1,140 @@ +<!-- +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +--> + +# Release Notes for Superset 4.1.0 + +Superset 4.1.0 brings a range of new features and quality of life improvements. This release is a minor version, meaning it doesn't include any breaking changes. However, users of basic Superset docker images like `4.1.0` should see the note at the bottom of this file about [changes to those builds](/RELEASING/release-notes-4-1/README.md#change-to-docker-image-builds). + +## Highlights + +Here are some of the highlights of this release. + +### Big Number With Time Period Updates + +We released a [Big Number with Time Period Comparison](https://github.com/apache/superset/pull/26908) chart as part of Superset 4.0. With the latest update, there are now [color options](https://github.com/apache/superset/pull/27524) for comparisons. The chart now also uses [standardize controls](https://github.com/apache/superset/pull/27193) such that when switching charts will maintain the selected metrics. To enable the new chart, you'll need to enable the `CHART_PLUGINS_EXPERIMENTAL` feature flag. + +<div> + <image src="media/big_number_chart.png" alt="Image" width="100%"> +</div> + +### Table with Time Comparison +Added functionality to do [table time comparisons](https://github.com/apache/superset/pull/28057). This will help improve and facilitate efficient data analysis. + +<div> + <image src="media/table_with_time.png" alt="Image" width="100%"> +</div> + +### New ECharts Versions + +The new ECharts [Heatmap](https://github.com/apache/superset/pull/25353) has been added. Compared to the legacy Heatmap, it has more accurate percentage calculations, server side sorting to respect row limits, and a more interactive legend control that allows selecting a subset of values. + +<div> + <image src="media/heatmap.png" alt="Image" width="100%"> +</div> + +We also added a new ECharts [Histogram](https://github.com/apache/superset/pull/28652) chart. The new chart will help visualize patterns, clusters, and outliers in the data and provides insights into its shape, central tendency, and spread. + +<div> + <image src="media/histogram.png" alt="Image" width="100%"> +</div> + +A new Echarts [Sankey](https://github.com/apache/superset/pull/29329) chart now exists. The chart visually tracks the movement and transformation of values across system stages. + +<div> + <image src="media/sankey.png" alt="Image" width="100%"> +</div> + +You can use the CLI command to migrate Area, Bubble, Line, Sankey, [Heatmap](https://github.com/apache/superset/pull/27771), and [Histogram](https://github.com/apache/superset/pull/28780) chart types but we'll add more as the ECharts migrations continue. Note that migrations for deprecated charts may be forced in upcoming major versions when the code is removed. Running migrations earlier will allow you to de-risk future upgrades while improving user experience. + +```bash +Usage: superset viz-migrations [OPTIONS] COMMAND [ARGS]... + + Migrates a viz from one type to another. + +Commands: + downgrade Downgrades a viz to the previous version. + upgrade Upgrade a viz to the latest version. +``` + +Note: When migrating dashboards from one Superset instance to another (using import/export features or the Superset CLI), or restoring a backup of prior charts and dashboards, Superset will apply the existing migrations that are used during version upgrades. This will ensure that your charts and dashboards are using the latest and greatest charts that Superset officially supports. For any migration issues, feel free to [open a new issue](https://github.com/apache/superset/issues/new?assignees=&labels=bug&projects=&template=bug-report.yml) in the repo. + +### Improved Upload Forms + +We've made design changes to the [CSV](https://github.com/apache/superset/pull/27840), [Excel](https://github.com/apache/superset/pull/28105), and [Columnar](https://github.com/apache/superset/pull/28192 +) upload modals to improve user experience and to be more performant. The new designs has the following goals: + +- Improved error handling. +- Better backend parameter validation. +- More aligned with our other modal dialogs + +#### CSV +<div> + <img src="media/csv_modal_1.png" alt="Image" width="25%"> + <img src="media/csv_modal_2.png" alt="Image" width="25%"> + <img src="media/csv_modal_3.png" alt="Image" width="25%"> + <img src="media/csv_modal_4.png" alt="Image" width="25%"> +</div> + +#### Excel +<div> + <img src="media/excel_modal_1.png" alt="Image" width="25%"> + <img src="media/excel_modal_2.png" alt="Image" width="25%"> + <img src="media/excel_modal_3.png" alt="Image" width="25%"> + <img src="media/excel_modal_4.png" alt="Image" width="25%"> +</div> + +#### Columnar +<div> + <img src="media/columnar_modal_1.png" alt="Image" width="33%"> + <img src="media/columnar_modal_2.png" alt="Image" width="33%"> + <img src="media/columnar_modal_3.png" alt="Image" width="33%"> +</div> + + +### OAuth2 For Databases + +You now have the ability to enable [OAuth2](https://github.com/apache/superset/pull/27631) for databases like BigQuery, Snowflake, Dremio, Databricks, Google Sheets, etc. When enabled, it will allow users to connect to Oauth2 enabled databases with their own credentials. + +### Catalog Support For Databases + +Added support for the [catalog heirachy](https://github.com/apache/superset/pull/28317) for databases that support it, such as [BigQuery (projects), Databricks, Presto, Snowflake, and Trino](https://github.com/apache/superset/pull/28416). Once enabled, users will see catalogs when selecting tables in [SQL Lab, datasets](https://github.com/apache/superset/pull/28376), and when setting up Data Access Roles + +### Slack Upload Files V2 API Updates +As part of [[SIP-138] Proposal for Slack file upload V2 integration for Alerts and Reports](https://github.com/apache/superset/issues/29263) we now have support for Slack file upload files v2 API call. This feature is behind the feature flag `ALERT_REPORT_SLACK_V2` and also changes the Slack channel to a selector. You may also need to add the following scopes (`channels:read`, `group:read`) to your Slack bot to work. + +<div> + <image src="media/slack_modal.png" alt="Image" width="100%"> +</div> + +### Total and Percentages In Tooltips For ECharts + +Users can now see both the [total and percentage in tooltips](https://github.com/apache/superset/pull/27950) for ECharts. + +<div> + <image src="media/tooltips.png" alt="Image" width="100%"> +</div> + +### Additional Metadata Bar To Dashboards + +There is now a [metadata bar](https://github.com/apache/superset/pull/27857) added to the header of dashboards. This will now show viewers of the dashboard both the owners and last modified time of the dashboard. + + +## Change to Docker image builds + +Starting in 4.1.0, the release's docker image does not ship with drivers needed to operate Superset. Users may need to install a driver for their metadata database (MySQL or Postgres) as well as the driver for their data warehouse. This is a result of changes to the `lean` docker image that official releases come from; see [Docker Build Presets](/docs/installation/docker-builds#build-presets) for more details. diff --git a/RELEASING/release-notes-4-1/media/big_number_chart.png b/RELEASING/release-notes-4-1/media/big_number_chart.png new file mode 100644 index 000000000000..962d0c3889eb Binary files /dev/null and b/RELEASING/release-notes-4-1/media/big_number_chart.png differ diff --git a/RELEASING/release-notes-4-1/media/columnar_modal_1.png b/RELEASING/release-notes-4-1/media/columnar_modal_1.png new file mode 100644 index 000000000000..21d201769e69 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/columnar_modal_1.png differ diff --git a/RELEASING/release-notes-4-1/media/columnar_modal_2.png b/RELEASING/release-notes-4-1/media/columnar_modal_2.png new file mode 100644 index 000000000000..692f4549de77 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/columnar_modal_2.png differ diff --git a/RELEASING/release-notes-4-1/media/columnar_modal_3.png b/RELEASING/release-notes-4-1/media/columnar_modal_3.png new file mode 100644 index 000000000000..f7edddfd5c59 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/columnar_modal_3.png differ diff --git a/RELEASING/release-notes-4-1/media/csv_modal_1.png b/RELEASING/release-notes-4-1/media/csv_modal_1.png new file mode 100644 index 000000000000..8529357514e7 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/csv_modal_1.png differ diff --git a/RELEASING/release-notes-4-1/media/csv_modal_2.png b/RELEASING/release-notes-4-1/media/csv_modal_2.png new file mode 100644 index 000000000000..972753587d5b Binary files /dev/null and b/RELEASING/release-notes-4-1/media/csv_modal_2.png differ diff --git a/RELEASING/release-notes-4-1/media/csv_modal_3.png b/RELEASING/release-notes-4-1/media/csv_modal_3.png new file mode 100644 index 000000000000..25acf14a3551 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/csv_modal_3.png differ diff --git a/RELEASING/release-notes-4-1/media/csv_modal_4.png b/RELEASING/release-notes-4-1/media/csv_modal_4.png new file mode 100644 index 000000000000..9d81b28306e6 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/csv_modal_4.png differ diff --git a/RELEASING/release-notes-4-1/media/excel_modal_1.png b/RELEASING/release-notes-4-1/media/excel_modal_1.png new file mode 100644 index 000000000000..6e9640a94998 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/excel_modal_1.png differ diff --git a/RELEASING/release-notes-4-1/media/excel_modal_2.png b/RELEASING/release-notes-4-1/media/excel_modal_2.png new file mode 100644 index 000000000000..4cc2d451bda7 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/excel_modal_2.png differ diff --git a/RELEASING/release-notes-4-1/media/excel_modal_3.png b/RELEASING/release-notes-4-1/media/excel_modal_3.png new file mode 100644 index 000000000000..64d8fdc561e1 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/excel_modal_3.png differ diff --git a/RELEASING/release-notes-4-1/media/excel_modal_4.png b/RELEASING/release-notes-4-1/media/excel_modal_4.png new file mode 100644 index 000000000000..8f5b45e504aa Binary files /dev/null and b/RELEASING/release-notes-4-1/media/excel_modal_4.png differ diff --git a/RELEASING/release-notes-4-1/media/heatmap.png b/RELEASING/release-notes-4-1/media/heatmap.png new file mode 100644 index 000000000000..52738bf00624 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/heatmap.png differ diff --git a/RELEASING/release-notes-4-1/media/histogram.png b/RELEASING/release-notes-4-1/media/histogram.png new file mode 100644 index 000000000000..61a7fe0318cf Binary files /dev/null and b/RELEASING/release-notes-4-1/media/histogram.png differ diff --git a/RELEASING/release-notes-4-1/media/sankey.png b/RELEASING/release-notes-4-1/media/sankey.png new file mode 100644 index 000000000000..0df200ee9e49 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/sankey.png differ diff --git a/RELEASING/release-notes-4-1/media/slack_modal.png b/RELEASING/release-notes-4-1/media/slack_modal.png new file mode 100644 index 000000000000..3804e2a6fc9e Binary files /dev/null and b/RELEASING/release-notes-4-1/media/slack_modal.png differ diff --git a/RELEASING/release-notes-4-1/media/table_with_time.png b/RELEASING/release-notes-4-1/media/table_with_time.png new file mode 100644 index 000000000000..4f7607c21753 Binary files /dev/null and b/RELEASING/release-notes-4-1/media/table_with_time.png differ diff --git a/RELEASING/release-notes-4-1/media/tooltips.png b/RELEASING/release-notes-4-1/media/tooltips.png new file mode 100644 index 000000000000..03f394b75d6f Binary files /dev/null and b/RELEASING/release-notes-4-1/media/tooltips.png differ diff --git a/RELEASING/set_release_env.sh b/RELEASING/set_release_env.sh index 3d04a76d7863..7b297395775b 100755 --- a/RELEASING/set_release_env.sh +++ b/RELEASING/set_release_env.sh @@ -50,8 +50,8 @@ else export SUPERSET_GITHUB_BRANCH="${VERSION_MAJOR}.${VERSION_MINOR}" export SUPERSET_PGP_FULLNAME="${2}" export SUPERSET_VERSION_RC="${SUPERSET_VERSION}rc${VERSION_RC}" - export SUPERSET_RELEASE=apache-superset-"${SUPERSET_VERSION}" - export SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" + export SUPERSET_RELEASE=apache_superset-"${SUPERSET_VERSION}" + export SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" export SUPERSET_RELEASE_TARBALL="${SUPERSET_RELEASE}"-source.tar.gz export SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz export SUPERSET_TMP_ASF_SITE_PATH="/tmp/incubator-superset-site-${SUPERSET_VERSION}" diff --git a/RELEASING/test_run_tarball.sh b/RELEASING/test_run_tarball.sh index d4c8a9c706a0..d28c7226bda9 100755 --- a/RELEASING/test_run_tarball.sh +++ b/RELEASING/test_run_tarball.sh @@ -27,7 +27,7 @@ if [ -z "${SUPERSET_SVN_DEV_PATH}" ]; then fi if [[ -n ${1} ]] && [[ ${1} == "local" ]]; then - SUPERSET_RELEASE_RC=apache-superset-"${SUPERSET_VERSION_RC}" + SUPERSET_RELEASE_RC=apache_superset-"${SUPERSET_VERSION_RC}" SUPERSET_RELEASE_RC_TARBALL="${SUPERSET_RELEASE_RC}"-source.tar.gz SUPERSET_TARBALL_PATH="${SUPERSET_SVN_DEV_PATH}"/${SUPERSET_VERSION_RC}/${SUPERSET_RELEASE_RC_TARBALL} SUPERSET_TMP_TARBALL_FILENAME=_tmp_"${SUPERSET_VERSION_RC}".tar.gz diff --git a/RELEASING/validate_this_release.sh b/RELEASING/validate_this_release.sh index 98c502be2a24..4942803702cb 100755 --- a/RELEASING/validate_this_release.sh +++ b/RELEASING/validate_this_release.sh @@ -38,7 +38,7 @@ get_pip_command() { PYTHON=$(get_python_command) PIP=$(get_pip_command) -# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache-superset-x.x.xrcx-source.tar.gz` +# Get the release directory's path. If you unzip an Apache release and just run the npm script to validate the release, this will be a file name like `apache_superset-x.x.xrcx-source.tar.gz` RELEASE_ZIP_PATH="../../$(basename "$(dirname "$(pwd)")")-source.tar.gz" # Install dependencies from requirements.txt if the file exists diff --git a/RELEASING/verify_release.py b/RELEASING/verify_release.py index 546bb308d4c0..350263610057 100755 --- a/RELEASING/verify_release.py +++ b/RELEASING/verify_release.py @@ -65,35 +65,43 @@ def get_gpg_info(filename: str) -> tuple[Optional[str], Optional[str]]: output = result.stderr.decode() rsa_key = re.search(r"RSA key ([0-9A-F]+)", output) + eddsa_key = re.search(r"EDDSA key ([0-9A-F]+)", output) email = re.search(r'issuer "([^"]+)"', output) rsa_key_result = rsa_key.group(1) if rsa_key else None + eddsa_key_result = eddsa_key.group(1) if eddsa_key else None email_result = email.group(1) if email else None - # Debugging: print warnings if rsa_key or email is not found - if rsa_key_result is None: - print("Warning: No RSA key found in GPG verification output.") - if email_result is None: + key_result = rsa_key_result or eddsa_key_result + + # Debugging: + if key_result: + print("RSA or EDDSA Key found") + else: + print("Warning: No RSA or EDDSA key found in GPG verification output.") + if email_result: + print("email found") + else: print("Warning: No email address found in GPG verification output.") - return rsa_key_result, email_result + return key_result, email_result -def verify_rsa_key(rsa_key: str, email: Optional[str]) -> str: - """Fetch the KEYS file and verify if the RSA key and email match.""" +def verify_key(key: str, email: Optional[str]) -> str: + """Fetch the KEYS file and verify if the RSA/EDDSA key and email match.""" url = "https://downloads.apache.org/superset/KEYS" response = requests.get(url) if response.status_code == 200: - if rsa_key not in response.text: - return "RSA key not found on KEYS page" + if key not in response.text: + return "RSA/EDDSA key not found on KEYS page" # Check if email is None or not in response.text if email and email in response.text: - return "RSA key and email verified against Apache KEYS file" + return "RSA/EDDSA key and email verified against Apache KEYS file" elif email: - return "RSA key verified, but Email not found on KEYS page" + return "RSA/EDDSA key verified, but Email not found on KEYS page" else: - return "RSA key verified, but Email not available for verification" + return "RSA/EDDSA key verified, but Email not available for verification" else: return "Failed to fetch KEYS file" @@ -103,9 +111,9 @@ def verify_sha512_and_rsa(filename: str) -> None: sha_result = verify_sha512(filename) print(sha_result) - rsa_key, email = get_gpg_info(filename) - if rsa_key: - rsa_result = verify_rsa_key(rsa_key, email) + key, email = get_gpg_info(filename) + if key: + rsa_result = verify_key(key, email) print(rsa_result) else: print("GPG verification failed: RSA key or email not found") diff --git a/RESOURCES/FEATURE_FLAGS.md b/RESOURCES/FEATURE_FLAGS.md index 2c4d4d0c3e01..f985ad725494 100644 --- a/RESOURCES/FEATURE_FLAGS.md +++ b/RESOURCES/FEATURE_FLAGS.md @@ -45,7 +45,7 @@ These features are **finished** but currently being tested. They are usable, but - CACHE_IMPERSONATION - CONFIRM_DASHBOARD_DIFF - DRILL_TO_DETAIL -- DYNAMIC_PLUGINS: [(docs)](https://superset.apache.org/docs/configuration/running-on-kubernetes) +- DYNAMIC_PLUGINS - ENABLE_SUPERSET_META_DB: [(docs)](https://superset.apache.org/docs/configuration/databases/#querying-across-databases) - ESTIMATE_QUERY_COST - GLOBAL_ASYNC_QUERIES [(docs)](https://github.com/apache/superset/blob/master/CONTRIBUTING.md#async-chart-queries) @@ -68,9 +68,16 @@ These features flags are **safe for production**. They have been tested and will - DISABLE_LEGACY_DATASOURCE_EDITOR ### Flags retained for runtime configuration + +Currently some of our feature flags act as dynamic configurations that can changed +on the fly. This acts in contradiction with the typical ephemeral feature flag use case, +where the flag is used to mature a feature, and eventually deprecated once the feature is +solid. Eventually we'll likely refactor these under a more formal "dynamic configurations" managed +independently. This new framework will also allow for non-boolean configurations. + - ALERTS_ATTACH_REPORTS - ALLOW_ADHOC_SUBQUERY -- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/first-dashboard#manage-access-to-dashboards) +- DASHBOARD_RBAC [(docs)](https://superset.apache.org/docs/using-superset/creating-your-first-dashboard#manage-access-to-dashboards) - DATAPANEL_CLOSED_BY_DEFAULT - DRUID_JOINS - EMBEDDABLE_CHARTS @@ -79,6 +86,7 @@ These features flags are **safe for production**. They have been tested and will - ESCAPE_MARKDOWN_HTML - LISTVIEWS_DEFAULT_CARD_VIEW - SCHEDULED_QUERIES [(docs)](https://superset.apache.org/docs/configuration/alerts-reports) +- SLACK_ENABLE_AVATARS (see `superset/config.py` for more information) - SQLLAB_BACKEND_PERSISTENCE - SQL_VALIDATORS_BY_ENGINE [(docs)](https://superset.apache.org/docs/configuration/sql-templating) - THUMBNAILS [(docs)](https://superset.apache.org/docs/configuration/cache) diff --git a/RESOURCES/INTHEWILD.md b/RESOURCES/INTHEWILD.md index 67c5e9f07711..1d4d37729544 100644 --- a/RESOURCES/INTHEWILD.md +++ b/RESOURCES/INTHEWILD.md @@ -25,16 +25,17 @@ all you have to do is file a simple PR [like this one](https://github.com/apache the categorization is inaccurate, please file a PR with your correction as well. Join our growing community! - ### Sharing Economy + - [Airbnb](https://github.com/airbnb) - [Faasos](http://faasos.com/) [@shashanksingh] - [Hostnfly](https://www.hostnfly.com/) [@alexisrosuel] -- [Lime](https://www.limebike.com/) [@cxmcc] +- [Lime](https://www.li.me/) [@cxmcc] - [Lyft](https://www.lyft.com/) - [Ontruck](https://www.ontruck.com/) ### Financial Services + - [Aktia Bank plc](https://www.aktia.com) [@villebro] - [American Express](https://www.americanexpress.com) [@TheLastSultan] - [Cape Crypto](https://capecrypto.com) @@ -47,15 +48,16 @@ Join our growing community! - [bumper](https://www.bumper.co/) [@vasu-ram, @JamiePercival] ### Gaming -- [Digit Game Studios](https://www.digitgaming.com/) + - [Popoko VM Games Studio](https://popoko.live) ### E-Commerce + - [AiHello](https://www.aihello.com) [@ganeshkrishnan1] - [Bazaar Technologies](https://www.bazaartech.com) [@umair-abro] - [Dragonpass](https://www.dragonpass.com.cn/) [@zhxjdwh] - [Dropit Shopping](https://www.dropit.shop/) [@dropit-dev] -- [Fanatics](https://www.fanatics.com) [@coderfender] +- [Fanatics](https://www.fanatics.com/) [@coderfender] - [Fordeal](http://www.fordeal.com) [@Renkai] - [GFG - Global Fashion Group](https://global-fashion-group.com) [@ksaagariconic] - [HuiShouBao](http://www.huishoubao.com/) [@Yukinoshita-Yukino] @@ -64,14 +66,15 @@ Join our growing community! - [Rakuten Viki](https://www.viki.com) - [Shopee](https://shopee.sg) [@xiaohanyu] - [Shopkick](https://www.shopkick.com) [@LAlbertalli] -- [Tails.com](https://tails.com) [@alanmcruickshank] +- [Tails.com](https://tails.com/gb/) [@alanmcruickshank] - [THE ICONIC](http://theiconic.com.au/) [@ksaagariconic] - [Utair](https://www.utair.ru) [@utair-digital] -- [VkusVill](https://www.vkusvill.ru) [@ETselikov] +- [VkusVill](https://vkusvill.ru/) [@ETselikov] - [Zalando](https://www.zalando.com) [@dmigo] - [Zalora](https://www.zalora.com) [@ksaagariconic] ### Enterprise Technology + - [A3Data](https://a3data.com.br) [@neylsoncrepalde] - [Analytics Aura](https://analyticsaura.com/) [@Analytics-Aura] - [Apollo GraphQL](https://www.apollographql.com/) [@evans] @@ -80,7 +83,7 @@ Join our growing community! - [Caizin](https://caizin.com/) [@tejaskatariya] - [Careem](https://www.careem.com/) [@SamraHanifCareem] - [Cloudsmith](https://cloudsmith.io) [@alancarson] -- [CnOvit](http://www.cnovit.com/) [@xieshaohu] +- [CnOvit](https://www.cnovit.com/) [@xieshaohu] - [Cyberhaven](https://www.cyberhaven.com/) [@toliver-ch] - [Deepomatic](https://deepomatic.com/) [@Zanoellia] - [Dial Once](https://www.dial-once.com/) @@ -89,8 +92,7 @@ Join our growing community! - [Endress+Hauser](http://www.endress.com/) [@rumbin] - [FBK - ICT center](http://ict.fbk.eu) - [Gavagai](https://gavagai.io) [@gavagai-corp] -- [GfK Data Lab](http://datalab.gfk.com) [@mherr] -- [GrowthSimple](https://growthsimple.ai/) +- [GfK Data Lab](https://www.gfk.com/home) [@mherr] - [Hydrolix](https://www.hydrolix.io/) - [Intercom](https://www.intercom.com/) [@kate-gallo] - [jampp](https://jampp.com/) @@ -105,17 +107,14 @@ Join our growing community! - [Peak AI](https://www.peak.ai/) [@azhar22k] - [PeopleDoc](https://www.people-doc.com) [@rodo] - [Preset, Inc.](https://preset.io) -- [Pronto Tools](http://www.prontotools.io) [@zkan] - [PubNub](https://pubnub.com) [@jzucker2] - [ReadyTech](https://www.readytech.io) - [Reward Gateway](https://www.rewardgateway.com) - [ScopeAI](https://www.getscopeai.com) [@iloveluce] -- [Showmax](https://tech.showmax.com) [@bobek] -- [source{d}](https://www.sourced.tech) [@marnovo] -- [Steamroot](https://streamroot.io/) +- [Showmax](https://showmax.com) [@bobek] - [TechAudit](https://www.techaudit.info) [@ETselikov] - [Tenable](https://www.tenable.com) [@dflionis] -- [Tentacle](https://public.tentaclecmi.com) [@jdclarke5] +- [Tentacle](https://tentaclecmi.com) [@jdclarke5] - [timbr.ai](https://timbr.ai/) [@semantiDan] - [Tobii](http://www.tobii.com/) [@dwa] - [Tooploox](https://www.tooploox.com/) [@jakubczaplicki] @@ -125,6 +124,7 @@ Join our growing community! - [Zeta](https://www.zeta.tech/) [@shaikidris] ### Media & Entertainment + - [6play](https://www.6play.fr) [@CoryChaplin] - [bilibili](https://www.bilibili.com) [@Moinheart] - [BurdaForward](https://www.burda-forward.de/en/) @@ -137,6 +137,7 @@ Join our growing community! - [Zaihang](http://www.zaih.com/) ### Education + - [Aveti Learning](https://avetilearning.com/) [@TheShubhendra] - [Brilliant.org](https://brilliant.org/) - [Platzi.com](https://platzi.com/) @@ -147,6 +148,7 @@ Join our growing community! - [WikiMedia Foundation](https://wikimediafoundation.org) [@vg] ### Energy + - [Airboxlab](https://foobot.io) [@antoine-galataud] - [DouroECI](https://www.douroeci.com/) [@nunohelibeires] - [Safaricom](https://www.safaricom.co.ke/) [@mmutiso] @@ -154,31 +156,35 @@ Join our growing community! - [Wattbewerb](https://wattbewerb.de/) [@wattbewerb] ### Healthcare + - [Amino](https://amino.com) [@shkr] -- [Beans](https://www.beans.fi) [@kakoni] - [Bluesquare](https://www.bluesquarehub.com/) [@madewulf] - [Care](https://www.getcare.io/)[@alandao2021] - [Living Goods](https://www.livinggoods.org) [@chelule] - [Maieutical Labs](https://maieuticallabs.it) [@xrmx] - [QPID Health](http://www.qpidhealth.com/) - [REDCap Cloud](https://www.redcapcloud.com/) -- [TrustMedis](https://trustmedis.com) [@famasya] +- [TrustMedis](https://trustmedis.com/) [@famasya] - [WeSure](https://www.wesure.cn/) ### HR / Staffing + - [Swile](https://www.swile.co/) [@PaoloTerzi] - [Symmetrics](https://www.symmetrics.fyi) - [bluquist](https://bluquist.com/) ### Government + - [City of Ann Arbor, MI](https://www.a2gov.org/) [@sfirke] - [RIS3 Strategy of CZ, MIT CR](https://www.ris3.cz/) [@RIS3CZ] ### Travel + - [Agoda](https://www.agoda.com/) [@lostseaway, @maiake, @obombayo] - [Skyscanner](https://www.skyscanner.net/) [@cleslie, @stanhoucke] ### Others + - [10Web](https://10web.io/) - [AI inside](https://inside.ai/en/) - [Automattic](https://automattic.com/) [@Khrol, @Usiel] diff --git a/UPDATING.md b/UPDATING.md index f1ccbbdc087d..30a87cd24848 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -22,7 +22,14 @@ under the License. This file documents any backwards-incompatible changes in Superset and assists people when migrating to a new version. -## Next +## 4.1.2 + +- [31198](https://github.com/apache/superset/pull/31198) Disallows by default the use of the following ClickHouse functions: "version", "currentDatabase", "hostName". +- [31173](https://github.com/apache/superset/pull/31173) Modified `fetch_csrf_token` to align with HTTP standards, particularly regarding how cookies are handled. If you encounter any issues related to CSRF functionality, please report them as a new issue and reference this PR for context. + +### Potential Downtime + +## 4.1.0 - [29274](https://github.com/apache/superset/pull/29274): We made it easier to trigger CI on your forks, whether they are public or private. Simply push to a branch that fits `[0-9].[0-9]*` and @@ -58,6 +65,7 @@ assists people when migrating to a new version. backend, as well as the .json files used by the frontend. If you were doing anything before as part of your bundling to expose translation packages, it's probably not needed anymore. - [29264](https://github.com/apache/superset/pull/29264) Slack has updated its file upload api, and we are now supporting this new api in Superset, although the Slack api is not backward compatible. The original Slack integration is deprecated and we will require a new Slack scope `channels:read` to be added to Slack workspaces in order to use this new api. In an upcoming release, we will make this new Slack scope mandatory and remove the old Slack functionality. +- [30274](https://github.com/apache/superset/pull/30274) Moved SLACK_ENABLE_AVATAR from config.py to the feature flag framework, please adapt your configs ### Potential Downtime @@ -227,7 +235,8 @@ assists people when migrating to a new version. - [19273](https://github.com/apache/superset/pull/19273): The `SUPERSET_CELERY_WORKERS` and `SUPERSET_WORKERS` config keys has been removed. Configure Celery directly using `CELERY_CONFIG` on Superset. - [19231](https://github.com/apache/superset/pull/19231): The `ENABLE_REACT_CRUD_VIEWS` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the React views support their use case. - [19230](https://github.com/apache/superset/pull/19230): The `ROW_LEVEL_SECURITY` feature flag has been removed (permanently enabled). Any deployments which had set this flag to false will need to verify that the presence of the Row Level Security feature does not interfere with their use case. -- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation. Please follow [these](https://docs.celeryq.dev/en/stable/whatsnew-5.2.html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map). +- [19168](https://github.com/apache/superset/pull/19168): Celery upgrade to 5.X resulted in breaking changes to its command line invocation. + html#step-1-adjust-your-command-line-invocation) instructions for adjustments. Also consider migrating you Celery config per [here](https://docs.celeryq.dev/en/stable/userguide/configuration.html#conf-old-settings-map). - [19142](https://github.com/apache/superset/pull/19142): The `VERSIONED_EXPORT` config key is now `True` by default. - [19113](https://github.com/apache/superset/pull/19113): The `ENABLE_JAVASCRIPT_CONTROLS` config key has moved from an app config to a feature flag. Any deployments who overrode this setting will now need to override the feature flag from here onward. - [19107](https://github.com/apache/superset/pull/19107): The `SQLLAB_BACKEND_PERSISTENCE` feature flag is now `True` by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`. @@ -538,7 +547,7 @@ assists people when migrating to a new version. - [8117](https://github.com/apache/superset/pull/8117): If you are using `ENABLE_PROXY_FIX = True`, review the newly-introduced variable, `PROXY_FIX_CONFIG`, which changes the proxy behavior in accordance with - [Werkzeug](https://werkzeug.palletsprojects.com/en/0.15.x/middleware/proxy_fix/) + Werkzeug. - [8069](https://github.com/apache/superset/pull/8069): introduces [MessagePack](https://github.com/msgpack/msgpack-python) and diff --git a/docker-compose-image-tag.yml b/docker-compose-image-tag.yml index 9309c6d619cf..4ba32d28b3c2 100644 --- a/docker-compose-image-tag.yml +++ b/docker-compose-image-tag.yml @@ -21,7 +21,7 @@ # create you own docker environment file (docker/.env) with your own # unique random secure passwords and SECRET_KEY. # ----------------------------------------------------------------------- -x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest} +x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev} x-superset-depends-on: &superset-depends-on - db - redis @@ -30,7 +30,6 @@ x-superset-volumes: - ./docker:/app/docker - superset_home:/app/superset_home -version: "3.7" services: redis: image: redis:7 diff --git a/docker-compose-non-dev.yml b/docker-compose-non-dev.yml index 73de435a0fdd..2fdb4b993f77 100644 --- a/docker-compose-non-dev.yml +++ b/docker-compose-non-dev.yml @@ -35,7 +35,6 @@ x-common-build: &common-build cache_from: - apache/superset-cache:3.10-slim-bookworm -version: "4.0" services: redis: image: redis:7 diff --git a/docker/docker-frontend.sh b/docker/docker-frontend.sh index c69fefd6d0fb..aad6e0a16d5b 100755 --- a/docker/docker-frontend.sh +++ b/docker/docker-frontend.sh @@ -26,7 +26,7 @@ fi if [ "$BUILD_SUPERSET_FRONTEND_IN_DOCKER" = "true" ]; then cd /app/superset-frontend npm install -f --no-optional --global webpack webpack-cli - npm install -f --no-optional + npm install -f echo "Running frontend" npm run dev diff --git a/docker/pythonpath_dev/superset_config.py b/docker/pythonpath_dev/superset_config.py index c78a5041417a..e8223e53584b 100644 --- a/docker/pythonpath_dev/superset_config.py +++ b/docker/pythonpath_dev/superset_config.py @@ -74,7 +74,12 @@ class CeleryConfig: broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" - imports = ("superset.sql_lab",) + imports = ( + "superset.sql_lab", + "superset.tasks.scheduler", + "superset.tasks.thumbnails", + "superset.tasks.cache", + ) result_backend = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULTS_DB}" worker_prefetch_multiplier = 1 task_acks_late = False diff --git a/docs/docs/configuration/databases.mdx b/docs/docs/configuration/databases.mdx index 67734643b9b2..d74819733223 100644 --- a/docs/docs/configuration/databases.mdx +++ b/docs/docs/configuration/databases.mdx @@ -54,7 +54,7 @@ are compatible with Superset. | [Azure MS SQL](/docs/configuration/databases#sql-server) | `pip install pymssql` | `mssql+pymssql://UserName@presetSQL:TestPassword@presetSQL.database.windows.net:1433/TestSchema` | | [ClickHouse](/docs/configuration/databases#clickhouse) | `pip install clickhouse-connect` | `clickhousedb://{username}:{password}@{hostname}:{port}/{database}` | | [CockroachDB](/docs/configuration/databases#cockroachdb) | `pip install cockroachdb` | `cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable` | -| [CouchbaseDB](/docs/configuration/databases#couchbaseDB) | `pip install couchbase-sqlalchemy` | `couchbasedb://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` | +| [Couchbase](/docs/configuration/databases#couchbase) | `pip install couchbase-sqlalchemy` | `couchbase://{username}:{password}@{hostname}:{port}?truststorepath={ssl certificate path}` | | [Dremio](/docs/configuration/databases#dremio) | `pip install sqlalchemy_dremio` | `dremio://user:pwd@host:31010/` | | [Elasticsearch](/docs/configuration/databases#elasticsearch) | `pip install elasticsearch-dbapi` | `elasticsearch+http://{user}:{password}@{host}:9200/` | | [Exasol](/docs/configuration/databases#exasol) | `pip install sqlalchemy-exasol` | `exa+pyodbc://{username}:{password}@{hostname}:{port}/my_schema?CONNECTIONLCALL=en_US.UTF-8&driver=EXAODBC` | @@ -70,7 +70,7 @@ are compatible with Superset. | [PostgreSQL](/docs/configuration/databases#postgres) | `pip install psycopg2` | `postgresql://<UserName>:<DBPassword>@<Database Host>/<Database Name>` | | [Presto](/docs/configuration/databases#presto) | `pip install pyhive` | `presto://` | | [Rockset](/docs/configuration/databases#rockset) | `pip install rockset-sqlalchemy` | `rockset://<api_key>:@<api_server>` | -| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache-superset[hana]` | `hana://{username}:{password}@{host}:{port}` | +| [SAP Hana](/docs/configuration/databases#hana) | `pip install hdbcli sqlalchemy-hana` or `pip install apache_superset[hana]` | `hana://{username}:{password}@{host}:{port}` | | [StarRocks](/docs/configuration/databases#starrocks) | `pip install starrocks` | `starrocks://<User>:<Password>@<Host>:<Port>/<Catalog>.<Database>` | | [Snowflake](/docs/configuration/databases#snowflake) | `pip install snowflake-sqlalchemy` | `snowflake://{user}:{password}@{account}.{region}/{database}?role={role}&warehouse={warehouse}` | | SQLite | No additional library needed | `sqlite://path/to/file.db?check_same_thread=false` | @@ -375,9 +375,10 @@ cockroachdb://root@{hostname}:{port}/{database}?sslmode=disable -#### CouchbaseDB +#### Couchbase -The recommended connector library for CouchbaseDB is +The Couchbase's Superset connection is designed to support two services: Couchbase Analytics and Couchbase Columnar. +The recommended connector library for couchbase is [couchbase-sqlalchemy](https://github.com/couchbase/couchbase-sqlalchemy). ``` pip install couchbase-sqlalchemy @@ -386,7 +387,7 @@ pip install couchbase-sqlalchemy The expected connection string is formatted as follows: ``` -couchbasedb://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false} +couchbase://{username}:{password}@{hostname}:{port}?truststorepath={certificate path}?ssl={true/false} ``` diff --git a/docs/docs/configuration/networking-settings.mdx b/docs/docs/configuration/networking-settings.mdx index 3993c8bfc46d..bf7884003133 100644 --- a/docs/docs/configuration/networking-settings.mdx +++ b/docs/docs/configuration/networking-settings.mdx @@ -11,7 +11,7 @@ version: 1 To configure CORS, or cross-origin resource sharing, the following dependency must be installed: ```python -pip install apache-superset[cors] +pip install apache_superset[cors] ``` The following keys in `superset_config.py` can be specified to configure CORS: diff --git a/docs/docs/contributing/contributing.mdx b/docs/docs/contributing/contributing.mdx index c170d3eb0bc6..109a3692df96 100644 --- a/docs/docs/contributing/contributing.mdx +++ b/docs/docs/contributing/contributing.mdx @@ -26,9 +26,9 @@ More references: Here's a list of repositories that contain Superset-related packages: - [apache/superset](https://github.com/apache/superset) - is the main repository containing the `apache-superset` Python package + is the main repository containing the `apache_superset` Python package distributed on - [pypi](https://pypi.org/project/apache-superset/). This repository + [pypi](https://pypi.org/project/apache_superset/). This repository also includes Superset's main TypeScript/JavaScript bundles and react apps under the [superset-frontend](https://github.com/apache/superset/tree/master/superset-frontend) folder. @@ -86,7 +86,7 @@ text strings from Superset's UI. You can jump into the existing language dictionaries at `superset/translations/<language_code>/LC_MESSAGES/messages.po`, or even create a dictionary for a new language altogether. -See [Translating](howtos#contribute-translations) for more details. +See [Translating](howtos#contributing-translations) for more details. ### Ask Questions diff --git a/docs/docs/contributing/guidelines.mdx b/docs/docs/contributing/guidelines.mdx index 7cf24954a1cd..1ba1e6af93ce 100644 --- a/docs/docs/contributing/guidelines.mdx +++ b/docs/docs/contributing/guidelines.mdx @@ -12,7 +12,7 @@ A philosophy we would like to strongly encourage is The purpose is to separate problem from possible solutions. -**Bug fixes:** If you’re only fixing a small bug, it’s fine to submit a pull request right away but we highly recommend to file an issue detailing what you’re fixing. This is helpful in case we don’t accept that specific fix but want to keep track of the issue. Please keep in mind that the project maintainers reserve the rights to accept or reject incoming PRs, so it is better to separate the issue and the code to fix it from each other. In some cases, project maintainers may request you to create a separate issue from PR before proceeding. +**Bug fixes:** If you’re only fixing a small bug, it’s fine to submit a pull request right away but we highly recommend filing an issue detailing what you’re fixing. This is helpful in case we don’t accept that specific fix but want to keep track of the issue. Please keep in mind that the project maintainers reserve the rights to accept or reject incoming PRs, so it is better to separate the issue and the code to fix it from each other. In some cases, project maintainers may request you to create a separate issue from PR before proceeding. **Refactor:** For small refactors, it can be a standalone PR itself detailing what you are refactoring and why. If there are concerns, project maintainers may request you to create a `#SIP` for the PR before proceeding. diff --git a/docs/docs/installation/docker-builds.mdx b/docs/docs/installation/docker-builds.mdx index fe0ec8d30bf8..e7738aa51a71 100644 --- a/docs/docs/installation/docker-builds.mdx +++ b/docs/docs/installation/docker-builds.mdx @@ -32,8 +32,15 @@ for the build, and/or base image. Here are the build presets that are exposed through the `build_docker.py` script: - `lean`: The default Docker image, including both frontend and backend. Tags -without a build_preset are lean builds, e.g., `latest`. -- `dev`: For development, with a headless browser, dev-related utilities and root access. + without a build_preset are lean builds (ie: `latest`, `4.0.0`, `3.0.0`, ...). `lean` + builds do not contain database + drivers, meaning you need to install your own. That applies to analytics databases **AND + the metadata database**. You'll likely want to layer either `mysqlclient` or `psycopg2-binary` + depending on the metadata database you choose for your installation, plus the required + drivers to connect to your analytics database(s). +- `dev`: For development, with a headless browser, dev-related utilities and root access. This + includes some commonly used database drivers like `mysqlclient`, `psycopg2-binary` and + some other used for development/CI - `py311`, e.g., Py311: Similar to lean but with a different Python version (in this example, 3.11). - `ci`: For certain CI workloads. - `websocket`: For Superset clusters supporting advanced features. diff --git a/docs/docs/installation/docker-compose.mdx b/docs/docs/installation/docker-compose.mdx index e12b25a1ee0a..0baf91a3bca2 100644 --- a/docs/docs/installation/docker-compose.mdx +++ b/docs/docs/installation/docker-compose.mdx @@ -38,7 +38,12 @@ Note that there are 3 major ways we support to run docker-compose: 1. **docker-compose-image-tag.yml** where we fetch an image from docker-hub say for the `3.0.0` release for instance, and fire it up so you can try it. Here what's in the local branch has no effects on what's running, we just fetch and run - pre-built images from docker-hub + pre-built images from docker-hub. For `docker compose` to work along with the + Postgres image it boots up, you'll want to point to a `-dev`-suffixed TAG, as in + `export TAG=4.0.0-dev` or `export TAG=3.0.0-dev`, with `latest-dev` being the default. + That's because The `dev` builds happen to package the `psycopg2-binary` required to connect + to the Postgres database launched as part of the `docker compose` builds. +`` More on these two approaches after setting up the requirements for either. diff --git a/docs/docs/installation/pypi.mdx b/docs/docs/installation/pypi.mdx index 564760e52b9f..ca0c48728976 100644 --- a/docs/docs/installation/pypi.mdx +++ b/docs/docs/installation/pypi.mdx @@ -12,7 +12,7 @@ import useBaseUrl from "@docusaurus/useBaseUrl"; <img src={useBaseUrl("/img/pypi.png" )} width="150" /> <br /><br /> -This page describes how to install Superset using the `apache-superset` package [published on PyPI](https://pypi.org/project/apache-superset/). +This page describes how to install Superset using the `apache_superset` package [published on PyPI](https://pypi.org/project/apache_superset/). ## OS Dependencies @@ -128,10 +128,10 @@ command line. ### Installing and Initializing Superset -First, start by installing `apache-superset`: +First, start by installing `apache_superset`: ```bash -pip install apache-superset +pip install apache_superset ``` Then, you need to initialize the database: diff --git a/docs/docs/installation/upgrading-superset.mdx b/docs/docs/installation/upgrading-superset.mdx index 459223385c6a..38e03822dd40 100644 --- a/docs/docs/installation/upgrading-superset.mdx +++ b/docs/docs/installation/upgrading-superset.mdx @@ -32,7 +32,7 @@ docker compose up To upgrade superset in a native installation, run the following commands: ```bash -pip install apache-superset --upgrade +pip install apache_superset --upgrade ``` ## Upgrading the Metadata Database diff --git a/docs/docs/security/cves.mdx b/docs/docs/security/cves.mdx index 4ac0bb529650..e61d8602162d 100644 --- a/docs/docs/security/cves.mdx +++ b/docs/docs/security/cves.mdx @@ -2,6 +2,21 @@ title: CVEs fixed by release sidebar_position: 2 --- +#### Version 4.1.2 + +| CVE | Title | Affected | +|:---------------|:-----------------------------------------------------------------------------------|---------:| +| CVE-2025-27696 | Improper authorization leading to resource ownership takeover | < 4.1.2 | + +#### Version 4.1.0 + +| CVE | Title | Affected | +|:---------------|:-----------------------------------------------------------------------------------|---------:| +| CVE-2024-53947 | Improper SQL authorisation, parse for specific postgres functions | < 4.1.0 | +| CVE-2024-53948 | Error verbosity exposes metadata in analytics databases | < 4.1.0 | +| CVE-2024-53949 | Lower privilege users are able to create Role when FAB_ADD_SECURITY_API is enabled | < 4.1.0 | +| CVE-2024-55633 | SQLLab Improper readonly query validation allows unauthorized write access | < 4.1.0 | + #### Version 4.0.2 | CVE | Title | Affected | diff --git a/docs/docs/using-superset/creating-your-first-dashboard.mdx b/docs/docs/using-superset/creating-your-first-dashboard.mdx index 95e6512782a2..8a7343d7a833 100644 --- a/docs/docs/using-superset/creating-your-first-dashboard.mdx +++ b/docs/docs/using-superset/creating-your-first-dashboard.mdx @@ -106,7 +106,7 @@ You can also certify metrics if you'd like for your team in this view. 2. Virtual calculated columns: you can write SQL queries that customize the appearance and behavior -of a specific column (e.g. `CAST(recovery_rate) as float`). +of a specific column (e.g. `CAST(recovery_rate as float)`). Aggregate functions aren't allowed in calculated columns. <img src={useBaseUrl("/img/tutorial/tutorial_calculated_column.png" )} /> diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 7cfd903cac11..f27d10e0c6c9 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -203,13 +203,18 @@ const config = { ({ docs: { sidebarPath: require.resolve('./sidebars.js'), - editUrl: 'https://github.com/apache/superset/edit/master/docs', + editUrl: + ({versionDocsDirPath, docPath}) => { + if (docPath === 'intro.md') { + return 'https://github.com/apache/superset/edit/master/README.md' + } + return `https://github.com/apache/superset/edit/master/docs/${versionDocsDirPath}/${docPath}` + } }, blog: { showReadingTime: true, // Please change this to your repo. - editUrl: - 'https://github.com/facebook/docusaurus/edit/main/website/blog/', + editUrl: 'https://github.com/facebook/docusaurus/edit/main/website/blog/', }, theme: { customCss: require.resolve('./src/styles/custom.css'), diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json index a039bd3a2a51..f5a44193ea35 100644 --- a/docs/static/resources/openapi.json +++ b/docs/static/resources/openapi.json @@ -100,8 +100,13 @@ "QUERY_SECURITY_ACCESS_ERROR", "MISSING_OWNERSHIP_ERROR", "USER_ACTIVITY_SECURITY_ACCESS_ERROR", + "DASHBOARD_SECURITY_ACCESS_ERROR", + "CHART_SECURITY_ACCESS_ERROR", + "OAUTH2_REDIRECT", + "OAUTH2_REDIRECT_ERROR", "BACKEND_TIMEOUT_ERROR", "DATABASE_NOT_FOUND_ERROR", + "TABLE_NOT_FOUND_ERROR", "MISSING_TEMPLATE_PARAMS_ERROR", "INVALID_TEMPLATE_PARAMS_ERROR", "RESULTS_BACKEND_NOT_CONFIGURED_ERROR", @@ -112,10 +117,13 @@ "RESULTS_BACKEND_ERROR", "ASYNC_WORKERS_ERROR", "ADHOC_SUBQUERY_NOT_ALLOWED_ERROR", + "INVALID_SQL_ERROR", + "RESULT_TOO_LARGE_ERROR", "GENERIC_COMMAND_ERROR", "GENERIC_BACKEND_ERROR", "INVALID_PAYLOAD_FORMAT_ERROR", "INVALID_PAYLOAD_SCHEMA_ERROR", + "MARSHMALLOW_ERROR", "REPORT_NOTIFICATION_ERROR" ], "type": "string" @@ -458,13 +466,13 @@ "AnnotationRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/AnnotationRestApi.get_list.User1" + "$ref": "#/components/schemas/AnnotationRestApi.get_list.User" }, "changed_on_delta_humanized": { "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/AnnotationRestApi.get_list.User" + "$ref": "#/components/schemas/AnnotationRestApi.get_list.User1" }, "end_dttm": { "format": "date-time", @@ -580,6 +588,29 @@ }, "type": "object" }, + "AppleHealthResponseSchema": { + "properties": { + "cache_message": { + "description": "Human readable status message for the cache connection", + "type": "string" + }, + "cache_status": { + "description": "The status of the cache", + "enum": ["NA", "HEALTHY", "ERROR"], + "type": "string" + }, + "metastore_message": { + "description": "Human readable status message for the metastore connection", + "type": "string" + }, + "metastore_status": { + "description": "The status of the metastore", + "enum": ["NA", "HEALTHY", "ERROR"], + "type": "string" + } + }, + "type": "object" + }, "AvailableDomainsSchema": { "properties": { "domains": { @@ -591,10 +622,127 @@ }, "type": "object" }, + "CSVMetadataUploadFilePostSchema": { + "properties": { + "delimiter": { + "description": "The delimiter of the CSV file", + "type": "string" + }, + "file": { + "description": "The file to upload", + "format": "binary", + "type": "string" + }, + "header_row": { + "description": "Row containing the headers to use as column names(0 is first line of data). Leave empty if there is no header row.", + "type": "integer" + } + }, + "required": ["file"], + "type": "object" + }, + "CSVUploadPostSchema": { + "properties": { + "already_exists": { + "default": "fail", + "description": "What to do if the table already exists accepts: fail, replace, append", + "enum": ["fail", "replace", "append"], + "type": "string" + }, + "column_data_types": { + "description": "A dictionary with column names and their data types if you need to change the defaults. Example: {'user_id':'int'}. Check Python Pandas library for supported data types", + "type": "string" + }, + "column_dates": { + "description": "A list of column names that should be parsed as dates. Example: date,timestamp", + "items": { + "type": "string" + }, + "type": "array" + }, + "columns_read": { + "description": "A List of the column names that should be read", + "items": { + "type": "string" + }, + "type": "array" + }, + "dataframe_index": { + "description": "Write dataframe index as a column.", + "type": "boolean" + }, + "day_first": { + "description": "DD/MM format dates, international and European format", + "type": "boolean" + }, + "decimal_character": { + "description": "Character to recognize as decimal point. Default is '.'", + "type": "string" + }, + "delimiter": { + "description": "The delimiter of the CSV file", + "type": "string" + }, + "file": { + "description": "The CSV file to upload", + "format": "text/csv", + "type": "string" + }, + "header_row": { + "description": "Row containing the headers to use as column names(0 is first line of data). Leave empty if there is no header row.", + "type": "integer" + }, + "index_column": { + "description": "Column to use as the row labels of the dataframe. Leave empty if no index column", + "type": "string" + }, + "index_label": { + "description": "Index label for index column.", + "type": "string" + }, + "null_values": { + "description": "A list of strings that should be treated as null. Examples: '' for empty strings, 'None', 'N/A',Warning: Hive database supports only a single value", + "items": { + "type": "string" + }, + "type": "array" + }, + "rows_to_read": { + "description": "Number of rows to read from the file. If None, reads all rows.", + "minimum": 1, + "nullable": true, + "type": "integer" + }, + "schema": { + "description": "The schema to upload the data file to.", + "type": "string" + }, + "skip_blank_lines": { + "description": "Skip blank lines in the CSV file.", + "type": "boolean" + }, + "skip_initial_space": { + "description": "Skip spaces after delimiter.", + "type": "boolean" + }, + "skip_rows": { + "description": "Number of rows to skip at start of file.", + "type": "integer" + }, + "table_name": { + "description": "The name of the table to be created/appended", + "maxLength": 10000, + "minLength": 1, + "type": "string" + } + }, + "required": ["file", "table_name"], + "type": "object" + }, "CacheInvalidationRequestSchema": { "properties": { "datasource_uids": { - "description": "The uid of the dataset/datasource this new chart will use. A complete datasource identification needs `datasouce_uid` ", + "description": "The uid of the dataset/datasource this new chart will use. A complete datasource identification needs `datasource_uid` ", "items": { "type": "string" }, @@ -642,6 +790,18 @@ }, "type": "object" }, + "CatalogsResponseSchema": { + "properties": { + "result": { + "items": { + "description": "A database catalog name", + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "ChartCacheScreenshotResponseSchema": { "properties": { "cache_key": { @@ -864,14 +1024,7 @@ }, "type": { "description": "Datasource type", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" } }, @@ -884,6 +1037,11 @@ "description": "HAVING clause to be added to aggregate queries using AND operator.", "type": "string" }, + "instant_time_comparison_range": { + "description": "This is only set using the new time comparison controls that is made available in some plugins behind the experimental feature flag.", + "nullable": true, + "type": "string" + }, "relative_end": { "description": "End time for relative time deltas. Default: `config[\"DEFAULT_RELATIVE_START_TIME\"]`", "enum": ["today", "now"], @@ -915,12 +1073,7 @@ "1969-12-28T00:00:00Z/P1W", "1969-12-29T00:00:00Z/P1W", "P1W/1970-01-03T00:00:00Z", - "P1W/1970-01-04T00:00:00Z", - "PT2H", - "PT4H", - "PT8H", - "PT10H", - "PT12H" + "P1W/1970-01-04T00:00:00Z" ], "example": "P1D", "nullable": true, @@ -958,6 +1111,7 @@ ">=", "<=", "LIKE", + "NOT LIKE", "ILIKE", "IS NULL", "IS NOT NULL", @@ -1098,8 +1252,10 @@ "geodetic_parse", "geohash_decode", "geohash_encode", + "histogram", "pivot", "prophet", + "rank", "rename", "resample", "rolling", @@ -1173,12 +1329,7 @@ "1969-12-28T00:00:00Z/P1W", "1969-12-29T00:00:00Z/P1W", "P1W/1970-01-03T00:00:00Z", - "P1W/1970-01-04T00:00:00Z", - "PT2H", - "PT4H", - "PT8H", - "PT10H", - "PT12H" + "P1W/1970-01-04T00:00:00Z" ], "example": "P1D", "type": "string" @@ -1603,7 +1754,7 @@ "type": "boolean" }, "owners": { - "$ref": "#/components/schemas/ChartDataRestApi.get.User" + "$ref": "#/components/schemas/ChartDataRestApi.get.AppleUser" }, "params": { "nullable": true, @@ -1635,6 +1786,23 @@ }, "type": "object" }, + "ChartDataRestApi.get.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "ChartDataRestApi.get.Dashboard": { "properties": { "dashboard_title": { @@ -1668,23 +1836,6 @@ }, "type": "object" }, - "ChartDataRestApi.get.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, "ChartDataRestApi.get_list": { "properties": { "cache_timeout": { @@ -1700,7 +1851,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User3" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -1715,7 +1866,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" }, "created_by_name": { "readOnly": true @@ -1766,10 +1917,10 @@ "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.AppleUser" }, "owners": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.AppleUser1" }, "params": { "nullable": true, @@ -1803,6 +1954,40 @@ }, "type": "object" }, + "ChartDataRestApi.get_list.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "ChartDataRestApi.get_list.AppleUser1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "ChartDataRestApi.get_list.Dashboard": { "properties": { "dashboard_title": { @@ -1880,37 +2065,6 @@ "required": ["first_name", "last_name"], "type": "object" }, - "ChartDataRestApi.get_list.User2": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "ChartDataRestApi.get_list.User3": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, "ChartDataRestApi.post": { "properties": { "cache_timeout": { @@ -1946,14 +2100,7 @@ }, "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" }, "description": { @@ -1987,7 +2134,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modifiedstate.", "nullable": true, "type": "boolean" }, @@ -2039,14 +2186,7 @@ }, "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "nullable": true, "type": "string" }, @@ -2081,7 +2221,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modifiedstate.", "nullable": true, "type": "boolean" }, @@ -2094,7 +2234,8 @@ }, "tags": { "items": { - "$ref": "#/components/schemas/Tag" + "description": "Tags to be associated with the chart", + "type": "integer" }, "type": "array" }, @@ -2371,7 +2512,7 @@ "type": "boolean" }, "owners": { - "$ref": "#/components/schemas/ChartRestApi.get.User" + "$ref": "#/components/schemas/ChartRestApi.get.AppleUser" }, "params": { "nullable": true, @@ -2403,6 +2544,23 @@ }, "type": "object" }, + "ChartRestApi.get.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "ChartRestApi.get.Dashboard": { "properties": { "dashboard_title": { @@ -2436,23 +2594,6 @@ }, "type": "object" }, - "ChartRestApi.get.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, "ChartRestApi.get_list": { "properties": { "cache_timeout": { @@ -2468,7 +2609,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User3" + "$ref": "#/components/schemas/ChartRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -2483,7 +2624,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User" + "$ref": "#/components/schemas/ChartRestApi.get_list.User1" }, "created_by_name": { "readOnly": true @@ -2534,10 +2675,10 @@ "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartRestApi.get_list.AppleUser" }, "owners": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartRestApi.get_list.AppleUser1" }, "params": { "nullable": true, @@ -2571,84 +2712,84 @@ }, "type": "object" }, - "ChartRestApi.get_list.Dashboard": { + "ChartRestApi.get_list.AppleUser": { "properties": { - "dashboard_title": { - "maxLength": 500, - "nullable": true, + "first_name": { + "maxLength": 64, "type": "string" }, "id": { "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" } }, + "required": ["first_name", "last_name"], "type": "object" }, - "ChartRestApi.get_list.SqlaTable": { + "ChartRestApi.get_list.AppleUser1": { "properties": { - "default_endpoint": { - "nullable": true, + "first_name": { + "maxLength": 64, "type": "string" }, - "table_name": { - "maxLength": 250, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, "type": "string" } }, - "required": ["table_name"], + "required": ["first_name", "last_name"], "type": "object" }, - "ChartRestApi.get_list.Tag": { + "ChartRestApi.get_list.Dashboard": { "properties": { - "id": { - "type": "integer" - }, - "name": { - "maxLength": 250, + "dashboard_title": { + "maxLength": 500, "nullable": true, "type": "string" }, - "type": { - "enum": [1, 2, 3, 4] + "id": { + "type": "integer" } }, "type": "object" }, - "ChartRestApi.get_list.User": { + "ChartRestApi.get_list.SqlaTable": { "properties": { - "first_name": { - "maxLength": 64, + "default_endpoint": { + "nullable": true, "type": "string" }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, + "table_name": { + "maxLength": 250, "type": "string" } }, - "required": ["first_name", "last_name"], + "required": ["table_name"], "type": "object" }, - "ChartRestApi.get_list.User1": { + "ChartRestApi.get_list.Tag": { "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, "id": { "type": "integer" }, - "last_name": { - "maxLength": 64, + "name": { + "maxLength": 250, + "nullable": true, "type": "string" + }, + "type": { + "enum": [1, 2, 3, 4] } }, - "required": ["first_name", "last_name"], "type": "object" }, - "ChartRestApi.get_list.User2": { + "ChartRestApi.get_list.User": { "properties": { "first_name": { "maxLength": 64, @@ -2665,12 +2806,15 @@ "required": ["first_name", "last_name"], "type": "object" }, - "ChartRestApi.get_list.User3": { + "ChartRestApi.get_list.User1": { "properties": { "first_name": { "maxLength": 64, "type": "string" }, + "id": { + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -2714,14 +2858,7 @@ }, "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" }, "description": { @@ -2755,7 +2892,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modifiedstate.", "nullable": true, "type": "boolean" }, @@ -2807,14 +2944,7 @@ }, "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "nullable": true, "type": "string" }, @@ -2849,7 +2979,7 @@ "type": "string" }, "query_context_generation": { - "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modfiedstate.", + "description": "The query context generation represents whether the query_contextis user generated or not so that it does not update user modifiedstate.", "nullable": true, "type": "boolean" }, @@ -2862,7 +2992,8 @@ }, "tags": { "items": { - "$ref": "#/components/schemas/Tag" + "description": "Tags to be associated with the chart", + "type": "integer" }, "type": "array" }, @@ -2877,11 +3008,70 @@ }, "type": "object" }, + "ColumnarMetadataUploadFilePostSchema": { + "properties": { + "file": { + "description": "The file to upload", + "format": "binary", + "type": "string" + } + }, + "required": ["file"], + "type": "object" + }, + "ColumnarUploadPostSchema": { + "properties": { + "already_exists": { + "default": "fail", + "description": "What to do if the table already exists accepts: fail, replace, append", + "enum": ["fail", "replace", "append"], + "type": "string" + }, + "columns_read": { + "description": "A List of the column names that should be read", + "items": { + "type": "string" + }, + "type": "array" + }, + "dataframe_index": { + "description": "Write dataframe index as a column.", + "type": "boolean" + }, + "file": { + "description": "The Columnar file to upload", + "format": "binary", + "type": "string" + }, + "index_label": { + "description": "Index label for index column.", + "type": "string" + }, + "schema": { + "description": "The schema to upload the data file to.", + "type": "string" + }, + "table_name": { + "description": "The name of the table to be created/appended", + "maxLength": 10000, + "minLength": 1, + "type": "string" + } + }, + "required": ["file", "table_name"], + "type": "object" + }, "CssTemplateRestApi.get": { "properties": { - "created_by": { + "changed_by": { "$ref": "#/components/schemas/CssTemplateRestApi.get.User" }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "created_by": { + "$ref": "#/components/schemas/CssTemplateRestApi.get.User1" + }, "css": { "nullable": true, "type": "string" @@ -2914,16 +3104,33 @@ "required": ["first_name", "last_name"], "type": "object" }, + "CssTemplateRestApi.get.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "CssTemplateRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1" + "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User" }, "changed_on_delta_humanized": { "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User" + "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1" }, "created_on": { "format": "date-time", @@ -3007,6 +3214,23 @@ }, "type": "object" }, + "DashboardCacheScreenshotResponseSchema": { + "properties": { + "cache_key": { + "description": "The cache key", + "type": "string" + }, + "dashboard_url": { + "description": "The url to render the dashboard", + "type": "string" + }, + "image_url": { + "description": "The url to fetch the screenshot", + "type": "string" + } + }, + "type": "object" + }, "DashboardCopySchema": { "properties": { "css": { @@ -3034,12 +3258,21 @@ }, "DashboardDatasetSchema": { "properties": { + "always_filter_main_dttm": { + "type": "boolean" + }, "cache_timeout": { "type": "integer" }, "column_formats": { "type": "object" }, + "column_names": { + "items": { + "type": "string" + }, + "type": "array" + }, "column_types": { "items": { "type": "integer" @@ -3106,6 +3339,9 @@ "name": { "type": "string" }, + "normalize_columns": { + "type": "boolean" + }, "offset": { "type": "integer" }, @@ -3199,6 +3435,12 @@ }, "type": "array" }, + "created_by": { + "$ref": "#/components/schemas/User" + }, + "created_on_delta_humanized": { + "type": "string" + }, "css": { "description": "Override CSS for the dashboard.", "type": "string" @@ -3242,7 +3484,7 @@ }, "tags": { "items": { - "$ref": "#/components/schemas/Tag1" + "$ref": "#/components/schemas/Tag" }, "type": "array" }, @@ -3306,7 +3548,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/DashboardRestApi.get_list.User2" + "$ref": "#/components/schemas/DashboardRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -3318,7 +3560,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/DashboardRestApi.get_list.User" + "$ref": "#/components/schemas/DashboardRestApi.get_list.User1" }, "created_on_delta_humanized": { "readOnly": true @@ -3343,7 +3585,7 @@ "type": "string" }, "owners": { - "$ref": "#/components/schemas/DashboardRestApi.get_list.User1" + "$ref": "#/components/schemas/DashboardRestApi.get_list.AppleUser" }, "position_json": { "nullable": true, @@ -3376,6 +3618,23 @@ }, "type": "object" }, + "DashboardRestApi.get_list.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "DashboardRestApi.get_list.Role": { "properties": { "id": { @@ -3439,23 +3698,6 @@ "required": ["first_name", "last_name"], "type": "object" }, - "DashboardRestApi.get_list.User2": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, "DashboardRestApi.post": { "properties": { "certification_details": { @@ -3592,12 +3834,23 @@ "minLength": 0, "nullable": true, "type": "string" + }, + "tags": { + "items": { + "description": "Tags to be associated with the dashboard", + "nullable": true, + "type": "integer" + }, + "type": "array" } }, "type": "object" }, "Database": { "properties": { + "allow_multi_catalog": { + "type": "boolean" + }, "allows_cost_estimate": { "type": "boolean" }, @@ -3685,15 +3938,14 @@ "type": "string" }, "engine_information": { - "additionalProperties": {}, - "type": "object" + "$ref": "#/components/schemas/EngineInformation" }, "expose_in_sqllab": { "description": "Expose this database to SQLLab", "type": "boolean" }, "extra": { - "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.</p>", + "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.7. The <code>disable_drill_to_detail</code> field is a boolean specifying whether or notdrill to detail is disabled for the database.8. The <code>allow_multi_catalog</code> indicates if the database allows changing the default catalog when running queries and creating datasets.</p>", "type": "string" }, "force_ctas_schema": { @@ -3930,6 +4182,9 @@ "nullable": true, "type": "boolean" }, + "allow_multi_catalog": { + "readOnly": true + }, "allow_run_async": { "nullable": true, "type": "boolean" @@ -3946,6 +4201,9 @@ "backend": { "readOnly": true }, + "changed_by": { + "$ref": "#/components/schemas/DatabaseRestApi.get_list.User" + }, "changed_on": { "format": "date-time", "nullable": true, @@ -3955,7 +4213,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/DatabaseRestApi.get_list.User" + "$ref": "#/components/schemas/DatabaseRestApi.get_list.User1" }, "database_name": { "maxLength": 250, @@ -3964,6 +4222,9 @@ "disable_data_preview": { "readOnly": true }, + "disable_drill_to_detail": { + "readOnly": true + }, "engine_information": { "readOnly": true }, @@ -4009,6 +4270,20 @@ "required": ["first_name", "last_name"], "type": "object" }, + "DatabaseRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "DatabaseRestApi.post": { "properties": { "allow_ctas": { @@ -4066,7 +4341,7 @@ "type": "string" }, "extra": { - "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.</p>", + "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.7. The <code>disable_drill_to_detail</code> field is a boolean specifying whether or notdrill to detail is disabled for the database.8. The <code>allow_multi_catalog</code> indicates if the database allows changing the default catalog when running queries and creating datasets.</p>", "type": "string" }, "force_ctas_schema": { @@ -4178,7 +4453,7 @@ "type": "string" }, "extra": { - "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.</p>", + "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.7. The <code>disable_drill_to_detail</code> field is a boolean specifying whether or notdrill to detail is disabled for the database.8. The <code>allow_multi_catalog</code> indicates if the database allows changing the default catalog when running queries and creating datasets.</p>", "type": "string" }, "force_ctas_schema": { @@ -4313,7 +4588,7 @@ "type": "string" }, "extra": { - "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.</p>", + "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.7. The <code>disable_drill_to_detail</code> field is a boolean specifying whether or notdrill to detail is disabled for the database.8. The <code>allow_multi_catalog</code> indicates if the database allows changing the default catalog when running queries and creating datasets.</p>", "type": "string" }, "impersonate_user": { @@ -4382,7 +4657,7 @@ "type": "string" }, "extra": { - "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.</p>", + "description": "<p>JSON string containing extra configuration elements.<br>1. The <code>engine_params</code> object gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine\" rel=\"noopener noreferrer\">sqlalchemy.create_engine</a> call, while the <code>metadata_params</code> gets unpacked into the <a href=\"https://docs.sqlalchemy.org/en/rel_1_0/core/metadata.html#sqlalchemy.schema.MetaData\" rel=\"noopener noreferrer\">sqlalchemy.MetaData</a> call.<br>2. The <code>metadata_cache_timeout</code> is a cache timeout setting in seconds for metadata fetch of this database. Specify it as <strong>\"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600}</strong>. If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires.<br>3. The <code>schemas_allowed_for_file_upload</code> is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as <strong>\"schemas_allowed_for_file_upload\": [\"public\", \"csv_upload\"]</strong>. If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty<br>4. The <code>version</code> field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct<br>5. The <code>allows_virtual_table_explore</code> field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.<br>6. The <code>disable_data_preview</code> field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.7. The <code>disable_drill_to_detail</code> field is a boolean specifying whether or notdrill to detail is disabled for the database.8. The <code>allow_multi_catalog</code> indicates if the database allows changing the default catalog when running queries and creating datasets.</p>", "type": "string" }, "id": { @@ -4909,12 +5184,21 @@ }, "DatasetRestApi.get": { "properties": { + "always_filter_main_dttm": { + "nullable": true, + "type": "boolean" + }, "cache_timeout": { "nullable": true, "type": "integer" }, + "catalog": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, "changed_by": { - "$ref": "#/components/schemas/DatasetRestApi.get.User2" + "$ref": "#/components/schemas/DatasetRestApi.get.User1" }, "changed_on": { "format": "date-time", @@ -5000,6 +5284,10 @@ "name": { "readOnly": true }, + "normalize_columns": { + "nullable": true, + "type": "boolean" + }, "offset": { "nullable": true, "type": "integer" @@ -5008,7 +5296,7 @@ "readOnly": true }, "owners": { - "$ref": "#/components/schemas/DatasetRestApi.get.User1" + "$ref": "#/components/schemas/DatasetRestApi.get.AppleUser" }, "schema": { "maxLength": 255, @@ -5046,8 +5334,28 @@ "required": ["columns", "database", "metrics", "table_name"], "type": "object" }, + "DatasetRestApi.get.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, "DatasetRestApi.get.Database": { "properties": { + "allow_multi_catalog": { + "readOnly": true + }, "backend": { "readOnly": true }, @@ -5218,9 +5526,6 @@ "maxLength": 64, "type": "string" }, - "id": { - "type": "integer" - }, "last_name": { "maxLength": 64, "type": "string" @@ -5229,24 +5534,15 @@ "required": ["first_name", "last_name"], "type": "object" }, - "DatasetRestApi.get.User2": { + "DatasetRestApi.get_list": { "properties": { - "first_name": { - "maxLength": 64, + "catalog": { + "maxLength": 256, + "nullable": true, "type": "string" }, - "last_name": { - "maxLength": 64, - "type": "string" - } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "DatasetRestApi.get_list": { - "properties": { "changed_by": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User1" + "$ref": "#/components/schemas/DatasetRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -5285,7 +5581,7 @@ "readOnly": true }, "owners": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User" + "$ref": "#/components/schemas/DatasetRestApi.get_list.AppleUser" }, "schema": { "maxLength": 255, @@ -5304,42 +5600,45 @@ "required": ["database", "table_name"], "type": "object" }, - "DatasetRestApi.get_list.Database": { + "DatasetRestApi.get_list.AppleUser": { "properties": { - "database_name": { - "maxLength": 250, + "first_name": { + "maxLength": 64, "type": "string" }, "id": { "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" } }, - "required": ["database_name"], + "required": ["first_name", "last_name"], "type": "object" }, - "DatasetRestApi.get_list.User": { + "DatasetRestApi.get_list.Database": { "properties": { - "first_name": { - "maxLength": 64, + "database_name": { + "maxLength": 250, "type": "string" }, "id": { "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" } }, - "required": ["first_name", "last_name"], + "required": ["database_name"], "type": "object" }, - "DatasetRestApi.get_list.User1": { + "DatasetRestApi.get_list.User": { "properties": { "first_name": { "maxLength": 64, "type": "string" }, + "id": { + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -5350,6 +5649,16 @@ }, "DatasetRestApi.post": { "properties": { + "always_filter_main_dttm": { + "default": false, + "type": "boolean" + }, + "catalog": { + "maxLength": 250, + "minLength": 0, + "nullable": true, + "type": "string" + }, "database": { "type": "integer" }, @@ -5361,6 +5670,10 @@ "nullable": true, "type": "boolean" }, + "normalize_columns": { + "default": false, + "type": "boolean" + }, "owners": { "items": { "type": "integer" @@ -5388,10 +5701,20 @@ }, "DatasetRestApi.put": { "properties": { + "always_filter_main_dttm": { + "default": false, + "type": "boolean" + }, "cache_timeout": { "nullable": true, "type": "integer" }, + "catalog": { + "maxLength": 250, + "minLength": 0, + "nullable": true, + "type": "string" + }, "columns": { "items": { "$ref": "#/components/schemas/DatasetColumnsPut" @@ -5445,6 +5768,10 @@ }, "type": "array" }, + "normalize_columns": { + "nullable": true, + "type": "boolean" + }, "offset": { "nullable": true, "type": "integer" @@ -5480,6 +5807,11 @@ }, "Datasource": { "properties": { + "catalog": { + "description": "Datasource catalog", + "nullable": true, + "type": "string" + }, "database_name": { "description": "Datasource name", "type": "string" @@ -5490,14 +5822,7 @@ }, "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" }, "schema": { @@ -5604,8 +5929,34 @@ }, "type": "object" }, + "EngineInformation": { + "properties": { + "disable_ssh_tunneling": { + "description": "SSH tunnel is not available to the database", + "type": "boolean" + }, + "supports_dynamic_catalog": { + "description": "The database supports multiple catalogs in a single connection", + "type": "boolean" + }, + "supports_file_upload": { + "description": "Users can upload files to the database", + "type": "boolean" + }, + "supports_oauth2": { + "description": "The database supports OAuth2", + "type": "boolean" + } + }, + "type": "object" + }, "EstimateQueryCostSchema": { "properties": { + "catalog": { + "description": "The database catalog", + "nullable": true, + "type": "string" + }, "database_id": { "description": "The database id", "type": "integer" @@ -5627,8 +5978,109 @@ "required": ["database_id", "sql"], "type": "object" }, + "ExcelMetadataUploadFilePostSchema": { + "properties": { + "file": { + "description": "The file to upload", + "format": "binary", + "type": "string" + }, + "header_row": { + "description": "Row containing the headers to use as column names(0 is first line of data). Leave empty if there is no header row.", + "type": "integer" + } + }, + "required": ["file"], + "type": "object" + }, + "ExcelUploadPostSchema": { + "properties": { + "already_exists": { + "default": "fail", + "description": "What to do if the table already exists accepts: fail, replace, append", + "enum": ["fail", "replace", "append"], + "type": "string" + }, + "column_dates": { + "description": "A list of column names that should be parsed as dates. Example: date,timestamp", + "items": { + "type": "string" + }, + "type": "array" + }, + "columns_read": { + "description": "A List of the column names that should be read", + "items": { + "type": "string" + }, + "type": "array" + }, + "dataframe_index": { + "description": "Write dataframe index as a column.", + "type": "boolean" + }, + "decimal_character": { + "description": "Character to recognize as decimal point. Default is '.'", + "type": "string" + }, + "file": { + "description": "The Excel file to upload", + "format": "binary", + "type": "string" + }, + "header_row": { + "description": "Row containing the headers to use as column names(0 is first line of data). Leave empty if there is no header row.", + "type": "integer" + }, + "index_column": { + "description": "Column to use as the row labels of the dataframe. Leave empty if no index column", + "type": "string" + }, + "index_label": { + "description": "Index label for index column.", + "type": "string" + }, + "null_values": { + "description": "A list of strings that should be treated as null. Examples: '' for empty strings, 'None', 'N/A',Warning: Hive database supports only a single value", + "items": { + "type": "string" + }, + "type": "array" + }, + "rows_to_read": { + "description": "Number of rows to read from the file. If None, reads all rows.", + "minimum": 1, + "nullable": true, + "type": "integer" + }, + "schema": { + "description": "The schema to upload the data file to.", + "type": "string" + }, + "sheet_name": { + "description": "Strings used for sheet names (default is the first sheet).", + "type": "string" + }, + "skip_rows": { + "description": "Number of rows to skip at start of file.", + "type": "integer" + }, + "table_name": { + "description": "The name of the table to be created/appended", + "maxLength": 10000, + "minLength": 1, + "type": "string" + } + }, + "required": ["file", "table_name"], + "type": "object" + }, "ExecutePayloadSchema": { "properties": { + "catalog": { + "nullable": true, + "type": "string" + }, "client_id": { "nullable": true, "type": "string" @@ -5737,14 +6189,7 @@ }, "datasource_type": { "description": "The datasource type", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" }, "form_data": { @@ -5767,14 +6212,7 @@ }, "datasource_type": { "description": "The datasource type", - "enum": [ - "sl_table", - "table", - "dataset", - "query", - "saved_query", - "view" - ], + "enum": ["table", "dataset", "query", "saved_query", "view"], "type": "string" }, "form_data": { @@ -5799,10 +6237,25 @@ }, "GetOrCreateDatasetSchema": { "properties": { + "always_filter_main_dttm": { + "default": false, + "type": "boolean" + }, + "catalog": { + "description": "The catalog the table belongs to", + "maxLength": 250, + "minLength": 0, + "nullable": true, + "type": "string" + }, "database_id": { "description": "ID of database table belongs to", "type": "integer" }, + "normalize_columns": { + "default": false, + "type": "boolean" + }, "schema": { "description": "The schema the table belongs to", "maxLength": 250, @@ -5843,825 +6296,810 @@ "required": ["resources", "rls"], "type": "object" }, - "LogRestApi.get": { + "IASLoginTokenRequestSchema": { "properties": { - "action": { - "maxLength": 512, + "access_token": { + "description": "IAS Access token. Can be requested by adding `token` to the `responseTypes`.", "nullable": true, "type": "string" }, - "dashboard_id": { - "nullable": true, - "type": "integer" - }, - "dttm": { - "format": "date-time", + "client_id": { + "description": "IAS client id. Only needed if `refresh_token` is provided.", "nullable": true, "type": "string" }, - "duration_ms": { - "nullable": true, - "type": "integer" - }, - "json": { + "client_secret": { + "description": "IAS client secret. Only needed if `refresh_token` and `client_id` is provided.", "nullable": true, "type": "string" }, - "referrer": { - "maxLength": 1024, - "nullable": true, + "id_token": { + "description": "IAS ID token. Can be requested by adding `id_token` to the `responseTypes`. The following scopes should be added:`iam:ds:groups`, `corpds:ds:username`, `corpds:ds:firstName`, `corpds:ds:lastName`, `corpds:ds:email`, `corpds:ds:uidNumber`, `iam:ds:explicitgroups`.", "type": "string" }, - "slice_id": { - "nullable": true, - "type": "integer" - }, - "user": { - "$ref": "#/components/schemas/LogRestApi.get.User" - }, - "user_id": { + "refresh_token": { + "description": "IAS refresh token. This can be requested by adding `offline` to `scopes`. To be able to refresh the token, the IAS `client_id` and `client_secret` need to be added, as the same client needs to be used for refreshing as logging in.", "nullable": true, - "type": "integer" + "type": "string" } }, + "required": ["id_token"], "type": "object" }, - "LogRestApi.get.User": { + "IASLoginTokenResponseSchema": { "properties": { - "username": { - "maxLength": 64, + "access_token": { + "description": "Access that can be used access the Superset API", + "type": "string" + }, + "refresh_token": { + "description": "Refresh token that can be used to refresh the `access_token`", "type": "string" } }, - "required": ["username"], + "required": ["access_token"], "type": "object" }, - "LogRestApi.get_list": { + "IASProfileResponse": { "properties": { - "action": { - "maxLength": 512, - "nullable": true, - "type": "string" - }, - "dashboard_id": { - "nullable": true, - "type": "integer" - }, - "dttm": { - "format": "date-time", - "nullable": true, + "client_id": { + "description": "The client id", "type": "string" }, - "duration_ms": { - "nullable": true, - "type": "integer" - }, - "json": { - "nullable": true, + "profile": { + "description": "The IAS profile", "type": "string" }, - "referrer": { - "maxLength": 1024, - "nullable": true, + "scope": { + "description": "The scope of the client", "type": "string" - }, - "slice_id": { - "nullable": true, - "type": "integer" - }, - "user": { - "$ref": "#/components/schemas/LogRestApi.get_list.User" - }, - "user_id": { - "nullable": true, - "type": "integer" } }, + "required": ["client_id", "profile"], "type": "object" }, - "LogRestApi.get_list.User": { + "IASProfilesResponseSchema": { "properties": { - "username": { - "maxLength": 64, - "type": "string" + "result": { + "description": "A list of all available IAS profiles", + "items": { + "$ref": "#/components/schemas/IASProfileResponse" + }, + "type": "array" } }, - "required": ["username"], "type": "object" }, - "LogRestApi.post": { + "IASTokenResponse": { "properties": { - "id": { - "type": "integer" + "access_token": { + "description": "The access token", + "type": "string" + }, + "has_client_credentials": { + "description": "Are the original client credentials available or not", + "type": "boolean" + }, + "has_refresh_token": { + "description": "Is there a refresh token available or not", + "type": "boolean" + }, + "ias_profile": { + "description": "The IAS profile", + "type": "string" + }, + "id_token": { + "description": "The id token", + "type": "string" } }, + "required": [ + "has_client_credentials", + "has_refresh_token", + "ias_profile", + "id_token" + ], "type": "object" }, - "LogRestApi.put": { + "IASTokensResponseSchema": { "properties": { - "action": { - "maxLength": 512, - "nullable": true, - "type": "string" - }, - "dttm": { - "format": "date-time", - "nullable": true, - "type": "string" - }, - "json": { - "nullable": true, - "type": "string" - }, - "user": { - "nullable": true - } - }, - "type": "object" - }, - "QueryExecutionResponseSchema": { - "properties": { - "columns": { - "items": { - "type": "object" - }, - "type": "array" - }, - "data": { - "items": { - "type": "object" - }, - "type": "array" - }, - "expanded_columns": { - "items": { - "type": "object" - }, - "type": "array" - }, - "query": { - "$ref": "#/components/schemas/QueryResult" - }, - "query_id": { - "type": "integer" - }, - "selected_columns": { + "result": { + "description": "A list of all requested tokens", "items": { - "type": "object" + "$ref": "#/components/schemas/IASTokenResponse" }, "type": "array" - }, - "status": { - "type": "string" } }, "type": "object" }, - "QueryRestApi.get": { + "ImportV1Database": { "properties": { - "changed_on": { - "format": "date-time", - "nullable": true, - "type": "string" + "allow_csv_upload": { + "type": "boolean" }, - "client_id": { - "maxLength": 11, - "type": "string" + "allow_ctas": { + "type": "boolean" }, - "database": { - "$ref": "#/components/schemas/QueryRestApi.get.Database" + "allow_cvas": { + "type": "boolean" }, - "end_result_backend_time": { - "nullable": true, - "type": "number" + "allow_dml": { + "type": "boolean" }, - "end_time": { - "nullable": true, - "type": "number" + "allow_run_async": { + "type": "boolean" }, - "error_message": { + "cache_timeout": { "nullable": true, + "type": "integer" + }, + "database_name": { "type": "string" }, - "executed_sql": { + "expose_in_sqllab": { + "type": "boolean" + }, + "external_url": { "nullable": true, "type": "string" }, - "id": { - "type": "integer" + "extra": { + "$ref": "#/components/schemas/ImportV1DatabaseExtra" }, - "limit": { - "nullable": true, - "type": "integer" + "impersonate_user": { + "type": "boolean" }, - "progress": { + "is_managed_externally": { "nullable": true, - "type": "integer" + "type": "boolean" }, - "results_key": { - "maxLength": 64, + "password": { "nullable": true, "type": "string" }, - "rows": { - "nullable": true, - "type": "integer" + "sqlalchemy_uri": { + "type": "string" }, - "schema": { - "maxLength": 256, - "nullable": true, + "ssh_tunnel": { + "allOf": [ + { + "$ref": "#/components/schemas/DatabaseSSHTunnel" + } + ], + "nullable": true + }, + "uuid": { + "format": "uuid", "type": "string" }, - "select_as_cta": { - "nullable": true, + "version": { + "type": "string" + } + }, + "required": ["database_name", "sqlalchemy_uri", "uuid", "version"], + "type": "object" + }, + "ImportV1DatabaseExtra": { + "properties": { + "allow_multi_catalog": { "type": "boolean" }, - "select_as_cta_used": { - "nullable": true, + "allows_virtual_table_explore": { "type": "boolean" }, - "select_sql": { - "nullable": true, - "type": "string" + "cancel_query_on_windows_unload": { + "type": "boolean" }, - "sql": { - "nullable": true, - "type": "string" + "cost_estimate_enabled": { + "type": "boolean" }, - "sql_editor_id": { - "maxLength": 256, - "nullable": true, - "type": "string" + "disable_data_preview": { + "type": "boolean" }, - "start_running_time": { - "nullable": true, - "type": "number" + "disable_drill_to_detail": { + "type": "boolean" }, - "start_time": { - "nullable": true, - "type": "number" + "engine_params": { + "additionalProperties": {}, + "type": "object" }, - "status": { - "maxLength": 16, - "nullable": true, - "type": "string" + "metadata_cache_timeout": { + "additionalProperties": { + "type": "integer" + }, + "type": "object" }, - "tab_name": { - "maxLength": 256, - "nullable": true, - "type": "string" + "metadata_params": { + "additionalProperties": {}, + "type": "object" }, - "tmp_schema_name": { - "maxLength": 256, - "nullable": true, - "type": "string" + "schemas_allowed_for_csv_upload": { + "items": { + "type": "string" + }, + "type": "array" }, - "tmp_table_name": { - "maxLength": 256, + "version": { "nullable": true, "type": "string" - }, - "tracking_url": { - "readOnly": true } }, - "required": ["client_id", "database"], "type": "object" }, - "QueryRestApi.get.Database": { + "LakehouseCatalog": { "properties": { - "id": { + "database_id": { + "description": "The numerical id of the database", "type": "integer" - } - }, - "type": "object" - }, - "QueryRestApi.get_list": { - "properties": { - "changed_on": { - "format": "date-time", - "type": "string" - }, - "database": { - "$ref": "#/components/schemas/Database1" - }, - "end_time": { - "type": "number" }, - "executed_sql": { + "database_name": { + "description": "The name of the database", "type": "string" }, "id": { + "description": "The numerical id of the catalog", "type": "integer" }, - "rows": { - "type": "integer" - }, - "schema": { + "name": { + "description": "The name of the catalog", "type": "string" }, - "sql": { + "sqlalchemy_uri": { + "description": "The SQLAlchemy URI", "type": "string" + } + }, + "type": "object" + }, + "LakehouseConnectRequestSchema": { + "properties": { + "catalogs": { + "description": "A list of catalogs to connect to. If empty or undefined, connect all available catalogs.", + "example": ["my_catalog"], + "items": { + "type": "string" + }, + "nullable": true, + "type": "array" }, - "sql_tables": { - "readOnly": true + "database_names": { + "description": "Custom catalog-database name mappings. If undefined, the default naming convention will be used", + "example": { + "my_catalog": "my_database" + }, + "nullable": true, + "type": "object" }, - "start_time": { - "type": "number" + "dry_run": { + "description": "Should the task be executed in dry run mode. Useful for testing.", + "example": false, + "type": "boolean" }, - "status": { + "engine_type": { + "description": "The type of the engine (only trino is currently supported)", + "enum": ["trino"], + "example": "trino", "type": "string" }, - "tab_name": { + "engine_url": { + "description": "The URL to the engine", + "example": "https://myengine.corp.apple.com", "type": "string" }, - "tmp_table_name": { + "environment": { + "description": "Prod or Int", + "enum": ["int", "prod"], + "example": "int", "type": "string" }, - "tracking_url": { + "name": { + "description": "The name of the Lakehouse (subject to change over time). If left undefined, use lakehouse id", + "example": "My Lakehouse", + "nullable": true, "type": "string" }, - "user": { - "$ref": "#/components/schemas/User" - } - }, - "type": "object" - }, - "QueryRestApi.post": { - "properties": { - "id": { - "type": "integer" + "superset_url": { + "description": "The URL of the Superset instance connecting to", + "example": "http://localhost:8088", + "type": "string" } }, + "required": [ + "engine_type", + "engine_url", + "environment", + "superset_url" + ], "type": "object" }, - "QueryRestApi.put": { + "LakehouseConnectResponseSchema": { "properties": { - "id": { - "type": "integer" + "task_id": { + "description": "The id of the connection task", + "type": "string" } }, "type": "object" }, - "QueryResult": { + "LakehouseConnectResultResponseSchema": { "properties": { - "changed_on": { - "format": "date-time", - "type": "string" - }, - "ctas": { - "type": "boolean" - }, - "db": { - "type": "string" - }, - "dbId": { - "type": "integer" - }, - "endDttm": { - "type": "number" + "existing_catalogs": { + "description": "A list of connected catalogs before the connect operation", + "items": { + "description": "Catalogs", + "items": { + "$ref": "#/components/schemas/LakehouseCatalog" + }, + "type": "array" + }, + "type": "array" }, - "errorMessage": { - "nullable": true, - "type": "string" + "lakehouse": { + "allOf": [ + { + "$ref": "#/components/schemas/LakehouseGetResponseSchema" + } + ], + "description": "The connected Lakehouse", + "nullable": true }, - "executedSql": { + "message": { + "description": "A message to provide additional context on what has happened", "type": "string" }, - "extra": { - "type": "object" + "new_catalogs": { + "description": "A list of newly connected catalogs after the connect operation", + "items": { + "description": "Catalogs", + "items": { + "$ref": "#/components/schemas/LakehouseCatalog" + }, + "type": "array" + }, + "type": "array" }, - "id": { + "status": { + "description": "The status of the task", + "enum": [ + "scheduled", + "running", + "completed", + "failed", + "reconnect_failed", + "cancelled" + ], "type": "string" - }, - "limit": { - "type": "integer" - }, - "limitingFactor": { + } + }, + "required": ["lakehouse"], + "type": "object" + }, + "LakehouseConnectStatusResponseSchema": { + "properties": { + "message": { + "description": "A message to provide additional context on what has happened", "type": "string" }, - "progress": { - "type": "integer" - }, - "queryId": { - "type": "integer" - }, - "resultsKey": { + "status": { + "description": "The status of the task", + "enum": [ + "scheduled", + "running", + "completed", + "failed", + "reconnect_failed", + "cancelled" + ], + "type": "string" + } + }, + "type": "object" + }, + "LakehouseDatasetListResponseSchema": { + "properties": { + "name": { + "description": "The name of the dataset", "type": "string" - }, - "rows": { - "type": "integer" }, "schema": { + "description": "The schema", + "nullable": true, "type": "string" }, - "serverId": { - "type": "integer" - }, "sql": { + "description": "The virtual dataset query", "type": "string" - }, - "sqlEditorId": { - "type": "string" - }, - "startDttm": { - "type": "number" - }, - "state": { + } + }, + "required": ["name", "sql"], + "type": "object" + }, + "LakehouseDatasetRequestSchema": { + "properties": { + "catalog": { + "description": "The catalog. If left empty, uses an arbitrary connected catalog.", + "nullable": true, "type": "string" }, - "tab": { + "lakehouse_id": { + "description": "Deprecated, not used for anything", "type": "string" }, - "tempSchema": { - "nullable": true, + "name": { + "description": "The name of the dataset to be created", "type": "string" }, - "tempTable": { - "nullable": true, - "type": "string" + "overwrite": { + "default": false, + "description": "Should the virtual dataset be overwritten if it already exists", + "type": "boolean" }, - "trackingUrl": { + "schema": { + "description": "The schema", "nullable": true, "type": "string" }, - "user": { + "sql": { + "description": "The virtual dataset query", "type": "string" - }, - "userId": { - "type": "integer" } }, + "required": ["name", "sql"], "type": "object" }, - "RLSRestApi.get": { + "LakehouseDatasetResponseSchema": { "properties": { - "clause": { - "description": "clause_description", - "type": "string" - }, - "description": { - "description": "description_description", - "type": "string" - }, - "filter_type": { - "description": "filter_type_description", - "enum": ["Regular", "Base"], - "type": "string" - }, - "group_key": { - "description": "group_key_description", - "type": "string" - }, - "id": { - "description": "id_description", - "type": "integer" - }, - "name": { - "description": "name_description", + "url": { + "description": "The url for exploring the virtual dataset", "type": "string" - }, - "roles": { - "items": { - "$ref": "#/components/schemas/Roles1" - }, - "type": "array" - }, - "tables": { + } + }, + "required": ["url"], + "type": "object" + }, + "LakehouseDisconnectResponseSchema": { + "properties": { + "lakehouse": { + "allOf": [ + { + "$ref": "#/components/schemas/LakehouseGetResponseSchema" + } + ], + "description": "A list of disconnected lakehouses" + } + }, + "type": "object" + }, + "LakehouseGetListResponseSchema": { + "properties": { + "lakehouses": { + "description": "A list of connected lakehouses", "items": { - "$ref": "#/components/schemas/Tables" + "$ref": "#/components/schemas/LakehouseGetResponseSchema" }, "type": "array" } }, "type": "object" }, - "RLSRestApi.get_list": { + "LakehouseGetResponseSchema": { "properties": { - "changed_on_delta_humanized": { - "readOnly": true - }, - "clause": { - "description": "clause_description", - "type": "string" - }, - "description": { - "description": "description_description", - "type": "string" - }, - "filter_type": { - "description": "filter_type_description", - "enum": ["Regular", "Base"], - "type": "string" + "catalogs": { + "description": "A list of connected catalogs", + "items": { + "description": "Catalogs", + "items": { + "$ref": "#/components/schemas/LakehouseCatalog" + }, + "type": "array" + }, + "type": "array" }, - "group_key": { - "description": "group_key_description", + "engine_url": { + "description": "The URL uf the engine", "type": "string" }, "id": { - "description": "id_description", - "type": "integer" + "description": "The unique id of the Lakehouse", + "type": "string" }, "name": { - "description": "name_description", "type": "string" }, - "roles": { - "items": { - "$ref": "#/components/schemas/Roles1" - }, - "type": "array" - }, - "tables": { - "items": { - "$ref": "#/components/schemas/Tables" - }, - "type": "array" + "superset_url": { + "description": "The URL uf the Superset instance", + "type": "string" } }, "type": "object" }, - "RLSRestApi.post": { + "LogRestApi.get": { "properties": { - "clause": { - "description": "clause_description", + "action": { + "maxLength": 512, + "nullable": true, "type": "string" }, - "description": { - "description": "description_description", + "dashboard_id": { "nullable": true, - "type": "string" + "type": "integer" }, - "filter_type": { - "description": "filter_type_description", - "enum": ["Regular", "Base"], + "dttm": { + "format": "date-time", + "nullable": true, "type": "string" }, - "group_key": { - "description": "group_key_description", + "duration_ms": { + "nullable": true, + "type": "integer" + }, + "json": { "nullable": true, "type": "string" }, - "name": { - "description": "name_description", - "maxLength": 255, - "minLength": 1, + "referrer": { + "maxLength": 1024, + "nullable": true, "type": "string" }, - "roles": { - "description": "roles_description", - "items": { - "type": "integer" - }, - "type": "array" + "slice_id": { + "nullable": true, + "type": "integer" }, - "tables": { - "description": "tables_description", - "items": { - "type": "integer" - }, - "minItems": 1, - "type": "array" + "user": { + "$ref": "#/components/schemas/LogRestApi.get.AppleUser" + }, + "user_id": { + "nullable": true, + "type": "integer" } }, - "required": ["clause", "filter_type", "name", "roles", "tables"], "type": "object" }, - "RLSRestApi.put": { + "LogRestApi.get.AppleUser": { "properties": { - "clause": { - "description": "clause_description", + "username": { + "maxLength": 64, "type": "string" - }, - "description": { - "description": "description_description", + } + }, + "required": ["username"], + "type": "object" + }, + "LogRestApi.get_list": { + "properties": { + "action": { + "maxLength": 512, "nullable": true, "type": "string" }, - "filter_type": { - "description": "filter_type_description", - "enum": ["Regular", "Base"], + "dashboard_id": { + "nullable": true, + "type": "integer" + }, + "dttm": { + "format": "date-time", + "nullable": true, "type": "string" }, - "group_key": { - "description": "group_key_description", + "duration_ms": { + "nullable": true, + "type": "integer" + }, + "json": { "nullable": true, "type": "string" }, - "name": { - "description": "name_description", - "maxLength": 255, - "minLength": 1, + "referrer": { + "maxLength": 1024, + "nullable": true, "type": "string" }, - "roles": { - "description": "roles_description", - "items": { - "type": "integer" - }, - "type": "array" + "slice_id": { + "nullable": true, + "type": "integer" }, - "tables": { - "description": "tables_description", - "items": { - "type": "integer" - }, - "type": "array" + "user": { + "$ref": "#/components/schemas/LogRestApi.get_list.AppleUser" + }, + "user_id": { + "nullable": true, + "type": "integer" } }, "type": "object" }, - "RecentActivity": { + "LogRestApi.get_list.AppleUser": { "properties": { - "action": { - "description": "Action taken describing type of activity", - "type": "string" - }, - "item_title": { - "description": "Title of item", - "type": "string" - }, - "item_type": { - "description": "Type of item, e.g. slice or dashboard", - "type": "string" - }, - "item_url": { - "description": "URL to item", - "type": "string" - }, - "time": { - "description": "Time of activity, in epoch milliseconds", - "type": "number" - }, - "time_delta_humanized": { - "description": "Human-readable description of how long ago activity took place.", + "username": { + "maxLength": 64, "type": "string" } }, + "required": ["username"], "type": "object" }, - "RecentActivityResponseSchema": { + "LogRestApi.post": { "properties": { - "result": { - "description": "A list of recent activity objects", - "items": { - "$ref": "#/components/schemas/RecentActivity" - }, - "type": "array" + "id": { + "type": "integer" } }, "type": "object" }, - "RecentActivitySchema": { + "LogRestApi.put": { "properties": { "action": { - "description": "Action taken describing type of activity", - "type": "string" - }, - "item_title": { - "description": "Title of item", + "maxLength": 512, + "nullable": true, "type": "string" }, - "item_type": { - "description": "Type of item, e.g. slice or dashboard", + "dttm": { + "format": "date-time", + "nullable": true, "type": "string" }, - "item_url": { - "description": "URL to item", + "json": { + "nullable": true, "type": "string" }, - "time": { - "description": "Time of activity, in epoch milliseconds", - "type": "number" - }, - "time_delta_humanized": { - "description": "Human-readable description of how long ago activity took place.", - "type": "string" + "user": { + "nullable": true } }, "type": "object" }, - "RelatedResponseSchema": { + "QueryExecutionResponseSchema": { "properties": { - "count": { - "description": "The total number of related values", - "type": "integer" + "columns": { + "items": { + "type": "object" + }, + "type": "array" }, - "result": { + "data": { "items": { - "$ref": "#/components/schemas/RelatedResultResponse" + "type": "object" }, "type": "array" - } - }, - "type": "object" - }, - "RelatedResultResponse": { - "properties": { - "extra": { - "description": "The extra metadata for related item", - "type": "object" }, - "text": { - "description": "The related item string representation", - "type": "string" + "expanded_columns": { + "items": { + "type": "object" + }, + "type": "array" }, - "value": { - "description": "The related item identifier", + "query": { + "$ref": "#/components/schemas/QueryResult" + }, + "query_id": { "type": "integer" + }, + "selected_columns": { + "items": { + "type": "object" + }, + "type": "array" + }, + "status": { + "type": "string" } }, "type": "object" }, - "ReportExecutionLogRestApi.get": { + "QueryRestApi.get": { "properties": { - "end_dttm": { + "changed_on": { "format": "date-time", "nullable": true, "type": "string" }, - "error_message": { - "nullable": true, + "client_id": { + "maxLength": 11, "type": "string" }, - "id": { - "type": "integer" + "database": { + "$ref": "#/components/schemas/QueryRestApi.get.Database" }, - "scheduled_dttm": { - "format": "date-time", - "type": "string" + "end_result_backend_time": { + "nullable": true, + "type": "number" }, - "start_dttm": { - "format": "date-time", + "end_time": { "nullable": true, - "type": "string" + "type": "number" }, - "state": { - "maxLength": 50, + "error_message": { + "nullable": true, "type": "string" }, - "uuid": { - "format": "uuid", + "executed_sql": { "nullable": true, "type": "string" }, - "value": { - "nullable": true, - "type": "number" + "id": { + "type": "integer" }, - "value_row_json": { + "limit": { "nullable": true, - "type": "string" - } - }, - "required": ["scheduled_dttm", "state"], - "type": "object" - }, - "ReportExecutionLogRestApi.get_list": { - "properties": { - "end_dttm": { - "format": "date-time", + "type": "integer" + }, + "progress": { "nullable": true, - "type": "string" + "type": "integer" }, - "error_message": { + "results_key": { + "maxLength": 64, "nullable": true, "type": "string" }, - "id": { + "rows": { + "nullable": true, "type": "integer" }, - "scheduled_dttm": { - "format": "date-time", + "schema": { + "maxLength": 256, + "nullable": true, "type": "string" }, - "start_dttm": { - "format": "date-time", + "select_as_cta": { + "nullable": true, + "type": "boolean" + }, + "select_as_cta_used": { + "nullable": true, + "type": "boolean" + }, + "select_sql": { "nullable": true, "type": "string" }, - "state": { - "maxLength": 50, + "sql": { + "nullable": true, "type": "string" }, - "uuid": { - "format": "uuid", + "sql_editor_id": { + "maxLength": 256, "nullable": true, "type": "string" }, - "value": { + "start_running_time": { "nullable": true, "type": "number" }, - "value_row_json": { + "start_time": { + "nullable": true, + "type": "number" + }, + "status": { + "maxLength": 16, "nullable": true, "type": "string" - } + }, + "tab_name": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "tmp_schema_name": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "tmp_table_name": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "tracking_url": { + "readOnly": true + } }, - "required": ["scheduled_dttm", "state"], + "required": ["client_id", "database"], "type": "object" }, - "ReportExecutionLogRestApi.post": { + "QueryRestApi.get.Database": { "properties": { "id": { "type": "integer" @@ -6669,505 +7107,1058 @@ }, "type": "object" }, - "ReportExecutionLogRestApi.put": { + "QueryRestApi.get_list": { "properties": { + "changed_on": { + "format": "date-time", + "type": "string" + }, + "database": { + "$ref": "#/components/schemas/Database1" + }, + "end_time": { + "type": "number" + }, + "executed_sql": { + "type": "string" + }, "id": { "type": "integer" + }, + "rows": { + "type": "integer" + }, + "schema": { + "type": "string" + }, + "sql": { + "type": "string" + }, + "sql_tables": { + "readOnly": true + }, + "start_time": { + "type": "number" + }, + "status": { + "type": "string" + }, + "tab_name": { + "type": "string" + }, + "tmp_table_name": { + "type": "string" + }, + "tracking_url": { + "type": "string" + }, + "user": { + "$ref": "#/components/schemas/User" } }, "type": "object" }, - "ReportRecipient": { + "QueryRestApi.post": { "properties": { - "recipient_config_json": { - "$ref": "#/components/schemas/ReportRecipientConfigJSON" - }, - "type": { - "description": "The recipient type, check spec for valid options", - "enum": ["Email", "Slack"], - "type": "string" + "id": { + "type": "integer" } }, - "required": ["type"], "type": "object" }, - "ReportRecipientConfigJSON": { + "QueryRestApi.put": { "properties": { - "target": { - "type": "string" + "id": { + "type": "integer" } }, "type": "object" }, - "ReportScheduleRestApi.get": { + "QueryResult": { "properties": { - "active": { - "nullable": true, - "type": "boolean" - }, - "chart": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get.Slice" - }, - "context_markdown": { - "nullable": true, + "changed_on": { + "format": "date-time", "type": "string" }, - "creation_method": { - "maxLength": 255, - "nullable": true, - "type": "string" + "ctas": { + "type": "boolean" }, - "crontab": { - "maxLength": 1000, + "db": { "type": "string" }, - "custom_width": { - "nullable": true, + "dbId": { "type": "integer" }, - "dashboard": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get.Dashboard" - }, - "database": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get.Database" + "endDttm": { + "type": "number" }, - "description": { + "errorMessage": { "nullable": true, "type": "string" }, + "executedSql": { + "type": "string" + }, "extra": { - "readOnly": true + "type": "object" }, - "force_screenshot": { - "nullable": true, - "type": "boolean" + "id": { + "type": "string" }, - "grace_period": { - "nullable": true, + "limit": { "type": "integer" }, - "id": { + "limitingFactor": { + "type": "string" + }, + "progress": { "type": "integer" }, - "last_eval_dttm": { - "format": "date-time", - "nullable": true, - "type": "string" + "queryId": { + "type": "integer" }, - "last_state": { - "maxLength": 50, - "nullable": true, + "resultsKey": { "type": "string" }, - "last_value": { - "nullable": true, - "type": "number" + "rows": { + "type": "integer" }, - "last_value_row_json": { - "nullable": true, + "schema": { "type": "string" }, - "log_retention": { - "nullable": true, + "serverId": { "type": "integer" }, - "name": { - "maxLength": 150, + "sql": { "type": "string" }, - "owners": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get.User" + "sqlEditorId": { + "type": "string" }, - "recipients": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get.ReportRecipients" + "startDttm": { + "type": "number" }, - "report_format": { - "maxLength": 50, - "nullable": true, + "state": { "type": "string" }, - "sql": { - "nullable": true, + "tab": { "type": "string" }, - "timezone": { - "maxLength": 100, + "tempSchema": { + "nullable": true, "type": "string" }, - "type": { - "maxLength": 50, + "tempTable": { + "nullable": true, "type": "string" }, - "validator_config_json": { + "trackingUrl": { "nullable": true, "type": "string" }, - "validator_type": { - "maxLength": 100, - "nullable": true, + "user": { "type": "string" }, - "working_timeout": { - "nullable": true, + "userId": { "type": "integer" } }, - "required": ["crontab", "name", "recipients", "type"], "type": "object" }, - "ReportScheduleRestApi.get.Dashboard": { + "RLSRestApi.get": { "properties": { - "dashboard_title": { - "maxLength": 500, - "nullable": true, + "clause": { + "description": "clause_description", "type": "string" }, - "id": { - "type": "integer" - } - }, - "type": "object" - }, - "ReportScheduleRestApi.get.Database": { - "properties": { - "database_name": { - "maxLength": 250, + "description": { + "description": "description_description", + "type": "string" + }, + "filter_type": { + "description": "filter_type_description", + "enum": ["Regular", "Base"], + "type": "string" + }, + "group_key": { + "description": "group_key_description", "type": "string" }, "id": { - "type": "integer" - } - }, - "required": ["database_name"], - "type": "object" - }, - "ReportScheduleRestApi.get.ReportRecipients": { - "properties": { - "id": { + "description": "id_description", "type": "integer" }, - "recipient_config_json": { - "nullable": true, + "name": { + "description": "name_description", "type": "string" }, - "type": { - "maxLength": 50, - "type": "string" + "roles": { + "items": { + "$ref": "#/components/schemas/Roles1" + }, + "type": "array" + }, + "tables": { + "items": { + "$ref": "#/components/schemas/Tables" + }, + "type": "array" } }, - "required": ["type"], "type": "object" }, - "ReportScheduleRestApi.get.Slice": { + "RLSRestApi.get_list": { "properties": { - "id": { - "type": "integer" + "changed_by": { + "$ref": "#/components/schemas/User" }, - "slice_name": { - "maxLength": 250, - "nullable": true, + "changed_on_delta_humanized": { + "readOnly": true + }, + "clause": { + "description": "clause_description", "type": "string" }, - "viz_type": { - "maxLength": 250, - "nullable": true, + "description": { + "description": "description_description", "type": "string" - } - }, - "type": "object" - }, - "ReportScheduleRestApi.get.User": { - "properties": { - "first_name": { - "maxLength": 64, + }, + "filter_type": { + "description": "filter_type_description", + "enum": ["Regular", "Base"], + "type": "string" + }, + "group_key": { + "description": "group_key_description", "type": "string" }, "id": { + "description": "id_description", "type": "integer" }, - "last_name": { - "maxLength": 64, + "name": { + "description": "name_description", "type": "string" + }, + "roles": { + "items": { + "$ref": "#/components/schemas/Roles1" + }, + "type": "array" + }, + "tables": { + "items": { + "$ref": "#/components/schemas/Tables" + }, + "type": "array" } }, - "required": ["first_name", "last_name"], "type": "object" }, - "ReportScheduleRestApi.get_list": { + "RLSRestApi.post": { "properties": { - "active": { - "nullable": true, - "type": "boolean" - }, - "changed_by": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User2" - }, - "changed_on": { - "format": "date-time", - "nullable": true, + "clause": { + "description": "clause_description", "type": "string" }, - "changed_on_delta_humanized": { - "readOnly": true - }, - "chart_id": { + "description": { + "description": "description_description", "nullable": true, - "type": "integer" - }, - "created_by": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User" + "type": "string" }, - "created_on": { - "format": "date-time", - "nullable": true, + "filter_type": { + "description": "filter_type_description", + "enum": ["Regular", "Base"], "type": "string" }, - "creation_method": { - "maxLength": 255, + "group_key": { + "description": "group_key_description", "nullable": true, "type": "string" }, - "crontab": { - "maxLength": 1000, + "name": { + "description": "name_description", + "maxLength": 255, + "minLength": 1, "type": "string" }, - "crontab_humanized": { - "readOnly": true + "roles": { + "description": "roles_description", + "items": { + "type": "integer" + }, + "type": "array" }, - "dashboard_id": { - "nullable": true, - "type": "integer" + "tables": { + "description": "tables_description", + "items": { + "type": "integer" + }, + "minItems": 1, + "type": "array" + } + }, + "required": ["clause", "filter_type", "name", "roles", "tables"], + "type": "object" + }, + "RLSRestApi.put": { + "properties": { + "clause": { + "description": "clause_description", + "type": "string" }, "description": { + "description": "description_description", "nullable": true, "type": "string" }, - "extra": { - "readOnly": true - }, - "id": { - "type": "integer" - }, - "last_eval_dttm": { - "format": "date-time", - "nullable": true, + "filter_type": { + "description": "filter_type_description", + "enum": ["Regular", "Base"], "type": "string" }, - "last_state": { - "maxLength": 50, + "group_key": { + "description": "group_key_description", "nullable": true, "type": "string" }, "name": { - "maxLength": 150, + "description": "name_description", + "maxLength": 255, + "minLength": 1, "type": "string" }, - "owners": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User1" - }, - "recipients": { - "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.ReportRecipients" - }, - "timezone": { - "maxLength": 100, - "type": "string" + "roles": { + "description": "roles_description", + "items": { + "type": "integer" + }, + "type": "array" }, - "type": { - "maxLength": 50, - "type": "string" + "tables": { + "description": "tables_description", + "items": { + "type": "integer" + }, + "type": "array" } }, - "required": ["crontab", "name", "recipients", "type"], "type": "object" }, - "ReportScheduleRestApi.get_list.ReportRecipients": { + "RecentActivity": { "properties": { - "id": { - "type": "integer" + "action": { + "description": "Action taken describing type of activity", + "type": "string" }, - "type": { - "maxLength": 50, + "item_title": { + "description": "Title of item", + "type": "string" + }, + "item_type": { + "description": "Type of item, e.g. slice or dashboard", + "type": "string" + }, + "item_url": { + "description": "URL to item", + "type": "string" + }, + "time": { + "description": "Time of activity, in epoch milliseconds", + "type": "number" + }, + "time_delta_humanized": { + "description": "Human-readable description of how long ago activity took place.", "type": "string" } }, - "required": ["type"], "type": "object" }, - "ReportScheduleRestApi.get_list.User": { + "RecentActivityResponseSchema": { "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "last_name": { - "maxLength": 64, - "type": "string" + "result": { + "description": "A list of recent activity objects", + "items": { + "$ref": "#/components/schemas/RecentActivity" + }, + "type": "array" } }, - "required": ["first_name", "last_name"], "type": "object" }, - "ReportScheduleRestApi.get_list.User1": { + "RecentActivitySchema": { "properties": { - "first_name": { - "maxLength": 64, + "action": { + "description": "Action taken describing type of activity", "type": "string" }, - "id": { - "type": "integer" + "item_title": { + "description": "Title of item", + "type": "string" }, - "last_name": { - "maxLength": 64, + "item_type": { + "description": "Type of item, e.g. slice or dashboard", + "type": "string" + }, + "item_url": { + "description": "URL to item", + "type": "string" + }, + "time": { + "description": "Time of activity, in epoch milliseconds", + "type": "number" + }, + "time_delta_humanized": { + "description": "Human-readable description of how long ago activity took place.", "type": "string" } }, - "required": ["first_name", "last_name"], "type": "object" }, - "ReportScheduleRestApi.get_list.User2": { + "RelatedResponseSchema": { "properties": { - "first_name": { - "maxLength": 64, - "type": "string" + "count": { + "description": "The total number of related values", + "type": "integer" }, - "last_name": { - "maxLength": 64, - "type": "string" + "result": { + "items": { + "$ref": "#/components/schemas/RelatedResultResponse" + }, + "type": "array" } }, - "required": ["first_name", "last_name"], "type": "object" }, - "ReportScheduleRestApi.post": { + "RelatedResultResponse": { "properties": { - "active": { - "type": "boolean" + "extra": { + "description": "The extra metadata for related item", + "type": "object" }, - "chart": { + "text": { + "description": "The related item string representation", + "type": "string" + }, + "value": { + "description": "The related item identifier", + "type": "integer" + } + }, + "type": "object" + }, + "ReportExecutionLogRestApi.get": { + "properties": { + "end_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "error_message": { + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + }, + "scheduled_dttm": { + "format": "date-time", + "type": "string" + }, + "start_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "state": { + "maxLength": 50, + "type": "string" + }, + "uuid": { + "format": "uuid", + "nullable": true, + "type": "string" + }, + "value": { + "nullable": true, + "type": "number" + }, + "value_row_json": { + "nullable": true, + "type": "string" + } + }, + "required": ["scheduled_dttm", "state"], + "type": "object" + }, + "ReportExecutionLogRestApi.get_list": { + "properties": { + "end_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "error_message": { + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + }, + "scheduled_dttm": { + "format": "date-time", + "type": "string" + }, + "start_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "state": { + "maxLength": 50, + "type": "string" + }, + "uuid": { + "format": "uuid", + "nullable": true, + "type": "string" + }, + "value": { "nullable": true, + "type": "number" + }, + "value_row_json": { + "nullable": true, + "type": "string" + } + }, + "required": ["scheduled_dttm", "state"], + "type": "object" + }, + "ReportExecutionLogRestApi.post": { + "properties": { + "id": { + "type": "integer" + } + }, + "type": "object" + }, + "ReportExecutionLogRestApi.put": { + "properties": { + "id": { "type": "integer" + } + }, + "type": "object" + }, + "ReportRecipient": { + "properties": { + "recipient_config_json": { + "$ref": "#/components/schemas/ReportRecipientConfigJSON" + }, + "type": { + "description": "The recipient type, check spec for valid options", + "enum": ["Email", "Slack", "SlackV2"], + "type": "string" + } + }, + "required": ["type"], + "type": "object" + }, + "ReportRecipientConfigJSON": { + "properties": { + "bccTarget": { + "type": "string" + }, + "ccTarget": { + "type": "string" + }, + "target": { + "type": "string" + } + }, + "type": "object" + }, + "ReportScheduleRestApi.get": { + "properties": { + "active": { + "nullable": true, + "type": "boolean" + }, + "chart": { + "$ref": "#/components/schemas/ReportScheduleRestApi.get.Slice" }, "context_markdown": { - "description": "Markdown description", "nullable": true, "type": "string" }, "creation_method": { - "description": "Creation method is used to inform the frontend whether the report/alert was created in the dashboard, chart, or alerts and reports UI.", - "enum": ["charts", "dashboards", "alerts_reports"] + "maxLength": 255, + "nullable": true, + "type": "string" }, "crontab": { - "description": "A CRON expression.[Crontab Guru](https://crontab.guru/) is a helpful resource that can help you craft a CRON expression.", - "example": "*/5 * * * *", "maxLength": 1000, - "minLength": 1, "type": "string" }, "custom_width": { - "description": "Custom width of the screenshot in pixels", - "example": 1000, "nullable": true, "type": "integer" }, "dashboard": { - "nullable": true, - "type": "integer" + "$ref": "#/components/schemas/ReportScheduleRestApi.get.Dashboard" }, "database": { - "type": "integer" + "$ref": "#/components/schemas/ReportScheduleRestApi.get.Database" }, "description": { - "description": "Use a nice description to give context to this Alert/Report", - "example": "Daily sales dashboard to marketing", + "nullable": true, + "type": "string" + }, + "email_subject": { + "maxLength": 255, "nullable": true, "type": "string" }, "extra": { - "type": "object" + "readOnly": true }, "force_screenshot": { + "nullable": true, "type": "boolean" }, "grace_period": { - "description": "Once an alert is triggered, how long, in seconds, before Superset nags you again. (in seconds)", - "example": 14400, - "minimum": 1, + "nullable": true, + "type": "integer" + }, + "id": { "type": "integer" }, + "last_eval_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "last_state": { + "maxLength": 50, + "nullable": true, + "type": "string" + }, + "last_value": { + "nullable": true, + "type": "number" + }, + "last_value_row_json": { + "nullable": true, + "type": "string" + }, "log_retention": { - "description": "How long to keep the logs around for this report (in days)", - "example": 90, - "minimum": 1, + "nullable": true, "type": "integer" }, "name": { - "description": "The report schedule name.", - "example": "Daily dashboard email", "maxLength": 150, - "minLength": 1, "type": "string" }, "owners": { - "items": { - "description": "Owner are users ids allowed to delete or change this report. If left empty you will be one of the owners of the report.", - "type": "integer" - }, - "type": "array" + "$ref": "#/components/schemas/ReportScheduleRestApi.get.AppleUser" }, "recipients": { - "items": { - "$ref": "#/components/schemas/ReportRecipient" - }, - "type": "array" + "$ref": "#/components/schemas/ReportScheduleRestApi.get.ReportRecipients" }, "report_format": { - "enum": ["PNG", "CSV", "TEXT"], - "type": "string" - }, - "selected_tabs": { - "items": { - "type": "integer" - }, + "maxLength": 50, "nullable": true, - "type": "array" + "type": "string" }, "sql": { - "description": "A SQL statement that defines whether the alert should get triggered or not. The query is expected to return either NULL or a number value.", - "example": "SELECT value FROM time_series_table", + "nullable": true, "type": "string" }, "timezone": { - "description": "A timezone string that represents the location of the timezone.", - "enum": [ - "Africa/Abidjan", - "Africa/Accra", - "Africa/Addis_Ababa", - "Africa/Algiers", - "Africa/Asmara", - "Africa/Asmera", - "Africa/Bamako", - "Africa/Bangui", - "Africa/Banjul", - "Africa/Bissau", - "Africa/Blantyre", - "Africa/Brazzaville", - "Africa/Bujumbura", - "Africa/Cairo", - "Africa/Casablanca", - "Africa/Ceuta", - "Africa/Conakry", - "Africa/Dakar", - "Africa/Dar_es_Salaam", - "Africa/Djibouti", - "Africa/Douala", - "Africa/El_Aaiun", - "Africa/Freetown", - "Africa/Gaborone", - "Africa/Harare", - "Africa/Johannesburg", - "Africa/Juba", - "Africa/Kampala", - "Africa/Khartoum", + "maxLength": 100, + "type": "string" + }, + "type": { + "maxLength": 50, + "type": "string" + }, + "validator_config_json": { + "nullable": true, + "type": "string" + }, + "validator_type": { + "maxLength": 100, + "nullable": true, + "type": "string" + }, + "working_timeout": { + "nullable": true, + "type": "integer" + } + }, + "required": ["crontab", "name", "recipients", "type"], + "type": "object" + }, + "ReportScheduleRestApi.get.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "ReportScheduleRestApi.get.Dashboard": { + "properties": { + "dashboard_title": { + "maxLength": 500, + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + } + }, + "type": "object" + }, + "ReportScheduleRestApi.get.Database": { + "properties": { + "database_name": { + "maxLength": 250, + "type": "string" + }, + "id": { + "type": "integer" + } + }, + "required": ["database_name"], + "type": "object" + }, + "ReportScheduleRestApi.get.ReportRecipients": { + "properties": { + "id": { + "type": "integer" + }, + "recipient_config_json": { + "nullable": true, + "type": "string" + }, + "type": { + "maxLength": 50, + "type": "string" + } + }, + "required": ["type"], + "type": "object" + }, + "ReportScheduleRestApi.get.Slice": { + "properties": { + "id": { + "type": "integer" + }, + "slice_name": { + "maxLength": 250, + "nullable": true, + "type": "string" + }, + "viz_type": { + "maxLength": 250, + "nullable": true, + "type": "string" + } + }, + "type": "object" + }, + "ReportScheduleRestApi.get_list": { + "properties": { + "active": { + "nullable": true, + "type": "boolean" + }, + "changed_by": { + "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User" + }, + "changed_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "chart_id": { + "nullable": true, + "type": "integer" + }, + "created_by": { + "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User1" + }, + "created_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "creation_method": { + "maxLength": 255, + "nullable": true, + "type": "string" + }, + "crontab": { + "maxLength": 1000, + "type": "string" + }, + "crontab_humanized": { + "readOnly": true + }, + "dashboard_id": { + "nullable": true, + "type": "integer" + }, + "description": { + "nullable": true, + "type": "string" + }, + "extra": { + "readOnly": true + }, + "id": { + "type": "integer" + }, + "last_eval_dttm": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "last_state": { + "maxLength": 50, + "nullable": true, + "type": "string" + }, + "name": { + "maxLength": 150, + "type": "string" + }, + "owners": { + "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.AppleUser" + }, + "recipients": { + "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.ReportRecipients" + }, + "timezone": { + "maxLength": 100, + "type": "string" + }, + "type": { + "maxLength": 50, + "type": "string" + } + }, + "required": ["crontab", "name", "recipients", "type"], + "type": "object" + }, + "ReportScheduleRestApi.get_list.AppleUser": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "ReportScheduleRestApi.get_list.ReportRecipients": { + "properties": { + "id": { + "type": "integer" + }, + "type": { + "maxLength": 50, + "type": "string" + } + }, + "required": ["type"], + "type": "object" + }, + "ReportScheduleRestApi.get_list.User": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "ReportScheduleRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "ReportScheduleRestApi.post": { + "properties": { + "active": { + "type": "boolean" + }, + "chart": { + "nullable": true, + "type": "integer" + }, + "context_markdown": { + "description": "Markdown description", + "nullable": true, + "type": "string" + }, + "creation_method": { + "description": "Creation method is used to inform the frontend whether the report/alert was created in the dashboard, chart, or alerts and reports UI.", + "enum": ["charts", "dashboards", "alerts_reports"] + }, + "crontab": { + "description": "A CRON expression.[Crontab Guru](https://crontab.guru/) is a helpful resource that can help you craft a CRON expression.", + "example": "*/5 * * * *", + "maxLength": 1000, + "minLength": 1, + "type": "string" + }, + "custom_width": { + "description": "Custom width of the screenshot in pixels", + "example": 1000, + "nullable": true, + "type": "integer" + }, + "dashboard": { + "nullable": true, + "type": "integer" + }, + "database": { + "type": "integer" + }, + "description": { + "description": "Use a nice description to give context to this Alert/Report", + "example": "Daily sales dashboard to marketing", + "nullable": true, + "type": "string" + }, + "email_subject": { + "description": "The report schedule subject line", + "example": "[Report] Report name: Dashboard or chart name", + "nullable": true, + "type": "string" + }, + "extra": { + "type": "object" + }, + "force_screenshot": { + "type": "boolean" + }, + "grace_period": { + "description": "Once an alert is triggered, how long, in seconds, before Superset nags you again. (in seconds)", + "example": 14400, + "minimum": 1, + "type": "integer" + }, + "log_retention": { + "description": "How long to keep the logs around for this report (in days)", + "example": 90, + "minimum": 1, + "type": "integer" + }, + "name": { + "description": "The report schedule name.", + "example": "Daily dashboard email", + "maxLength": 150, + "minLength": 1, + "type": "string" + }, + "owners": { + "items": { + "description": "Owner are users ids allowed to delete or change this report. If left empty you will be one of the owners of the report.", + "type": "integer" + }, + "type": "array" + }, + "recipients": { + "items": { + "$ref": "#/components/schemas/ReportRecipient" + }, + "type": "array" + }, + "report_format": { + "enum": ["PDF", "PNG", "CSV", "TEXT"], + "type": "string" + }, + "selected_tabs": { + "items": { + "type": "integer" + }, + "nullable": true, + "type": "array" + }, + "sql": { + "description": "A SQL statement that defines whether the alert should get triggered or not. The query is expected to return either NULL or a number value.", + "example": "SELECT value FROM time_series_table", + "type": "string" + }, + "timezone": { + "description": "A timezone string that represents the location of the timezone.", + "enum": [ + "Africa/Abidjan", + "Africa/Accra", + "Africa/Addis_Ababa", + "Africa/Algiers", + "Africa/Asmara", + "Africa/Asmera", + "Africa/Bamako", + "Africa/Bangui", + "Africa/Banjul", + "Africa/Bissau", + "Africa/Blantyre", + "Africa/Brazzaville", + "Africa/Bujumbura", + "Africa/Cairo", + "Africa/Casablanca", + "Africa/Ceuta", + "Africa/Conakry", + "Africa/Dakar", + "Africa/Dar_es_Salaam", + "Africa/Djibouti", + "Africa/Douala", + "Africa/El_Aaiun", + "Africa/Freetown", + "Africa/Gaborone", + "Africa/Harare", + "Africa/Johannesburg", + "Africa/Juba", + "Africa/Kampala", + "Africa/Khartoum", "Africa/Kigali", "Africa/Kinshasa", "Africa/Lagos", @@ -7234,6 +8225,7 @@ "America/Cayman", "America/Chicago", "America/Chihuahua", + "America/Ciudad_Juarez", "America/Coral_Harbour", "America/Cordoba", "America/Costa_Rica", @@ -7586,8 +8578,9 @@ "Europe/Istanbul", "Europe/Jersey", "Europe/Kaliningrad", - "Europe/Kyiv", + "Europe/Kiev", "Europe/Kirov", + "Europe/Kyiv", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", @@ -7750,7 +8743,7 @@ "type": "string" }, "working_timeout": { - "description": "If an alert is staled at a working state, how long until it's state is reseted to error", + "description": "If an alert is staled at a working state, how long until it's state is reset to error", "example": 3600, "minimum": 1, "type": "integer" @@ -7803,6 +8796,12 @@ "nullable": true, "type": "string" }, + "email_subject": { + "description": "The report schedule subject line", + "example": "[Report] Report name: Dashboard or chart name", + "nullable": true, + "type": "string" + }, "extra": { "type": "object" }, @@ -7818,7 +8817,7 @@ "log_retention": { "description": "How long to keep the logs around for this report (in days)", "example": 90, - "minimum": 1, + "minimum": 0, "type": "integer" }, "name": { @@ -7841,7 +8840,7 @@ "type": "array" }, "report_format": { - "enum": ["PNG", "CSV", "TEXT"], + "enum": ["PDF", "PNG", "CSV", "TEXT"], "type": "string" }, "sql": { @@ -7948,6 +8947,7 @@ "America/Cayman", "America/Chicago", "America/Chihuahua", + "America/Ciudad_Juarez", "America/Coral_Harbour", "America/Cordoba", "America/Costa_Rica", @@ -8300,8 +9300,9 @@ "Europe/Istanbul", "Europe/Jersey", "Europe/Kaliningrad", - "Europe/Kyiv", + "Europe/Kiev", "Europe/Kirov", + "Europe/Kyiv", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", @@ -8450,1285 +9451,3199 @@ ], "type": "string" }, - "type": { - "description": "The report schedule type", - "enum": ["Alert", "Report"], - "type": "string" + "type": { + "description": "The report schedule type", + "enum": ["Alert", "Report"], + "type": "string" + }, + "validator_config_json": { + "$ref": "#/components/schemas/ValidatorConfigJSON" + }, + "validator_type": { + "description": "Determines when to trigger alert based off value from alert query. Alerts will be triggered with these validator types:\n- Not Null - When the return value is Not NULL, Empty, or 0\n- Operator - When `sql_return_value comparison_operator threshold` is True e.g. `50 <= 75`<br>Supports the comparison operators <, <=, >, >=, ==, and !=", + "enum": ["not null", "operator"], + "nullable": true, + "type": "string" + }, + "working_timeout": { + "description": "If an alert is staled at a working state, how long until it's state is reset to error", + "example": 3600, + "minimum": 1, + "nullable": true, + "type": "integer" + } + }, + "type": "object" + }, + "Resource": { + "properties": { + "id": { + "type": "string" + }, + "type": { + "enum": ["dashboard"] + } + }, + "required": ["id", "type"], + "type": "object" + }, + "RlsRule": { + "properties": { + "clause": { + "type": "string" + }, + "dataset": { + "type": "integer" + } + }, + "required": ["clause"], + "type": "object" + }, + "Roles": { + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "Roles1": { + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "SQLLabBootstrapSchema": { + "properties": { + "active_tab": { + "$ref": "#/components/schemas/TabState" + }, + "databases": { + "additionalProperties": { + "$ref": "#/components/schemas/ImportV1Database" + }, + "type": "object" + }, + "queries": { + "additionalProperties": { + "$ref": "#/components/schemas/QueryResult" + }, + "type": "object" + }, + "tab_state_ids": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "SavedQueryRestApi.get": { + "properties": { + "catalog": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "changed_by": { + "$ref": "#/components/schemas/SavedQueryRestApi.get.User" + }, + "changed_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "created_by": { + "$ref": "#/components/schemas/SavedQueryRestApi.get.User1" + }, + "database": { + "$ref": "#/components/schemas/SavedQueryRestApi.get.Database" + }, + "description": { + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + }, + "label": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "schema": { + "maxLength": 128, + "nullable": true, + "type": "string" + }, + "sql": { + "nullable": true, + "type": "string" + }, + "sql_tables": { + "readOnly": true + }, + "template_parameters": { + "nullable": true, + "type": "string" + } + }, + "type": "object" + }, + "SavedQueryRestApi.get.Database": { + "properties": { + "database_name": { + "maxLength": 250, + "type": "string" + }, + "id": { + "type": "integer" + } + }, + "required": ["database_name"], + "type": "object" + }, + "SavedQueryRestApi.get.User": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "SavedQueryRestApi.get.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "SavedQueryRestApi.get_list": { + "properties": { + "catalog": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "changed_by": { + "$ref": "#/components/schemas/SavedQueryRestApi.get_list.User" + }, + "changed_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "created_by": { + "$ref": "#/components/schemas/SavedQueryRestApi.get_list.User1" + }, + "created_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "database": { + "$ref": "#/components/schemas/SavedQueryRestApi.get_list.Database" + }, + "db_id": { + "nullable": true, + "type": "integer" + }, + "description": { + "nullable": true, + "type": "string" + }, + "extra": { + "readOnly": true + }, + "id": { + "type": "integer" + }, + "label": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "last_run_delta_humanized": { + "readOnly": true + }, + "rows": { + "nullable": true, + "type": "integer" + }, + "schema": { + "maxLength": 128, + "nullable": true, + "type": "string" + }, + "sql": { + "nullable": true, + "type": "string" + }, + "sql_tables": { + "readOnly": true + }, + "tags": { + "$ref": "#/components/schemas/SavedQueryRestApi.get_list.Tag" + } + }, + "type": "object" + }, + "SavedQueryRestApi.get_list.Database": { + "properties": { + "database_name": { + "maxLength": 250, + "type": "string" + }, + "id": { + "type": "integer" + } + }, + "required": ["database_name"], + "type": "object" + }, + "SavedQueryRestApi.get_list.Tag": { + "properties": { + "id": { + "type": "integer" + }, + "name": { + "maxLength": 250, + "nullable": true, + "type": "string" + }, + "type": { + "enum": [1, 2, 3, 4] + } + }, + "type": "object" + }, + "SavedQueryRestApi.get_list.User": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "SavedQueryRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "SavedQueryRestApi.post": { + "properties": { + "catalog": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "db_id": { + "nullable": true, + "type": "integer" + }, + "description": { + "nullable": true, + "type": "string" + }, + "extra_json": { + "nullable": true, + "type": "string" + }, + "label": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "schema": { + "maxLength": 128, + "nullable": true, + "type": "string" + }, + "sql": { + "nullable": true, + "type": "string" + }, + "template_parameters": { + "nullable": true, + "type": "string" + } + }, + "type": "object" + }, + "SavedQueryRestApi.put": { + "properties": { + "catalog": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "db_id": { + "nullable": true, + "type": "integer" + }, + "description": { + "nullable": true, + "type": "string" + }, + "extra_json": { + "nullable": true, + "type": "string" + }, + "label": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "schema": { + "maxLength": 128, + "nullable": true, + "type": "string" + }, + "sql": { + "nullable": true, + "type": "string" + }, + "template_parameters": { + "nullable": true, + "type": "string" + } + }, + "type": "object" + }, + "SchemasResponseSchema": { + "properties": { + "result": { + "items": { + "description": "A database schema name", + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "SelectStarResponseSchema": { + "properties": { + "result": { + "description": "SQL select star", + "type": "string" + } + }, + "type": "object" + }, + "Slice": { + "properties": { + "cache_timeout": { + "description": "Duration (in seconds) of the caching timeout for this chart.", + "type": "integer" + }, + "certification_details": { + "description": "Details of the certification.", + "type": "string" + }, + "certified_by": { + "description": "Person or group that has certified this dashboard.", + "type": "string" + }, + "changed_on": { + "description": "Timestamp of the last modification.", + "format": "date-time", + "type": "string" + }, + "changed_on_humanized": { + "description": "Timestamp of the last modification in human readable form.", + "type": "string" + }, + "datasource": { + "description": "Datasource identifier.", + "type": "string" + }, + "description": { + "description": "Slice description.", + "type": "string" + }, + "description_markeddown": { + "description": "Sanitized HTML version of the chart description.", + "type": "string" + }, + "edit_url": { + "description": "The URL for editing the slice.", + "type": "string" + }, + "form_data": { + "description": "Form data associated with the slice.", + "type": "object" + }, + "is_managed_externally": { + "description": "If the chart is managed outside externally.", + "type": "boolean" + }, + "modified": { + "description": "Last modification in human readable form.", + "type": "string" + }, + "owners": { + "description": "Owners identifiers.", + "items": { + "type": "integer" + }, + "type": "array" + }, + "query_context": { + "description": "The context associated with the query.", + "type": "object" + }, + "slice_id": { + "description": "The slice ID.", + "type": "integer" + }, + "slice_name": { + "description": "The slice name.", + "type": "string" + }, + "slice_url": { + "description": "The slice URL.", + "type": "string" + } + }, + "type": "object" + }, + "StopQuerySchema": { + "properties": { + "client_id": { + "type": "string" + } + }, + "type": "object" + }, + "Tab": { + "properties": { + "children": { + "items": { + "$ref": "#/components/schemas/Tab" + }, + "type": "array" + }, + "parents": { + "items": { + "type": "string" + }, + "type": "array" + }, + "title": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + "TabState": { + "properties": { + "active": { + "type": "boolean" + }, + "autorun": { + "type": "boolean" + }, + "database_id": { + "type": "integer" + }, + "extra_json": { + "type": "object" + }, + "hide_left_bar": { + "type": "boolean" + }, + "id": { + "type": "string" + }, + "label": { + "type": "string" + }, + "latest_query": { + "$ref": "#/components/schemas/QueryResult" + }, + "query_limit": { + "type": "integer" + }, + "saved_query": { + "nullable": true, + "type": "object" + }, + "schema": { + "type": "string" + }, + "sql": { + "type": "string" + }, + "table_schemas": { + "items": { + "$ref": "#/components/schemas/Table" + }, + "type": "array" + }, + "user_id": { + "type": "integer" + } + }, + "type": "object" + }, + "Table": { + "properties": { + "database_id": { + "type": "integer" + }, + "description": { + "type": "string" + }, + "expanded": { + "type": "boolean" + }, + "id": { + "type": "integer" + }, + "schema": { + "type": "string" + }, + "tab_state_id": { + "type": "integer" + }, + "table": { + "type": "string" + } + }, + "type": "object" + }, + "TableExtraMetadataResponseSchema": { + "properties": { + "clustering": { + "type": "object" + }, + "metadata": { + "type": "object" + }, + "partitions": { + "type": "object" + } + }, + "type": "object" + }, + "TableMetadataColumnsResponse": { + "properties": { + "duplicates_constraint": { + "type": "string" + }, + "keys": { + "description": "", + "items": { + "type": "string" + }, + "type": "array" + }, + "longType": { + "description": "The actual backend long type for the column", + "type": "string" + }, + "name": { + "description": "The column name", + "type": "string" + }, + "type": { + "description": "The column type", + "type": "string" + } + }, + "type": "object" + }, + "TableMetadataForeignKeysIndexesResponse": { + "properties": { + "column_names": { + "items": { + "description": "A list of column names that compose the foreign key or index", + "type": "string" + }, + "type": "array" + }, + "name": { + "description": "The name of the foreign key or index", + "type": "string" + }, + "options": { + "$ref": "#/components/schemas/TableMetadataOptionsResponse" + }, + "referred_columns": { + "items": { + "type": "string" + }, + "type": "array" + }, + "referred_schema": { + "type": "string" + }, + "referred_table": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" + }, + "TableMetadataOptionsResponse": { + "properties": { + "deferrable": { + "type": "boolean" + }, + "initially": { + "type": "boolean" + }, + "match": { + "type": "boolean" + }, + "ondelete": { + "type": "boolean" + }, + "onupdate": { + "type": "boolean" + } + }, + "type": "object" + }, + "TableMetadataPrimaryKeyResponse": { + "properties": { + "column_names": { + "items": { + "description": "A list of column names that compose the primary key", + "type": "string" + }, + "type": "array" + }, + "name": { + "description": "The primary key index name", + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" + }, + "TableMetadataResponseSchema": { + "properties": { + "columns": { + "description": "A list of columns and their metadata", + "items": { + "$ref": "#/components/schemas/TableMetadataColumnsResponse" + }, + "type": "array" + }, + "foreignKeys": { + "description": "A list of foreign keys and their metadata", + "items": { + "$ref": "#/components/schemas/TableMetadataForeignKeysIndexesResponse" + }, + "type": "array" + }, + "indexes": { + "description": "A list of indexes and their metadata", + "items": { + "$ref": "#/components/schemas/TableMetadataForeignKeysIndexesResponse" + }, + "type": "array" + }, + "name": { + "description": "The name of the table", + "type": "string" + }, + "primaryKey": { + "allOf": [ + { + "$ref": "#/components/schemas/TableMetadataPrimaryKeyResponse" + } + ], + "description": "Primary keys metadata" + }, + "selectStar": { + "description": "SQL select star", + "type": "string" + } + }, + "type": "object" + }, + "Tables": { + "properties": { + "id": { + "type": "integer" + }, + "schema": { + "type": "string" + }, + "table_name": { + "type": "string" + } + }, + "type": "object" + }, + "TabsPayloadSchema": { + "properties": { + "all_tabs": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "tab_tree": { + "items": { + "$ref": "#/components/schemas/Tab" + }, + "type": "array" + } + }, + "type": "object" + }, + "Tag": { + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "type": { + "enum": [1, 2, 3, 4] + } + }, + "type": "object" + }, + "TagGetResponseSchema": { + "properties": { + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "type": { + "type": "string" + } + }, + "type": "object" + }, + "TagObject": { + "properties": { + "description": { + "nullable": true, + "type": "string" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "objects_to_tag": { + "description": "Objects to tag", + "items": {}, + "type": "array" + } + }, + "type": "object" + }, + "TagPostBulkResponseObject": { + "properties": { + "objects_skipped": { + "description": "Objects to tag", + "items": {}, + "type": "array" + }, + "objects_tagged": { + "description": "Objects to tag", + "items": {}, + "type": "array" + } + }, + "type": "object" + }, + "TagPostBulkResponseSchema": { + "properties": { + "result": { + "$ref": "#/components/schemas/TagPostBulkResponseObject" + } + }, + "type": "object" + }, + "TagPostBulkSchema": { + "properties": { + "tags": { + "items": { + "$ref": "#/components/schemas/TagObject" + }, + "type": "array" + } + }, + "type": "object" + }, + "TagRestApi.get": { + "properties": { + "changed_by": { + "$ref": "#/components/schemas/TagRestApi.get.User" + }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "created_by": { + "$ref": "#/components/schemas/TagRestApi.get.User1" + }, + "created_on_delta_humanized": { + "readOnly": true + }, + "description": { + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + }, + "name": { + "maxLength": 250, + "nullable": true, + "type": "string" + }, + "type": { + "enum": [1, 2, 3, 4] + } + }, + "type": "object" + }, + "TagRestApi.get.User": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "TagRestApi.get.User1": { + "properties": { + "active": { + "nullable": true, + "type": "boolean" + }, + "changed_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "created_on": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "email": { + "maxLength": 320, + "type": "string" + }, + "fail_login_count": { + "nullable": true, + "type": "integer" + }, + "first_name": { + "maxLength": 64, + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_login": { + "format": "date-time", + "nullable": true, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + }, + "login_count": { + "nullable": true, + "type": "integer" + }, + "password": { + "maxLength": 256, + "nullable": true, + "type": "string" + }, + "username": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["email", "first_name", "last_name", "username"], + "type": "object" + }, + "TagRestApi.get_list": { + "properties": { + "changed_by": { + "$ref": "#/components/schemas/TagRestApi.get_list.User" + }, + "changed_on_delta_humanized": { + "readOnly": true + }, + "created_by": { + "$ref": "#/components/schemas/TagRestApi.get_list.User1" + }, + "created_on_delta_humanized": { + "readOnly": true + }, + "description": { + "nullable": true, + "type": "string" + }, + "id": { + "type": "integer" + }, + "name": { + "maxLength": 250, + "nullable": true, + "type": "string" + }, + "type": { + "enum": [1, 2, 3, 4] + } + }, + "type": "object" + }, + "TagRestApi.get_list.User": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "TagRestApi.get_list.User1": { + "properties": { + "first_name": { + "maxLength": 64, + "type": "string" + }, + "last_name": { + "maxLength": 64, + "type": "string" + } + }, + "required": ["first_name", "last_name"], + "type": "object" + }, + "TagRestApi.post": { + "properties": { + "description": { + "nullable": true, + "type": "string" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "objects_to_tag": { + "description": "Objects to tag", + "items": {}, + "type": "array" + } + }, + "type": "object" + }, + "TagRestApi.put": { + "properties": { + "description": { + "nullable": true, + "type": "string" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "objects_to_tag": { + "description": "Objects to tag", + "items": {}, + "type": "array" + } + }, + "type": "object" + }, + "TaggedObjectEntityResponseSchema": { + "properties": { + "changed_on": { + "format": "date-time", + "type": "string" + }, + "created_by": { + "$ref": "#/components/schemas/User" + }, + "creator": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "owners": { + "items": { + "$ref": "#/components/schemas/User1" + }, + "type": "array" + }, + "tags": { + "items": { + "$ref": "#/components/schemas/TagGetResponseSchema" + }, + "type": "array" + }, + "type": { + "type": "string" + }, + "url": { + "type": "string" + } + }, + "type": "object" + }, + "TemporaryCachePostSchema": { + "properties": { + "value": { + "description": "Any type of JSON supported text.", + "type": "string" + } + }, + "required": ["value"], + "type": "object" + }, + "TemporaryCachePutSchema": { + "properties": { + "value": { + "description": "Any type of JSON supported text.", + "type": "string" + } + }, + "required": ["value"], + "type": "object" + }, + "UploadFileMetadata": { + "properties": { + "items": { + "items": { + "$ref": "#/components/schemas/UploadFileMetadataItem" + }, + "type": "array" + } + }, + "type": "object" + }, + "UploadFileMetadataItem": { + "properties": { + "column_names": { + "description": "A list of columns names in the sheet", + "items": { + "type": "string" + }, + "type": "array" + }, + "sheet_name": { + "description": "The name of the sheet", + "type": "string" + } + }, + "type": "object" + }, + "User": { + "properties": { + "first_name": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "type": "string" + } + }, + "type": "object" + }, + "User1": { + "properties": { + "first_name": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "last_name": { + "type": "string" + }, + "username": { + "type": "string" + } + }, + "type": "object" + }, + "User2": { + "properties": { + "first_name": { + "type": "string" + }, + "last_name": { + "type": "string" + }, + "username": { + "type": "string" + } + }, + "type": "object" + }, + "UserResponseSchema": { + "properties": { + "email": { + "type": "string" + }, + "first_name": { + "type": "string" + }, + "id": { + "type": "integer" + }, + "is_active": { + "type": "boolean" + }, + "is_anonymous": { + "type": "boolean" + }, + "last_name": { + "type": "string" + }, + "username": { + "type": "string" + } + }, + "type": "object" + }, + "ValidateSQLRequest": { + "properties": { + "catalog": { + "nullable": true, + "type": "string" + }, + "schema": { + "nullable": true, + "type": "string" + }, + "sql": { + "description": "SQL statement to validate", + "type": "string" + }, + "template_params": { + "nullable": true, + "type": "object" + } + }, + "required": ["sql"], + "type": "object" + }, + "ValidateSQLResponse": { + "properties": { + "end_column": { + "type": "integer" + }, + "line_number": { + "type": "integer" + }, + "message": { + "type": "string" + }, + "start_column": { + "type": "integer" + } + }, + "type": "object" + }, + "ValidatorConfigJSON": { + "properties": { + "op": { + "description": "The operation to compare with a threshold to apply to the SQL output\n", + "enum": ["<", "<=", ">", ">=", "==", "!="], + "type": "string" + }, + "threshold": { + "type": "number" + } + }, + "type": "object" + }, + "advanced_data_type_convert_schema": { + "properties": { + "type": { + "default": "port", + "type": "string" + }, + "values": { + "items": { + "default": "http" + }, + "minItems": 1, + "type": "array" + } + }, + "required": ["type", "values"], + "type": "object" + }, + "database_catalogs_query_schema": { + "properties": { + "force": { + "type": "boolean" + } + }, + "type": "object" + }, + "database_schemas_query_schema": { + "properties": { + "catalog": { + "type": "string" + }, + "force": { + "type": "boolean" + } + }, + "type": "object" + }, + "database_tables_query_schema": { + "properties": { + "catalog_name": { + "type": "string" + }, + "force": { + "type": "boolean" + }, + "schema_name": { + "type": "string" + } + }, + "required": ["schema_name"], + "type": "object" + }, + "delete_tags_schema": { + "items": { + "type": "string" + }, + "type": "array" + }, + "get_delete_ids_schema": { + "items": { + "type": "integer" + }, + "type": "array" + }, + "get_export_ids_schema": { + "items": { + "type": "integer" + }, + "type": "array" + }, + "get_fav_star_ids_schema": { + "items": { + "type": "integer" + }, + "type": "array" + }, + "get_info_schema": { + "properties": { + "add_columns": { + "additionalProperties": { + "properties": { + "page": { + "type": "integer" + }, + "page_size": { + "type": "integer" + } + }, + "type": "object" + }, + "type": "object" + }, + "edit_columns": { + "additionalProperties": { + "properties": { + "page": { + "type": "integer" + }, + "page_size": { + "type": "integer" + } + }, + "type": "object" + }, + "type": "object" + }, + "keys": { + "items": { + "enum": [ + "add_columns", + "edit_columns", + "filters", + "permissions", + "add_title", + "edit_title", + "none" + ], + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "get_item_schema": { + "properties": { + "columns": { + "items": { + "type": "string" + }, + "type": "array" + }, + "keys": { + "items": { + "enum": [ + "show_columns", + "description_columns", + "label_columns", + "show_title", + "none" + ], + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "get_list_schema": { + "properties": { + "columns": { + "items": { + "type": "string" + }, + "type": "array" }, - "validator_config_json": { - "$ref": "#/components/schemas/ValidatorConfigJSON" + "filters": { + "items": { + "properties": { + "col": { + "type": "string" + }, + "opr": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "boolean" + }, + { + "type": "array" + } + ] + } + }, + "required": ["col", "opr", "value"], + "type": "object" + }, + "type": "array" }, - "validator_type": { - "description": "Determines when to trigger alert based off value from alert query. Alerts will be triggered with these validator types:\n- Not Null - When the return value is Not NULL, Empty, or 0\n- Operator - When `sql_return_value comparison_operator threshold` is True e.g. `50 <= 75`<br>Supports the comparison operators <, <=, >, >=, ==, and !=", - "enum": ["not null", "operator"], - "nullable": true, + "keys": { + "items": { + "enum": [ + "list_columns", + "order_columns", + "label_columns", + "description_columns", + "list_title", + "none" + ], + "type": "string" + }, + "type": "array" + }, + "order_column": { "type": "string" }, - "working_timeout": { - "description": "If an alert is staled at a working state, how long until it's state is reseted to error", - "example": 3600, - "minimum": 1, - "nullable": true, + "order_direction": { + "enum": ["asc", "desc"], + "type": "string" + }, + "page": { + "type": "integer" + }, + "page_size": { "type": "integer" + }, + "select_columns": { + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" }, - "Resource": { + "get_recent_activity_schema": { "properties": { - "id": { - "type": "string" + "actions": { + "items": { + "type": "string" + }, + "type": "array" }, - "type": { - "enum": ["dashboard"] + "distinct": { + "type": "boolean" + }, + "page": { + "type": "number" + }, + "page_size": { + "type": "number" } }, - "required": ["id", "type"], "type": "object" }, - "RlsRule": { + "get_related_schema": { "properties": { - "clause": { + "filter": { "type": "string" }, - "dataset": { + "include_ids": { + "items": { + "type": "integer" + }, + "type": "array" + }, + "page": { + "type": "integer" + }, + "page_size": { "type": "integer" } }, - "required": ["clause"], "type": "object" }, - "Roles": { + "queries_get_updated_since_schema": { "properties": { - "id": { - "type": "integer" - }, - "name": { - "type": "string" + "last_updated_ms": { + "type": "number" } }, + "required": ["last_updated_ms"], "type": "object" }, - "Roles1": { + "screenshot_query_schema": { "properties": { - "id": { - "type": "integer" + "force": { + "type": "boolean" }, - "name": { - "type": "string" + "thumb_size": { + "items": { + "type": "integer" + }, + "type": "array" + }, + "window_size": { + "items": { + "type": "integer" + }, + "type": "array" } }, "type": "object" }, - "SavedQueryRestApi.get": { + "sql_lab_get_results_schema": { "properties": { - "changed_on_delta_humanized": { - "readOnly": true - }, - "created_by": { - "$ref": "#/components/schemas/SavedQueryRestApi.get.User" - }, - "database": { - "$ref": "#/components/schemas/SavedQueryRestApi.get.Database" - }, - "description": { - "nullable": true, - "type": "string" - }, - "id": { - "type": "integer" - }, - "label": { - "maxLength": 256, - "nullable": true, - "type": "string" - }, - "schema": { - "maxLength": 128, - "nullable": true, - "type": "string" - }, - "sql": { - "nullable": true, - "type": "string" - }, - "sql_tables": { - "readOnly": true - }, - "template_parameters": { - "nullable": true, + "key": { "type": "string" } }, + "required": ["key"], "type": "object" }, - "SavedQueryRestApi.get.Database": { + "thumbnail_query_schema": { "properties": { - "database_name": { - "maxLength": 250, - "type": "string" - }, - "id": { - "type": "integer" + "force": { + "type": "boolean" } }, - "required": ["database_name"], "type": "object" + } + }, + "securitySchemes": { + "jwt": { + "bearerFormat": "JWT", + "scheme": "bearer", + "type": "http" }, - "SavedQueryRestApi.get.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" + "jwt_refresh": { + "bearerFormat": "JWT", + "scheme": "bearer", + "type": "http" + } + } + }, + "info": { + "description": "Superset", + "title": "Superset", + "version": "v1" + }, + "openapi": "3.0.2", + "paths": { + "/api/v1/advanced_data_type/convert": { + "get": { + "description": "Returns an AdvancedDataTypeResponse object populated with the passed in args.", + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/advanced_data_type_convert_schema" + } + } + }, + "in": "query", + "name": "q" } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "SavedQueryRestApi.get_list": { - "properties": { - "changed_on_delta_humanized": { - "readOnly": true - }, - "created_by": { - "$ref": "#/components/schemas/SavedQueryRestApi.get_list.User" + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AdvancedDataTypeSchema" + } + } + }, + "description": "AdvancedDataTypeResponse object has been returned." }, - "created_on": { - "format": "date-time", - "nullable": true, - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "database": { - "$ref": "#/components/schemas/SavedQueryRestApi.get_list.Database" + "401": { + "$ref": "#/components/responses/401" }, - "db_id": { - "nullable": true, - "type": "integer" + "403": { + "$ref": "#/components/responses/403" }, - "description": { - "nullable": true, - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "extra": { - "readOnly": true + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Return an AdvancedDataTypeResponse", + "tags": ["Advanced Data Type"] + } + }, + "/api/v1/advanced_data_type/types": { + "get": { + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "a successful return of the available advanced data types has taken place." }, - "id": { - "type": "integer" + "401": { + "$ref": "#/components/responses/401" }, - "label": { - "maxLength": 256, - "nullable": true, - "type": "string" + "403": { + "$ref": "#/components/responses/403" }, - "last_run_delta_humanized": { - "readOnly": true + "404": { + "$ref": "#/components/responses/404" }, - "rows": { - "nullable": true, - "type": "integer" + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Return a list of available advanced data types", + "tags": ["Advanced Data Type"] + } + }, + "/api/v1/annotation_layer/": { + "delete": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_delete_ids_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "CSS templates bulk delete" }, - "schema": { - "maxLength": 128, - "nullable": true, - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "sql": { - "nullable": true, - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "sql_tables": { - "readOnly": true + "422": { + "$ref": "#/components/responses/422" }, - "tags": { - "$ref": "#/components/schemas/SavedQueryRestApi.get_list.Tag" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "SavedQueryRestApi.get_list.Database": { - "properties": { - "database_name": { - "maxLength": 250, - "type": "string" - }, - "id": { - "type": "integer" + "security": [ + { + "jwt": [] } - }, - "required": ["database_name"], - "type": "object" + ], + "summary": "Delete multiple annotation layers in a bulk operation", + "tags": ["Annotation Layers"] }, - "SavedQueryRestApi.get_list.Tag": { - "properties": { - "id": { - "type": "integer" + "get": { + "description": "Gets a list of annotation layers, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_list_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "count": { + "description": "The total record count on the backend", + "type": "number" + }, + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", + "type": "string" + }, + "order_columns": { + "description": "A list of allowed columns to sort", + "items": { + "type": "string" + }, + "type": "array" + }, + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "Items from Model" }, - "name": { - "maxLength": 250, - "nullable": true, - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "type": { - "enum": [1, 2, 3, 4] - } - }, - "type": "object" - }, - "SavedQueryRestApi.get_list.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "id": { - "type": "integer" + "422": { + "$ref": "#/components/responses/422" }, - "last_name": { - "maxLength": 64, - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "SavedQueryRestApi.post": { - "properties": { - "db_id": { - "nullable": true, - "type": "integer" - }, - "description": { - "nullable": true, - "type": "string" - }, - "label": { - "maxLength": 256, - "nullable": true, - "type": "string" - }, - "schema": { - "maxLength": 128, - "nullable": true, - "type": "string" - }, - "sql": { - "nullable": true, - "type": "string" - }, - "template_parameters": { - "nullable": true, - "type": "string" + "security": [ + { + "jwt": [] } - }, - "type": "object" + ], + "summary": "Get a list of annotation layers", + "tags": ["Annotation Layers"] }, - "SavedQueryRestApi.put": { - "properties": { - "db_id": { - "nullable": true, - "type": "integer" + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.post" + } + } }, - "description": { - "nullable": true, - "type": "string" + "description": "Annotation Layer schema", + "required": true + }, + "responses": { + "201": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.post" + } + }, + "type": "object" + } + } + }, + "description": "Annotation added" }, - "label": { - "maxLength": 256, - "nullable": true, - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "schema": { - "maxLength": 128, - "nullable": true, - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "sql": { - "nullable": true, - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "template_parameters": { - "nullable": true, - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "SchemasResponseSchema": { - "properties": { - "result": { - "items": { - "description": "A database schema name", - "type": "string" - }, - "type": "array" + "security": [ + { + "jwt": [] } - }, - "type": "object" - }, - "SelectStarResponseSchema": { - "properties": { - "result": { - "description": "SQL select star", - "type": "string" + ], + "summary": "Create an annotation layer", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/annotation_layer/_info": { + "get": { + "description": "Get metadata information about this API resource", + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_info_schema" + } + } + }, + "in": "query", + "name": "q" } - }, - "type": "object" - }, - "Slice": { - "properties": { - "cache_timeout": { - "description": "Duration (in seconds) of the caching timeout for this chart.", - "type": "integer" - }, - "certification_details": { - "description": "Details of the certification.", - "type": "string" - }, - "certified_by": { - "description": "Person or group that has certified this dashboard.", - "type": "string" - }, - "changed_on": { - "description": "Timestamp of the last modification.", - "format": "date-time", - "type": "string" - }, - "changed_on_humanized": { - "description": "Timestamp of the last modification in human readable form.", - "type": "string" - }, - "datasource": { - "description": "Datasource identifier.", - "type": "string" - }, - "description": { - "description": "Slice description.", - "type": "string" - }, - "description_markeddown": { - "description": "Sanitized HTML version of the chart description.", - "type": "string" + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "Item from Model" }, - "edit_url": { - "description": "The URL for editing the slice.", - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "form_data": { - "description": "Form data associated with the slice.", - "type": "object" + "401": { + "$ref": "#/components/responses/401" }, - "is_managed_externally": { - "description": "If the chart is managed outside externally.", - "type": "boolean" + "422": { + "$ref": "#/components/responses/422" }, - "modified": { - "description": "Last modification in human readable form.", - "type": "string" + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Get metadata information about this API resource", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/annotation_layer/related/{column_name}": { + "get": { + "parameters": [ + { + "in": "path", + "name": "column_name", + "required": true, + "schema": { + "type": "string" + } }, - "owners": { - "description": "Owners identifiers.", - "items": { - "type": "integer" + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_related_schema" + } + } }, - "type": "array" + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RelatedResponseSchema" + } + } + }, + "description": "Related column data" }, - "query_context": { - "description": "The context associated with the query.", - "type": "object" + "400": { + "$ref": "#/components/responses/400" }, - "slice_id": { - "description": "The slice ID.", - "type": "integer" + "401": { + "$ref": "#/components/responses/401" }, - "slice_name": { - "description": "The slice name.", - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "slice_url": { - "description": "The slice URL.", - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "StopQuerySchema": { - "properties": { - "client_id": { - "type": "string" + "security": [ + { + "jwt": [] } - }, - "type": "object" - }, - "TableExtraMetadataResponseSchema": { - "properties": { - "clustering": { - "type": "object" - }, - "metadata": { - "type": "object" - }, - "partitions": { - "type": "object" + ], + "summary": "Get related fields data", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/annotation_layer/{pk}": { + "delete": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } - }, - "type": "object" - }, - "TableMetadataColumnsResponse": { - "properties": { - "duplicates_constraint": { - "type": "string" - }, - "keys": { - "description": "", - "items": { - "type": "string" + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } }, - "type": "array" + "description": "Item deleted" }, - "longType": { - "description": "The actual backend long type for the column", - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "name": { - "description": "The column name", - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "type": { - "description": "The column type", - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" + "security": [ + { + "jwt": [] + } + ], + "summary": "Delete annotation layer", + "tags": ["Annotation Layers"] }, - "TableMetadataForeignKeysIndexesResponse": { - "properties": { - "column_names": { - "items": { - "description": "A list of column names that compose the foreign key or index", - "type": "string" - }, - "type": "array" + "get": { + "description": "Get an item model", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } }, - "name": { - "description": "The name of the foreign key or index", - "type": "string" + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_item_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "id": { + "description": "The item id", + "type": "string" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "result": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.get" + }, + "show_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "show_title": { + "description": "A title to render. Will be translated by babel", + "example": "Show Item Details", + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Item from Model" }, - "options": { - "$ref": "#/components/schemas/TableMetadataOptionsResponse" + "400": { + "$ref": "#/components/responses/400" }, - "referred_columns": { - "items": { - "type": "string" - }, - "type": "array" + "401": { + "$ref": "#/components/responses/401" }, - "referred_schema": { - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "referred_table": { - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "type": { - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" + "security": [ + { + "jwt": [] + } + ], + "summary": "Get an annotation layer", + "tags": ["Annotation Layers"] }, - "TableMetadataOptionsResponse": { - "properties": { - "deferrable": { - "type": "boolean" - }, - "initially": { - "type": "boolean" - }, - "match": { - "type": "boolean" - }, - "ondelete": { - "type": "boolean" - }, - "onupdate": { - "type": "boolean" + "put": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.put" + } + } + }, + "description": "Annotation schema", + "required": true }, - "type": "object" - }, - "TableMetadataPrimaryKeyResponse": { - "properties": { - "column_names": { - "items": { - "description": "A list of column names that compose the primary key", - "type": "string" + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/AnnotationLayerRestApi.put" + } + }, + "type": "object" + } + } }, - "type": "array" + "description": "Annotation changed" }, - "name": { - "description": "The primary key index name", - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "type": { - "type": "string" + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "TableMetadataResponseSchema": { - "properties": { - "columns": { - "description": "A list of columns and their metadata", - "items": { - "$ref": "#/components/schemas/TableMetadataColumnsResponse" - }, - "type": "array" + "security": [ + { + "jwt": [] + } + ], + "summary": "Update an annotation layer", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/annotation_layer/{pk}/annotation/": { + "delete": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } }, - "foreignKeys": { - "description": "A list of foreign keys and their metadata", - "items": { - "$ref": "#/components/schemas/TableMetadataForeignKeysIndexesResponse" + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_delete_ids_schema" + } + } }, - "type": "array" - }, - "indexes": { - "description": "A list of indexes and their metadata", - "items": { - "$ref": "#/components/schemas/TableMetadataForeignKeysIndexesResponse" + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } }, - "type": "array" + "description": "Annotations bulk delete" }, - "name": { - "description": "The name of the table", - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "primaryKey": { - "allOf": [ - { - "$ref": "#/components/schemas/TableMetadataPrimaryKeyResponse" - } - ], - "description": "Primary keys metadata" + "404": { + "$ref": "#/components/responses/404" }, - "selectStar": { - "description": "SQL select star", - "type": "string" + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" + "security": [ + { + "jwt": [] + } + ], + "summary": "Bulk delete annotation layers", + "tags": ["Annotation Layers"] }, - "Tables": { - "properties": { - "id": { - "type": "integer" + "get": { + "description": "Gets a list of annotation layers, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "parameters": [ + { + "description": "The annotation layer id for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_list_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "count": { + "description": "The total record count on the backend", + "type": "number" + }, + "ids": { + "description": "A list of annotation ids", + "items": { + "type": "string" + }, + "type": "array" + }, + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/AnnotationRestApi.get_list" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "Items from Annotations" }, - "schema": { - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "table_name": { - "type": "string" - } - }, - "type": "object" - }, - "Tag": { - "properties": { - "id": { - "type": "integer" + "401": { + "$ref": "#/components/responses/401" }, - "name": { - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "type": { - "enum": [1, 2, 3, 4] + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "Tag1": { - "properties": { - "id": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "type": { - "enum": [1, 2, 3, 4] + "security": [ + { + "jwt": [] } - }, - "type": "object" + ], + "summary": "Get a list of annotation layers", + "tags": ["Annotation Layers"] }, - "TagGetResponseSchema": { - "properties": { - "id": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "type": { - "type": "string" + "post": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } - }, - "type": "object" - }, - "TagRestApi.get": { - "properties": { - "changed_by": { - "$ref": "#/components/schemas/TagRestApi.get.User1" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AnnotationRestApi.post" + } + } }, - "changed_on_delta_humanized": { - "readOnly": true + "description": "Annotation schema", + "required": true + }, + "responses": { + "201": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/AnnotationRestApi.post" + } + }, + "type": "object" + } + } + }, + "description": "Annotation added" }, - "created_by": { - "$ref": "#/components/schemas/TagRestApi.get.User" + "400": { + "$ref": "#/components/responses/400" }, - "id": { - "type": "integer" + "401": { + "$ref": "#/components/responses/401" }, - "name": { - "maxLength": 250, - "nullable": true, - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "type": { - "enum": [1, 2, 3, 4] + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "TagRestApi.get.User": { - "properties": { - "active": { - "nullable": true, - "type": "boolean" - }, - "changed_on": { - "format": "date-time", - "nullable": true, - "type": "string" - }, - "created_on": { - "format": "date-time", - "nullable": true, - "type": "string" - }, - "email": { - "maxLength": 64, - "type": "string" - }, - "fail_login_count": { - "nullable": true, - "type": "integer" - }, - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "type": "integer" - }, - "last_login": { - "format": "date-time", - "nullable": true, - "type": "string" + "security": [ + { + "jwt": [] + } + ], + "summary": "Create an annotation layer", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/annotation_layer/{pk}/annotation/{annotation_id}": { + "delete": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } }, - "last_name": { - "maxLength": 64, - "type": "string" + { + "description": "The annotation pk for this annotation", + "in": "path", + "name": "annotation_id", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Item deleted" }, - "login_count": { - "nullable": true, - "type": "integer" + "404": { + "$ref": "#/components/responses/404" }, - "password": { - "maxLength": 256, - "nullable": true, - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "username": { - "maxLength": 64, - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "required": ["email", "first_name", "last_name", "username"], - "type": "object" + "security": [ + { + "jwt": [] + } + ], + "summary": "Delete annotation layer", + "tags": ["Annotation Layers"] }, - "TagRestApi.get.User1": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" + "get": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } }, - "last_name": { - "maxLength": 64, - "type": "string" + { + "description": "The annotation pk", + "in": "path", + "name": "annotation_id", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_item_schema" + } + } + }, + "in": "query", + "name": "q" } - }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "TagRestApi.get_list": { - "properties": { - "changed_by": { - "$ref": "#/components/schemas/TagRestApi.get_list.User1" + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "description": "The item id", + "type": "string" + }, + "result": { + "$ref": "#/components/schemas/AnnotationRestApi.get" + } + }, + "type": "object" + } + } + }, + "description": "Item from Model" }, - "changed_on_delta_humanized": { - "readOnly": true + "400": { + "$ref": "#/components/responses/400" }, - "created_by": { - "$ref": "#/components/schemas/TagRestApi.get_list.User" + "401": { + "$ref": "#/components/responses/401" }, - "id": { - "type": "integer" + "404": { + "$ref": "#/components/responses/404" }, - "name": { - "maxLength": 250, - "nullable": true, - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "type": { - "enum": [1, 2, 3, 4] + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "TagRestApi.get_list.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "last_name": { - "maxLength": 64, - "type": "string" + "security": [ + { + "jwt": [] } - }, - "required": ["first_name", "last_name"], - "type": "object" + ], + "summary": "Get an annotation layer", + "tags": ["Annotation Layers"] }, - "TagRestApi.get_list.User1": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" + "put": { + "parameters": [ + { + "description": "The annotation layer pk for this annotation", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } }, - "last_name": { - "maxLength": 64, - "type": "string" + { + "description": "The annotation pk for this annotation", + "in": "path", + "name": "annotation_id", + "required": true, + "schema": { + "type": "integer" + } } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AnnotationRestApi.put" + } + } + }, + "description": "Annotation schema", + "required": true }, - "required": ["first_name", "last_name"], - "type": "object" - }, - "TagRestApi.post": { - "properties": { - "tags": { - "items": { - "type": "string" + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/AnnotationRestApi.put" + } + }, + "type": "object" + } + } }, - "type": "array" - } - }, - "type": "object" - }, - "TagRestApi.put": { - "properties": { - "id": { - "type": "integer" - } - }, - "type": "object" - }, - "TaggedObjectEntityResponseSchema": { - "properties": { - "changed_on": { - "format": "date-time", - "type": "string" - }, - "created_by": { - "$ref": "#/components/schemas/User" - }, - "creator": { - "type": "string" + "description": "Annotation changed" }, - "id": { - "type": "integer" + "400": { + "$ref": "#/components/responses/400" }, - "name": { - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "type": { - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "url": { - "type": "string" - } - }, - "type": "object" - }, - "TemporaryCachePostSchema": { - "properties": { - "value": { - "description": "Any type of JSON supported text.", - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "required": ["value"], - "type": "object" - }, - "TemporaryCachePutSchema": { - "properties": { - "value": { - "description": "Any type of JSON supported text.", - "type": "string" + "security": [ + { + "jwt": [] } - }, - "required": ["value"], - "type": "object" - }, - "User": { - "properties": { - "first_name": { - "type": "string" + ], + "summary": "Update an annotation layer", + "tags": ["Annotation Layers"] + } + }, + "/api/v1/apple/health": { + "get": { + "description": "Endpoint for checking the health status of the metastore and cache", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AppleHealthResponseSchema" + } + } + }, + "description": "The" }, - "id": { - "type": "integer" + "400": { + "$ref": "#/components/responses/400" }, - "last_name": { - "type": "string" + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "User1": { - "properties": { - "first_name": { - "type": "string" + "summary": "Get the health status of the metastore and cache", + "tags": ["Apple"] + } + }, + "/api/v1/assets/export/": { + "get": { + "description": "Gets a ZIP file with all the Superset assets (databases, datasets, charts, dashboards, saved queries) as YAML files.", + "responses": { + "200": { + "content": { + "application/zip": { + "schema": { + "format": "binary", + "type": "string" + } + } + }, + "description": "ZIP file" }, - "id": { - "type": "integer" + "401": { + "$ref": "#/components/responses/401" }, - "last_name": { - "type": "string" + "404": { + "$ref": "#/components/responses/404" }, - "username": { - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "User2": { - "properties": { - "first_name": { - "type": "string" - }, - "last_name": { - "type": "string" - }, - "username": { - "type": "string" + "security": [ + { + "jwt": [] } - }, - "type": "object" - }, - "UserResponseSchema": { - "properties": { - "email": { - "type": "string" - }, - "first_name": { - "type": "string" + ], + "summary": "Export all assets", + "tags": ["Import/export"] + } + }, + "/api/v1/assets/import/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "properties": { + "bundle": { + "description": "upload file (ZIP or JSON)", + "format": "binary", + "type": "string" + }, + "passwords": { + "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_passwords": { + "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_key_passwords": { + "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_keys": { + "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", + "type": "string" + } + }, + "type": "object" + } + } }, - "id": { - "type": "integer" + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Assets import result" }, - "is_active": { - "type": "boolean" + "400": { + "$ref": "#/components/responses/400" }, - "is_anonymous": { - "type": "boolean" + "401": { + "$ref": "#/components/responses/401" }, - "last_name": { - "type": "string" + "422": { + "$ref": "#/components/responses/422" }, - "username": { - "type": "string" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "ValidateSQLRequest": { - "properties": { - "schema": { - "nullable": true, - "type": "string" - }, - "sql": { - "description": "SQL statement to validate", - "type": "string" - }, - "template_params": { - "nullable": true, - "type": "object" + "security": [ + { + "jwt": [] } - }, - "required": ["sql"], - "type": "object" - }, - "ValidateSQLResponse": { - "properties": { - "end_column": { - "type": "integer" - }, - "line_number": { - "type": "integer" + ], + "summary": "Import multiple assets", + "tags": ["Import/export"] + } + }, + "/api/v1/async_event/": { + "get": { + "description": "Reads off of the Redis events stream, using the user's JWT token and optional query params for last event received.", + "parameters": [ + { + "description": "Last ID received by the client", + "in": "query", + "name": "last_id", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "items": { + "properties": { + "channel_id": { + "type": "string" + }, + "errors": { + "items": { + "type": "object" + }, + "type": "array" + }, + "id": { + "type": "string" + }, + "job_id": { + "type": "string" + }, + "result_url": { + "type": "string" + }, + "status": { + "type": "string" + }, + "user_id": { + "type": "integer" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "Async event results" }, - "message": { - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "start_column": { - "type": "integer" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "ValidatorConfigJSON": { - "properties": { - "op": { - "description": "The operation to compare with a threshold to apply to the SQL output\n", - "enum": ["<", "<=", ">", ">=", "==", "!="], - "type": "string" - }, - "threshold": { - "type": "number" + "security": [ + { + "jwt": [] } - }, - "type": "object" - }, - "advanced_data_type_convert_schema": { - "properties": { - "type": { - "default": "port", - "type": "string" - }, - "values": { - "items": { - "default": "http" + ], + "summary": "Read off of the Redis events stream", + "tags": ["AsyncEventsRestApi"] + } + }, + "/api/v1/available_domains/": { + "get": { + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/AvailableDomainsSchema" + } + }, + "type": "object" + } + } }, - "minItems": 1, - "type": "array" + "description": "a list of available domains" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" } }, - "required": ["type", "values"], - "type": "object" - }, - "database_schemas_query_schema": { - "properties": { - "force": { - "type": "boolean" + "security": [ + { + "jwt": [] } + ], + "summary": "Get all available domains", + "tags": ["Available Domains"] + } + }, + "/api/v1/cachekey/invalidate": { + "post": { + "description": "Takes a list of datasources, finds and invalidates the associated cache records and removes the database records.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CacheInvalidationRequestSchema" + } + } + }, + "description": "A list of datasources uuid or the tuples of database and datasource names", + "required": true }, - "type": "object" - }, - "database_tables_query_schema": { - "properties": { - "force": { - "type": "boolean" + "responses": { + "201": { + "description": "cache was successfully invalidated" }, - "schema_name": { - "type": "string" + "400": { + "$ref": "#/components/responses/400" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "required": ["schema_name"], - "type": "object" - }, - "delete_tags_schema": { - "items": { - "type": "string" - }, - "type": "array" - }, - "get_delete_ids_schema": { - "items": { - "type": "integer" - }, - "type": "array" - }, - "get_export_ids_schema": { - "items": { - "type": "integer" - }, - "type": "array" - }, - "get_fav_star_ids_schema": { - "items": { - "type": "integer" - }, - "type": "array" - }, - "get_info_schema": { - "properties": { - "add_columns": { - "additionalProperties": { - "properties": { - "page": { - "type": "integer" - }, - "page_size": { - "type": "integer" + "security": [ + { + "jwt": [] + } + ], + "summary": "Invalidate cache records and remove the database records", + "tags": ["CacheRestApi"] + } + }, + "/api/v1/chart/": { + "delete": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_delete_ids_schema" } - }, - "type": "object" + } }, - "type": "object" - }, - "edit_columns": { - "additionalProperties": { - "properties": { - "page": { - "type": "integer" - }, - "page_size": { - "type": "integer" + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" } - }, - "type": "object" + } }, - "type": "object" + "description": "Charts bulk delete" }, - "keys": { - "items": { - "enum": [ - "add_columns", - "edit_columns", - "filters", - "permissions", - "add_title", - "edit_title", - "none" - ], - "type": "string" - }, - "type": "array" - } - }, - "type": "object" - }, - "get_item_schema": { - "properties": { - "columns": { - "items": { - "type": "string" - }, - "type": "array" + "401": { + "$ref": "#/components/responses/401" }, - "keys": { - "items": { - "enum": [ - "show_columns", - "description_columns", - "label_columns", - "show_title", - "none" - ], - "type": "string" - }, - "type": "array" + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" + "security": [ + { + "jwt": [] + } + ], + "summary": "Bulk delete charts", + "tags": ["Charts"] }, - "get_list_schema": { - "properties": { - "columns": { - "items": { - "type": "string" + "get": { + "description": "Gets a list of charts, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_list_schema" + } + } }, - "type": "array" - }, - "filters": { - "items": { - "properties": { - "col": { - "type": "string" - }, - "opr": { - "type": "string" - }, - "value": { - "anyOf": [ - { + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "count": { + "description": "The total record count on the backend", "type": "number" }, - { + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", "type": "string" }, - { - "type": "boolean" + "order_columns": { + "description": "A list of allowed columns to sort", + "items": { + "type": "string" + }, + "type": "array" }, - { + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/ChartRestApi.get_list" + }, "type": "array" } - ] + }, + "type": "object" } - }, - "required": ["col", "opr", "value"], - "type": "object" - }, - "type": "array" - }, - "keys": { - "items": { - "enum": [ - "list_columns", - "order_columns", - "label_columns", - "description_columns", - "list_title", - "none" - ], - "type": "string" + } }, - "type": "array" + "description": "Items from Model" }, - "order_column": { - "type": "string" + "400": { + "$ref": "#/components/responses/400" }, - "order_direction": { - "enum": ["asc", "desc"], - "type": "string" + "401": { + "$ref": "#/components/responses/401" }, - "page": { - "type": "integer" + "422": { + "$ref": "#/components/responses/422" }, - "page_size": { - "type": "integer" + "500": { + "$ref": "#/components/responses/500" } }, - "type": "object" - }, - "get_recent_activity_schema": { - "properties": { - "actions": { - "items": { - "type": "string" - }, - "type": "array" - }, - "distinct": { - "type": "boolean" - }, - "page": { - "type": "number" - }, - "page_size": { - "type": "number" + "security": [ + { + "jwt": [] } - }, - "type": "object" + ], + "summary": "Get a list of charts", + "tags": ["Charts"] }, - "get_related_schema": { - "properties": { - "filter": { - "type": "string" + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartRestApi.post" + } + } }, - "include_ids": { - "items": { - "type": "integer" + "description": "Chart schema", + "required": true + }, + "responses": { + "201": { + "content": { + "application/json": { + "schema": { + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/ChartRestApi.post" + } + }, + "type": "object" + } + } }, - "type": "array" + "description": "Chart added" }, - "page": { - "type": "integer" + "400": { + "$ref": "#/components/responses/400" }, - "page_size": { - "type": "integer" - } - }, - "type": "object" - }, - "queries_get_updated_since_schema": { - "properties": { - "last_updated_ms": { - "type": "number" - } - }, - "required": ["last_updated_ms"], - "type": "object" - }, - "screenshot_query_schema": { - "properties": { - "force": { - "type": "boolean" + "401": { + "$ref": "#/components/responses/401" }, - "thumb_size": { - "items": { - "type": "integer" - }, - "type": "array" + "403": { + "$ref": "#/components/responses/403" }, - "window_size": { - "items": { - "type": "integer" - }, - "type": "array" - } - }, - "type": "object" - }, - "sql_lab_get_results_schema": { - "properties": { - "key": { - "type": "string" + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" } }, - "required": ["key"], - "type": "object" - }, - "thumbnail_query_schema": { - "properties": { - "force": { - "type": "boolean" + "security": [ + { + "jwt": [] } - }, - "type": "object" + ], + "summary": "Create a new chart", + "tags": ["Charts"] } }, - "securitySchemes": { - "jwt": { - "bearerFormat": "JWT", - "scheme": "bearer", - "type": "http" - }, - "jwt_refresh": { - "bearerFormat": "JWT", - "scheme": "bearer", - "type": "http" - } - } - }, - "info": { - "description": "Superset", - "title": "Superset", - "version": "v1" - }, - "openapi": "3.0.2", - "paths": { - "/api/v1/advanced_data_type/convert": { + "/api/v1/chart/_info": { "get": { + "description": "Get metadata information about this API resource", "parameters": [ { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/advanced_data_type_convert_schema" + "$ref": "#/components/schemas/get_info_schema" } } }, @@ -9741,11 +12656,47 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AdvancedDataTypeSchema" + "properties": { + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "AdvancedDataTypeResponse object has been returned." + "description": "Item from Model" }, "400": { "$ref": "#/components/responses/400" @@ -9753,8 +12704,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -9765,38 +12716,51 @@ "jwt": [] } ], - "summary": "Returns a AdvancedDataTypeResponse object populated with the passed in args.", - "tags": ["Advanced Data Type"] + "summary": "Get metadata information about this API resource", + "tags": ["Charts"] } }, - "/api/v1/advanced_data_type/types": { - "get": { - "description": "Returns a list of available advanced data types.", + "/api/v1/chart/data": { + "post": { + "description": "Takes a query context constructed in the client and returns payload data response for the given query.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartDataQueryContextSchema" + } + } + }, + "description": "A query context consists of a datasource from which to fetch data and one or many query objects.", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "result": { - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" + "$ref": "#/components/schemas/ChartDataResponseSchema" } } }, - "description": "a successful return of the available advanced data types has taken place." + "description": "Query result" + }, + "202": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartDataAsyncResponseSchema" + } + } + }, + "description": "Async job details" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -9806,23 +12770,21 @@ "jwt": [] } ], - "tags": ["Advanced Data Type"] + "summary": "Return payload data response for the given query", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/": { - "delete": { - "description": "Deletes multiple annotation layers in a bulk operation.", + "/api/v1/chart/data/{cache_key}": { + "get": { + "description": "Takes a query context cache key and returns payload data response for the given query.", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "cache_key", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { @@ -9830,16 +12792,14 @@ "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/ChartDataResponseSchema" } } }, - "description": "CSS templates bulk delete" + "description": "Query result" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" @@ -9859,16 +12819,18 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] - }, + "summary": "Return payload data response for the given query", + "tags": ["Charts"] + } + }, + "/api/v1/chart/export/": { "get": { - "description": "Get a list of Annotation layers, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_list_schema" + "$ref": "#/components/schemas/get_export_ids_schema" } } }, @@ -9879,72 +12841,14 @@ "responses": { "200": { "content": { - "application/json": { + "application/zip": { "schema": { - "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", - "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, - "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list" - }, - "type": "array" - } - }, - "type": "object" + "format": "binary", + "type": "string" } } }, - "description": "Items from Model" + "description": "A zip file with chart(s), dataset(s) and database(s) as YAML" }, "400": { "$ref": "#/components/responses/400" @@ -9952,8 +12856,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -9964,39 +12868,35 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] - }, - "post": { - "description": "Create an Annotation layer", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.post" + "summary": "Download multiple charts as YAML files", + "tags": ["Charts"] + } + }, + "/api/v1/chart/favorite_status/": { + "get": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_fav_star_ids_schema" + } } - } - }, - "description": "Annotation Layer schema", - "required": true - }, + }, + "in": "query", + "name": "q" + } + ], "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { - "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.post" - } - }, - "type": "object" + "$ref": "#/components/schemas/GetFavStarIdsSchema" } } }, - "description": "Annotation added" + "description": "None" }, "400": { "$ref": "#/components/responses/400" @@ -10016,71 +12916,64 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Check favorited charts for current user", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/_info": { - "get": { - "description": "Get metadata information about this API resource", - "parameters": [ - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], + "/api/v1/chart/import/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "properties": { + "formData": { + "description": "upload file (ZIP)", + "format": "binary", + "type": "string" + }, + "overwrite": { + "description": "overwrite existing charts?", + "type": "boolean" + }, + "passwords": { + "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_passwords": { + "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_key_passwords": { + "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_keys": { + "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", + "type": "string" + } + }, + "type": "object" + } + } + }, + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Item from Model" + "description": "Chart import result" }, "400": { "$ref": "#/components/responses/400" @@ -10100,11 +12993,13 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Import chart(s) with associated datasets and databases", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/related/{column_name}": { + "/api/v1/chart/related/{column_name}": { "get": { + "description": "Get a list of all possible owners for a chart. Use `owners` has the `column_name` parameter", "parameters": [ { "in": "path", @@ -10155,15 +13050,58 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Get related fields data", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/{pk}": { + "/api/v1/chart/warm_up_cache": { + "put": { + "description": "Warms up the cache for the chart. Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartCacheWarmUpRequestSchema" + } + } + }, + "description": "Identifies the chart to warm up cache for, and any additional dashboard or filter context to use.", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartCacheWarmUpResponseSchema" + } + } + }, + "description": "Each chart's warmup status" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Warm up the cache for the chart", + "tags": ["Charts"] + } + }, + "/api/v1/chart/{pk}": { "delete": { - "description": "Delete Annotation layer", "parameters": [ { - "description": "The annotation layer pk for this annotation", "in": "path", "name": "pk", "required": true, @@ -10186,7 +13124,13 @@ } } }, - "description": "Item deleted" + "description": "Chart delete" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" }, "404": { "$ref": "#/components/responses/404" @@ -10203,10 +13147,11 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Delete a chart", + "tags": ["Charts"] }, "get": { - "description": "Get an Annotation layer", + "description": "Get an item model", "parameters": [ { "in": "path", @@ -10259,7 +13204,7 @@ "type": "object" }, "result": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.get" + "$ref": "#/components/schemas/ChartRestApi.get" }, "show_columns": { "description": "A list of columns", @@ -10301,13 +13246,12 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Get a chart detail information", + "tags": ["Charts"] }, "put": { - "description": "Update an Annotation layer", "parameters": [ { - "description": "The annotation layer pk for this annotation", "in": "path", "name": "pk", "required": true, @@ -10320,11 +13264,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.put" + "$ref": "#/components/schemas/ChartRestApi.put" } } }, - "description": "Annotation schema", + "description": "Chart schema", "required": true }, "responses": { @@ -10337,14 +13281,157 @@ "type": "number" }, "result": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.put" + "$ref": "#/components/schemas/ChartRestApi.put" } }, "type": "object" } } }, - "description": "Annotation changed" + "description": "Chart changed" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Update a chart", + "tags": ["Charts"] + } + }, + "/api/v1/chart/{pk}/cache_screenshot/": { + "get": { + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/screenshot_query_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "202": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartCacheScreenshotResponseSchema" + } + } + }, + "description": "Chart async result" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Compute and cache a screenshot", + "tags": ["Charts"] + } + }, + "/api/v1/chart/{pk}/data/": { + "get": { + "description": "Takes a chart ID and uses the query context stored when the chart was saved to return payload data response.", + "parameters": [ + { + "description": "The chart ID", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "The format in which the data should be returned", + "in": "query", + "name": "format", + "schema": { + "type": "string" + } + }, + { + "description": "The type in which the data should be returned", + "in": "query", + "name": "type", + "schema": { + "type": "string" + } + }, + { + "description": "Should the queries be forced to load from the source", + "in": "query", + "name": "force", + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartDataResponseSchema" + } + } + }, + "description": "Query result" + }, + "202": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChartDataAsyncResponseSchema" + } + } + }, + "description": "Async job details" }, "400": { "$ref": "#/components/responses/400" @@ -10352,9 +13439,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -10364,32 +13448,20 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Return payload data response for a chart", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/{pk}/annotation/": { + "/api/v1/chart/{pk}/favorites/": { "delete": { - "description": "Deletes multiple annotation in a bulk operation.", "parameters": [ { - "description": "The annotation layer pk for this annotation", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { @@ -10398,15 +13470,15 @@ "application/json": { "schema": { "properties": { - "message": { - "type": "string" + "result": { + "type": "object" } }, "type": "object" } } }, - "description": "Annotations bulk delete" + "description": "Chart removed from favorites" }, "401": { "$ref": "#/components/responses/401" @@ -10414,9 +13486,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -10426,30 +13495,18 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Remove the chart from the user favorite list", + "tags": ["Charts"] }, - "get": { - "description": "Get a list of Annotation layers, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "post": { "parameters": [ { - "description": "The annotation layer id for this annotation", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_list_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { @@ -10458,39 +13515,21 @@ "application/json": { "schema": { "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "ids": { - "description": "A list of annotation ids", - "items": { - "type": "string" - }, - "type": "array" - }, "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/AnnotationRestApi.get_list" - }, - "type": "array" + "type": "object" } }, "type": "object" } } }, - "description": "Items from Annotations" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Chart added to favorites" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -10501,50 +13540,41 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] - }, - "post": { - "description": "Create an Annotation layer", + "summary": "Mark the chart as favorite for the current user", + "tags": ["Charts"] + } + }, + "/api/v1/chart/{pk}/screenshot/{digest}/": { + "get": { "parameters": [ { - "description": "The annotation layer pk for this annotation", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } + }, + { + "in": "path", + "name": "digest", + "required": true, + "schema": { + "type": "string" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AnnotationRestApi.post" - } - } - }, - "description": "Annotation schema", - "required": true - }, "responses": { - "201": { + "200": { "content": { - "application/json": { + "image/*": { "schema": { - "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/AnnotationRestApi.post" - } - }, - "type": "object" + "format": "binary", + "type": "string" } } }, - "description": "Annotation added" + "description": "Chart thumbnail image" }, "400": { "$ref": "#/components/responses/400" @@ -10564,15 +13594,15 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Get a computed screenshot from cache", + "tags": ["Charts"] } }, - "/api/v1/annotation_layer/{pk}/annotation/{annotation_id}": { - "delete": { - "description": "Delete Annotation layer", + "/api/v1/chart/{pk}/thumbnail/{digest}/": { + "get": { + "description": "Compute or get already computed chart thumbnail from cache.", "parameters": [ { - "description": "The annotation layer pk for this annotation", "in": "path", "name": "pk", "required": true, @@ -10581,37 +13611,38 @@ } }, { - "description": "The annotation pk for this annotation", "in": "path", - "name": "annotation_id", + "name": "digest", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], "responses": { "200": { "content": { - "application/json": { + "image/*": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "format": "binary", + "type": "string" } } }, - "description": "Item deleted" + "description": "Chart thumbnail image" + }, + "302": { + "description": "Redirects to the current digest" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -10621,34 +13652,18 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] - }, - "get": { - "description": "Get an Annotation layer", + "summary": "Get chart thumbnail", + "tags": ["Charts"] + } + }, + "/api/v1/css_template/": { + "delete": { "parameters": [ - { - "description": "The annotation layer pk for this annotation", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The annotation pk", - "in": "path", - "name": "annotation_id", - "required": true, - "schema": { - "type": "integer" - } - }, { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_item_schema" + "$ref": "#/components/schemas/get_delete_ids_schema" } } }, @@ -10662,22 +13677,15 @@ "application/json": { "schema": { "properties": { - "id": { - "description": "The item id", + "message": { "type": "string" - }, - "result": { - "$ref": "#/components/schemas/AnnotationRestApi.get" } }, "type": "object" } } }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "CSS templates bulk delete" }, "401": { "$ref": "#/components/responses/401" @@ -10697,59 +13705,93 @@ "jwt": [] } ], - "tags": ["Annotation Layers"] + "summary": "Bulk delete CSS templates", + "tags": ["CSS Templates"] }, - "put": { - "description": "Update an Annotation layer", + "get": { + "description": "Gets a list of CSS templates, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { - "description": "The annotation layer pk for this annotation", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The annotation pk for this annotation", - "in": "path", - "name": "annotation_id", - "required": true, - "schema": { - "type": "integer" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_list_schema" + } + } + }, + "in": "query", + "name": "q" } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/AnnotationRestApi.put" - } - } - }, - "description": "Annotation schema", - "required": true - }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "id": { + "count": { + "description": "The total record count on the backend", "type": "number" }, + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", + "type": "string" + }, + "order_columns": { + "description": "A list of allowed columns to sort", + "items": { + "type": "string" + }, + "type": "array" + }, "result": { - "$ref": "#/components/schemas/AnnotationRestApi.put" + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/CssTemplateRestApi.get_list" + }, + "type": "array" } }, "type": "object" } } }, - "description": "Annotation changed" + "description": "Items from Model" }, "400": { "$ref": "#/components/responses/400" @@ -10757,41 +13799,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": ["Annotation Layers"] - } - }, - "/api/v1/assets/export/": { - "get": { - "description": "Returns a ZIP file with all the Superset assets (databases, datasets, charts, dashboards, saved queries) as YAML files.", - "responses": { - "200": { - "content": { - "application/zip": { - "schema": { - "format": "binary", - "type": "string" - } - } - }, - "description": "ZIP file" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -10802,59 +13811,39 @@ "jwt": [] } ], - "tags": ["Import/export"] - } - }, - "/api/v1/assets/import/": { + "summary": "Get a list of CSS templates", + "tags": ["CSS Templates"] + }, "post": { "requestBody": { "content": { - "multipart/form-data": { + "application/json": { "schema": { - "properties": { - "bundle": { - "description": "upload file (ZIP or JSON)", - "format": "binary", - "type": "string" - }, - "passwords": { - "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_passwords": { - "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_key_passwords": { - "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_keys": { - "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/CssTemplateRestApi.post" } } }, + "description": "Model schema", "required": true }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { "properties": { - "message": { + "id": { "type": "string" + }, + "result": { + "$ref": "#/components/schemas/CssTemplateRestApi.post" } }, "type": "object" } } }, - "description": "Assets import result" + "description": "Item inserted" }, "400": { "$ref": "#/components/responses/400" @@ -10874,20 +13863,24 @@ "jwt": [] } ], - "tags": ["Import/export"] + "summary": "Create a CSS template", + "tags": ["CSS Templates"] } }, - "/api/v1/async_event/": { + "/api/v1/css_template/_info": { "get": { - "description": "Reads off of the Redis events stream, using the user's JWT token and optional query params for last event received.", + "description": "Get metadata information about this API resource", "parameters": [ { - "description": "Last ID received by the client", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_info_schema" + } + } + }, "in": "query", - "name": "last_id", - "schema": { - "type": "string" - } + "name": "q" } ], "responses": { @@ -10896,35 +13889,37 @@ "application/json": { "schema": { "properties": { - "result": { - "items": { - "properties": { - "channel_id": { - "type": "string" - }, - "errors": { - "items": { - "type": "object" + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } }, - "type": "array" - }, - "id": { - "type": "string" - }, - "job_id": { - "type": "string" - }, - "result_url": { - "type": "string" - }, - "status": { - "type": "string" + "type": "object" }, - "user_id": { - "type": "integer" - } - }, - "type": "object" + "type": "array" + } + }, + "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" }, "type": "array" } @@ -10933,11 +13928,17 @@ } } }, - "description": "Async event results" + "description": "Item from Model" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -10947,64 +13948,53 @@ "jwt": [] } ], - "tags": ["AsyncEventsRestApi"] + "summary": "Get metadata information about this API resource", + "tags": ["CSS Templates"] } }, - "/api/v1/available_domains/": { + "/api/v1/css_template/related/{column_name}": { "get": { - "description": "Get all available domains", - "responses": { - "200": { + "parameters": [ + { + "in": "path", + "name": "column_name", + "required": true, + "schema": { + "type": "string" + } + }, + { "content": { "application/json": { "schema": { - "properties": { - "result": { - "$ref": "#/components/schemas/AvailableDomainsSchema" - } - }, - "type": "object" + "$ref": "#/components/schemas/get_related_schema" } - } - }, - "description": "a list of available domains" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "403": { - "$ref": "#/components/responses/403" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": ["Available Domains"] - } - }, - "/api/v1/cachekey/invalidate": { - "post": { - "description": "Takes a list of datasources, finds the associated cache records and invalidates them and removes the database records", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CacheInvalidationRequestSchema" - } - } - }, - "description": "A list of datasources uuid or the tuples of database and datasource names", - "required": true - }, + } + }, + "in": "query", + "name": "q" + } + ], "responses": { - "201": { - "description": "cache was successfully invalidated" + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RelatedResponseSchema" + } + } + }, + "description": "Related column data" }, "400": { "$ref": "#/components/responses/400" }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, "500": { "$ref": "#/components/responses/500" } @@ -11014,23 +14004,20 @@ "jwt": [] } ], - "tags": ["CacheRestApi"] + "summary": "Get related fields data", + "tags": ["CSS Templates"] } }, - "/api/v1/chart/": { + "/api/v1/css_template/{pk}": { "delete": { - "description": "Deletes multiple Charts in a bulk operation.", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -11047,13 +14034,7 @@ } } }, - "description": "Charts bulk delete" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "403": { - "$ref": "#/components/responses/403" + "description": "Item deleted" }, "404": { "$ref": "#/components/responses/404" @@ -11070,16 +14051,25 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Delete a CSS template", + "tags": ["CSS Templates"] }, "get": { - "description": "Get a list of charts, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "description": "Get an item model", "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_list_schema" + "$ref": "#/components/schemas/get_item_schema" } } }, @@ -11093,10 +14083,6 @@ "application/json": { "schema": { "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, "description_columns": { "properties": { "column_name": { @@ -11107,12 +14093,9 @@ }, "type": "object" }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" + "id": { + "description": "The item id", + "type": "string" }, "label_columns": { "properties": { @@ -11124,38 +14107,27 @@ }, "type": "object" }, - "list_columns": { + "result": { + "$ref": "#/components/schemas/CssTemplateRestApi.get" + }, + "show_columns": { "description": "A list of columns", "items": { "type": "string" }, "type": "array" }, - "list_title": { + "show_title": { "description": "A title to render. Will be translated by babel", - "example": "List Items", + "example": "Show Item Details", "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, - "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/ChartRestApi.get_list" - }, - "type": "array" } }, "type": "object" } } }, - "description": "Items from Model" + "description": "Item from Model" }, "400": { "$ref": "#/components/responses/400" @@ -11163,6 +14135,9 @@ "401": { "$ref": "#/components/responses/401" }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -11175,39 +14150,46 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get a CSS template", + "tags": ["CSS Templates"] }, - "post": { - "description": "Create a new Chart.", + "put": { + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + } + ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartRestApi.post" + "$ref": "#/components/schemas/CssTemplateRestApi.put" } } }, - "description": "Chart schema", + "description": "Model schema", "required": true }, "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { "properties": { - "id": { - "type": "number" - }, "result": { - "$ref": "#/components/schemas/ChartRestApi.post" + "$ref": "#/components/schemas/CssTemplateRestApi.put" } }, "type": "object" } } }, - "description": "Chart added" + "description": "Item changed" }, "400": { "$ref": "#/components/responses/400" @@ -11215,8 +14197,8 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" + "404": { + "$ref": "#/components/responses/404" }, "422": { "$ref": "#/components/responses/422" @@ -11230,18 +14212,73 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Update a CSS template", + "tags": ["CSS Templates"] } }, - "/api/v1/chart/_info": { + "/api/v1/dashboard/": { + "delete": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_delete_ids_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Dashboard bulk delete" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Bulk delete dashboards", + "tags": ["Dashboards"] + }, "get": { - "description": "Several metadata information about chart API endpoints.", + "description": "Gets a list of dashboards, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_info_schema" + "$ref": "#/components/schemas/get_list_schema" } } }, @@ -11255,46 +14292,69 @@ "application/json": { "schema": { "properties": { - "add_columns": { - "type": "object" + "count": { + "description": "The total record count on the backend", + "type": "number" }, - "edit_columns": { + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, "type": "object" }, - "filters": { + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { "properties": { "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" } }, "type": "object" }, - "permissions": { - "description": "The user permissions for this API resource", + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", + "type": "string" + }, + "order_columns": { + "description": "A list of allowed columns to sort", "items": { "type": "string" }, "type": "array" + }, + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/DashboardRestApi.get_list" + }, + "type": "array" } }, "type": "object" } } }, - "description": "Item from Model" + "description": "Items from Model" }, "400": { "$ref": "#/components/responses/400" @@ -11314,43 +14374,39 @@ "jwt": [] } ], - "tags": ["Charts"] - } - }, - "/api/v1/chart/data": { + "summary": "Get a list of dashboards", + "tags": ["Dashboards"] + }, "post": { - "description": "Takes a query context constructed in the client and returns payload data response for the given query.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartDataQueryContextSchema" + "$ref": "#/components/schemas/DashboardRestApi.post" } } }, - "description": "A query context consists of a datasource from which to fetch data and one or many query objects.", + "description": "Dashboard schema", "required": true }, "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ChartDataResponseSchema" - } - } - }, - "description": "Query result" - }, - "202": { + "201": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartDataAsyncResponseSchema" + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/DashboardRestApi.post" + } + }, + "type": "object" } } }, - "description": "Async job details" + "description": "Dashboard added" }, "400": { "$ref": "#/components/responses/400" @@ -11358,6 +14414,9 @@ "401": { "$ref": "#/components/responses/401" }, + "404": { + "$ref": "#/components/responses/404" + }, "500": { "$ref": "#/components/responses/500" } @@ -11367,20 +14426,24 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Create a new dashboard", + "tags": ["Dashboards"] } }, - "/api/v1/chart/data/{cache_key}": { + "/api/v1/dashboard/_info": { "get": { - "description": "Takes a query context cache key and returns payload data response for the given query.", + "description": "Get metadata information about this API resource", "parameters": [ { - "in": "path", - "name": "cache_key", - "required": true, - "schema": { - "type": "string" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_info_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -11388,11 +14451,47 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartDataResponseSchema" + "properties": { + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "Query result" + "description": "Item from Model" }, "400": { "$ref": "#/components/responses/400" @@ -11400,9 +14499,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -11415,12 +14511,12 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get metadata information about this API resource", + "tags": ["Dashboards"] } }, - "/api/v1/chart/export/": { + "/api/v1/dashboard/export/": { "get": { - "description": "Exports multiple charts and downloads them as YAML files", "parameters": [ { "content": { @@ -11437,14 +14533,13 @@ "responses": { "200": { "content": { - "application/zip": { + "text/plain": { "schema": { - "format": "binary", "type": "string" } } }, - "description": "A zip file with chart(s), dataset(s) and database(s) as YAML" + "description": "Dashboard export" }, "400": { "$ref": "#/components/responses/400" @@ -11455,6 +14550,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -11464,12 +14562,12 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Download multiple dashboards as YAML files", + "tags": ["Dashboards"] } }, - "/api/v1/chart/favorite_status/": { + "/api/v1/dashboard/favorite_status/": { "get": { - "description": "Check favorited dashboards for current user", "parameters": [ { "content": { @@ -11512,10 +14610,11 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Check favorited dashboards for current user", + "tags": ["Dashboards"] } }, - "/api/v1/chart/import/": { + "/api/v1/dashboard/import/": { "post": { "requestBody": { "content": { @@ -11523,12 +14622,12 @@ "schema": { "properties": { "formData": { - "description": "upload file (ZIP)", + "description": "upload file (ZIP or JSON)", "format": "binary", "type": "string" }, "overwrite": { - "description": "overwrite existing charts?", + "description": "overwrite existing dashboards?", "type": "boolean" }, "passwords": { @@ -11568,7 +14667,7 @@ } } }, - "description": "Chart import result" + "description": "Dashboard import result" }, "400": { "$ref": "#/components/responses/400" @@ -11588,31 +14687,20 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Import dashboard(s) with associated charts/datasets/databases", + "tags": ["Dashboards"] } }, - "/api/v1/chart/related/{column_name}": { + "/api/v1/dashboard/permalink/{key}": { "get": { - "description": "Get a list of all possible owners for a chart. Use `owners` has the `column_name` parameter", "parameters": [ { "in": "path", - "name": "column_name", + "name": "key", "required": true, "schema": { "type": "string" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { @@ -11620,11 +14708,17 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RelatedResponseSchema" + "properties": { + "state": { + "description": "The stored state", + "type": "object" + } + }, + "type": "object" } } }, - "description": "Related column data" + "description": "Returns the stored state." }, "400": { "$ref": "#/components/responses/400" @@ -11635,6 +14729,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -11644,37 +14741,51 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get dashboard's permanent link state", + "tags": ["Dashboard Permanent Link"] } }, - "/api/v1/chart/warm_up_cache": { - "put": { - "description": "Warms up the cache for the chart. Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action.", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ChartCacheWarmUpRequestSchema" - } + "/api/v1/dashboard/related/{column_name}": { + "get": { + "description": "Get a list of all possible owners for a dashboard.", + "parameters": [ + { + "in": "path", + "name": "column_name", + "required": true, + "schema": { + "type": "string" } }, - "description": "Identifies the chart to warm up cache for, and any additional dashboard or filter context to use.", - "required": true - }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_related_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartCacheWarmUpResponseSchema" + "$ref": "#/components/schemas/RelatedResponseSchema" } } }, - "description": "Each chart's warmup status" + "description": "Related column data" }, "400": { "$ref": "#/components/responses/400" }, + "401": { + "$ref": "#/components/responses/401" + }, "404": { "$ref": "#/components/responses/404" }, @@ -11687,20 +14798,20 @@ "jwt": [] } ], - "summary": "Warms up the cache for the chart", - "tags": ["Charts"] + "summary": "Get related fields data", + "tags": ["Dashboards"] } }, - "/api/v1/chart/{pk}": { - "delete": { - "description": "Deletes a Chart.", + "/api/v1/dashboard/{id_or_slug}": { + "get": { "parameters": [ { + "description": "Either the id of the dashboard, or its slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -11710,15 +14821,18 @@ "application/json": { "schema": { "properties": { - "message": { - "type": "string" + "result": { + "$ref": "#/components/schemas/DashboardGetResponseSchema" } }, "type": "object" } } }, - "description": "Chart delete" + "description": "Dashboard" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" @@ -11728,12 +14842,6 @@ }, "404": { "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" } }, "security": [ @@ -11741,29 +14849,20 @@ "jwt": [] } ], - "tags": ["Charts"] - }, - "get": { - "description": "Get a chart detail information.", - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_item_schema" - } - } - }, - "in": "query", - "name": "q" + "summary": "Get a dashboard detail information", + "tags": ["Dashboards"] + } + }, + "/api/v1/dashboard/{id_or_slug}/charts": { + "get": { + "parameters": [ + { + "in": "path", + "name": "id_or_slug", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { @@ -11772,51 +14871,18 @@ "application/json": { "schema": { "properties": { - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "id": { - "description": "The item id", - "type": "string" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, "result": { - "$ref": "#/components/schemas/ChartRestApi.get" - }, - "show_columns": { - "description": "A list of columns", "items": { - "type": "string" + "$ref": "#/components/schemas/ChartEntityResponseSchema" }, "type": "array" - }, - "show_title": { - "description": "A title to render. Will be translated by babel", - "example": "Show Item Details", - "type": "string" } }, "type": "object" } } }, - "description": "Item from Model" + "description": "Dashboard chart definitions" }, "400": { "$ref": "#/components/responses/400" @@ -11824,14 +14890,11 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" } }, "security": [ @@ -11839,17 +14902,20 @@ "jwt": [] } ], - "tags": ["Charts"] - }, - "put": { - "description": "Changes a Chart.", + "summary": "Get a dashboard's chart definitions.", + "tags": ["Dashboards"] + } + }, + "/api/v1/dashboard/{id_or_slug}/copy/": { + "post": { "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -11857,11 +14923,10 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartRestApi.put" + "$ref": "#/components/schemas/DashboardCopySchema" } } }, - "description": "Chart schema", "required": true }, "responses": { @@ -11873,15 +14938,15 @@ "id": { "type": "number" }, - "result": { - "$ref": "#/components/schemas/ChartRestApi.put" + "last_modified_time": { + "type": "number" } }, "type": "object" } } }, - "description": "Chart changed" + "description": "Id of new dashboard and last modified time" }, "400": { "$ref": "#/components/responses/400" @@ -11895,9 +14960,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -11907,43 +14969,42 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Create a copy of an existing dashboard", + "tags": ["Dashboards"] } }, - "/api/v1/chart/{pk}/cache_screenshot/": { + "/api/v1/dashboard/{id_or_slug}/datasets": { "get": { - "description": "Compute and cache a screenshot.", + "description": "Returns a list of a dashboard's datasets. Each dataset includes only the information necessary to render the dashboard's charts.", "parameters": [ { + "description": "Either the id of the dashboard, or its slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/screenshot_query_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { - "202": { + "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartCacheScreenshotResponseSchema" + "properties": { + "result": { + "items": { + "$ref": "#/components/schemas/DashboardDatasetSchema" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "Chart async result" + "description": "Dashboard dataset definitions" }, "400": { "$ref": "#/components/responses/400" @@ -11951,11 +15012,11 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" - }, - "500": { - "$ref": "#/components/responses/500" } }, "security": [ @@ -11963,45 +15024,21 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get dashboard's datasets", + "tags": ["Dashboards"] } }, - "/api/v1/chart/{pk}/data/": { - "get": { - "description": "Takes a chart ID and uses the query context stored when the chart was saved to return payload data response.", + "/api/v1/dashboard/{id_or_slug}/embedded": { + "delete": { "parameters": [ { - "description": "The chart ID", + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The format in which the data should be returned", - "in": "query", - "name": "format", - "schema": { - "type": "string" - } - }, - { - "description": "The type in which the data should be returned", - "in": "query", - "name": "type", "schema": { "type": "string" } - }, - { - "description": "Should the queries be forced to load from the source", - "in": "query", - "name": "force", - "schema": { - "type": "boolean" - } } ], "responses": { @@ -12009,24 +15046,16 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ChartDataResponseSchema" - } - } - }, - "description": "Query result" - }, - "202": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ChartDataAsyncResponseSchema" + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" } } }, - "description": "Async job details" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Successfully removed the configuration" }, "401": { "$ref": "#/components/responses/401" @@ -12040,19 +15069,18 @@ "jwt": [] } ], - "tags": ["Charts"] - } - }, - "/api/v1/chart/{pk}/favorites/": { - "delete": { - "description": "Remove the chart from the user favorite list", + "summary": "Delete a dashboard's embedded configuration", + "tags": ["Dashboards"] + }, + "get": { "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -12063,21 +15091,18 @@ "schema": { "properties": { "result": { - "type": "object" + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" } }, "type": "object" } } }, - "description": "Chart removed from favorites" + "description": "Result contains the embedded dashboard config" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -12087,20 +15112,32 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get the dashboard's embedded configuration", + "tags": ["Dashboards"] }, "post": { - "description": "Marks the chart as favorite for the current user", "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmbeddedDashboardConfig" + } + } + }, + "description": "The embedded configuration to set", + "required": true + }, "responses": { "200": { "content": { @@ -12108,21 +15145,18 @@ "schema": { "properties": { "result": { - "type": "object" + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" } }, "type": "object" } } }, - "description": "Chart added to favorites" + "description": "Successfully set the configuration" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -12132,51 +15166,52 @@ "jwt": [] } ], - "tags": ["Charts"] - } - }, - "/api/v1/chart/{pk}/screenshot/{digest}/": { - "get": { - "description": "Get a computed screenshot from cache.", + "summary": "Set a dashboard's embedded configuration", + "tags": ["Dashboards"] + }, + "put": { + "description": "Sets a dashboard's embedded configuration.", "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "in": "path", - "name": "digest", + "name": "id_or_slug", "required": true, "schema": { "type": "string" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmbeddedDashboardConfig" + } + } + }, + "description": "The embedded configuration to set", + "required": true + }, "responses": { "200": { "content": { - "image/*": { + "application/json": { "schema": { - "format": "binary", - "type": "string" + "properties": { + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + } + }, + "type": "object" } } }, - "description": "Chart thumbnail image" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Successfully set the configuration" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -12186,24 +15221,17 @@ "jwt": [] } ], - "tags": ["Charts"] + "tags": ["Dashboards"] } }, - "/api/v1/chart/{pk}/thumbnail/{digest}/": { + "/api/v1/dashboard/{id_or_slug}/tabs": { "get": { - "description": "Compute or get already computed chart thumbnail from cache.", + "description": "Returns a list of a dashboard's tabs and dashboard's nested tree structure for associated tabs.", "parameters": [ { + "description": "Either the id of the dashboard, or its slug", "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "in": "path", - "name": "digest", + "name": "id_or_slug", "required": true, "schema": { "type": "string" @@ -12213,17 +15241,21 @@ "responses": { "200": { "content": { - "image/*": { + "application/json": { "schema": { - "format": "binary", - "type": "string" + "properties": { + "result": { + "items": { + "$ref": "#/components/schemas/TabsPayloadSchema" + }, + "type": "object" + } + }, + "type": "object" } } }, - "description": "Chart thumbnail image" - }, - "302": { - "description": "Redirects to the current digest" + "description": "Dashboard tabs" }, "400": { "$ref": "#/components/responses/400" @@ -12231,11 +15263,11 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" - }, - "500": { - "$ref": "#/components/responses/500" } }, "security": [ @@ -12243,23 +15275,20 @@ "jwt": [] } ], - "tags": ["Charts"] + "summary": "Get dashboard's tabs", + "tags": ["Dashboards"] } }, - "/api/v1/css_template/": { + "/api/v1/dashboard/{pk}": { "delete": { - "description": "Deletes multiple css templates in a bulk operation.", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -12276,11 +15305,14 @@ } } }, - "description": "CSS templates bulk delete" + "description": "Dashboard deleted" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, @@ -12296,92 +15328,52 @@ "jwt": [] } ], - "tags": ["CSS Templates"] + "summary": "Delete a dashboard", + "tags": ["Dashboards"] }, - "get": { - "description": "Get a list of CSS templates, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "put": { "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_list_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DashboardRestApi.put" + } + } + }, + "description": "Dashboard schema", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "count": { - "description": "The total record count on the backend", + "id": { "type": "number" }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", - "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" + "last_modified_time": { + "type": "number" }, "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/CssTemplateRestApi.get_list" - }, - "type": "array" + "$ref": "#/components/schemas/DashboardRestApi.put" } }, "type": "object" } } }, - "description": "Items from Model" + "description": "Dashboard changed" }, "400": { "$ref": "#/components/responses/400" @@ -12389,6 +15381,12 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -12401,39 +15399,41 @@ "jwt": [] } ], - "tags": ["CSS Templates"] - }, + "summary": "Update a dashboard", + "tags": ["Dashboards"] + } + }, + "/api/v1/dashboard/{pk}/cache_dashboard_screenshot/": { "post": { - "description": "Create a CSS template", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + } + ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CssTemplateRestApi.post" + "$ref": "#/components/schemas/DashboardScreenshotPostSchema" } } - }, - "description": "Model schema", - "required": true + } }, "responses": { - "201": { + "202": { "content": { "application/json": { "schema": { - "properties": { - "id": { - "type": "string" - }, - "result": { - "$ref": "#/components/schemas/CssTemplateRestApi.post" - } - }, - "type": "object" + "$ref": "#/components/schemas/DashboardCacheScreenshotResponseSchema" } } }, - "description": "Item inserted" + "description": "Dashboard async result" }, "400": { "$ref": "#/components/responses/400" @@ -12441,8 +15441,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -12453,23 +15453,20 @@ "jwt": [] } ], - "tags": ["CSS Templates"] + "summary": "Compute and cache a screenshot", + "tags": ["Dashboards"] } }, - "/api/v1/css_template/_info": { - "get": { - "description": "Get metadata information about this API resource", + "/api/v1/dashboard/{pk}/favorites/": { + "delete": { "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -12478,55 +15475,21 @@ "application/json": { "schema": { "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, + "result": { "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" } }, "type": "object" } } }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Dashboard removed from favorites" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -12537,30 +15500,18 @@ "jwt": [] } ], - "tags": ["CSS Templates"] - } - }, - "/api/v1/css_template/related/{column_name}": { - "get": { + "summary": "Remove the dashboard from the user favorite list", + "tags": ["Dashboards"] + }, + "post": { "parameters": [ { "in": "path", - "name": "column_name", + "name": "pk", "required": true, "schema": { - "type": "string" + "type": "integer" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { @@ -12568,14 +15519,16 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RelatedResponseSchema" + "properties": { + "result": { + "type": "object" + } + }, + "type": "object" } } }, - "description": "Related column data" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Dashboard added to favorites" }, "401": { "$ref": "#/components/responses/401" @@ -12592,12 +15545,12 @@ "jwt": [] } ], - "tags": ["CSS Templates"] + "summary": "Mark the dashboard as favorite for the current user", + "tags": ["Dashboards"] } }, - "/api/v1/css_template/{pk}": { - "delete": { - "description": "Delete CSS template", + "/api/v1/dashboard/{pk}/filter_state": { + "post": { "parameters": [ { "in": "path", @@ -12606,15 +15559,33 @@ "schema": { "type": "integer" } + }, + { + "in": "query", + "name": "tab_id", + "schema": { + "type": "integer" + } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TemporaryCachePostSchema" + } + } + }, + "required": true + }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { "properties": { - "message": { + "key": { + "description": "The key to retrieve the value.", "type": "string" } }, @@ -12622,10 +15593,13 @@ } } }, - "description": "Item deleted" + "description": "The value was stored successfully." }, - "404": { - "$ref": "#/components/responses/404" + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" }, "422": { "$ref": "#/components/responses/422" @@ -12639,10 +15613,12 @@ "jwt": [] } ], - "tags": ["CSS Templates"] - }, - "get": { - "description": "Get a CSS template", + "summary": "Create a dashboard's filter state", + "tags": ["Dashboard Filter State"] + } + }, + "/api/v1/dashboard/{pk}/filter_state/{key}": { + "delete": { "parameters": [ { "in": "path", @@ -12653,15 +15629,13 @@ } }, { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_item_schema" - } - } - }, - "in": "query", - "name": "q" + "description": "The value key.", + "in": "path", + "name": "key", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { @@ -12670,43 +15644,8 @@ "application/json": { "schema": { "properties": { - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "id": { - "description": "The item id", - "type": "string" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "result": { - "$ref": "#/components/schemas/CssTemplateRestApi.get" - }, - "show_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "show_title": { - "description": "A title to render. Will be translated by babel", - "example": "Show Item Details", + "message": { + "description": "The result of the operation", "type": "string" } }, @@ -12714,7 +15653,7 @@ } } }, - "description": "Item from Model" + "description": "Deleted the stored value." }, "400": { "$ref": "#/components/responses/400" @@ -12737,10 +15676,10 @@ "jwt": [] } ], - "tags": ["CSS Templates"] + "summary": "Delete a dashboard's filter state value", + "tags": ["Dashboard Filter State"] }, - "put": { - "description": "Update a CSS template", + "get": { "parameters": [ { "in": "path", @@ -12749,34 +15688,32 @@ "schema": { "type": "integer" } + }, + { + "in": "path", + "name": "key", + "required": true, + "schema": { + "type": "string" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/CssTemplateRestApi.put" - } - } - }, - "description": "Model schema", - "required": true - }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "result": { - "$ref": "#/components/schemas/CssTemplateRestApi.put" + "value": { + "description": "The stored value", + "type": "string" } }, "type": "object" } } }, - "description": "Item changed" + "description": "Returns the stored value." }, "400": { "$ref": "#/components/responses/400" @@ -12799,32 +15736,53 @@ "jwt": [] } ], - "tags": ["CSS Templates"] - } - }, - "/api/v1/dashboard/": { - "delete": { - "description": "Deletes multiple Dashboards in a bulk operation.", + "summary": "Get a dashboard's filter state value", + "tags": ["Dashboard Filter State"] + }, + "put": { "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "in": "path", + "name": "key", + "required": true, + "schema": { + "type": "string" + } + }, + { "in": "query", - "name": "q" + "name": "tab_id", + "schema": { + "type": "integer" + } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TemporaryCachePutSchema" + } + } + }, + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "message": { + "key": { + "description": "The key to retrieve the value.", "type": "string" } }, @@ -12832,14 +15790,14 @@ } } }, - "description": "Dashboard bulk delete" + "description": "The value was stored successfully." + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, @@ -12855,84 +15813,40 @@ "jwt": [] } ], - "tags": ["Dashboards"] - }, - "get": { - "description": "Get a list of dashboards, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "summary": "Update a dashboard's filter state value", + "tags": ["Dashboard Filter State"] + } + }, + "/api/v1/dashboard/{pk}/filters": { + "put": { "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_list_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DashboardNativeFiltersConfigUpdateSchema" + } + } + }, + "description": "Native filters configuration", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", - "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/DashboardRestApi.get_list" - }, "type": "array" } }, @@ -12940,7 +15854,7 @@ } } }, - "description": "Items from Model" + "description": "Dashboard native filters updated" }, "400": { "$ref": "#/components/responses/400" @@ -12948,6 +15862,12 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -12960,19 +15880,30 @@ "jwt": [] } ], + "summary": "Update native filters configuration for a dashboard.", "tags": ["Dashboards"] - }, + } + }, + "/api/v1/dashboard/{pk}/permalink": { "post": { - "description": "Create a new Dashboard.", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "string" + } + } + ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DashboardRestApi.post" + "$ref": "#/components/schemas/DashboardPermalinkStateSchema" } } }, - "description": "Dashboard schema", "required": true }, "responses": { @@ -12981,18 +15912,20 @@ "application/json": { "schema": { "properties": { - "id": { - "type": "number" + "key": { + "description": "The key to retrieve the permanent link data.", + "type": "string" }, - "result": { - "$ref": "#/components/schemas/DashboardRestApi.post" + "url": { + "description": "permanent link.", + "type": "string" } }, "type": "object" } } }, - "description": "Dashboard added" + "description": "The permanent link was stored successfully." }, "400": { "$ref": "#/components/responses/400" @@ -13000,8 +15933,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -13012,71 +15945,41 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Create a new dashboard's permanent link", + "tags": ["Dashboard Permanent Link"] } }, - "/api/v1/dashboard/_info": { + "/api/v1/dashboard/{pk}/screenshot/{digest}/": { "get": { - "description": "Several metadata information about dashboard API endpoints.", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "in": "path", + "name": "digest", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { "200": { "content": { - "application/json": { + "image/*": { "schema": { - "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" + "format": "binary", + "type": "string" } } }, - "description": "Item from Model" + "description": "Dashboard thumbnail image" }, "400": { "$ref": "#/components/responses/400" @@ -13084,8 +15987,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -13096,18 +15999,36 @@ "jwt": [] } ], + "summary": "Get a computed screenshot from cache", "tags": ["Dashboards"] } }, - "/api/v1/dashboard/export/": { + "/api/v1/dashboard/{pk}/thumbnail/{digest}/": { "get": { - "description": "Exports multiple Dashboards and downloads them as YAML files.", + "description": "Computes async or get already computed dashboard thumbnail from cache.", "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "A hex digest that makes this dashboard unique", + "in": "path", + "name": "digest", + "required": true, + "schema": { + "type": "string" + } + }, { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_export_ids_schema" + "$ref": "#/components/schemas/thumbnail_query_schema" } } }, @@ -13118,16 +16039,32 @@ "responses": { "200": { "content": { - "text/plain": { + "image/*": { "schema": { + "format": "binary", "type": "string" } } }, - "description": "Dashboard export" + "description": "Dashboard thumbnail image" }, - "400": { - "$ref": "#/components/responses/400" + "202": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Thumbnail does not exist on cache, fired async to compute" + }, + "302": { + "description": "Redirects to the current digest" }, "401": { "$ref": "#/components/responses/401" @@ -13147,18 +16084,19 @@ "jwt": [] } ], + "summary": "Get dashboard's thumbnail", "tags": ["Dashboards"] } }, - "/api/v1/dashboard/favorite_status/": { + "/api/v1/database/": { "get": { - "description": "Check favorited dashboards for current user", + "description": "Gets a list of databases, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_fav_star_ids_schema" + "$ref": "#/components/schemas/get_list_schema" } } }, @@ -13171,11 +16109,70 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GetFavStarIdsSchema" + "properties": { + "count": { + "description": "The total record count on the backend", + "type": "number" + }, + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", + "type": "string" + }, + "order_columns": { + "description": "A list of allowed columns to sort", + "items": { + "type": "string" + }, + "type": "array" + }, + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/DatabaseRestApi.get_list" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "None" + "description": "Items from Model" }, "400": { "$ref": "#/components/responses/400" @@ -13183,8 +16180,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -13195,63 +16192,39 @@ "jwt": [] } ], - "tags": ["Dashboards"] - } - }, - "/api/v1/dashboard/import/": { + "summary": "Get a list of databases", + "tags": ["Database"] + }, "post": { "requestBody": { "content": { - "multipart/form-data": { + "application/json": { "schema": { - "properties": { - "formData": { - "description": "upload file (ZIP or JSON)", - "format": "binary", - "type": "string" - }, - "overwrite": { - "description": "overwrite existing dashboards?", - "type": "boolean" - }, - "passwords": { - "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_passwords": { - "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_key_passwords": { - "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_keys": { - "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatabaseRestApi.post" } } }, + "description": "Database schema", "required": true }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { "properties": { - "message": { - "type": "string" + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/DatabaseRestApi.post" } }, "type": "object" } } }, - "description": "Dashboard import result" + "description": "Database added" }, "400": { "$ref": "#/components/responses/400" @@ -13259,8 +16232,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -13271,20 +16244,24 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Create a new database", + "tags": ["Database"] } }, - "/api/v1/dashboard/permalink/{key}": { + "/api/v1/database/_info": { "get": { - "description": "Retrives dashboard state associated with a permanent link.", + "description": "Get metadata information about this API resource", "parameters": [ { - "in": "path", - "name": "key", - "required": true, - "schema": { - "type": "string" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_info_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -13293,16 +16270,46 @@ "application/json": { "schema": { "properties": { - "state": { - "description": "The stored state", + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" + }, + "type": "array" } }, "type": "object" } } }, - "description": "Returns the stored state." + "description": "Item from Model" }, "400": { "$ref": "#/components/responses/400" @@ -13310,9 +16317,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -13325,53 +16329,76 @@ "jwt": [] } ], - "tags": ["Dashboard Permanent Link"] + "summary": "Get metadata information about this API resource", + "tags": ["Database"] } }, - "/api/v1/dashboard/related/{column_name}": { + "/api/v1/database/available/": { "get": { - "description": "Get a list of all possible owners for a dashboard.", - "parameters": [ - { - "in": "path", - "name": "column_name", - "required": true, - "schema": { - "type": "string" - } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RelatedResponseSchema" + "items": { + "properties": { + "available_drivers": { + "description": "Installed drivers for the engine", + "items": { + "type": "string" + }, + "type": "array" + }, + "default_driver": { + "description": "Default driver for the engine", + "type": "string" + }, + "engine": { + "description": "Name of the SQLAlchemy engine", + "type": "string" + }, + "engine_information": { + "description": "Dict with public properties form the DB Engine", + "properties": { + "disable_ssh_tunneling": { + "description": "Whether the engine supports SSH Tunnels", + "type": "boolean" + }, + "supports_file_upload": { + "description": "Whether the engine supports file uploads", + "type": "boolean" + } + }, + "type": "object" + }, + "name": { + "description": "Name of the database", + "type": "string" + }, + "parameters": { + "description": "JSON schema defining the needed parameters", + "type": "object" + }, + "preferred": { + "description": "Is the database preferred?", + "type": "boolean" + }, + "sqlalchemy_uri_placeholder": { + "description": "Example placeholder for the SQLAlchemy URI", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" } } }, - "description": "Related column data" + "description": "Database names" }, "400": { "$ref": "#/components/responses/400" }, - "401": { - "$ref": "#/components/responses/401" - }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -13381,23 +16408,22 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Get names of databases currently available", + "tags": ["Database"] } }, - "/api/v1/dashboard/{id_or_slug}": { - "get": { - "description": "Get a dashboard detail information.", - "parameters": [ - { - "description": "Either the id of the dashboard, or its slug", - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" + "/api/v1/database/columnar_metadata/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/ColumnarMetadataUploadFilePostSchema" + } } - } - ], + }, + "required": true + }, "responses": { "200": { "content": { @@ -13405,14 +16431,14 @@ "schema": { "properties": { "result": { - "$ref": "#/components/schemas/DashboardGetResponseSchema" + "$ref": "#/components/schemas/UploadFileMetadata" } }, "type": "object" } } }, - "description": "Dashboard" + "description": "Columnar upload response" }, "400": { "$ref": "#/components/responses/400" @@ -13420,11 +16446,11 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" } }, "security": [ @@ -13432,22 +16458,22 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Upload a Columnar file and returns file metadata", + "tags": ["Database"] } }, - "/api/v1/dashboard/{id_or_slug}/charts": { - "get": { - "description": "Get the chart definitions for a given dashboard", - "parameters": [ - { - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" + "/api/v1/database/csv_metadata/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/CSVMetadataUploadFilePostSchema" + } } - } - ], + }, + "required": true + }, "responses": { "200": { "content": { @@ -13455,17 +16481,14 @@ "schema": { "properties": { "result": { - "items": { - "$ref": "#/components/schemas/ChartEntityResponseSchema" - }, - "type": "array" + "$ref": "#/components/schemas/UploadFileMetadata" } }, "type": "object" } } }, - "description": "Dashboard chart definitions" + "description": "Columnar upload response" }, "400": { "$ref": "#/components/responses/400" @@ -13473,11 +16496,11 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" } }, "security": [ @@ -13485,27 +16508,17 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Upload an CSV file and returns file metadata", + "tags": ["Database"] } }, - "/api/v1/dashboard/{id_or_slug}/copy/": { + "/api/v1/database/excel_metadata/": { "post": { - "parameters": [ - { - "description": "The dashboard id or slug", - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" - } - } - ], "requestBody": { "content": { - "application/json": { + "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/DashboardCopySchema" + "$ref": "#/components/schemas/ExcelMetadataUploadFilePostSchema" } } }, @@ -13517,18 +16530,15 @@ "application/json": { "schema": { "properties": { - "id": { - "type": "number" - }, - "last_modified_time": { - "type": "number" + "result": { + "$ref": "#/components/schemas/UploadFileMetadata" } }, "type": "object" } } }, - "description": "Id of new dashboard and last modified time" + "description": "Columnar upload response" }, "400": { "$ref": "#/components/responses/400" @@ -13536,9 +16546,6 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, @@ -13551,42 +16558,110 @@ "jwt": [] } ], - "summary": "Makes a copy of an existing dashboard", - "tags": ["Dashboards"] + "summary": "Upload an Excel file and returns file metadata", + "tags": ["Database"] } }, - "/api/v1/dashboard/{id_or_slug}/datasets": { + "/api/v1/database/export/": { "get": { - "description": "Returns a list of a dashboard's datasets. Each dataset includes only the information necessary to render the dashboard's charts.", "parameters": [ { - "description": "Either the id of the dashboard, or its slug", - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_export_ids_schema" + } + } + }, + "in": "query", + "name": "q" } ], + "responses": { + "200": { + "content": { + "application/zip": { + "schema": { + "format": "binary", + "type": "string" + } + } + }, + "description": "A zip file with database(s) and dataset(s) as YAML" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Download database(s) and associated dataset(s) as a zip file", + "tags": ["Database"] + } + }, + "/api/v1/database/import/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "properties": { + "formData": { + "description": "upload file (ZIP)", + "format": "binary", + "type": "string" + }, + "overwrite": { + "description": "overwrite existing databases?", + "type": "boolean" + }, + "passwords": { + "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_passwords": { + "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_key_passwords": { + "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_keys": { + "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", + "type": "string" + } + }, + "type": "object" + } + } + }, + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "result": { - "items": { - "$ref": "#/components/schemas/DashboardDatasetSchema" - }, - "type": "array" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Dashboard dataset definitions" + "description": "Database import result" }, "400": { "$ref": "#/components/responses/400" @@ -13594,11 +16669,11 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" + "422": { + "$ref": "#/components/responses/422" }, - "404": { - "$ref": "#/components/responses/404" + "500": { + "$ref": "#/components/responses/500" } }, "security": [ @@ -13606,18 +16681,38 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Import database(s) with associated datasets", + "tags": ["Database"] } }, - "/api/v1/dashboard/{id_or_slug}/embedded": { - "delete": { - "description": "Removes a dashboard's embedded configuration.", + "/api/v1/database/oauth2/": { + "get": { + "description": "-> Receive and store personal access tokens from OAuth for user-level authorization", "parameters": [ { - "description": "The dashboard id or slug", - "in": "path", - "name": "id_or_slug", - "required": true, + "in": "query", + "name": "state", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "code", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "scope", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "error", "schema": { "type": "string" } @@ -13626,21 +16721,19 @@ "responses": { "200": { "content": { - "application/json": { + "text/html": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "type": "string" } } }, - "description": "Successfully removed the configuration" + "description": "A dummy self-closing HTML page" }, - "401": { - "$ref": "#/components/responses/401" + "400": { + "$ref": "#/components/responses/400" + }, + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -13651,19 +16744,31 @@ "jwt": [] } ], - "tags": ["Dashboards"] - }, + "summary": "Receive personal access tokens from OAuth2", + "tags": ["Database"] + } + }, + "/api/v1/database/related/{column_name}": { "get": { - "description": "Returns the dashboard's embedded configuration", "parameters": [ { - "description": "The dashboard id or slug", "in": "path", - "name": "id_or_slug", + "name": "column_name", "required": true, "schema": { "type": "string" } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_related_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -13671,20 +16776,21 @@ "content": { "application/json": { "schema": { - "properties": { - "result": { - "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" - } - }, - "type": "object" + "$ref": "#/components/schemas/RelatedResponseSchema" } } }, - "description": "Result contains the embedded dashboard config" + "description": "Related column data" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, + "404": { + "$ref": "#/components/responses/404" + }, "500": { "$ref": "#/components/responses/500" } @@ -13694,30 +16800,21 @@ "jwt": [] } ], - "tags": ["Dashboards"] - }, + "summary": "Get related fields data", + "tags": ["Database"] + } + }, + "/api/v1/database/test_connection/": { "post": { - "description": "Sets a dashboard's embedded configuration.", - "parameters": [ - { - "description": "The dashboard id or slug", - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" - } - } - ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EmbeddedDashboardConfig" + "$ref": "#/components/schemas/DatabaseTestConnectionSchema" } } }, - "description": "The embedded configuration to set", + "description": "Database schema", "required": true }, "responses": { @@ -13726,18 +16823,21 @@ "application/json": { "schema": { "properties": { - "result": { - "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Successfully set the configuration" + "description": "Database Test Connection" }, - "401": { - "$ref": "#/components/responses/401" + "400": { + "$ref": "#/components/responses/400" + }, + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -13748,30 +16848,21 @@ "jwt": [] } ], - "tags": ["Dashboards"] - }, - "put": { - "description": "Sets a dashboard's embedded configuration.", - "parameters": [ - { - "description": "The dashboard id or slug", - "in": "path", - "name": "id_or_slug", - "required": true, - "schema": { - "type": "string" - } - } - ], + "summary": "Test a database connection", + "tags": ["Database"] + } + }, + "/api/v1/database/validate_parameters/": { + "post": { "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EmbeddedDashboardConfig" + "$ref": "#/components/schemas/DatabaseValidateParametersSchema" } } }, - "description": "The embedded configuration to set", + "description": "DB-specific parameters", "required": true }, "responses": { @@ -13780,18 +16871,21 @@ "application/json": { "schema": { "properties": { - "result": { - "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Successfully set the configuration" + "description": "Database Test Connection" }, - "401": { - "$ref": "#/components/responses/401" + "400": { + "$ref": "#/components/responses/400" + }, + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -13802,12 +16896,12 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Validate database connection parameters", + "tags": ["Database"] } }, - "/api/v1/dashboard/{pk}": { + "/api/v1/database/{pk}": { "delete": { - "description": "Deletes a Dashboard.", "parameters": [ { "in": "path", @@ -13832,7 +16926,7 @@ } } }, - "description": "Dashboard deleted" + "description": "Database deleted" }, "401": { "$ref": "#/components/responses/401" @@ -13855,12 +16949,13 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Delete a database", + "tags": ["Database"] }, - "put": { - "description": "Changes a Dashboard.", + "get": { "parameters": [ { + "description": "The database id", "in": "path", "name": "pk", "required": true, @@ -13869,38 +16964,16 @@ } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DashboardRestApi.put" - } - } - }, - "description": "Dashboard schema", - "required": true - }, "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "id": { - "type": "number" - }, - "last_modified_time": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/DashboardRestApi.put" - } - }, "type": "object" } } }, - "description": "Dashboard changed" + "description": "Database" }, "400": { "$ref": "#/components/responses/400" @@ -13908,12 +16981,6 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -13926,12 +16993,10 @@ "jwt": [] } ], - "tags": ["Dashboards"] - } - }, - "/api/v1/dashboard/{pk}/favorites/": { - "delete": { - "description": "Remove the dashboard from the user favorite list", + "summary": "Get a database", + "tags": ["Database"] + }, + "put": { "parameters": [ { "in": "path", @@ -13942,28 +17007,51 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DatabaseRestApi.put" + } + } + }, + "description": "Database schema", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { + "id": { + "type": "number" + }, "result": { - "type": "object" + "$ref": "#/components/schemas/DatabaseRestApi.put" } }, "type": "object" } } }, - "description": "Dashboard removed from favorites" + "description": "Database changed" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -13973,18 +17061,32 @@ "jwt": [] } ], - "tags": ["Dashboards"] - }, - "post": { - "description": "Marks the dashboard as favorite for the current user", + "summary": "Change a database", + "tags": ["Database"] + } + }, + "/api/v1/database/{pk}/catalogs/": { + "get": { "parameters": [ { + "description": "The database id", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/database_catalogs_query_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -13992,16 +17094,14 @@ "content": { "application/json": { "schema": { - "properties": { - "result": { - "type": "object" - } - }, - "type": "object" + "$ref": "#/components/schemas/CatalogsResponseSchema" } } }, - "description": "Dashboard added to favorites" + "description": "A List of all catalogs from the database" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" @@ -14018,12 +17118,12 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Get all catalogs from a database", + "tags": ["Database"] } }, - "/api/v1/dashboard/{pk}/filter_state": { + "/api/v1/database/{pk}/columnar_upload/": { "post": { - "description": "Stores a new value.", "parameters": [ { "in": "path", @@ -14032,20 +17132,13 @@ "schema": { "type": "integer" } - }, - { - "in": "query", - "name": "tab_id", - "schema": { - "type": "integer" - } } ], "requestBody": { "content": { - "application/json": { + "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/TemporaryCachePostSchema" + "$ref": "#/components/schemas/ColumnarUploadPostSchema" } } }, @@ -14053,72 +17146,11 @@ }, "responses": { "201": { - "content": { - "application/json": { - "schema": { - "properties": { - "key": { - "description": "The key to retrieve the value.", - "type": "string" - } - }, - "type": "object" - } - } - }, - "description": "The value was stored successfully." - }, - "400": { - "$ref": "#/components/responses/400" - }, - "401": { - "$ref": "#/components/responses/401" - }, - "422": { - "$ref": "#/components/responses/422" - }, - "500": { - "$ref": "#/components/responses/500" - } - }, - "security": [ - { - "jwt": [] - } - ], - "tags": ["Dashboard Filter State"] - } - }, - "/api/v1/dashboard/{pk}/filter_state/{key}": { - "delete": { - "description": "Deletes a value.", - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The value key.", - "in": "path", - "name": "key", - "required": true, - "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { "content": { "application/json": { "schema": { "properties": { "message": { - "description": "The result of the operation", "type": "string" } }, @@ -14126,7 +17158,7 @@ } } }, - "description": "Deleted the stored value." + "description": "Columnar upload response" }, "400": { "$ref": "#/components/responses/400" @@ -14149,26 +17181,21 @@ "jwt": [] } ], - "tags": ["Dashboard Filter State"] - }, + "summary": "Upload a Columnar file to a database table", + "tags": ["Database"] + } + }, + "/api/v1/database/{pk}/connection": { "get": { - "description": "Retrives a value.", "parameters": [ { + "description": "The database id", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } - }, - { - "in": "path", - "name": "key", - "required": true, - "schema": { - "type": "string" - } } ], "responses": { @@ -14176,17 +17203,11 @@ "content": { "application/json": { "schema": { - "properties": { - "value": { - "description": "The stored value", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatabaseConnectionSchema" } } }, - "description": "Returns the stored value." + "description": "Database with connection info" }, "400": { "$ref": "#/components/responses/400" @@ -14194,9 +17215,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -14209,10 +17227,12 @@ "jwt": [] } ], - "tags": ["Dashboard Filter State"] - }, - "put": { - "description": "Updates an existing value.", + "summary": "Get a database connection info", + "tags": ["Database"] + } + }, + "/api/v1/database/{pk}/csv_upload/": { + "post": { "parameters": [ { "in": "path", @@ -14221,41 +17241,25 @@ "schema": { "type": "integer" } - }, - { - "in": "path", - "name": "key", - "required": true, - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "tab_id", - "schema": { - "type": "integer" - } } ], "requestBody": { "content": { - "application/json": { + "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/TemporaryCachePutSchema" + "$ref": "#/components/schemas/CSVUploadPostSchema" } } }, "required": true }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { "properties": { - "key": { - "description": "The key to retrieve the value.", + "message": { "type": "string" } }, @@ -14263,7 +17267,7 @@ } } }, - "description": "The value was stored successfully." + "description": "CSV upload response" }, "400": { "$ref": "#/components/responses/400" @@ -14286,27 +17290,27 @@ "jwt": [] } ], - "tags": ["Dashboard Filter State"] + "summary": "Upload a CSV file to a database table", + "tags": ["Database"] } }, - "/api/v1/dashboard/{pk}/permalink": { + "/api/v1/database/{pk}/excel_upload/": { "post": { - "description": "Stores a new permanent link.", "parameters": [ { "in": "path", "name": "pk", "required": true, "schema": { - "type": "string" + "type": "integer" } } ], "requestBody": { "content": { - "application/json": { + "multipart/form-data": { "schema": { - "$ref": "#/components/schemas/DashboardPermalinkStateSchema" + "$ref": "#/components/schemas/ExcelUploadPostSchema" } } }, @@ -14318,12 +17322,7 @@ "application/json": { "schema": { "properties": { - "key": { - "description": "The key to retrieve the permanent link data.", - "type": "string" - }, - "url": { - "description": "permanent link.", + "message": { "type": "string" } }, @@ -14331,7 +17330,7 @@ } } }, - "description": "The permanent link was stored successfully." + "description": "Excel upload response" }, "400": { "$ref": "#/components/responses/400" @@ -14339,6 +17338,9 @@ "401": { "$ref": "#/components/responses/401" }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -14351,12 +17353,12 @@ "jwt": [] } ], - "tags": ["Dashboard Permanent Link"] + "summary": "Upload an Excel file to a database table", + "tags": ["Database"] } }, - "/api/v1/dashboard/{pk}/thumbnail/{digest}/": { + "/api/v1/database/{pk}/function_names/": { "get": { - "description": "Compute async or get already computed dashboard thumbnail from cache.", "parameters": [ { "in": "path", @@ -14365,57 +17367,18 @@ "schema": { "type": "integer" } - }, - { - "description": "A hex digest that makes this dashboard unique", - "in": "path", - "name": "digest", - "required": true, - "schema": { - "type": "string" - } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/thumbnail_query_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { "200": { - "content": { - "image/*": { - "schema": { - "format": "binary", - "type": "string" - } - } - }, - "description": "Dashboard thumbnail image" - }, - "202": { "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatabaseFunctionNamesResponse" } } }, - "description": "Thumbnail does not exist on cache, fired async to compute" - }, - "302": { - "description": "Redirects to the current digest" + "description": "Query result" }, "401": { "$ref": "#/components/responses/401" @@ -14423,9 +17386,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -14435,23 +17395,20 @@ "jwt": [] } ], - "tags": ["Dashboards"] + "summary": "Get function names supported by a database", + "tags": ["Database"] } }, - "/api/v1/database/": { + "/api/v1/database/{pk}/related_objects/": { "get": { - "description": "Get a list of models", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_list_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -14459,79 +17416,17 @@ "content": { "application/json": { "schema": { - "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", - "type": "string" - }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, - "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/DatabaseRestApi.get_list" - }, - "type": "array" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatabaseRelatedObjectsResponse" } } }, - "description": "Items from Model" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Query result" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -14542,39 +17437,44 @@ "jwt": [] } ], + "summary": "Get charts and dashboards count associated to a database", "tags": ["Database"] - }, - "post": { - "description": "Create a new Database.", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DatabaseRestApi.post" - } + } + }, + "/api/v1/database/{pk}/schemas/": { + "get": { + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" } }, - "description": "Database schema", - "required": true - }, - "responses": { - "201": { + { "content": { "application/json": { "schema": { - "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/DatabaseRestApi.post" - } - }, - "type": "object" + "$ref": "#/components/schemas/database_schemas_query_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SchemasResponseSchema" } } }, - "description": "Database added" + "description": "A List of all schemas from the database" }, "400": { "$ref": "#/components/responses/400" @@ -14594,23 +17494,20 @@ "jwt": [] } ], + "summary": "Get all schemas from a database", "tags": ["Database"] } }, - "/api/v1/database/_info": { + "/api/v1/database/{pk}/schemas_access_for_file_upload/": { "get": { - "description": "Get metadata information about this API resource", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -14618,56 +17515,17 @@ "content": { "application/json": { "schema": { - "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatabaseSchemaAccessForFileUploadResponse" } } }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "The list of the database schemas where to upload information" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -14678,76 +17536,64 @@ "jwt": [] } ], + "summary": "The list of the database schemas where to upload information", "tags": ["Database"] } }, - "/api/v1/database/available/": { + "/api/v1/database/{pk}/select_star/{table_name}/": { "get": { - "description": "Get names of databases currently available", + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } + } + ], "responses": { "200": { "content": { "application/json": { "schema": { - "items": { - "properties": { - "available_drivers": { - "description": "Installed drivers for the engine", - "items": { - "type": "string" - }, - "type": "array" - }, - "default_driver": { - "description": "Default driver for the engine", - "type": "string" - }, - "engine": { - "description": "Name of the SQLAlchemy engine", - "type": "string" - }, - "engine_information": { - "description": "Dict with public properties form the DB Engine", - "properties": { - "disable_ssh_tunneling": { - "description": "Whether the engine supports SSH Tunnels", - "type": "boolean" - }, - "supports_file_upload": { - "description": "Whether the engine supports file uploads", - "type": "boolean" - } - }, - "type": "object" - }, - "name": { - "description": "Name of the database", - "type": "string" - }, - "parameters": { - "description": "JSON schema defining the needed parameters", - "type": "object" - }, - "preferred": { - "description": "Is the database preferred?", - "type": "boolean" - }, - "sqlalchemy_uri_placeholder": { - "description": "Example placeholder for the SQLAlchemy URI", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" + "$ref": "#/components/schemas/SelectStarResponseSchema" } } }, - "description": "Database names" + "description": "SQL statement for a select star for table" }, "400": { "$ref": "#/components/responses/400" }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -14757,36 +17603,54 @@ "jwt": [] } ], + "summary": "Get database select star for table", "tags": ["Database"] } }, - "/api/v1/database/export/": { + "/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/": { "get": { - "description": "Download database(s) and associated dataset(s) as a zip file", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_export_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } } ], "responses": { "200": { "content": { - "application/zip": { + "application/json": { "schema": { - "format": "binary", - "type": "string" + "$ref": "#/components/schemas/SelectStarResponseSchema" } } }, - "description": "A zip file with database(s) and dataset(s) as YAML" + "description": "SQL statement for a select star for table" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" @@ -14794,6 +17658,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -14803,48 +17670,22 @@ "jwt": [] } ], + "summary": "Get database select star for table", "tags": ["Database"] } - }, - "/api/v1/database/import/": { - "post": { - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "properties": { - "formData": { - "description": "upload file (ZIP)", - "format": "binary", - "type": "string" - }, - "overwrite": { - "description": "overwrite existing databases?", - "type": "boolean" - }, - "passwords": { - "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_passwords": { - "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_key_passwords": { - "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_keys": { - "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", - "type": "string" - } - }, - "type": "object" - } + }, + "/api/v1/database/{pk}/ssh_tunnel/": { + "delete": { + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" } - }, - "required": true - }, + } + ], "responses": { "200": { "content": { @@ -14859,14 +17700,17 @@ } } }, - "description": "Database import result" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "SSH Tunnel deleted" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -14879,42 +17723,61 @@ "jwt": [] } ], + "summary": "Delete a SSH tunnel", "tags": ["Database"] } }, - "/api/v1/database/test_connection/": { - "post": { - "description": "Tests a database connection", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DatabaseTestConnectionSchema" - } + "/api/v1/database/{pk}/table/{table_name}/{schema_name}/": { + "get": { + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" } }, - "description": "Database schema", - "required": true - }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } + } + ], "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/TableMetadataResponseSchema" } } }, - "description": "Database Test Connection" + "description": "Table metadata information" }, "400": { "$ref": "#/components/responses/400" }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -14927,42 +17790,62 @@ "jwt": [] } ], + "summary": "Get database table metadata", "tags": ["Database"] } }, - "/api/v1/database/validate_parameters/": { - "post": { - "description": "Validates parameters used to connect to a database", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DatabaseValidateParametersSchema" - } + "/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/": { + "get": { + "description": "Response depends on each DB engine spec normally focused on partitions.", + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" } }, - "description": "DB-specific parameters", - "required": true - }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } + } + ], "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/TableExtraMetadataResponseSchema" } } }, - "description": "Database Test Connection" + "description": "Table extra metadata information" }, "400": { "$ref": "#/components/responses/400" }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -14975,20 +17858,47 @@ "jwt": [] } ], + "summary": "Get table extra metadata", "tags": ["Database"] } }, - "/api/v1/database/{pk}": { - "delete": { - "description": "Deletes a Database.", + "/api/v1/database/{pk}/table_metadata/": { + "get": { + "description": "Metadata associated with the table (columns, indexes, etc.)", "parameters": [ { + "description": "The database id", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } + }, + { + "description": "Table name", + "in": "query", + "name": "table", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Optional table schema, if not passed default schema will be used", + "in": "query", + "name": "schema", + "schema": { + "type": "string" + } + }, + { + "description": "Optional table catalog, if not passed default catalog will be used", + "in": "query", + "name": "catalog", + "schema": { + "type": "string" + } } ], "responses": { @@ -14996,29 +17906,18 @@ "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/TableExtraMetadataResponseSchema" } } }, - "description": "Database deleted" + "description": "Table metadata information" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -15028,10 +17927,13 @@ "jwt": [] } ], + "summary": "Get table metadata", "tags": ["Database"] - }, + } + }, + "/api/v1/database/{pk}/table_metadata/extra/": { "get": { - "description": "Get a database", + "description": "Extra metadata associated with the table (partitions, description, etc.)", "parameters": [ { "description": "The database id", @@ -15041,6 +17943,31 @@ "schema": { "type": "integer" } + }, + { + "description": "Table name", + "in": "query", + "name": "name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Optional table schema, if not passed the schema configured in the database will be used", + "in": "query", + "name": "schema", + "schema": { + "type": "string" + } + }, + { + "description": "Optional table catalog, if not passed the catalog configured in the database will be used", + "in": "query", + "name": "catalog", + "schema": { + "type": "string" + } } ], "responses": { @@ -15048,20 +17975,17 @@ "content": { "application/json": { "schema": { - "type": "object" + "$ref": "#/components/schemas/TableExtraMetadataResponseSchema" } } }, - "description": "Database" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Table extra metadata information" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -15072,58 +17996,62 @@ "jwt": [] } ], + "summary": "Get table extra metadata", "tags": ["Database"] - }, - "put": { - "description": "Changes a Database.", + } + }, + "/api/v1/database/{pk}/tables/": { + "get": { "parameters": [ { + "description": "The database id", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } + }, + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/database_tables_query_schema" + } + } + }, + "in": "query", + "name": "q" } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DatabaseRestApi.put" - } - } - }, - "description": "Database schema", - "required": true - }, "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "id": { - "type": "number" + "count": { + "type": "integer" }, "result": { - "$ref": "#/components/schemas/DatabaseRestApi.put" + "description": "A List of tables for given database", + "items": { + "$ref": "#/components/schemas/DatabaseTablesResponse" + }, + "type": "array" } }, "type": "object" } } }, - "description": "Database changed" + "description": "Tables list" }, "400": { "$ref": "#/components/responses/400" }, "401": { - "$ref": "#/components/responses/401" - }, - "403": { - "$ref": "#/components/responses/403" + "$ref": "#/components/responses/401" }, "404": { "$ref": "#/components/responses/404" @@ -15140,14 +18068,15 @@ "jwt": [] } ], + "summary": "Get a list of tables for given database", "tags": ["Database"] } }, - "/api/v1/database/{pk}/connection": { - "get": { + "/api/v1/database/{pk}/validate_sql/": { + "post": { + "description": "Validates that arbitrary SQL is acceptable for the given database.", "parameters": [ { - "description": "The database id", "in": "path", "name": "pk", "required": true, @@ -15156,16 +18085,36 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ValidateSQLRequest" + } + } + }, + "description": "Validate SQL request", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatabaseConnectionSchema" + "properties": { + "result": { + "description": "A List of SQL errors found on the statement", + "items": { + "$ref": "#/components/schemas/ValidateSQLResponse" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "Database with connection info" + "description": "Validation result" }, "400": { "$ref": "#/components/responses/400" @@ -15173,8 +18122,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -15185,21 +18134,23 @@ "jwt": [] } ], - "summary": "Get a database connection info", + "summary": "Validate arbitrary SQL", "tags": ["Database"] } }, - "/api/v1/database/{pk}/function_names/": { - "get": { - "description": "Get function names supported by a database", + "/api/v1/dataset/": { + "delete": { "parameters": [ { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_delete_ids_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -15207,18 +18158,32 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatabaseFunctionNamesResponse" + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" } } }, - "description": "Query result" + "description": "Dataset bulk delete" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -15228,20 +18193,22 @@ "jwt": [] } ], - "tags": ["Database"] - } - }, - "/api/v1/database/{pk}/related_objects/": { + "summary": "Bulk delete datasets", + "tags": ["Datasets"] + }, "get": { - "description": "Get charts and dashboards count associated to a database", + "description": "Gets a list of datasets, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_list_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -15249,17 +18216,79 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatabaseRelatedObjectsResponse" + "properties": { + "count": { + "description": "The total record count on the backend", + "type": "number" + }, + "description_columns": { + "properties": { + "column_name": { + "description": "The description for the column name. Will be translated by babel", + "example": "A Nice description for the column", + "type": "string" + } + }, + "type": "object" + }, + "ids": { + "description": "A list of item ids, useful when you don't know the column id", + "items": { + "type": "string" + }, + "type": "array" + }, + "label_columns": { + "properties": { + "column_name": { + "description": "The label for the column name. Will be translated by babel", + "example": "A Nice label for the column", + "type": "string" + } + }, + "type": "object" + }, + "list_columns": { + "description": "A list of columns", + "items": { + "type": "string" + }, + "type": "array" + }, + "list_title": { + "description": "A title to render. Will be translated by babel", + "example": "List Items", + "type": "string" + }, + "order_columns": { + "description": "A list of allowed columns to sort", + "items": { + "type": "string" + }, + "type": "array" + }, + "result": { + "description": "The result from the get list query", + "items": { + "$ref": "#/components/schemas/DatasetRestApi.get_list" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "Query result" + "description": "Items from Model" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -15270,44 +18299,39 @@ "jwt": [] } ], - "tags": ["Database"] - } - }, - "/api/v1/database/{pk}/schemas/": { - "get": { - "description": "Get all schemas from a database", - "parameters": [ - { - "description": "The database id", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" + "summary": "Get a list of datasets", + "tags": ["Datasets"] + }, + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DatasetRestApi.post" + } } }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/database_schemas_query_schema" - } - } - }, - "in": "query", - "name": "q" - } - ], + "description": "Dataset schema", + "required": true + }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SchemasResponseSchema" + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/DatasetRestApi.post" + } + }, + "type": "object" } } }, - "description": "A List of all schemas from the database" + "description": "Dataset added" }, "400": { "$ref": "#/components/responses/400" @@ -15315,8 +18339,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -15327,19 +18351,24 @@ "jwt": [] } ], - "tags": ["Database"] + "summary": "Create a new dataset", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/schemas_access_for_file_upload/": { + "/api/v1/dataset/_info": { "get": { + "description": "Get metadata information about this API resource", "parameters": [ { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_info_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -15347,17 +18376,56 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatabaseSchemaAccessForFileUploadResponse" + "properties": { + "add_columns": { + "type": "object" + }, + "edit_columns": { + "type": "object" + }, + "filters": { + "properties": { + "column_name": { + "items": { + "properties": { + "name": { + "description": "The filter name. Will be translated by babel", + "type": "string" + }, + "operator": { + "description": "The filter operation key to use on list filters", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "permissions": { + "description": "The user permissions for this API resource", + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "The list of the database schemas where to upload information" + "description": "Item from Model" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -15368,40 +18436,31 @@ "jwt": [] } ], - "summary": "The list of the database schemas where to upload information", - "tags": ["Database"] + "summary": "Get metadata information about this API resource", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/select_star/{table_name}/": { + "/api/v1/dataset/distinct/{column_name}": { "get": { - "description": "Get database select star for table", "parameters": [ { - "description": "The database id", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "Table name", "in": "path", - "name": "table_name", + "name": "column_name", "required": true, "schema": { "type": "string" } }, { - "description": "Table schema", - "in": "path", - "name": "schema_name", - "required": true, - "schema": { - "type": "string" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_related_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { @@ -15409,11 +18468,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SelectStarResponseSchema" + "$ref": "#/components/schemas/DistincResponseSchema" } } }, - "description": "SQL statement for a select star for table" + "description": "Distinct field data" }, "400": { "$ref": "#/components/responses/400" @@ -15424,9 +18483,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -15436,51 +18492,41 @@ "jwt": [] } ], - "tags": ["Database"] + "summary": "Get distinct values from field data", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/": { - "get": { - "description": "Get database select star for table", - "parameters": [ - { - "description": "The database id", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "Table name", - "in": "path", - "name": "table_name", - "required": true, - "schema": { - "type": "string" + "/api/v1/dataset/duplicate": { + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DatasetDuplicateSchema" + } } }, - { - "description": "Table schema", - "in": "path", - "name": "schema_name", - "required": true, - "schema": { - "type": "string" - } - } - ], + "description": "Dataset schema", + "required": true + }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SelectStarResponseSchema" + "properties": { + "id": { + "type": "number" + }, + "result": { + "$ref": "#/components/schemas/DatasetDuplicateSchema" + } + }, + "type": "object" } } }, - "description": "SQL statement for a select star for table" + "description": "Dataset duplicated" }, "400": { "$ref": "#/components/responses/400" @@ -15488,6 +18534,9 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, @@ -15503,50 +18552,45 @@ "jwt": [] } ], - "tags": ["Database"] + "summary": "Duplicate a dataset", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/ssh_tunnel/": { - "delete": { - "description": "Deletes a SSH Tunnel.", + "/api/v1/dataset/export/": { + "get": { "parameters": [ { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_export_ids_schema" + } + } + }, + "in": "query", + "name": "q" } ], "responses": { "200": { "content": { - "application/json": { + "text/plain": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "type": "string" } } }, - "description": "SSH Tunnel deleted" + "description": "Dataset export" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -15556,51 +18600,42 @@ "jwt": [] } ], - "tags": ["Database"] + "summary": "Download multiple datasets as YAML files", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/table/{table_name}/{schema_name}/": { - "get": { - "description": "Get database table metadata", - "parameters": [ - { - "description": "The database id", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "Table name", - "in": "path", - "name": "table_name", - "required": true, - "schema": { - "type": "string" + "/api/v1/dataset/get_or_create/": { + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetOrCreateDatasetSchema" + } } }, - { - "description": "Table schema", - "in": "path", - "name": "schema_name", - "required": true, - "schema": { - "type": "string" - } - } - ], + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TableMetadataResponseSchema" + "properties": { + "result": { + "properties": { + "table_id": { + "type": "integer" + } + }, + "type": "object" + } + }, + "type": "object" } } }, - "description": "Table metadata information" + "description": "The ID of the table" }, "400": { "$ref": "#/components/responses/400" @@ -15608,9 +18643,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -15620,54 +18652,75 @@ }, "security": [ { - "jwt": [] - } - ], - "tags": ["Database"] - } - }, - "/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/": { - "get": { - "description": "Response depends on each DB engine spec normally focused on partitions", - "parameters": [ - { - "description": "The database id", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "Table name", - "in": "path", - "name": "table_name", - "required": true, - "schema": { - "type": "string" - } - }, - { - "description": "Table schema", - "in": "path", - "name": "schema_name", - "required": true, - "schema": { - "type": "string" - } + "jwt": [] } ], + "summary": "Retrieve a table by name, or create it if it does not exist", + "tags": ["Datasets"] + } + }, + "/api/v1/dataset/import/": { + "post": { + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "properties": { + "formData": { + "description": "upload file (ZIP or YAML)", + "format": "binary", + "type": "string" + }, + "overwrite": { + "description": "overwrite existing datasets?", + "type": "boolean" + }, + "passwords": { + "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_passwords": { + "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_key_passwords": { + "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", + "type": "string" + }, + "ssh_tunnel_private_keys": { + "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", + "type": "string" + }, + "sync_columns": { + "description": "sync columns?", + "type": "boolean" + }, + "sync_metrics": { + "description": "sync metrics?", + "type": "boolean" + } + }, + "type": "object" + } + } + }, + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TableExtraMetadataResponseSchema" + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" } } }, - "description": "Table extra metadata information" + "description": "Dataset import result" }, "400": { "$ref": "#/components/responses/400" @@ -15675,9 +18728,6 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -15690,27 +18740,26 @@ "jwt": [] } ], - "summary": "Get table extra metadata", - "tags": ["Database"] + "summary": "Import dataset(s) with associated databases", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/tables/": { + "/api/v1/dataset/related/{column_name}": { "get": { "parameters": [ { - "description": "The database id", "in": "path", - "name": "pk", + "name": "column_name", "required": true, "schema": { - "type": "integer" + "type": "string" } }, { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/database_tables_query_schema" + "$ref": "#/components/schemas/get_related_schema" } } }, @@ -15723,23 +18772,11 @@ "content": { "application/json": { "schema": { - "properties": { - "count": { - "type": "integer" - }, - "result": { - "description": "A List of tables for given database", - "items": { - "$ref": "#/components/schemas/DatabaseTablesResponse" - }, - "type": "array" - } - }, - "type": "object" + "$ref": "#/components/schemas/RelatedResponseSchema" } } }, - "description": "Tables list" + "description": "Related column data" }, "400": { "$ref": "#/components/responses/400" @@ -15750,9 +18787,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -15762,32 +18796,22 @@ "jwt": [] } ], - "summary": "Get a list of tables for given database", - "tags": ["Database"] + "summary": "Get related fields data", + "tags": ["Datasets"] } }, - "/api/v1/database/{pk}/validate_sql/": { - "post": { - "description": "Validates arbitrary SQL.", - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - } - ], + "/api/v1/dataset/warm_up_cache": { + "put": { + "description": "Warms up the cache for the table. Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action.", "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ValidateSQLRequest" + "$ref": "#/components/schemas/DatasetCacheWarmUpRequestSchema" } } }, - "description": "Validate SQL request", + "description": "Identifies the database and table to warm up cache for, and any additional dashboard or filter context to use.", "required": true }, "responses": { @@ -15795,27 +18819,15 @@ "content": { "application/json": { "schema": { - "properties": { - "result": { - "description": "A List of SQL errors found on the statement", - "items": { - "$ref": "#/components/schemas/ValidateSQLResponse" - }, - "type": "array" - } - }, - "type": "object" + "$ref": "#/components/schemas/DatasetCacheWarmUpResponseSchema" } } }, - "description": "Validation result" + "description": "Each chart's warmup status" }, "400": { "$ref": "#/components/responses/400" }, - "401": { - "$ref": "#/components/responses/401" - }, "404": { "$ref": "#/components/responses/404" }, @@ -15828,24 +18840,20 @@ "jwt": [] } ], - "summary": "Validates that arbitrary sql is acceptable for the given database", - "tags": ["Database"] + "summary": "Warm up the cache for each chart powered by the given table", + "tags": ["Datasets"] } }, - "/api/v1/dataset/": { + "/api/v1/dataset/{pk}": { "delete": { - "description": "Deletes multiple Datasets in a bulk operation.", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_delete_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -15862,10 +18870,7 @@ } } }, - "description": "Dataset bulk delete" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Dataset delete" }, "401": { "$ref": "#/components/responses/401" @@ -15888,21 +18893,39 @@ "jwt": [] } ], + "summary": "Delete a dataset", "tags": ["Datasets"] }, "get": { - "description": "Get a list of models", + "description": "Get a dataset by ID", "parameters": [ + { + "description": "The dataset ID", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/get_list_schema" + "$ref": "#/components/schemas/get_item_schema" } } }, "in": "query", "name": "q" + }, + { + "description": "Should Jinja macros from sql, metrics and columns be rendered and included in the response", + "in": "query", + "name": "include_rendered_sql", + "schema": { + "type": "boolean" + } } ], "responses": { @@ -15911,69 +18934,19 @@ "application/json": { "schema": { "properties": { - "count": { - "description": "The total record count on the backend", - "type": "number" - }, - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "ids": { - "description": "A list of item ids, useful when you don't know the column id", - "items": { - "type": "string" - }, - "type": "array" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "list_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "list_title": { - "description": "A title to render. Will be translated by babel", - "example": "List Items", + "id": { + "description": "The item id", "type": "string" }, - "order_columns": { - "description": "A list of allowed columns to sort", - "items": { - "type": "string" - }, - "type": "array" - }, "result": { - "description": "The result from the get list query", - "items": { - "$ref": "#/components/schemas/DatasetRestApi.get_list" - }, - "type": "array" + "$ref": "#/components/schemas/DatasetRestApi.get" } }, "type": "object" } } }, - "description": "Items from Model" + "description": "Dataset object has been returned." }, "400": { "$ref": "#/components/responses/400" @@ -15993,15 +18966,32 @@ "jwt": [] } ], - "tags": ["Datasets"] - }, - "post": { - "description": "Create a new Dataset", + "summary": "Get a dataset", + "tags": ["Datasets"] + }, + "put": { + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "in": "query", + "name": "override_columns", + "schema": { + "type": "boolean" + } + } + ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetRestApi.post" + "$ref": "#/components/schemas/DatasetRestApi.put" } } }, @@ -16009,7 +18999,7 @@ "required": true }, "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { @@ -16018,14 +19008,14 @@ "type": "number" }, "result": { - "$ref": "#/components/schemas/DatasetRestApi.post" + "$ref": "#/components/schemas/DatasetRestApi.put" } }, "type": "object" } } }, - "description": "Dataset added" + "description": "Dataset changed" }, "400": { "$ref": "#/components/responses/400" @@ -16033,6 +19023,12 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -16045,23 +19041,30 @@ "jwt": [] } ], + "summary": "Update a dataset", "tags": ["Datasets"] } }, - "/api/v1/dataset/_info": { - "get": { - "description": "Get metadata information about this API resource", + "/api/v1/dataset/{pk}/column/{column_id}": { + "delete": { "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_info_schema" - } - } - }, - "in": "query", - "name": "q" + "description": "The dataset pk for this column", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "The column id for this dataset", + "in": "path", + "name": "column_id", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -16070,53 +19073,25 @@ "application/json": { "schema": { "properties": { - "add_columns": { - "type": "object" - }, - "edit_columns": { - "type": "object" - }, - "filters": { - "properties": { - "column_name": { - "items": { - "properties": { - "name": { - "description": "The filter name. Will be translated by babel", - "type": "string" - }, - "operator": { - "description": "The filter operation key to use on list filters", - "type": "string" - } - }, - "type": "object" - }, - "type": "array" - } - }, - "type": "object" - }, - "permissions": { - "description": "The user permissions for this API resource", - "items": { - "type": "string" - }, - "type": "array" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Item from Model" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Column deleted" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, "422": { "$ref": "#/components/responses/422" }, @@ -16129,30 +19104,30 @@ "jwt": [] } ], + "summary": "Delete a dataset column", "tags": ["Datasets"] } }, - "/api/v1/dataset/distinct/{column_name}": { - "get": { + "/api/v1/dataset/{pk}/metric/{metric_id}": { + "delete": { "parameters": [ { + "description": "The dataset pk for this column", "in": "path", - "name": "column_name", + "name": "pk", "required": true, "schema": { - "type": "string" + "type": "integer" } }, { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, - "in": "query", - "name": "q" + "description": "The metric id for this dataset", + "in": "path", + "name": "metric_id", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { @@ -16160,21 +19135,29 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DistincResponseSchema" + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" } } }, - "description": "Distinct field data" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Metric deleted" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -16184,44 +19167,37 @@ "jwt": [] } ], + "summary": "Delete a dataset metric", "tags": ["Datasets"] } }, - "/api/v1/dataset/duplicate": { - "post": { - "description": "Duplicates a Dataset", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DatasetDuplicateSchema" - } + "/api/v1/dataset/{pk}/refresh": { + "put": { + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" } - }, - "description": "Dataset schema", - "required": true - }, + } + ], "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/DatasetDuplicateSchema" + "message": { + "type": "string" } }, "type": "object" } } }, - "description": "Dataset duplicated" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Dataset delete" }, "401": { "$ref": "#/components/responses/401" @@ -16244,38 +19220,32 @@ "jwt": [] } ], + "summary": "Refresh and update columns of a dataset", "tags": ["Datasets"] } }, - "/api/v1/dataset/export/": { + "/api/v1/dataset/{pk}/related_objects": { "get": { - "description": "Exports multiple datasets and downloads them as YAML files", "parameters": [ { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_export_ids_schema" - } - } - }, - "in": "query", - "name": "q" + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } } ], "responses": { "200": { "content": { - "text/plain": { + "application/json": { "schema": { - "type": "string" + "$ref": "#/components/schemas/DatasetRelatedObjectsResponse" } } }, - "description": "Dataset export" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Query result" }, "401": { "$ref": "#/components/responses/401" @@ -16292,21 +19262,41 @@ "jwt": [] } ], + "summary": "Get charts and dashboards count associated to a dataset", "tags": ["Datasets"] } }, - "/api/v1/dataset/get_or_create/": { - "post": { - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/GetOrCreateDatasetSchema" - } + "/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/": { + "get": { + "parameters": [ + { + "description": "The type of datasource", + "in": "path", + "name": "datasource_type", + "required": true, + "schema": { + "type": "string" } }, - "required": true - }, + { + "description": "The id of the datasource", + "in": "path", + "name": "datasource_id", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "The name of the column to get values for", + "in": "path", + "name": "column_name", + "required": true, + "schema": { + "type": "string" + } + } + ], "responses": { "200": { "content": { @@ -16314,19 +19304,33 @@ "schema": { "properties": { "result": { - "properties": { - "table_id": { - "type": "integer" - } + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + }, + { + "type": "boolean" + }, + { + "type": "object" + } + ] }, - "type": "object" + "type": "array" } }, "type": "object" } } }, - "description": "The ID of the table" + "description": "A List of distinct values for the column" }, "400": { "$ref": "#/components/responses/400" @@ -16334,8 +19338,11 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -16346,81 +19353,44 @@ "jwt": [] } ], - "summary": "Retrieve a table by name, or create it if it does not exist", - "tags": ["Datasets"] + "summary": "Get possible values for a datasource column", + "tags": ["Datasources"] } }, - "/api/v1/dataset/import/": { - "post": { - "requestBody": { - "content": { - "multipart/form-data": { - "schema": { - "properties": { - "formData": { - "description": "upload file (ZIP or YAML)", - "format": "binary", - "type": "string" - }, - "overwrite": { - "description": "overwrite existing datasets?", - "type": "boolean" - }, - "passwords": { - "description": "JSON map of passwords for each featured database in the ZIP file. If the ZIP includes a database config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_passwords": { - "description": "JSON map of passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the password should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_key_passwords": { - "description": "JSON map of private_key_passwords for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key_password\"}`.", - "type": "string" - }, - "ssh_tunnel_private_keys": { - "description": "JSON map of private_keys for each ssh_tunnel associated to a featured database in the ZIP file. If the ZIP includes a ssh_tunnel config in the path `databases/MyDatabase.yaml`, the private_key should be provided in the following format: `{\"databases/MyDatabase.yaml\": \"my_private_key\"}`.", - "type": "string" - }, - "sync_columns": { - "description": "sync columns?", - "type": "boolean" - }, - "sync_metrics": { - "description": "sync metrics?", - "type": "boolean" - } - }, - "type": "object" - } + "/api/v1/embedded_dashboard/{uuid}": { + "get": { + "parameters": [ + { + "description": "The embedded configuration uuid", + "in": "path", + "name": "uuid", + "required": true, + "schema": { + "type": "string" } - }, - "required": true - }, + } + ], "responses": { "200": { "content": { "application/json": { "schema": { "properties": { - "message": { - "type": "string" + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" } }, "type": "object" } } }, - "description": "Dataset import result" - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Result contains the embedded dashboard configuration" }, "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "404": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -16431,30 +19401,48 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Get a report schedule log", + "tags": ["Embedded Dashboard"] } }, - "/api/v1/dataset/related/{column_name}": { + "/api/v1/explore/": { "get": { + "description": "Assembles Explore related information (form_data, slice, dataset) in a single endpoint.<br/><br/> The information can be assembled from:<br/> - The cache using a form_data_key<br/> - The metadata database using a permalink_key<br/> - Build from scratch using dataset or slice identifiers.", "parameters": [ { - "in": "path", - "name": "column_name", - "required": true, + "in": "query", + "name": "form_data_key", "schema": { "type": "string" } }, { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_related_schema" - } - } - }, "in": "query", - "name": "q" + "name": "permalink_key", + "schema": { + "type": "string" + } + }, + { + "in": "query", + "name": "slice_id", + "schema": { + "type": "integer" + } + }, + { + "in": "query", + "name": "datasource_id", + "schema": { + "type": "integer" + } + }, + { + "in": "query", + "name": "datasource_type", + "schema": { + "type": "string" + } } ], "responses": { @@ -16462,11 +19450,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/RelatedResponseSchema" + "$ref": "#/components/schemas/ExploreContextSchema" } } }, - "description": "Related column data" + "description": "Returns the initial context." }, "400": { "$ref": "#/components/responses/400" @@ -16477,6 +19465,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -16486,39 +19477,56 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Assemble Explore related information in a single endpoint", + "tags": ["Explore"] } }, - "/api/v1/dataset/warm_up_cache": { - "put": { - "description": "Warms up the cache for the table. Note for slices a force refresh occurs. In terms of the `extra_filters` these can be obtained from records in the JSON encoded `logs.json` column associated with the `explore_json` action.", + "/api/v1/explore/form_data": { + "post": { + "parameters": [ + { + "in": "query", + "name": "tab_id", + "schema": { + "type": "integer" + } + } + ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetCacheWarmUpRequestSchema" + "$ref": "#/components/schemas/FormDataPostSchema" } } }, - "description": "Identifies the database and table to warm up cache for, and any additional dashboard or filter context to use.", "required": true }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetCacheWarmUpResponseSchema" + "properties": { + "key": { + "description": "The key to retrieve the form_data.", + "type": "string" + } + }, + "type": "object" } } }, - "description": "Each chart's warmup status" + "description": "The form_data was stored successfully." }, "400": { "$ref": "#/components/responses/400" }, - "404": { - "$ref": "#/components/responses/404" + "401": { + "$ref": "#/components/responses/401" + }, + "422": { + "$ref": "#/components/responses/422" }, "500": { "$ref": "#/components/responses/500" @@ -16529,20 +19537,20 @@ "jwt": [] } ], - "summary": "Warms up the cache for each chart powered by the given table", - "tags": ["Datasets"] + "summary": "Create a new form_data", + "tags": ["Explore Form Data"] } }, - "/api/v1/dataset/{pk}": { + "/api/v1/explore/form_data/{key}": { "delete": { - "description": "Deletes a Dataset", "parameters": [ { + "description": "The form_data key.", "in": "path", - "name": "pk", + "name": "key", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -16553,6 +19561,7 @@ "schema": { "properties": { "message": { + "description": "The result of the operation", "type": "string" } }, @@ -16560,14 +19569,14 @@ } } }, - "description": "Dataset delete" + "description": "Deleted the stored form_data." + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, @@ -16583,74 +19592,28 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Delete a form_data", + "tags": ["Explore Form Data"] }, "get": { - "description": "Get an item model", "parameters": [ { "in": "path", - "name": "pk", + "name": "key", "required": true, "schema": { - "type": "integer" + "type": "string" } - }, - { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/get_item_schema" - } - } - }, - "in": "query", - "name": "q" } ], "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "description_columns": { - "properties": { - "column_name": { - "description": "The description for the column name. Will be translated by babel", - "example": "A Nice description for the column", - "type": "string" - } - }, - "type": "object" - }, - "id": { - "description": "The item id", - "type": "string" - }, - "label_columns": { - "properties": { - "column_name": { - "description": "The label for the column name. Will be translated by babel", - "example": "A Nice label for the column", - "type": "string" - } - }, - "type": "object" - }, - "result": { - "$ref": "#/components/schemas/DatasetRestApi.get" - }, - "show_columns": { - "description": "A list of columns", - "items": { - "type": "string" - }, - "type": "array" - }, - "show_title": { - "description": "A title to render. Will be translated by babel", - "example": "Show Item Details", + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "form_data": { + "description": "The stored form_data", "type": "string" } }, @@ -16658,7 +19621,7 @@ } } }, - "description": "Item from Model" + "description": "Returns the stored form_data." }, "400": { "$ref": "#/components/responses/400" @@ -16681,24 +19644,24 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Get a form_data", + "tags": ["Explore Form Data"] }, "put": { - "description": "Changes a Dataset", "parameters": [ { "in": "path", - "name": "pk", + "name": "key", "required": true, "schema": { - "type": "integer" + "type": "string" } }, { "in": "query", - "name": "override_columns", + "name": "tab_id", "schema": { - "type": "boolean" + "type": "integer" } } ], @@ -16706,11 +19669,10 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetRestApi.put" + "$ref": "#/components/schemas/FormDataPutSchema" } } }, - "description": "Dataset schema", "required": true }, "responses": { @@ -16719,18 +19681,16 @@ "application/json": { "schema": { "properties": { - "id": { - "type": "number" - }, - "result": { - "$ref": "#/components/schemas/DatasetRestApi.put" + "key": { + "description": "The key to retrieve the form_data.", + "type": "string" } }, "type": "object" } } }, - "description": "Dataset changed" + "description": "The form_data was stored successfully." }, "400": { "$ref": "#/components/responses/400" @@ -16738,9 +19698,6 @@ "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, @@ -16756,39 +19713,34 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Update an existing form_data", + "tags": ["Explore Form Data"] } }, - "/api/v1/dataset/{pk}/column/{column_id}": { - "delete": { - "description": "Delete a Dataset column", - "parameters": [ - { - "description": "The dataset pk for this column", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" + "/api/v1/explore/permalink": { + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ExplorePermalinkStateSchema" + } } }, - { - "description": "The column id for this dataset", - "in": "path", - "name": "column_id", - "required": true, - "schema": { - "type": "integer" - } - } - ], + "required": true + }, "responses": { - "200": { + "201": { "content": { "application/json": { "schema": { "properties": { - "message": { + "key": { + "description": "The key to retrieve the permanent link data.", + "type": "string" + }, + "url": { + "description": "permanent link.", "type": "string" } }, @@ -16796,17 +19748,14 @@ } } }, - "description": "Column deleted" + "description": "The permanent link was stored successfully." + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, "422": { "$ref": "#/components/responses/422" }, @@ -16819,29 +19768,19 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Create a new permanent link", + "tags": ["Explore Permanent Link"] } }, - "/api/v1/dataset/{pk}/metric/{metric_id}": { - "delete": { - "description": "Delete a Dataset metric", + "/api/v1/explore/permalink/{key}": { + "get": { "parameters": [ { - "description": "The dataset pk for this column", - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The metric id for this dataset", "in": "path", - "name": "metric_id", + "name": "key", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -16851,22 +19790,23 @@ "application/json": { "schema": { "properties": { - "message": { - "type": "string" + "state": { + "description": "The stored state", + "type": "object" } }, "type": "object" } } }, - "description": "Metric deleted" + "description": "Returns the stored form_data." + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, "404": { "$ref": "#/components/responses/404" }, @@ -16882,72 +19822,93 @@ "jwt": [] } ], - "tags": ["Datasets"] + "summary": "Get chart's permanent link state", + "tags": ["Explore Permanent Link"] } }, - "/api/v1/dataset/{pk}/refresh": { - "put": { - "description": "Refreshes and updates columns of a dataset", - "parameters": [ - { - "in": "path", - "name": "pk", - "required": true, - "schema": { - "type": "integer" + "/api/v1/ias/login_token": { + "post": { + "description": "Endpoint for exchanging IAS tokens for Superset API tokens. IAS Tokens should be requested from the `acssuperset` IAS application, typically using [client credential flow](https://docs.aci.apple.com/ias/tutorials/client_credentials/index.html). At a minimum, an `id_token` needs to be provided, but by providing a `refresh_token` Superset will be able to automatically refresh the token after the initial one expires. However, to be able to refresh the token, `client_id` and `client_secret` must be provided, as the token needs to be refreshed with the same client that was used for logging in. Unless `refresh_token`, `client_id` and `client_secret` are provided, a `refresh_token` will not be returned.\\nThe following scope should be requested in the token request: `offline openid iam:ds:groups corpds:ds:username corpds:ds:firstName corpds:ds:lastName corpds:ds:email iam:ds:explicitgroups`", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IASLoginTokenRequestSchema" + } } - } - ], + }, + "description": "Payload containing IAS tokens.", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "message": { - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/IASLoginTokenResponseSchema" } } }, - "description": "Dataset delete" + "description": "Query result" + }, + "400": { + "$ref": "#/components/responses/400" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } }, + "tags": ["IAS"] + } + }, + "/api/v1/ias/profiles": { + "get": { + "description": "Endpoint for retrieving available IAS profiles.", + "parameters": [ + { + "description": "The name of the IAS profile", + "in": "query", + "name": "profile", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IASProfilesResponseSchema" + } + } + }, + "description": "IAS Tokens" + } + }, "security": [ { "jwt": [] } ], - "tags": ["Datasets"] + "tags": ["IAS"] } }, - "/api/v1/dataset/{pk}/related_objects": { + "/api/v1/ias/tokens": { "get": { - "description": "Get charts and dashboards count associated to a dataset", + "description": "Endpoint for retrieving IAS tokens for the logged in user. If no token is provided, all tokens are returned.", "parameters": [ { - "in": "path", - "name": "pk", - "required": true, + "description": "The name of the IAS profile", + "in": "query", + "name": "profile", + "required": false, "schema": { - "type": "integer" + "type": "string" } } ], @@ -16956,20 +19917,14 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetRelatedObjectsResponse" + "$ref": "#/components/schemas/IASTokensResponseSchema" } } }, - "description": "Query result" - }, - "401": { - "$ref": "#/components/responses/401" + "description": "IAS Tokens" }, - "404": { - "$ref": "#/components/responses/404" - }, - "500": { - "$ref": "#/components/responses/500" + "400": { + "$ref": "#/components/responses/400" } }, "security": [ @@ -16977,77 +19932,67 @@ "jwt": [] } ], - "tags": ["Datasets"] + "tags": ["IAS"] } }, - "/api/v1/datasource/{datasource_type}/{datasource_id}/column/{column_name}/values/": { + "/api/v1/ias/tokens/{pk}": { "get": { + "description": "Endpoint for retrieving IAS tokens for a specific user. If no token is provided, all tokens are returned. Only Admin users are able to view other users' tokens.", "parameters": [ { - "description": "The type of datasource", "in": "path", - "name": "datasource_type", + "name": "pk", "required": true, "schema": { "type": "string" } }, { - "description": "The id of the datasource", - "in": "path", - "name": "datasource_id", - "required": true, - "schema": { - "type": "integer" - } - }, - { - "description": "The name of the column to get values for", - "in": "path", - "name": "column_name", - "required": true, + "description": "The name of the IAS profile", + "in": "query", + "name": "profile", + "required": false, "schema": { - "type": "string" - } - } - ], - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "result": { - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "object" - } - ] - }, - "type": "array" - } - }, - "type": "object" + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/IASTokensResponseSchema" } } }, - "description": "A List of distinct values for the column" + "description": "IAS Tokens" }, "400": { "$ref": "#/components/responses/400" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": ["IAS"] + } + }, + "/api/v1/lakehouse": { + "get": { + "description": "Disconnect a Lakehouse.", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LakehouseGetListResponseSchema" + } + } + }, + "description": "Lakehouse has been deleted" }, "401": { "$ref": "#/components/responses/401" @@ -17055,9 +20000,6 @@ "403": { "$ref": "#/components/responses/403" }, - "404": { - "$ref": "#/components/responses/404" - }, "500": { "$ref": "#/components/responses/500" } @@ -17067,18 +20009,18 @@ "jwt": [] } ], - "summary": "Get possible values for a datasource column", - "tags": ["Datasources"] + "summary": "Get a list of all connected Lakehouses", + "tags": ["Lakehouse"] } }, - "/api/v1/embedded_dashboard/{uuid}": { - "get": { - "description": "Get a report schedule log", + "/api/v1/lakehouse/{lakehouse_id}": { + "delete": { + "description": "Disconnect a Lakehouse.", "parameters": [ { - "description": "The embedded configuration uuid", + "description": "The unique id of the lakehouse", "in": "path", - "name": "uuid", + "name": "lakehouse_id", "required": true, "schema": { "type": "string" @@ -17090,20 +20032,18 @@ "content": { "application/json": { "schema": { - "properties": { - "result": { - "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseDisconnectResponseSchema" } } }, - "description": "Result contains the embedded dashboard configuration" + "description": "Lakehouse has been deleted" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, @@ -17116,44 +20056,17 @@ "jwt": [] } ], - "tags": ["Embedded Dashboard"] - } - }, - "/api/v1/explore/": { + "summary": "Disconnect a Lakehouse and all catalogs", + "tags": ["Lakehouse"] + }, "get": { - "description": "Assembles Explore related information (form_data, slice, dataset)\\n in a single endpoint.<br/><br/>\\nThe information can be assembled from:<br/> - The cache using a form_data_key<br/> - The metadata database using a permalink_key<br/> - Build from scratch using dataset or slice identifiers.", + "description": "Get information regarding a connected Lakehouse", "parameters": [ { - "in": "query", - "name": "form_data_key", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "permalink_key", - "schema": { - "type": "string" - } - }, - { - "in": "query", - "name": "slice_id", - "schema": { - "type": "integer" - } - }, - { - "in": "query", - "name": "datasource_id", - "schema": { - "type": "integer" - } - }, - { - "in": "query", - "name": "datasource_type", + "description": "The unique id of the lakehouse", + "in": "path", + "name": "lakehouse_id", + "required": true, "schema": { "type": "string" } @@ -17164,24 +20077,21 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ExploreContextSchema" + "$ref": "#/components/schemas/LakehouseGetResponseSchema" } } }, - "description": "Returns the initial context." - }, - "400": { - "$ref": "#/components/responses/400" + "description": "Lakehouses" }, "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -17191,19 +20101,19 @@ "jwt": [] } ], - "summary": "Assembles Explore related information (form_data, slice, dataset)\\n in a single endpoint.", - "tags": ["Explore"] - } - }, - "/api/v1/explore/form_data": { + "summary": "Get information about a specific Lakehouse", + "tags": ["Lakehouse"] + }, "post": { - "description": "Stores a new form_data.", + "description": "Endpoint for connecting a Lakehouse and associated catalogs to the ACS Superset instance.", "parameters": [ { - "in": "query", - "name": "tab_id", + "description": "The unique id of the lakehouse", + "in": "path", + "name": "lakehouse_id", + "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -17211,28 +20121,23 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/FormDataPostSchema" + "$ref": "#/components/schemas/LakehouseConnectRequestSchema" } } }, + "description": "Payload containing connection parameters.", "required": true }, "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { - "properties": { - "key": { - "description": "The key to retrieve the form_data.", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseConnectResponseSchema" } } }, - "description": "The form_data was stored successfully." + "description": "The connection task has been scheduled." }, "400": { "$ref": "#/components/responses/400" @@ -17240,8 +20145,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "403": { + "$ref": "#/components/responses/403" }, "500": { "$ref": "#/components/responses/500" @@ -17252,21 +20157,38 @@ "jwt": [] } ], - "tags": ["Explore Form Data"] + "summary": "Schedule a task to connect a Lakehouse and catalogs", + "tags": ["Lakehouse"] } }, - "/api/v1/explore/form_data/{key}": { - "delete": { - "description": "Deletes a form_data.", + "/api/v1/lakehouse/{lakehouse_id}/dataset": { + "get": { + "description": "Endpoint for listing all virtual datasets on a Lakehouse", "parameters": [ { - "description": "The form_data key.", + "description": "The unique id of the lakehouse", "in": "path", - "name": "key", + "name": "lakehouse_id", "required": true, "schema": { "type": "string" } + }, + { + "description": "The catalog. Leave empty for all catalogs", + "in": "query", + "name": "catalog", + "schema": { + "type": "string" + } + }, + { + "description": "The schema. Leave empty for all schemas", + "in": "query", + "name": "schema", + "schema": { + "type": "string" + } } ], "responses": { @@ -17274,17 +20196,11 @@ "content": { "application/json": { "schema": { - "properties": { - "message": { - "description": "The result of the operation", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseDatasetListResponseSchema" } } }, - "description": "Deleted the stored form_data." + "description": "All virtual datasets satisfying the query params." }, "400": { "$ref": "#/components/responses/400" @@ -17292,11 +20208,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" + "403": { + "$ref": "#/components/responses/403" }, "500": { "$ref": "#/components/responses/500" @@ -17307,36 +20220,43 @@ "jwt": [] } ], - "tags": ["Explore Form Data"] + "summary": "Get a list of virtual datasets", + "tags": ["Lakehouse"] }, - "get": { - "description": "Retrives a form_data.", + "post": { + "description": "Endpoint for creating a virtual dataset from a query", "parameters": [ { + "description": "The unique id of the lakehouse", "in": "path", - "name": "key", + "name": "lakehouse_id", "required": true, "schema": { "type": "string" } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LakehouseDatasetRequestSchema" + } + } + }, + "description": "Payload containing virtual dataset parameters.", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "form_data": { - "description": "The stored form_data", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseDatasetResponseSchema" } } }, - "description": "Returns the stored form_data." + "description": "The virtual dataset has been created." }, "400": { "$ref": "#/components/responses/400" @@ -17344,8 +20264,8 @@ "401": { "$ref": "#/components/responses/401" }, - "404": { - "$ref": "#/components/responses/404" + "403": { + "$ref": "#/components/responses/403" }, "422": { "$ref": "#/components/responses/422" @@ -17359,53 +20279,43 @@ "jwt": [] } ], - "tags": ["Explore Form Data"] - }, - "put": { - "description": "Updates an existing form_data.", + "summary": "Create a virtual dataset out of a query", + "tags": ["Lakehouse"] + } + }, + "/api/v1/lakehouse/{lakehouse_id}/task/{task_id}/cancel": { + "post": { + "description": "Endpoint for cancelling a scheduled connection request.", "parameters": [ { + "description": "The unique id of the lakehouse", "in": "path", - "name": "key", + "name": "lakehouse_id", "required": true, "schema": { "type": "string" } }, { - "in": "query", - "name": "tab_id", + "description": "The id of the connection task", + "in": "path", + "name": "task_id", + "required": true, "schema": { - "type": "integer" + "type": "string" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/FormDataPutSchema" - } - } - }, - "required": true - }, "responses": { "200": { "content": { "application/json": { "schema": { - "properties": { - "key": { - "description": "The key to retrieve the form_data.", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseConnectStatusResponseSchema" } } }, - "description": "The form_data was stored successfully." + "description": "The status of the task." }, "400": { "$ref": "#/components/responses/400" @@ -17413,12 +20323,12 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -17428,42 +20338,43 @@ "jwt": [] } ], - "tags": ["Explore Form Data"] + "summary": "Cancel a running task", + "tags": ["Lakehouse"] } }, - "/api/v1/explore/permalink": { - "post": { - "description": "Stores a new permanent link.", - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ExplorePermalinkStateSchema" - } + "/api/v1/lakehouse/{lakehouse_id}/task/{task_id}/result": { + "get": { + "description": "Endpoint for retrieving the payload of a scheduled connection request.", + "parameters": [ + { + "description": "The unique id of the lakehouse", + "in": "path", + "name": "lakehouse_id", + "required": true, + "schema": { + "type": "string" } }, - "required": true - }, + { + "description": "The id of the connection task", + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "type": "string" + } + } + ], "responses": { - "201": { + "200": { "content": { "application/json": { "schema": { - "properties": { - "key": { - "description": "The key to retrieve the permanent link data.", - "type": "string" - }, - "url": { - "description": "permanent link.", - "type": "string" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseConnectResultResponseSchema" } } }, - "description": "The permanent link was stored successfully." + "description": "The status of the task." }, "400": { "$ref": "#/components/responses/400" @@ -17471,8 +20382,8 @@ "401": { "$ref": "#/components/responses/401" }, - "422": { - "$ref": "#/components/responses/422" + "403": { + "$ref": "#/components/responses/404" }, "500": { "$ref": "#/components/responses/500" @@ -17483,16 +20394,27 @@ "jwt": [] } ], - "tags": ["Explore Permanent Link"] + "summary": "Get the payload of a completed connection task", + "tags": ["Lakehouse"] } }, - "/api/v1/explore/permalink/{key}": { + "/api/v1/lakehouse/{lakehouse_id}/task/{task_id}/status": { "get": { - "description": "Retrives chart state associated with a permanent link.", + "description": "Endpoint for checking what the status of a scheduled connection request is.", "parameters": [ { + "description": "The unique id of the lakehouse", "in": "path", - "name": "key", + "name": "lakehouse_id", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "The id of the connection task", + "in": "path", + "name": "task_id", "required": true, "schema": { "type": "string" @@ -17504,17 +20426,11 @@ "content": { "application/json": { "schema": { - "properties": { - "state": { - "description": "The stored state", - "type": "object" - } - }, - "type": "object" + "$ref": "#/components/schemas/LakehouseConnectStatusResponseSchema" } } }, - "description": "Returns the stored form_data." + "description": "The status of the task." }, "400": { "$ref": "#/components/responses/400" @@ -17522,12 +20438,12 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -17537,12 +20453,13 @@ "jwt": [] } ], - "tags": ["Explore Permanent Link"] + "summary": "Check the status of a connection task", + "tags": ["Lakehouse"] } }, "/api/v1/log/": { "get": { - "description": "Get a list of models", + "description": "Gets a list of logs, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { @@ -17644,6 +20561,7 @@ "jwt": [] } ], + "summary": "Get a list of logs", "tags": ["LogRestApi"] }, "post": { @@ -17852,12 +20770,13 @@ "jwt": [] } ], + "summary": "Get a log detail information", "tags": ["LogRestApi"] } }, "/api/v1/me/": { "get": { - "description": "Returns the user object corresponding to the agent making the request, or returns a 401 error if the user is unauthenticated.", + "description": "Gets the user object corresponding to the agent making the request, or returns a 401 error if the user is unauthenticated.", "responses": { "200": { "content": { @@ -17878,12 +20797,13 @@ "$ref": "#/components/responses/401" } }, + "summary": "Get the user object", "tags": ["Current User"] } }, "/api/v1/me/roles/": { "get": { - "description": "Returns the user roles corresponding to the agent making the request, or returns a 401 error if the user is unauthenticated.", + "description": "Gets the user roles corresponding to the agent making the request, or returns a 401 error if the user is unauthenticated.", "responses": { "200": { "content": { @@ -17904,6 +20824,7 @@ "$ref": "#/components/responses/401" } }, + "summary": "Get the user roles", "tags": ["Current User"] } }, @@ -17968,7 +20889,7 @@ }, "/api/v1/query/": { "get": { - "description": "Get a list of queries, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "description": "Gets a list of queries, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { @@ -18070,6 +20991,7 @@ "jwt": [] } ], + "summary": "Get a list of queries", "tags": ["Queries"] } }, @@ -18125,6 +21047,7 @@ "jwt": [] } ], + "summary": "Get distinct values from field data", "tags": ["Queries"] } }, @@ -18180,6 +21103,7 @@ "jwt": [] } ], + "summary": "Get related fields data", "tags": ["Queries"] } }, @@ -18293,7 +21217,7 @@ }, "/api/v1/query/{pk}": { "get": { - "description": "Get query detail information.", + "description": "Get an item model", "parameters": [ { "in": "path", @@ -18388,12 +21312,12 @@ "jwt": [] } ], + "summary": "Get query detail information", "tags": ["Queries"] } }, "/api/v1/report/": { "delete": { - "description": "Deletes multiple report schedules in a bulk operation.", "parameters": [ { "content": { @@ -18444,10 +21368,11 @@ "jwt": [] } ], + "summary": "Bulk delete report schedules", "tags": ["Report Schedules"] }, "get": { - "description": "Get a list of report schedules, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "description": "Gets a list of report schedules, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { @@ -18549,10 +21474,10 @@ "jwt": [] } ], + "summary": "Get a list of report schedules", "tags": ["Report Schedules"] }, "post": { - "description": "Create a report schedule", "requestBody": { "content": { "application/json": { @@ -18604,6 +21529,7 @@ "jwt": [] } ], + "summary": "Create a report schedule", "tags": ["Report Schedules"] } }, @@ -18688,6 +21614,7 @@ "jwt": [] } ], + "summary": "Get metadata information about this API resource", "tags": ["Report Schedules"] } }, @@ -18743,12 +21670,80 @@ "jwt": [] } ], + "summary": "Get related fields data", + "tags": ["Report Schedules"] + } + }, + "/api/v1/report/slack_channels/": { + "get": { + "description": "Get slack channels", + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/get_slack_channels_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "items": { + "properties": { + "id": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "Slack channels" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Get slack channels", "tags": ["Report Schedules"] } }, "/api/v1/report/{pk}": { "delete": { - "description": "Delete a report schedule", "parameters": [ { "description": "The report schedule pk", @@ -18794,10 +21789,11 @@ "jwt": [] } ], + "summary": "Delete a report schedule", "tags": ["Report Schedules"] }, "get": { - "description": "Get a report schedule", + "description": "Get an item model", "parameters": [ { "in": "path", @@ -18892,10 +21888,10 @@ "jwt": [] } ], + "summary": "Get a report schedule", "tags": ["Report Schedules"] }, "put": { - "description": "Update a report schedule", "parameters": [ { "description": "The Report Schedule pk", @@ -18961,12 +21957,13 @@ "jwt": [] } ], + "summary": "Update a report schedule", "tags": ["Report Schedules"] } }, "/api/v1/report/{pk}/log/": { "get": { - "description": "Get a list of report schedule logs, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "description": "Gets a list of report schedule logs, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "description": "The report schedule id for these logs", @@ -19038,12 +22035,12 @@ "jwt": [] } ], + "summary": "Get a list of report schedule logs", "tags": ["Report Schedules"] } }, "/api/v1/report/{pk}/log/{log_id}": { "get": { - "description": "Get a report schedule log", "parameters": [ { "description": "The report schedule pk for log", @@ -19116,12 +22113,12 @@ "jwt": [] } ], + "summary": "Get a report schedule log", "tags": ["Report Schedules"] } }, "/api/v1/rowlevelsecurity/": { "delete": { - "description": "Deletes multiple RLS rules in a bulk operation.", "parameters": [ { "content": { @@ -19172,10 +22169,11 @@ "jwt": [] } ], + "summary": "Bulk delete RLS rules", "tags": ["Row Level Security"] }, "get": { - "description": "Get a list of models", + "description": "Gets a list of RLS, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { @@ -19277,10 +22275,10 @@ "jwt": [] } ], + "summary": "Get a list of RLS", "tags": ["Row Level Security"] }, "post": { - "description": "Create a new RLS Rule", "requestBody": { "content": { "application/json": { @@ -19332,6 +22330,7 @@ "jwt": [] } ], + "summary": "Create a new RLS rule", "tags": ["Row Level Security"] } }, @@ -19416,6 +22415,7 @@ "jwt": [] } ], + "summary": "Get metadata information about this API resource", "tags": ["Row Level Security"] } }, @@ -19471,6 +22471,7 @@ "jwt": [] } ], + "summary": "Get related fields data", "tags": ["Row Level Security"] } }, @@ -19517,6 +22518,7 @@ "jwt": [] } ], + "summary": "Delete an RLS", "tags": ["Row Level Security"] }, "get": { @@ -19615,10 +22617,10 @@ "jwt": [] } ], + "summary": "Get an RLS", "tags": ["Row Level Security"] }, "put": { - "description": "Updates an RLS Rule", "parameters": [ { "description": "The Rule pk", @@ -19684,12 +22686,12 @@ "jwt": [] } ], + "summary": "Update an RLS rule", "tags": ["Row Level Security"] } }, "/api/v1/saved_query/": { "delete": { - "description": "Deletes multiple saved queries in a bulk operation.", "parameters": [ { "content": { @@ -19737,10 +22739,11 @@ "jwt": [] } ], + "summary": "Bulk delete saved queries", "tags": ["Queries"] }, "get": { - "description": "Get a list of saved queries, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", + "description": "Gets a list of saved queries, use Rison or JSON query parameters for filtering, sorting, pagination and for selecting specific columns and metadata.", "parameters": [ { "content": { @@ -19842,10 +22845,10 @@ "jwt": [] } ], + "summary": "Get a list of saved queries", "tags": ["Queries"] }, "post": { - "description": "Create a saved query", "requestBody": { "content": { "application/json": { @@ -19894,6 +22897,7 @@ "jwt": [] } ], + "summary": "Create a saved query", "tags": ["Queries"] } }, @@ -19978,6 +22982,7 @@ "jwt": [] } ], + "summary": "Get metadata information about this API resource", "tags": ["Queries"] } }, @@ -20033,12 +23038,12 @@ "jwt": [] } ], + "summary": "Get distinct values from field data", "tags": ["Queries"] } }, "/api/v1/saved_query/export/": { "get": { - "description": "Exports multiple saved queries and downloads them as YAML files", "parameters": [ { "content": { @@ -20082,6 +23087,7 @@ "jwt": [] } ], + "summary": "Download multiple saved queries as YAML files", "tags": ["Queries"] } }, @@ -20158,6 +23164,7 @@ "jwt": [] } ], + "summary": "Import saved queries with associated databases", "tags": ["Queries"] } }, @@ -20213,12 +23220,12 @@ "jwt": [] } ], + "summary": "Get related fields data", "tags": ["Queries"] } }, "/api/v1/saved_query/{pk}": { "delete": { - "description": "Delete saved query", "parameters": [ { "in": "path", @@ -20260,10 +23267,11 @@ "jwt": [] } ], + "summary": "Delete a saved query", "tags": ["Queries"] }, "get": { - "description": "Get a saved query", + "description": "Get an item model", "parameters": [ { "in": "path", @@ -20358,10 +23366,10 @@ "jwt": [] } ], + "summary": "Get a saved query", "tags": ["Queries"] }, "put": { - "description": "Update a saved query", "parameters": [ { "in": "path", @@ -20420,12 +23428,12 @@ "jwt": [] } ], + "summary": "Update a saved query", "tags": ["Queries"] } }, "/api/v1/security/csrf_token/": { "get": { - "description": "Fetch the CSRF token", "responses": { "200": { "content": { @@ -20454,12 +23462,12 @@ "jwt": [] } ], + "summary": "Get the CSRF token", "tags": ["Security"] } }, "/api/v1/security/guest_token/": { "post": { - "description": "Fetches a guest token", "requestBody": { "content": { "application/json": { @@ -20502,6 +23510,7 @@ "jwt": [] } ], + "summary": "Get a guest token", "tags": ["Security"] } }, @@ -20608,6 +23617,42 @@ "tags": ["Security"] } }, + "/api/v1/sqllab/": { + "get": { + "description": "Assembles SQLLab bootstrap data (active_tab, databases, queries, tab_state_ids) in a single endpoint. The data can be assembled from the current user's id.", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SQLLabBootstrapSchema" + } + } + }, + "description": "Returns the initial bootstrap data for SqlLab" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Get the bootstrap data for SqlLab page", + "tags": ["SQL Lab"] + } + }, "/api/v1/sqllab/estimate/": { "post": { "requestBody": { @@ -20655,13 +23700,12 @@ "jwt": [] } ], - "summary": "Estimates the SQL query execution cost", + "summary": "Estimate the SQL query execution cost", "tags": ["SQL Lab"] } }, "/api/v1/sqllab/execute/": { "post": { - "description": "Starts the execution of a SQL query", "requestBody": { "content": { "application/json": { @@ -20715,6 +23759,7 @@ "jwt": [] } ], + "summary": "Execute a SQL query", "tags": ["SQL Lab"] } }, @@ -20763,7 +23808,58 @@ "jwt": [] } ], - "summary": "Exports the SQL query results to a CSV", + "summary": "Export the SQL query results to a CSV", + "tags": ["SQL Lab"] + } + }, + "/api/v1/sqllab/format_sql/": { + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/FormatQueryPayloadSchema" + } + } + }, + "description": "SQL query", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Format SQL result" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Format SQL code", "tags": ["SQL Lab"] } }, @@ -20817,13 +23913,13 @@ "jwt": [] } ], - "summary": "Gets the result of a SQL query execution", + "summary": "Get the result of a SQL query execution", "tags": ["SQL Lab"] } }, "/api/v1/tag/": { "delete": { - "description": "Deletes multiple Tags. This will remove all tagged objects with this tag", + "description": "Bulk deletes tags. This will remove all tagged objects with this tag.", "parameters": [ { "content": { @@ -20874,6 +23970,7 @@ "jwt": [] } ], + "summary": "Bulk delete tags", "tags": ["Tags"] }, "get": { @@ -20979,9 +24076,11 @@ "jwt": [] } ], + "summary": "Get a list of tags", "tags": ["Tags"] }, "post": { + "description": "Create a new Tag", "requestBody": { "content": { "application/json": { @@ -20990,7 +24089,7 @@ } } }, - "description": "Model schema", + "description": "Tag schema", "required": true }, "responses": { @@ -21000,7 +24099,7 @@ "schema": { "properties": { "id": { - "type": "string" + "type": "number" }, "result": { "$ref": "#/components/schemas/TagRestApi.post" @@ -21010,7 +24109,7 @@ } } }, - "description": "Item inserted" + "description": "Tag added" }, "400": { "$ref": "#/components/responses/400" @@ -21030,12 +24129,13 @@ "jwt": [] } ], + "summary": "Create a tag", "tags": ["Tags"] } }, "/api/v1/tag/_info": { "get": { - "description": "Several metadata information about tag API endpoints.", + "description": "Get metadata information about this API resource", "parameters": [ { "content": { @@ -21114,12 +24214,62 @@ "jwt": [] } ], + "summary": "Get metadata information about tag API endpoints", + "tags": ["Tags"] + } + }, + "/api/v1/tag/bulk_create": { + "post": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagPostBulkSchema" + } + } + }, + "description": "Tag schema", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TagPostBulkResponseSchema" + } + } + }, + "description": "Bulk created tags and tagged objects" + }, + "302": { + "description": "Redirects to the current digest" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Bulk create tags and tagged objects", "tags": ["Tags"] } }, "/api/v1/tag/favorite_status/": { "get": { - "description": "Check favorited dashboards for current user", + "description": "Get favorited tags for current user", "parameters": [ { "content": { @@ -21167,7 +24317,6 @@ }, "/api/v1/tag/get_objects/": { "get": { - "description": "Gets all objects associated with a Tag.", "parameters": [ { "in": "path", @@ -21218,6 +24367,7 @@ "jwt": [] } ], + "summary": "Get all objects associated with a tag", "tags": ["Tags"] } }, @@ -21273,12 +24423,13 @@ "jwt": [] } ], + "summary": "Get related fields data", "tags": ["Tags"] } }, "/api/v1/tag/{object_type}/{object_id}/": { "post": { - "description": "Add tags to an object..", + "description": "Adds tags to an object. Creates new tags if they do not already exist.", "parameters": [ { "in": "path", @@ -21342,12 +24493,12 @@ "jwt": [] } ], + "summary": "Add tags to an object", "tags": ["Tags"] } }, "/api/v1/tag/{object_type}/{object_id}/{tag}/": { "delete": { - "description": "Deletes a Tagged Object.", "parameters": [ { "in": "path", @@ -21411,6 +24562,7 @@ "jwt": [] } ], + "summary": "Delete a tagged object", "tags": ["Tags"] } }, @@ -21457,10 +24609,11 @@ "jwt": [] } ], + "summary": "Delete a tag", "tags": ["Tags"] }, "get": { - "description": "Get a tag detail information.", + "description": "Get an item model", "parameters": [ { "in": "path", @@ -21555,9 +24708,11 @@ "jwt": [] } ], + "summary": "Get a tag detail information", "tags": ["Tags"] }, "put": { + "description": "Changes a Tag.", "parameters": [ { "in": "path", @@ -21576,7 +24731,7 @@ } } }, - "description": "Model schema", + "description": "Chart schema", "required": true }, "responses": { @@ -21585,6 +24740,9 @@ "application/json": { "schema": { "properties": { + "id": { + "type": "number" + }, "result": { "$ref": "#/components/schemas/TagRestApi.put" } @@ -21593,7 +24751,7 @@ } } }, - "description": "Item changed" + "description": "Tag changed" }, "400": { "$ref": "#/components/responses/400" @@ -21601,6 +24759,9 @@ "401": { "$ref": "#/components/responses/401" }, + "403": { + "$ref": "#/components/responses/403" + }, "404": { "$ref": "#/components/responses/404" }, @@ -21616,6 +24777,7 @@ "jwt": [] } ], + "summary": "Update a tag", "tags": ["Tags"] } }, @@ -21717,6 +24879,35 @@ "tags": ["Tags"] } }, + "/api/v1/user/{user_id}/avatar.png": { + "get": { + "description": "Gets the avatar URL for the user with the given ID, or returns a 401 error if the user is unauthenticated.", + "parameters": [ + { + "description": "The ID of the user", + "in": "path", + "name": "user_id", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "301": { + "description": "A redirect to the user's avatar URL" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + } + }, + "summary": "Get the user avatar", + "tags": ["User"] + } + }, "/api/{version}/_openapi": { "get": { "description": "Get the OpenAPI spec for a specific API version", diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000000..bf116ca204ed --- /dev/null +++ b/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "superset", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/pyproject.toml b/pyproject.toml index c6e0c16a21af..07803def91ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ requires = ["setuptools>=40.9.0", "wheel"] build-backend = "setuptools.build_meta" [project] -name = "apache-superset" +name = "apache_superset" description = "A modern, enterprise-ready business intelligence web application" readme = "README.md" dynamic = ["version", "scripts", "entry-points"] @@ -61,8 +61,11 @@ dependencies = [ "humanize", "importlib_metadata", "isodate", + "jsonpath-ng>=1.6.1, <2", "Mako>=1.2.2", "markdown>=3.0", + # marshmallow>=4 has issues: https://github.com/apache/superset/issues/33162 + "marshmallow<4", "msgpack>=1.0.0, <1.1", "nh3>=0.2.11, <0.3", "numpy==1.23.5", @@ -88,7 +91,7 @@ dependencies = [ "slack_sdk>=3.19.0, <4", "sqlalchemy>=1.4, <2", "sqlalchemy-utils>=0.38.3, <0.39", - "sqlglot>=23.0.2,<24", + "sqlglot>=25.24.0,<27", "sqlparse>=0.5.0", "tabulate>=0.8.9, <0.9", "typing-extensions>=4, <5", @@ -132,7 +135,6 @@ gevent = ["gevent>=23.9.1"] gsheets = ["shillelagh[gsheetsapi]>=1.2.18, <2"] hana = ["hdbcli==2.4.162", "sqlalchemy_hana==0.4.0"] hive = [ - "boto3", "pyhive[hive]>=0.6.5;python_version<'3.11'", "pyhive[hive_pure_sasl]>=0.7.0", "tableschema", @@ -155,7 +157,7 @@ pinot = ["pinotdb>=5.0.0, <6.0.0"] playwright = ["playwright>=1.37.0, <2"] postgres = ["psycopg2-binary==2.9.6"] presto = ["pyhive[presto]>=0.6.5"] -trino = ["boto3", "trino>=0.328.0"] +trino = ["trino>=0.328.0"] prophet = ["prophet>=1.1.5, <2"] redshift = ["sqlalchemy-redshift>=0.8.1, <0.9"] rockset = ["rockset-sqlalchemy>=0.0.1, <1"] @@ -230,6 +232,7 @@ module = "tests.*" check_untyped_defs = false disallow_untyped_calls = false disallow_untyped_defs = false +disable_error_code = "annotation-unchecked" [tool.tox] legacy_tox_ini = """ diff --git a/requirements/base.txt b/requirements/base.txt index 1b19d3a9205f..7131c890c565 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -144,11 +144,11 @@ geopy==2.4.1 google-auth==2.29.0 # via shillelagh greenlet==3.0.3 - # via - # shillelagh - # sqlalchemy + # via shillelagh gunicorn==22.0.0 # via apache-superset +h11==0.16.0 + # via wsproto hashids==1.3.1 # via apache-superset holidays==0.25 @@ -173,6 +173,8 @@ jinja2==3.1.4 # via # flask # flask-babel +jsonpath-ng==1.6.1 + # via apache-superset jsonschema==4.17.3 # via flask-appbuilder kombu==5.3.7 @@ -249,6 +251,8 @@ pgsanity==0.2.9 # via apache-superset platformdirs==3.8.1 # via requests-cache +ply==3.11 + # via jsonpath-ng polyline==2.0.2 # via apache-superset prison==0.2.1 @@ -344,7 +348,7 @@ sqlalchemy-utils==0.38.3 # via # apache-superset # flask-appbuilder -sqlglot==23.6.3 +sqlglot==26.11.1 # via apache-superset sqlparse==0.5.0 # via apache-superset @@ -378,7 +382,7 @@ vine==5.1.0 # kombu wcwidth==0.2.13 # via prompt-toolkit -werkzeug==3.0.3 +werkzeug==3.0.6 # via # -r requirements/base.in # flask diff --git a/requirements/development.txt b/requirements/development.txt index 5b99fd81b615..559b451d4acf 100644 --- a/requirements/development.txt +++ b/requirements/development.txt @@ -10,12 +10,12 @@ # via # -r requirements/base.in # -r requirements/development.in -appnope==0.1.4 - # via ipython astroid==3.1.0 # via pylint boto3==1.34.112 - # via dataflows-tabulator + # via + # apache-superset + # dataflows-tabulator botocore==1.34.112 # via # boto3 @@ -101,6 +101,10 @@ grpcio==1.62.1 # grpcio-status grpcio-status==1.60.1 # via google-api-core +h11==0.16.0 + # via + # -c requirements/base.txt + # wsproto identify==2.5.36 # via pre-commit ijson==3.2.3 @@ -177,9 +181,7 @@ protobuf==4.23.0 psycopg2-binary==2.9.6 # via apache-superset pure-sasl==0.6.2 - # via - # pyhive - # thrift-sasl + # via thrift-sasl pydata-google-auth==1.7.0 # via pandas-gbq pydruid==0.6.9 @@ -223,6 +225,10 @@ s3transfer==0.10.1 # via boto3 sqlalchemy-bigquery==1.11.0 # via apache-superset +sqlglot==26.11.1 + # via + # -c requirements/base.txt + # apache-superset sqloxide==0.1.43 # via apache-superset statsd==4.0.1 @@ -232,18 +238,9 @@ tableschema==1.20.10 thrift==0.16.0 # via # apache-superset - # pyhive # thrift-sasl thrift-sasl==0.4.3 - # via - # build - # coverage - # pip-tools - # pylint - # pyproject-api - # pyproject-hooks - # pytest - # tox + # via apache-superset tomlkit==0.12.5 # via pylint toposort==1.10 @@ -254,9 +251,6 @@ tqdm==4.66.4 # via # cmdstanpy # prophet -traitlets==5.14.3 - # via - # matplotlib-inline trino==0.328.0 # via apache-superset tzlocal==5.2 diff --git a/scripts/change_detector.py b/scripts/change_detector.py index 39e4a5c8ccd2..f52cd59fec45 100755 --- a/scripts/change_detector.py +++ b/scripts/change_detector.py @@ -52,7 +52,7 @@ def fetch_files_github_api(url: str): # type: ignore """Fetches data using GitHub API.""" req = Request(url) - req.add_header("Authorization", f"token {GITHUB_TOKEN}") + req.add_header("Authorization", f"Bearer {GITHUB_TOKEN}") req.add_header("Accept", "application/vnd.github.v3+json") print(f"Fetching from {url}") diff --git a/scripts/cypress_run.py b/scripts/cypress_run.py index 1154a3661dda..cef760e815b3 100644 --- a/scripts/cypress_run.py +++ b/scripts/cypress_run.py @@ -16,26 +16,18 @@ # under the License. import argparse -import hashlib import os import subprocess from datetime import datetime XVFB_PRE_CMD = "xvfb-run --auto-servernum --server-args='-screen 0, 1024x768x24' " REPO = os.getenv("GITHUB_REPOSITORY") or "apache/superset" -GITHUB_EVENT_NAME = os.getenv("GITHUB_REPOSITORY") or "push" +GITHUB_EVENT_NAME = os.getenv("GITHUB_EVENT_NAME") or "push" CYPRESS_RECORD_KEY = os.getenv("CYPRESS_RECORD_KEY") or "" -def compute_hash(file_path: str) -> str: - return hashlib.md5(file_path.encode()).hexdigest() - - -def compute_group_index(hash_value: str, num_groups: int) -> int: - return int(hash_value, 16) % num_groups - - def generate_build_id() -> str: + """Generates a build ID based on the current timestamp.""" now = datetime.now() rounded_minute = now.minute - (now.minute % 20) rounded_time = now.replace(minute=rounded_minute, second=0, microsecond=0) @@ -44,42 +36,70 @@ def generate_build_id() -> str: ) -def get_cypress_cmd( - spec_list: list[str], _filter: str, group: str, use_dashboard: bool -) -> str: +def run_cypress_for_test_file( + test_file: str, retries: int, use_dashboard: bool, group: str, dry_run: bool +) -> int: + """Runs Cypress for a single test file and retries upon failure.""" cypress_cmd = "./node_modules/.bin/cypress run" - os.environ["TERM"] = "xterm" os.environ["ELECTRON_DISABLE_GPU"] = "true" build_id = generate_build_id() browser = os.getenv("CYPRESS_BROWSER", "chrome") + chrome_flags = "--disable-dev-shm-usage" + # Create Cypress command for a single test file if use_dashboard: - # Run using cypress.io service - spec: str = "cypress/e2e/*/**/*" cmd = ( f"{XVFB_PRE_CMD} " - f'{cypress_cmd} --spec "{spec}" --browser {browser} ' + f'{cypress_cmd} --spec "{test_file}" --browser {browser} ' f"--record --group {group} --tag {REPO},{GITHUB_EVENT_NAME} " - f"--parallel --ci-build-id {build_id}" + f"--parallel --ci-build-id {build_id} " + f"-- {chrome_flags}" ) else: - # Run local, but split the execution os.environ.pop("CYPRESS_RECORD_KEY", None) - spec_list_str = ",".join(sorted(spec_list)) - if _filter: - spec_list_str = ",".join(sorted([s for s in spec_list if _filter in s])) cmd = ( f"{XVFB_PRE_CMD} " f"{cypress_cmd} --browser {browser} " - f'--spec "{spec_list_str}" ' + f'--spec "{test_file}" ' + f"-- {chrome_flags}" + ) + + if dry_run: + # Print the command instead of executing it + print(f"DRY RUN: {cmd}") + return 0 + + for attempt in range(retries): + print(f"RUN: {cmd} (Attempt {attempt + 1}/{retries})") + process = subprocess.Popen( + cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, ) - return cmd + + # Stream stdout in real-time + if process.stdout: + for stdout_line in iter(process.stdout.readline, ""): + print(stdout_line, end="") + + process.wait() + + if process.returncode == 0: + print(f"Test {test_file} succeeded on attempt {attempt + 1}") + return 0 + else: + print(f"Test {test_file} failed on attempt {attempt + 1}") + + print(f"Test {test_file} failed after {retries} retries.") + return process.returncode def main() -> None: parser = argparse.ArgumentParser( - description="Generate Cypress commands based on test file hash" + description="Run Cypress tests with retries per test file" ) parser.add_argument( "--use-dashboard", @@ -93,9 +113,12 @@ def main() -> None: "--parallelism-id", type=int, required=True, help="ID of the parallelism group" ) parser.add_argument( - "--filter", type=str, required=False, default=None, help="filter to test" + "--filter", type=str, required=False, default=None, help="Filter to test" ) parser.add_argument("--group", type=str, default="Default", help="Group name") + parser.add_argument( + "--retries", type=int, default=3, help="Number of retries per test file" + ) parser.add_argument( "--dry-run", action="store_true", @@ -109,14 +132,17 @@ def main() -> None: cypress_tests_path = os.path.join(cypress_base_full_path, "cypress/e2e") test_files = [] + file_count = 0 for root, _, files in os.walk(cypress_tests_path): for file in files: if file.endswith("test.ts") or file.endswith("test.js"): + file_count += 1 test_files.append( os.path.join(root, file).replace(cypress_base_full_path, "") ) + print(f"Found {file_count} test files.") - # Initialize groups + # Initialize groups for round-robin distribution groups: dict[int, list[str]] = {i: [] for i in range(args.parallelism)} # Sort test files to ensure deterministic distribution @@ -127,12 +153,21 @@ def main() -> None: group_index = index % args.parallelism groups[group_index].append(test_file) + # Only run tests for the group that matches the parallelism ID group_id = args.parallelism_id spec_list = groups[group_id] - cmd = get_cypress_cmd(spec_list, args.filter, args.group, args.use_dashboard) - print(f"RUN: {cmd}") - if not args.dry_run: - subprocess.run(cmd, shell=True, check=True, stdout=None, stderr=None) + + # Run each test file independently with retry logic or dry-run + processed_file_count: int = 0 + for test_file in spec_list: + result = run_cypress_for_test_file( + test_file, args.retries, args.use_dashboard, args.group, args.dry_run + ) + if result != 0: + print(f"Exiting due to failure in {test_file}") + exit(result) + processed_file_count += 1 + print(f"Ran {processed_file_count} test files successfully.") if __name__ == "__main__": diff --git a/scripts/tests/run.sh b/scripts/tests/run.sh index bf8431caeb91..7ba4c5e448fe 100755 --- a/scripts/tests/run.sh +++ b/scripts/tests/run.sh @@ -53,6 +53,9 @@ function test_init() { echo Superset init echo -------------------- superset init + echo Load test users + echo -------------------- + superset load-test-users } # diff --git a/scripts/translations/babel_update.sh b/scripts/translations/babel_update.sh index 9b2464d16103..73c0ecfacc1c 100755 --- a/scripts/translations/babel_update.sh +++ b/scripts/translations/babel_update.sh @@ -15,6 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd ../.. && pwd )" LICENSE_TMP=$(mktemp) @@ -47,6 +48,10 @@ pybabel extract \ --copyright-holder=Superset \ --project=Superset \ -k _ -k __ -k t -k tn:1,2 -k tct . + +# Normalize .pot file +msgcat --sort-by-msgid --no-wrap --no-location superset/translations/messages.pot -o superset/translations/messages.pot + cat $LICENSE_TMP superset/translations/messages.pot > messages.pot.tmp \ && mv messages.pot.tmp superset/translations/messages.pot diff --git a/setup.py b/setup.py index 00b8d22e2a4f..4ccdeb3f00c0 100644 --- a/setup.py +++ b/setup.py @@ -52,6 +52,7 @@ def get_git_sha() -> str: version_string = version_string.replace("-dev", ".dev0") setup( + name="apache_superset", version=version_string, packages=find_packages(), include_package_data=True, diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts index 4f9863071c75..bab9cc55aadc 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/editmode.test.ts @@ -16,10 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -import { SAMPLE_DASHBOARD_1, TABBED_DASHBOARD } from 'cypress/utils/urls'; +import { + SAMPLE_DASHBOARD_1, + SUPPORTED_CHARTS_DASHBOARD, + TABBED_DASHBOARD, +} from 'cypress/utils/urls'; import { drag, resize, waitForChartLoad } from 'cypress/utils'; import * as ace from 'brace'; -import { interceptGet, interceptUpdate, openTab } from './utils'; +import { + interceptExploreUpdate, + interceptGet, + interceptUpdate, + openTab, +} from './utils'; import { interceptExploreJson, interceptFiltering as interceptCharts, @@ -42,15 +51,37 @@ function openProperties() { cy.getBySel('header-actions-menu') .contains('Edit properties') .click({ force: true }); - cy.wait(500); + cy.get('.ant-modal-body').should('be.visible'); }); } +function openExploreProperties() { + cy.getBySel('actions-trigger').click({ force: true }); + cy.get('.ant-dropdown-menu') + .contains('Edit chart properties') + .click({ force: true }); + cy.get('.ant-modal-body').should('be.visible'); +} + +function assertMetadata(text: string) { + const regex = new RegExp(text); + cy.get('#json_metadata') + .should('be.visible') + .then(() => { + const metadata = cy.$$('#json_metadata')[0]; + + // cypress can read this locally, but not in ci + // so we have to use the ace module directly to fetch the value + expect(ace.edit(metadata).getValue()).to.match(regex); + }); +} + function openAdvancedProperties() { cy.get('.ant-modal-body') .contains('Advanced') .should('be.visible') .click({ force: true }); + cy.get('#json_metadata').should('be.visible'); } function dragComponent( @@ -83,20 +114,36 @@ function visitEdit(sampleDashboard = SAMPLE_DASHBOARD_1) { cy.visit(sampleDashboard); cy.wait('@get'); editDashboard(); + cy.get('.grid-container').should('exist'); cy.wait('@filtering'); cy.wait(500); } -function resetTabbedDashboard(go = false) { +function visit(sampleDashboard = SAMPLE_DASHBOARD_1) { + interceptCharts(); + interceptGet(); + + if (sampleDashboard === SAMPLE_DASHBOARD_1) { + cy.createSampleDashboards([0]); + } + + cy.visit(sampleDashboard); + cy.wait('@get'); + cy.get('.grid-container').should('exist'); + cy.wait(500); +} + +function resetDashboardColors(dashboard = 'tabbed_dash') { // eslint-disable-next-line @typescript-eslint/no-explicit-any - cy.getDashboard('tabbed_dash').then((r: Record<string, any>) => { + cy.getDashboard(dashboard).then((r: Record<string, any>) => { const jsonMetadata = r?.json_metadata || '{}'; const metadata = JSON.parse(jsonMetadata); const resetMetadata = JSON.stringify({ ...metadata, color_scheme: '', label_colors: {}, - shared_label_colors: {}, + shared_label_colors: [], + map_label_colors: {}, }); cy.updateDashboard(r.id, { certification_details: r.certification_details, @@ -106,27 +153,37 @@ function resetTabbedDashboard(go = false) { json_metadata: resetMetadata, owners: r.owners, slug: r.slug, - }).then(() => { - if (go) { - visitEdit(TABBED_DASHBOARD); - } }); }); } -function visitResetTabbedDashboard() { - resetTabbedDashboard(true); -} - -function selectColorScheme(color: string) { - cy.get( - '[data-test="dashboard-edit-properties-form"] [aria-label="Select color scheme"]', - ) +function selectColorScheme( + color: string, + target = 'dashboard-edit-properties-form', +) { + cy.get(`[data-test="${target}"] input[aria-label="Select color scheme"]`) .first() - .click(); + .then($input => { + cy.wrap($input).click({ force: true }); + cy.wrap($input).type(color.slice(0, 5), { force: true }); + }); cy.getBySel(color).click({ force: true }); } +function saveAndGo(dashboard = 'Tabbed Dashboard') { + interceptExploreUpdate(); + cy.getBySel('query-save-button').click(); + cy.getBySel('save-modal-body').then($modal => { + cy.wrap($modal) + .find("div[aria-label='Select a dashboard'] .ant-select-selection-item") + .should('have.text', dashboard); + cy.getBySel('save-overwrite-radio').should('not.be.disabled'); + cy.getBySel('save-overwrite-radio').click(); + cy.get('#btn_modal_save_goto_dash').click(); + cy.wait('@chartUpdate'); + }); +} + function applyChanges() { cy.getBySel('properties-modal-apply-button').click({ force: true }); } @@ -137,37 +194,37 @@ function saveChanges() { cy.wait('@update'); } -function assertMetadata(text: string) { - const regex = new RegExp(text); - cy.get('#json_metadata') - .should('be.visible') - .then(() => { - const metadata = cy.$$('#json_metadata')[0]; - - // cypress can read this locally, but not in ci - // so we have to use the ace module directly to fetch the value - expect(ace.edit(metadata).getValue()).to.match(regex); - }); -} function clearMetadata() { cy.get('#json_metadata').then($jsonmetadata => { - cy.wrap($jsonmetadata).find('.ace_content').click(); + cy.wrap($jsonmetadata).find('.ace_content').click({ force: true }); cy.wrap($jsonmetadata) .find('.ace_text-input') - .type('{selectall} {backspace}', { force: true }); + .then($ace => { + cy.wrap($ace).focus(); + cy.wrap($ace).should('have.focus'); + cy.wrap($ace).type('{selectall}', { force: true }); + cy.wrap($ace).type('{backspace}', { force: true }); + }); }); } function writeMetadata(metadata: string) { - cy.get('#json_metadata').then($jsonmetadata => - cy - .wrap($jsonmetadata) + cy.get('#json_metadata').then($jsonmetadata => { + cy.wrap($jsonmetadata).find('.ace_content').click({ force: true }); + cy.wrap($jsonmetadata) .find('.ace_text-input') - .type(metadata, { parseSpecialCharSequences: false, force: true }), - ); + .then($ace => { + cy.wrap($ace).focus(); + cy.wrap($ace).should('have.focus'); + cy.wrap($ace).type(metadata, { + parseSpecialCharSequences: false, + force: true, + }); + }); + }); } -function openExplore(chartName: string) { +function openExploreWithDashboardContext(chartName: string) { interceptExploreJson(); interceptGet(); @@ -181,21 +238,91 @@ function openExplore(chartName: string) { .should('contain', 'Edit chart') .click(); cy.wait('@getJson'); + cy.get('.chart-container').should('exist'); +} + +function saveExploreColorScheme( + chart = 'Top 10 California Names Timeseries', + colorScheme = 'supersetColors', +) { + interceptExploreUpdate(); + openExploreWithDashboardContext(chart); + openTab(0, 1, 'control-tabs'); + selectColorScheme(colorScheme, 'control-item'); + cy.getBySel('query-save-button').click(); + cy.getBySel('save-overwrite-radio').click(); + cy.getBySel('btn-modal-save').click(); + cy.wait('@chartUpdate'); } describe('Dashboard edit', () => { describe('Color consistency', () => { beforeEach(() => { - visitResetTabbedDashboard(); + resetDashboardColors(); }); - after(() => { - resetTabbedDashboard(); + it('should not allow to change color scheme of a chart when dashboard has one', () => { + visitEdit(TABBED_DASHBOARD); + openProperties(); + selectColorScheme('blueToGreen'); + applyChanges(); + saveChanges(); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + openExploreWithDashboardContext('Top 10 California Names Timeseries'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + openTab(0, 1, 'control-tabs'); + + cy.get('[aria-label="Select color scheme"]').should('be.disabled'); }); - it('should respect chart color scheme when none is set for the dashboard', () => { + it('should not allow to change color scheme of a chart when dashboard has no scheme but chart has shared labels', () => { + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // open second top tab to catch shared labels + openTab(0, 1); + waitForChartLoad({ + name: 'Trends', + viz: 'line', + }); + + openTab(0, 0); + openExploreWithDashboardContext('Top 10 California Names Timeseries'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + openTab(0, 1, 'control-tabs'); + + cy.get('[aria-label="Select color scheme"]').should('be.disabled'); + }); + + it('should allow to change color scheme of a chart when dashboard has no scheme but only custom label colors', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); - cy.get('[aria-label="Select color scheme"]').should('have.value', ''); + openAdvancedProperties(); + clearMetadata(); + writeMetadata('{"color_scheme":"","label_colors":{"Anthony":"red"}}'); applyChanges(); saveChanges(); @@ -206,17 +333,93 @@ describe('Dashboard edit', () => { viz: 'line', }); + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + openExploreWithDashboardContext('Top 10 California Names Timeseries'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + openTab(0, 1, 'control-tabs'); + selectColorScheme('blueToGreen', 'control-item'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + // label Christopher + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .eq(1) + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // label Daniel + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .eq(2) + .should('have.css', 'fill', 'rgb(0, 76, 218)'); + + // label David + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .eq(3) + .should('have.css', 'fill', 'rgb(0, 116, 241)'); + }); + + it('should allow to change color scheme of a chart when dashboard has no scheme and show the change', () => { + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + // label Anthony cy.get( '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + openExploreWithDashboardContext('Top 10 California Names Timeseries'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + openTab(0, 1, 'control-tabs'); + selectColorScheme('blueToGreen', 'control-item'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + saveAndGo(); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // reset original scheme + saveExploreColorScheme(); }); - it('should apply same color to same labels with color scheme set', () => { + it('should allow to change color scheme of a chart when dashboard has no scheme but custom label colors and show the change', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); - selectColorScheme('blueToGreen'); + openAdvancedProperties(); + clearMetadata(); + writeMetadata('{"color_scheme":"","label_colors":{"Anthony":"red"}}'); applyChanges(); saveChanges(); @@ -232,21 +435,174 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + openExploreWithDashboardContext('Top 10 California Names Timeseries'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + openTab(0, 1, 'control-tabs'); + selectColorScheme('blueToGreen', 'control-item'); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + // label Christopher + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .eq(1) + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + saveAndGo(); + + // label Anthony + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .first() + .should('have.css', 'fill', 'rgb(255, 0, 0)'); + + // label Christopher + cy.get('[data-test="chart-container"] .line .nv-legend-symbol') + .eq(1) .should('have.css', 'fill', 'rgb(50, 0, 167)'); + // reset original scheme + saveExploreColorScheme(); + }); + + it('should not change colors on refreshes with no color scheme set', () => { + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + // open 2nd main tab openTab(0, 1); waitForChartLoad({ name: 'Trends', viz: 'line' }); + // label Andrew + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(1) + .should('have.css', 'fill', 'rgb(69, 78, 124)'); + + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Andrew cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') - .eq(2) + .eq(1) + .should('have.css', 'fill', 'rgb(69, 78, 124)'); + }); + + it('should not change colors on refreshes with color scheme set', () => { + visitEdit(TABBED_DASHBOARD); + openProperties(); + selectColorScheme('blueToGreen'); + applyChanges(); + saveChanges(); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Andrew + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(1) + .should('have.css', 'fill', 'rgb(0, 76, 218)'); + + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Andrew + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(1) + .should('have.css', 'fill', 'rgb(0, 76, 218)'); + }); + + it('should respect chart color scheme when none is set for the dashboard', () => { + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); }); - it('should apply same color to same labels with no color scheme set', () => { + it('should apply same color to same labels with color scheme set on refresh', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); - cy.get('[aria-label="Select color scheme"]').should('have.value', ''); + selectColorScheme('blueToGreen'); applyChanges(); saveChanges(); @@ -257,6 +613,82 @@ describe('Dashboard edit', () => { viz: 'line', }); + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Anthony + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(2) + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + visit(TABBED_DASHBOARD); + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Anthony + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(2) + .should('have.css', 'fill', 'rgb(50, 0, 167)'); + }); + + it('should apply same color to same labels with no color scheme set on refresh', () => { + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + + // label Anthony + cy.get( + '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', + ) + .first() + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + // open 2nd main tab + openTab(0, 1); + waitForChartLoad({ name: 'Trends', viz: 'line' }); + + // label Anthony + cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') + .eq(2) + .should('have.css', 'fill', 'rgb(31, 168, 201)'); + + visit(TABBED_DASHBOARD); + + // open nested tab + openTab(1, 1); + waitForChartLoad({ + name: 'Top 10 California Names Timeseries', + viz: 'line', + }); + // label Anthony cy.get( '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', @@ -275,6 +707,7 @@ describe('Dashboard edit', () => { }); it('custom label colors should take the precedence in nested tabs', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); openAdvancedProperties(); clearMetadata(); @@ -305,6 +738,7 @@ describe('Dashboard edit', () => { }); it('label colors should take the precedence for rendered charts in nested tabs', () => { + visitEdit(TABBED_DASHBOARD); // open the tab first time and let chart load openTab(1, 1); waitForChartLoad({ @@ -333,6 +767,7 @@ describe('Dashboard edit', () => { }); it('should re-apply original color after removing custom label color with color scheme set', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); openAdvancedProperties(); clearMetadata(); @@ -375,6 +810,7 @@ describe('Dashboard edit', () => { }); it('should re-apply original color after removing custom label color with no color scheme set', () => { + visitEdit(TABBED_DASHBOARD); // open nested tab openTab(1, 1); waitForChartLoad({ @@ -438,6 +874,7 @@ describe('Dashboard edit', () => { }); it('should show the same colors in Explore', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); openAdvancedProperties(); clearMetadata(); @@ -461,7 +898,7 @@ describe('Dashboard edit', () => { .first() .should('have.css', 'fill', 'rgb(255, 0, 0)'); - openExplore('Top 10 California Names Timeseries'); + openExploreWithDashboardContext('Top 10 California Names Timeseries'); // label Anthony cy.get('[data-test="chart-container"] .line .nv-legend-symbol') @@ -469,7 +906,8 @@ describe('Dashboard edit', () => { .should('have.css', 'fill', 'rgb(255, 0, 0)'); }); - it.skip('should change color scheme multiple times', () => { + it('should change color scheme multiple times', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); selectColorScheme('blueToGreen'); applyChanges(); @@ -487,7 +925,7 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); // open 2nd main tab openTab(0, 1); @@ -496,7 +934,7 @@ describe('Dashboard edit', () => { // label Anthony cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .eq(2) - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); editDashboard(); openProperties(); @@ -507,7 +945,7 @@ describe('Dashboard edit', () => { // label Anthony cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .eq(2) - .should('have.css', 'fill', 'rgb(41, 105, 107)'); + .should('have.css', 'fill', 'rgb(0, 128, 246)'); // open main tab and nested tab openTab(0, 0); @@ -518,10 +956,11 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(41, 105, 107)'); + .should('have.css', 'fill', 'rgb(0, 128, 246)'); }); - it.skip('should apply the color scheme across main tabs', () => { + it('should apply the color scheme across main tabs', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); selectColorScheme('blueToGreen'); applyChanges(); @@ -533,10 +972,11 @@ describe('Dashboard edit', () => { cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); }); - it.skip('should apply the color scheme across main tabs for rendered charts', () => { + it('should apply the color scheme across main tabs for rendered charts', () => { + visitEdit(TABBED_DASHBOARD); waitForChartLoad({ name: 'Treemap', viz: 'treemap_v2' }); openProperties(); selectColorScheme('blueToGreen'); @@ -549,7 +989,7 @@ describe('Dashboard edit', () => { cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(41, 105, 107)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); // change scheme now that charts are rendered across the main tabs editDashboard(); @@ -560,10 +1000,11 @@ describe('Dashboard edit', () => { cy.get('[data-test-chart-name="Trends"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(0, 128, 246)'); }); - it.skip('should apply the color scheme in nested tabs', () => { + it('should apply the color scheme in nested tabs', () => { + visitEdit(TABBED_DASHBOARD); openProperties(); selectColorScheme('blueToGreen'); applyChanges(); @@ -579,17 +1020,18 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); // open another nested tab openTab(2, 1); waitForChartLoad({ name: 'Growth Rate', viz: 'line' }); cy.get('[data-test-chart-name="Growth Rate"] .line .nv-legend-symbol') .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); }); - it.skip('should apply a valid color scheme for rendered charts in nested tabs', () => { + it('should apply a valid color scheme for rendered charts in nested tabs', () => { + visitEdit(TABBED_DASHBOARD); // open the tab first time and let chart load openTab(1, 1); waitForChartLoad({ @@ -611,7 +1053,7 @@ describe('Dashboard edit', () => { '[data-test-chart-name="Top 10 California Names Timeseries"] .line .nv-legend-symbol', ) .first() - .should('have.css', 'fill', 'rgb(234, 11, 140)'); + .should('have.css', 'fill', 'rgb(50, 0, 167)'); }); }); @@ -623,9 +1065,10 @@ describe('Dashboard edit', () => { beforeEach(() => { cy.createSampleDashboards([0]); openProperties(); + selectColorScheme('supersetColors'); }); - it.skip('should accept a valid color scheme', () => { + it('should accept a valid color scheme', () => { openAdvancedProperties(); clearMetadata(); writeMetadata('{"color_scheme":"lyftColors"}'); @@ -636,21 +1079,21 @@ describe('Dashboard edit', () => { applyChanges(); }); - it.skip('should overwrite the color scheme when advanced is closed', () => { + it('should overwrite the color scheme when advanced is closed', () => { selectColorScheme('blueToGreen'); openAdvancedProperties(); assertMetadata('blueToGreen'); applyChanges(); }); - it.skip('should overwrite the color scheme when advanced is open', () => { + it('should overwrite the color scheme when advanced is open', () => { openAdvancedProperties(); selectColorScheme('modernSunset'); assertMetadata('modernSunset'); applyChanges(); }); - it.skip('should not accept an invalid color scheme', () => { + it('should not accept an invalid color scheme', () => { openAdvancedProperties(); clearMetadata(); // allow console error @@ -714,7 +1157,7 @@ describe('Dashboard edit', () => { visitEdit(); }); - it.skip('should add charts', () => { + it('should add charts', () => { cy.get('[role="checkbox"]').click(); dragComponent(); cy.getBySel('dashboard-component-chart-holder').should('have.length', 1); @@ -762,7 +1205,7 @@ describe('Dashboard edit', () => { visitEdit(); }); - it.skip('should save', () => { + it('should save', () => { cy.get('[role="checkbox"]').click(); dragComponent(); cy.getBySel('header-save-button').should('be.enabled'); diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/horizontalFilterBar.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/horizontalFilterBar.test.ts new file mode 100644 index 000000000000..d9ed21258f20 --- /dev/null +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/horizontalFilterBar.test.ts @@ -0,0 +1,248 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { nativeFilters } from 'cypress/support/directories'; + +import { + addCountryNameFilter, + applyNativeFilterValueWithIndex, + enterNativeFilterEditModal, + inputNativeFilterDefaultValue, + saveNativeFilterSettings, + validateFilterNameOnDashboard, + testItems, + interceptFilterState, +} from './utils'; +import { + prepareDashboardFilters, + SAMPLE_CHART, + visitDashboard, +} from './shared_dashboard_functions'; + +function openMoreFilters(waitFilterState = true) { + interceptFilterState(); + cy.getBySel('dropdown-container-btn').click(); + + if (waitFilterState) { + cy.wait('@postFilterState'); + } +} + +function openVerticalFilterBar() { + cy.getBySel('dashboard-filters-panel').should('exist'); + cy.getBySel('filter-bar__expand-button').click(); +} + +function setFilterBarOrientation(orientation: 'vertical' | 'horizontal') { + cy.getBySel('filterbar-orientation-icon').click(); + cy.wait(250); + cy.getBySel('dropdown-selectable-icon-submenu') + .contains('Orientation of filter bar') + .should('exist') + .trigger('mouseover'); + + if (orientation === 'vertical') { + cy.get('.ant-dropdown-menu-item-selected') + .contains('Horizontal (Top)') + .should('exist'); + cy.get('.ant-dropdown-menu-item').contains('Vertical (Left)').click(); + cy.getBySel('dashboard-filters-panel').should('exist'); + } else { + cy.get('.ant-dropdown-menu-item-selected') + .contains('Vertical (Left)') + .should('exist'); + cy.get('.ant-dropdown-menu-item').contains('Horizontal (Top)').click(); + cy.getBySel('loading-indicator').should('exist'); + cy.getBySel('filter-bar').should('exist'); + cy.getBySel('dashboard-filters-panel').should('not.exist'); + } +} + +describe('Horizontal FilterBar', () => { + it('should go from vertical to horizontal and the opposite', () => { + visitDashboard(); + openVerticalFilterBar(); + setFilterBarOrientation('horizontal'); + setFilterBarOrientation('vertical'); + }); + + it('should show all default actions in horizontal mode', () => { + visitDashboard(); + openVerticalFilterBar(); + setFilterBarOrientation('horizontal'); + cy.getBySel('horizontal-filterbar-empty') + .contains('No filters are currently added to this dashboard.') + .should('exist'); + cy.getBySel('filter-bar__create-filter').should('exist'); + cy.getBySel('filterbar-action-buttons').should('exist'); + }); + + it('should stay in horizontal mode when reloading', () => { + visitDashboard(); + openVerticalFilterBar(); + setFilterBarOrientation('horizontal'); + cy.reload(); + cy.getBySel('dashboard-filters-panel').should('not.exist'); + }); + + it('should show all filters in available space on load', () => { + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + { name: 'test_2', column: 'country_code', datasetId: 2 }, + { name: 'test_3', column: 'region', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + cy.get('.filter-item-wrapper').should('have.length', 3); + }); + + it('should show "more filters" on window resizing up and down', () => { + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + { name: 'test_2', column: 'country_code', datasetId: 2 }, + { name: 'test_3', column: 'region', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + + cy.getBySel('form-item-value').should('have.length', 3); + cy.viewport(768, 1024); + cy.getBySel('form-item-value').should('have.length', 0); + openMoreFilters(false); + cy.getBySel('form-item-value').should('have.length', 3); + + cy.getBySel('filter-bar').click(); + cy.viewport(1000, 1024); + openMoreFilters(false); + cy.getBySel('form-item-value').should('have.length', 3); + + cy.getBySel('filter-bar').click(); + cy.viewport(1300, 1024); + cy.getBySel('form-item-value').should('have.length', 3); + cy.getBySel('dropdown-container-btn').should('not.exist'); + }); + + it('should show "more filters" and scroll', () => { + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + { name: 'test_2', column: 'country_code', datasetId: 2 }, + { name: 'test_3', column: 'region', datasetId: 2 }, + { name: 'test_4', column: 'year', datasetId: 2 }, + { name: 'test_5', column: 'country_name', datasetId: 2 }, + { name: 'test_6', column: 'country_code', datasetId: 2 }, + { name: 'test_7', column: 'region', datasetId: 2 }, + { name: 'test_8', column: 'year', datasetId: 2 }, + { name: 'test_9', column: 'country_name', datasetId: 2 }, + { name: 'test_10', column: 'country_code', datasetId: 2 }, + { name: 'test_11', column: 'region', datasetId: 2 }, + { name: 'test_12', column: 'year', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + cy.get('.filter-item-wrapper').should('have.length', 3); + openMoreFilters(); + cy.getBySel('form-item-value').should('have.length', 12); + cy.getBySel('filter-control-name').contains('test_10').should('be.visible'); + cy.getBySel('filter-control-name') + .contains('test_12') + .should('not.be.visible'); + cy.get('.ant-popover-inner-content').scrollTo('bottom'); + cy.getBySel('filter-control-name').contains('test_12').should('be.visible'); + }); + + it('should display newly added filter', () => { + visitDashboard(); + openVerticalFilterBar(); + setFilterBarOrientation('horizontal'); + + enterNativeFilterEditModal(false); + addCountryNameFilter(); + saveNativeFilterSettings([]); + validateFilterNameOnDashboard(testItems.topTenChart.filterColumn); + }); + + it('should spot changes in "more filters" and apply their values', () => { + cy.intercept(`/api/v1/chart/data?form_data=**`).as('chart'); + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + { name: 'test_2', column: 'country_code', datasetId: 2 }, + { name: 'test_3', column: 'region', datasetId: 2 }, + { name: 'test_4', column: 'year', datasetId: 2 }, + { name: 'test_5', column: 'country_name', datasetId: 2 }, + { name: 'test_6', column: 'country_code', datasetId: 2 }, + { name: 'test_7', column: 'region', datasetId: 2 }, + { name: 'test_8', column: 'year', datasetId: 2 }, + { name: 'test_9', column: 'country_name', datasetId: 2 }, + { name: 'test_10', column: 'country_code', datasetId: 2 }, + { name: 'test_11', column: 'region', datasetId: 2 }, + { name: 'test_12', column: 'year', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + openMoreFilters(); + applyNativeFilterValueWithIndex(8, testItems.filterDefaultValue); + cy.get(nativeFilters.applyFilter).click({ force: true }); + cy.wait('@chart'); + cy.get('.antd5-scroll-number.antd5-badge-count').should( + 'have.attr', + 'title', + '1', + ); + }); + + it('should focus filter and open "more filters" programmatically', () => { + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + { name: 'test_2', column: 'country_code', datasetId: 2 }, + { name: 'test_3', column: 'region', datasetId: 2 }, + { name: 'test_4', column: 'year', datasetId: 2 }, + { name: 'test_5', column: 'country_name', datasetId: 2 }, + { name: 'test_6', column: 'country_code', datasetId: 2 }, + { name: 'test_7', column: 'region', datasetId: 2 }, + { name: 'test_8', column: 'year', datasetId: 2 }, + { name: 'test_9', column: 'country_name', datasetId: 2 }, + { name: 'test_10', column: 'country_code', datasetId: 2 }, + { name: 'test_11', column: 'region', datasetId: 2 }, + { name: 'test_12', column: 'year', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + openMoreFilters(); + applyNativeFilterValueWithIndex(8, testItems.filterDefaultValue); + cy.get(nativeFilters.applyFilter).click({ force: true }); + cy.getBySel('slice-header').within(() => { + cy.get('.filter-counts').trigger('mouseover'); + }); + cy.get('.filterStatusPopover').contains('test_9').click(); + cy.getBySel('dropdown-content').should('be.visible'); + cy.get('.ant-select-focused').should('be.visible'); + }); + + it('should show tag count and one plain tag on focus and only count on blur in select ', () => { + prepareDashboardFilters([ + { name: 'test_1', column: 'country_name', datasetId: 2 }, + ]); + setFilterBarOrientation('horizontal'); + enterNativeFilterEditModal(); + inputNativeFilterDefaultValue('Albania'); + cy.get('.ant-select-selection-search-input').clear({ force: true }); + inputNativeFilterDefaultValue('Algeria', true); + saveNativeFilterSettings([SAMPLE_CHART]); + cy.getBySel('filter-bar').within(() => { + cy.get(nativeFilters.filterItem).contains('Albania').should('be.visible'); + cy.get(nativeFilters.filterItem).contains('+ 1 ...').should('be.visible'); + cy.get('.ant-select-selection-search-input').click(); + cy.get(nativeFilters.filterItem).contains('+ 2 ...').should('be.visible'); + }); + }); +}); diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts index f2e88b1390c2..1ef02fb2fe40 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/nativeFilters.test.ts @@ -16,15 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -import qs from 'querystring'; import { dashboardView, - nativeFilters, - exploreView, dataTestChartName, + exploreView, + nativeFilters, } from 'cypress/support/directories'; -import { SAMPLE_DASHBOARD_1 } from 'cypress/utils/urls'; +import qs from 'querystring'; +import { + prepareDashboardFilters, + SAMPLE_CHART, + visitDashboard, +} from './shared_dashboard_functions'; import { addCountryNameFilter, addParentFilterWithValue, @@ -40,147 +44,16 @@ import { fillNativeFilterForm, getNativeFilterPlaceholderWithIndex, inputNativeFilterDefaultValue, - saveNativeFilterSettings, nativeFilterTooltips, + saveNativeFilterSettings, + testItems, undoDeleteNativeFilter, validateFilterContentOnDashboard, - valueNativeFilterOptions, validateFilterNameOnDashboard, - testItems, + valueNativeFilterOptions, WORLD_HEALTH_CHARTS, - interceptGet, - interceptCharts, - interceptDatasets, - interceptFilterState, } from './utils'; -const SAMPLE_CHART = { name: 'Most Populated Countries', viz: 'table' }; - -function visitDashboard(createSample = true) { - interceptCharts(); - interceptGet(); - interceptDatasets(); - - if (createSample) { - cy.createSampleDashboards([0]); - } - - cy.visit(SAMPLE_DASHBOARD_1); - cy.wait('@get'); - cy.wait('@getCharts'); - cy.wait('@getDatasets'); - cy.url().should('contain', 'native_filters_key'); -} - -function prepareDashboardFilters( - filters: { name: string; column: string; datasetId: number }[], -) { - cy.createSampleDashboards([0]); - cy.request({ - method: 'GET', - url: `api/v1/dashboard/1-sample-dashboard`, - }).then(res => { - const { body } = res; - const dashboardId = body.result.id; - const allFilters: Record<string, unknown>[] = []; - filters.forEach((f, i) => { - allFilters.push({ - id: `NATIVE_FILTER-fLH0pxFQ${i}`, - controlValues: { - enableEmptyFilter: false, - defaultToFirstItem: false, - multiSelect: true, - searchAllOptions: false, - inverseSelection: false, - }, - name: f.name, - filterType: 'filter_select', - targets: [ - { - datasetId: f.datasetId, - column: { name: f.column }, - }, - ], - defaultDataMask: { - extraFormData: {}, - filterState: {}, - ownState: {}, - }, - cascadeParentIds: [], - scope: { - rootPath: ['ROOT_ID'], - excluded: [], - }, - type: 'NATIVE_FILTER', - description: '', - chartsInScope: [5], - tabsInScope: [], - }); - }); - if (dashboardId) { - const jsonMetadata = { - native_filter_configuration: allFilters, - timed_refresh_immune_slices: [], - expanded_slices: {}, - refresh_frequency: 0, - color_scheme: '', - label_colors: {}, - shared_label_colors: {}, - color_scheme_domain: [], - cross_filters_enabled: false, - positions: { - DASHBOARD_VERSION_KEY: 'v2', - ROOT_ID: { type: 'ROOT', id: 'ROOT_ID', children: ['GRID_ID'] }, - GRID_ID: { - type: 'GRID', - id: 'GRID_ID', - children: ['ROW-0rHnUz4nMA'], - parents: ['ROOT_ID'], - }, - HEADER_ID: { - id: 'HEADER_ID', - type: 'HEADER', - meta: { text: '1 - Sample dashboard' }, - }, - 'CHART-DF6EfI55F-': { - type: 'CHART', - id: 'CHART-DF6EfI55F-', - children: [], - parents: ['ROOT_ID', 'GRID_ID', 'ROW-0rHnUz4nMA'], - meta: { - width: 4, - height: 50, - chartId: 5, - sliceName: 'Most Populated Countries', - }, - }, - 'ROW-0rHnUz4nMA': { - type: 'ROW', - id: 'ROW-0rHnUz4nMA', - children: ['CHART-DF6EfI55F-'], - parents: ['ROOT_ID', 'GRID_ID'], - meta: { background: 'BACKGROUND_TRANSPARENT' }, - }, - }, - default_filters: '{}', - filter_scopes: {}, - chart_configuration: {}, - }; - - return cy - .request({ - method: 'PUT', - url: `api/v1/dashboard/${dashboardId}`, - body: { - json_metadata: JSON.stringify(jsonMetadata), - }, - }) - .then(() => visitDashboard(false)); - } - return cy; - }); -} - function selectFilter(index: number) { cy.get("[data-test='filter-title-container'] [draggable='true']") .eq(index) @@ -195,219 +68,6 @@ function closeFilterModal() { }); } -function openVerticalFilterBar() { - cy.getBySel('dashboard-filters-panel').should('exist'); - cy.getBySel('filter-bar__expand-button').click(); -} - -function setFilterBarOrientation(orientation: 'vertical' | 'horizontal') { - cy.getBySel('filterbar-orientation-icon').click(); - cy.wait(250); - cy.getBySel('dropdown-selectable-icon-submenu') - .contains('Orientation of filter bar') - .should('exist') - .trigger('mouseover'); - - if (orientation === 'vertical') { - cy.get('.ant-dropdown-menu-item-selected') - .contains('Horizontal (Top)') - .should('exist'); - cy.get('.ant-dropdown-menu-item').contains('Vertical (Left)').click(); - cy.getBySel('dashboard-filters-panel').should('exist'); - } else { - cy.get('.ant-dropdown-menu-item-selected') - .contains('Vertical (Left)') - .should('exist'); - cy.get('.ant-dropdown-menu-item').contains('Horizontal (Top)').click(); - cy.getBySel('loading-indicator').should('exist'); - cy.getBySel('filter-bar').should('exist'); - cy.getBySel('dashboard-filters-panel').should('not.exist'); - } -} - -function openMoreFilters(intercetFilterState = true) { - interceptFilterState(); - cy.getBySel('dropdown-container-btn').click(); - - if (intercetFilterState) { - cy.wait('@postFilterState'); - } -} - -describe('Horizontal FilterBar', () => { - it('should go from vertical to horizontal and the opposite', () => { - visitDashboard(); - openVerticalFilterBar(); - setFilterBarOrientation('horizontal'); - setFilterBarOrientation('vertical'); - }); - - it('should show all default actions in horizontal mode', () => { - visitDashboard(); - openVerticalFilterBar(); - setFilterBarOrientation('horizontal'); - cy.getBySel('horizontal-filterbar-empty') - .contains('No filters are currently added to this dashboard.') - .should('exist'); - cy.getBySel('filter-bar__create-filter').should('exist'); - cy.getBySel('filterbar-action-buttons').should('exist'); - }); - - it('should stay in horizontal mode when reloading', () => { - visitDashboard(); - openVerticalFilterBar(); - setFilterBarOrientation('horizontal'); - cy.reload(); - cy.getBySel('dashboard-filters-panel').should('not.exist'); - }); - - it('should show all filters in available space on load', () => { - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - { name: 'test_2', column: 'country_code', datasetId: 2 }, - { name: 'test_3', column: 'region', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - cy.get('.filter-item-wrapper').should('have.length', 3); - }); - - it('should show "more filters" on window resizing up and down', () => { - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - { name: 'test_2', column: 'country_code', datasetId: 2 }, - { name: 'test_3', column: 'region', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - - cy.getBySel('form-item-value').should('have.length', 3); - cy.viewport(768, 1024); - cy.getBySel('form-item-value').should('have.length', 0); - openMoreFilters(false); - cy.getBySel('form-item-value').should('have.length', 3); - - cy.getBySel('filter-bar').click(); - cy.viewport(1000, 1024); - openMoreFilters(false); - cy.getBySel('form-item-value').should('have.length', 3); - - cy.getBySel('filter-bar').click(); - cy.viewport(1300, 1024); - cy.getBySel('form-item-value').should('have.length', 3); - cy.getBySel('dropdown-container-btn').should('not.exist'); - }); - - it('should show "more filters" and scroll', () => { - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - { name: 'test_2', column: 'country_code', datasetId: 2 }, - { name: 'test_3', column: 'region', datasetId: 2 }, - { name: 'test_4', column: 'year', datasetId: 2 }, - { name: 'test_5', column: 'country_name', datasetId: 2 }, - { name: 'test_6', column: 'country_code', datasetId: 2 }, - { name: 'test_7', column: 'region', datasetId: 2 }, - { name: 'test_8', column: 'year', datasetId: 2 }, - { name: 'test_9', column: 'country_name', datasetId: 2 }, - { name: 'test_10', column: 'country_code', datasetId: 2 }, - { name: 'test_11', column: 'region', datasetId: 2 }, - { name: 'test_12', column: 'year', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - cy.get('.filter-item-wrapper').should('have.length', 3); - openMoreFilters(); - cy.getBySel('form-item-value').should('have.length', 12); - cy.getBySel('filter-control-name').contains('test_10').should('be.visible'); - cy.getBySel('filter-control-name') - .contains('test_12') - .should('not.be.visible'); - cy.get('.ant-popover-inner-content').scrollTo('bottom'); - cy.getBySel('filter-control-name').contains('test_12').should('be.visible'); - }); - - it('should display newly added filter', () => { - visitDashboard(); - openVerticalFilterBar(); - setFilterBarOrientation('horizontal'); - - enterNativeFilterEditModal(false); - addCountryNameFilter(); - saveNativeFilterSettings([]); - validateFilterNameOnDashboard(testItems.topTenChart.filterColumn); - }); - - it('should spot changes in "more filters" and apply their values', () => { - cy.intercept(`/api/v1/chart/data?form_data=**`).as('chart'); - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - { name: 'test_2', column: 'country_code', datasetId: 2 }, - { name: 'test_3', column: 'region', datasetId: 2 }, - { name: 'test_4', column: 'year', datasetId: 2 }, - { name: 'test_5', column: 'country_name', datasetId: 2 }, - { name: 'test_6', column: 'country_code', datasetId: 2 }, - { name: 'test_7', column: 'region', datasetId: 2 }, - { name: 'test_8', column: 'year', datasetId: 2 }, - { name: 'test_9', column: 'country_name', datasetId: 2 }, - { name: 'test_10', column: 'country_code', datasetId: 2 }, - { name: 'test_11', column: 'region', datasetId: 2 }, - { name: 'test_12', column: 'year', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - openMoreFilters(); - applyNativeFilterValueWithIndex(8, testItems.filterDefaultValue); - cy.get(nativeFilters.applyFilter).click({ force: true }); - cy.wait('@chart'); - cy.get('.antd5-scroll-number.antd5-badge-count').should( - 'have.attr', - 'title', - '1', - ); - }); - - it('should focus filter and open "more filters" programmatically', () => { - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - { name: 'test_2', column: 'country_code', datasetId: 2 }, - { name: 'test_3', column: 'region', datasetId: 2 }, - { name: 'test_4', column: 'year', datasetId: 2 }, - { name: 'test_5', column: 'country_name', datasetId: 2 }, - { name: 'test_6', column: 'country_code', datasetId: 2 }, - { name: 'test_7', column: 'region', datasetId: 2 }, - { name: 'test_8', column: 'year', datasetId: 2 }, - { name: 'test_9', column: 'country_name', datasetId: 2 }, - { name: 'test_10', column: 'country_code', datasetId: 2 }, - { name: 'test_11', column: 'region', datasetId: 2 }, - { name: 'test_12', column: 'year', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - openMoreFilters(); - applyNativeFilterValueWithIndex(8, testItems.filterDefaultValue); - cy.get(nativeFilters.applyFilter).click({ force: true }); - cy.getBySel('slice-header').within(() => { - cy.get('.filter-counts').trigger('mouseover'); - }); - cy.get('.filterStatusPopover').contains('test_9').click(); - cy.getBySel('dropdown-content').should('be.visible'); - cy.get('.ant-select-focused').should('be.visible'); - }); - - it('should show tag count and one plain tag on focus and only count on blur in select ', () => { - prepareDashboardFilters([ - { name: 'test_1', column: 'country_name', datasetId: 2 }, - ]); - setFilterBarOrientation('horizontal'); - enterNativeFilterEditModal(); - inputNativeFilterDefaultValue('Albania'); - cy.get('.ant-select-selection-search-input').clear({ force: true }); - inputNativeFilterDefaultValue('Algeria', true); - saveNativeFilterSettings([SAMPLE_CHART]); - cy.getBySel('filter-bar').within(() => { - cy.get(nativeFilters.filterItem).contains('Albania').should('be.visible'); - cy.get(nativeFilters.filterItem).contains('+ 1 ...').should('be.visible'); - cy.get('.ant-select-selection-search-input').click(); - cy.get(nativeFilters.filterItem).contains('+ 2 ...').should('be.visible'); - }); - }); -}); - describe('Native filters', () => { describe('Nativefilters tests initial state required', () => { beforeEach(() => { diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/shared_dashboard_functions.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/shared_dashboard_functions.ts new file mode 100644 index 000000000000..b0f7853e94ba --- /dev/null +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/shared_dashboard_functions.ts @@ -0,0 +1,148 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { SAMPLE_DASHBOARD_1 } from 'cypress/utils/urls'; +import { interceptCharts, interceptDatasets, interceptGet } from './utils'; + +export const SAMPLE_CHART = { name: 'Most Populated Countries', viz: 'table' }; + +export function visitDashboard(createSample = true) { + interceptCharts(); + interceptGet(); + interceptDatasets(); + + if (createSample) { + cy.createSampleDashboards([0]); + } + + cy.visit(SAMPLE_DASHBOARD_1); + cy.wait('@get'); + cy.wait('@getCharts'); + cy.wait('@getDatasets'); + cy.url().should('contain', 'native_filters_key'); +} + +export function prepareDashboardFilters( + filters: { name: string; column: string; datasetId: number }[], +) { + cy.createSampleDashboards([0]); + cy.request({ + method: 'GET', + url: `api/v1/dashboard/1-sample-dashboard`, + }).then(res => { + const { body } = res; + const dashboardId = body.result.id; + const allFilters: Record<string, unknown>[] = []; + filters.forEach((f, i) => { + allFilters.push({ + id: `NATIVE_FILTER-fLH0pxFQ${i}`, + controlValues: { + enableEmptyFilter: false, + defaultToFirstItem: false, + multiSelect: true, + searchAllOptions: false, + inverseSelection: false, + }, + name: f.name, + filterType: 'filter_select', + targets: [ + { + datasetId: f.datasetId, + column: { name: f.column }, + }, + ], + defaultDataMask: { + extraFormData: {}, + filterState: {}, + ownState: {}, + }, + cascadeParentIds: [], + scope: { + rootPath: ['ROOT_ID'], + excluded: [], + }, + type: 'NATIVE_FILTER', + description: '', + chartsInScope: [5], + tabsInScope: [], + }); + }); + if (dashboardId) { + const jsonMetadata = { + native_filter_configuration: allFilters, + timed_refresh_immune_slices: [], + expanded_slices: {}, + refresh_frequency: 0, + color_scheme: '', + label_colors: {}, + shared_label_colors: [], + color_scheme_domain: [], + cross_filters_enabled: false, + positions: { + DASHBOARD_VERSION_KEY: 'v2', + ROOT_ID: { type: 'ROOT', id: 'ROOT_ID', children: ['GRID_ID'] }, + GRID_ID: { + type: 'GRID', + id: 'GRID_ID', + children: ['ROW-0rHnUz4nMA'], + parents: ['ROOT_ID'], + }, + HEADER_ID: { + id: 'HEADER_ID', + type: 'HEADER', + meta: { text: '1 - Sample dashboard' }, + }, + 'CHART-DF6EfI55F-': { + type: 'CHART', + id: 'CHART-DF6EfI55F-', + children: [], + parents: ['ROOT_ID', 'GRID_ID', 'ROW-0rHnUz4nMA'], + meta: { + width: 4, + height: 50, + chartId: 5, + sliceName: 'Most Populated Countries', + }, + }, + 'ROW-0rHnUz4nMA': { + type: 'ROW', + id: 'ROW-0rHnUz4nMA', + children: ['CHART-DF6EfI55F-'], + parents: ['ROOT_ID', 'GRID_ID'], + meta: { background: 'BACKGROUND_TRANSPARENT' }, + }, + }, + default_filters: '{}', + filter_scopes: {}, + chart_configuration: {}, + }; + + return cy + .request({ + method: 'PUT', + url: `api/v1/dashboard/${dashboardId}`, + body: { + json_metadata: JSON.stringify(jsonMetadata), + }, + }) + .then(() => visitDashboard(false)); + } + return cy; + }); +} diff --git a/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts b/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts index d892c080923d..f4dcc1cccf50 100644 --- a/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts +++ b/superset-frontend/cypress-base/cypress/e2e/dashboard/utils.ts @@ -125,7 +125,7 @@ export const valueNativeFilterOptions = [ ]; export function interceptGet() { - cy.intercept('/api/v1/dashboard/*').as('get'); + cy.intercept('GET', '/api/v1/dashboard/*').as('get'); } export function interceptFiltering() { @@ -144,6 +144,10 @@ export function interceptUpdate() { cy.intercept('PUT', `/api/v1/dashboard/*`).as('update'); } +export function interceptExploreUpdate() { + cy.intercept('PUT', `/api/v1/chart/*`).as('chartUpdate'); +} + export function interceptPost() { cy.intercept('POST', `/api/v1/dashboard/`).as('post'); } @@ -374,7 +378,7 @@ export function cancelNativeFilterSettings() { .should('be.visible') .should('have.text', 'There are unsaved changes.'); cy.get(nativeFilters.modal.footer) - .find(nativeFilters.modal.yesCancelButton) + .find(nativeFilters.modal.confirmCancelButton) .contains('cancel') .click({ force: true }); cy.get(nativeFilters.modal.container).should('not.exist'); @@ -520,13 +524,17 @@ export function addCountryNameFilter() { ); } -export function openTab(tabComponentIndex: number, tabIndex: number) { - return cy - .getBySel('dashboard-component-tabs') +export function openTab( + tabComponentIndex: number, + tabIndex: number, + target = 'dashboard-component-tabs', +) { + cy.getBySel(target) .eq(tabComponentIndex) .find('[role="tab"]') .eq(tabIndex) .click(); + cy.wait(500); } export const openTopLevelTab = (tabName: string) => { diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/AdhocMetrics.test.ts b/superset-frontend/cypress-base/cypress/e2e/explore/AdhocMetrics.test.ts index b1c0fd56cf6f..e97ac74c3f2a 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/AdhocMetrics.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/explore/AdhocMetrics.test.ts @@ -25,7 +25,7 @@ describe('AdhocMetrics', () => { }); it('Clear metric and set simple adhoc metric', () => { - const metric = 'SUM(num_girls)'; + const metric = 'sum(num_girls)'; const metricName = 'Sum Girls'; cy.get('[data-test=metrics]') .find('[data-test="remove-control-button"]') diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js index 14c386e0ea62..5644cda37eb2 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/chart.test.js @@ -147,7 +147,7 @@ describe('No Results', () => { cy.visitChartByParams(formData); cy.wait('@getJson').its('response.statusCode').should('eq', 200); cy.get('div.chart-container').contains( - 'No results were returned for this query', + 'No data', ); }); }); diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/table.test.ts b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/table.test.ts index 425e5e694b48..330d7fdce01f 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/table.test.ts +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/table.test.ts @@ -100,7 +100,7 @@ describe('Visualization > Table', () => { }); cy.verifySliceSuccess({ waitAlias: '@chartData', - querySubstring: /group by\n.*name/i, + querySubstring: /GROUP BY.*name/i, chartSelector: 'table', }); }); @@ -246,7 +246,7 @@ describe('Visualization > Table', () => { cy.visitChartByParams(formData); cy.verifySliceSuccess({ waitAlias: '@chartData', - querySubstring: /group by\n.*state/i, + querySubstring: /GROUP BY.*state/i, chartSelector: 'table', }); cy.get('td').contains(/\d*%/); diff --git a/superset-frontend/cypress-base/cypress/support/directories.ts b/superset-frontend/cypress-base/cypress/support/directories.ts index 77268a5e0473..b59aa1bf8196 100644 --- a/superset-frontend/cypress-base/cypress/support/directories.ts +++ b/superset-frontend/cypress-base/cypress/support/directories.ts @@ -322,7 +322,9 @@ export const nativeFilters = { footer: '.ant-modal-footer', saveButton: dataTestLocator('native-filter-modal-save-button'), cancelButton: dataTestLocator('native-filter-modal-cancel-button'), - yesCancelButton: '[type="button"]', + confirmCancelButton: dataTestLocator( + 'native-filter-modal-confirm-cancel-button', + ), alertXUnsavedFilters: '.ant-alert-message', tabsList: { filterItemsContainer: dataTestLocator('filter-title-container'), diff --git a/superset-frontend/jest.config.js b/superset-frontend/jest.config.js index efe28bdc1421..053b9f12c8af 100644 --- a/superset-frontend/jest.config.js +++ b/superset-frontend/jest.config.js @@ -56,7 +56,7 @@ module.exports = { moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json'], snapshotSerializers: ['@emotion/jest/enzyme-serializer'], transformIgnorePatterns: [ - 'node_modules/(?!d3-(interpolate|color)|remark-gfm|markdown-table|micromark-*.|decode-named-character-reference|character-entities|mdast-util-*.|unist-util-*.|ccount|escape-string-regexp|nanoid)', + 'node_modules/(?!d3-(interpolate|color)|remark-gfm|markdown-table|micromark-*.|decode-named-character-reference|character-entities|mdast-util-*.|unist-util-*.|ccount|escape-string-regexp|nanoid|echarts|zrender)', ], globals: { __DEV__: true, diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 6316286ab017..a49fde591b1a 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "superset", - "version": "0.0.0-dev", + "version": "4.1.3", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "superset", - "version": "0.0.0-dev", + "version": "4.1.3", "license": "Apache-2.0", "workspaces": [ "packages/*", @@ -67,6 +67,7 @@ "core-js": "^3.37.1", "d3-scale": "^2.1.2", "dom-to-image-more": "^3.2.0", + "dom-to-pdf": "^0.3.2", "emotion-rgba": "0.0.12", "fast-glob": "^3.2.7", "fs-extra": "^10.0.0", @@ -82,6 +83,7 @@ "json-bigint": "^1.0.0", "json-stringify-pretty-compact": "^2.0.0", "lodash": "^4.17.21", + "luxon": "^3.5.0", "mapbox-gl": "^2.10.0", "markdown-to-jsx": "^7.4.7", "match-sorter": "^6.3.4", @@ -92,7 +94,7 @@ "mustache": "^2.2.1", "nanoid": "^5.0.7", "polished": "^4.3.1", - "prop-types": "^15.7.2", + "prop-types": "^15.8.1", "query-string": "^6.13.7", "rc-trigger": "^5.3.4", "re-resizable": "^6.9.11", @@ -141,7 +143,7 @@ "yargs": "^17.7.2" }, "devDependencies": { - "@applitools/eyes-storybook": "^3.49.0", + "@applitools/eyes-storybook": "^3.50.7", "@babel/cli": "^7.22.6", "@babel/compat-data": "^7.22.6", "@babel/core": "^7.23.9", @@ -186,6 +188,7 @@ "@types/jquery": "^3.5.8", "@types/js-levenshtein": "^1.1.3", "@types/json-bigint": "^1.0.4", + "@types/luxon": "^3.7.1", "@types/mousetrap": "^1.6.15", "@types/react": "^16.9.53", "@types/react-dom": "^16.9.8", @@ -409,32 +412,32 @@ } }, "node_modules/@applitools/core": { - "version": "4.12.2", - "resolved": "https://registry.npmjs.org/@applitools/core/-/core-4.12.2.tgz", - "integrity": "sha512-Q3yPuFMr3QpV1mBnYF0D3OwNP46JxraFVzn1QNth0pQ/UR4XLdB2a3vtDNFrLD583T3OYiZamu0zouq3+bOClg==", - "dev": true, - "dependencies": { - "@applitools/core-base": "1.11.1", - "@applitools/dom-capture": "11.2.6", - "@applitools/dom-snapshot": "4.10.0", - "@applitools/driver": "1.16.6", - "@applitools/ec-client": "1.8.1", - "@applitools/logger": "2.0.15", - "@applitools/nml-client": "1.8.1", - "@applitools/req": "1.6.6", - "@applitools/screenshoter": "3.8.28", - "@applitools/snippets": "2.4.25", - "@applitools/socket": "1.1.15", - "@applitools/spec-driver-webdriver": "1.1.4", - "@applitools/ufg-client": "1.11.1", - "@applitools/utils": "1.7.1", + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@applitools/core/-/core-4.18.0.tgz", + "integrity": "sha512-GCW9pwPwXIieKLF5cdA2ezuwwzWHFFnq9mGNAfsvWc1/o2rchj7VRxMRo2566esaniOGVtY7klf9HzJaaZQubQ==", + "dev": true, + "dependencies": { + "@applitools/core-base": "1.16.0", + "@applitools/dom-capture": "11.3.0", + "@applitools/dom-snapshot": "4.11.3", + "@applitools/driver": "1.18.0", + "@applitools/ec-client": "1.9.3", + "@applitools/logger": "2.0.18", + "@applitools/nml-client": "1.8.9", + "@applitools/req": "1.7.2", + "@applitools/screenshoter": "3.8.35", + "@applitools/snippets": "2.4.27", + "@applitools/socket": "1.1.18", + "@applitools/spec-driver-webdriver": "1.1.11", + "@applitools/ufg-client": "1.12.3", + "@applitools/utils": "1.7.4", "@types/ws": "8.5.5", "abort-controller": "3.0.0", "chalk": "4.1.2", "node-fetch": "2.6.7", - "semver": "7.5.4", + "semver": "7.6.2", "webdriver": "7.31.1", - "ws": "8.13.0", + "ws": "8.17.1", "yargs": "17.7.2" }, "bin": { @@ -446,15 +449,15 @@ } }, "node_modules/@applitools/core-base": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@applitools/core-base/-/core-base-1.11.1.tgz", - "integrity": "sha512-PRkqjyBE+H/WPBpJp5JlluWb2Cl6POCm1+GyeKYeobekXD+uhZAQSv6C1kiY7KBv1mwuvqmvtBbbR8fYCfKrKw==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/@applitools/core-base/-/core-base-1.16.0.tgz", + "integrity": "sha512-6v5box6DqmvyfVNe0tjRSCIZpfkn6fc0DZMZI4+jKLczh4zm+Tlfey1ECavP3fRZayh79SGCpeIDqBNI9Ll7dA==", "dev": true, "dependencies": { - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1", + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "throat": "6.0.2" }, @@ -462,26 +465,11 @@ "node": ">=12.13.0" } }, - "node_modules/@applitools/core/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@applitools/core/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -490,9 +478,9 @@ } }, "node_modules/@applitools/core/node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", "dev": true, "engines": { "node": ">=10.0.0" @@ -510,16 +498,10 @@ } } }, - "node_modules/@applitools/core/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@applitools/css-tree": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@applitools/css-tree/-/css-tree-1.1.2.tgz", - "integrity": "sha512-+DBY7Rf/PorHniPYNNG9rDbQcjuXvrVTx3vXXMz7h4m8h8wjUDq5afIUQ9QSTj3H8awXKHRg1o9XJl5yA6cxOg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@applitools/css-tree/-/css-tree-1.1.4.tgz", + "integrity": "sha512-rH3aq/dkTweEUgS/MKuthD79CZDqpQVJlqmxqVxLZVAzbeFxYdTG/gnfG0zj6YJ025jzcPH2ktdW16Rl3QLutg==", "dev": true, "dependencies": { "mdn-data": "2.1.0", @@ -539,12 +521,12 @@ } }, "node_modules/@applitools/dom-capture": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/@applitools/dom-capture/-/dom-capture-11.2.6.tgz", - "integrity": "sha512-USNpYDaj+L8GcPX0pJFHbDpaHc/IFWJVvFiGrOWylgPPinBWtco52mj7lv5urSX9rVyxEF41awszA2BOFOIV3Q==", + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/@applitools/dom-capture/-/dom-capture-11.3.0.tgz", + "integrity": "sha512-LGcNSPgzvlL/afQGUyykTfuPR6N+GYYQ5EaA/f5j4lgfYVxEyG/6t1W62GTImR86ZVHLEsKAQUKVE7jbKAZmVw==", "dev": true, "dependencies": { - "@applitools/dom-shared": "1.0.13", + "@applitools/dom-shared": "1.0.15", "@applitools/functional-commons": "1.6.0" }, "engines": { @@ -552,22 +534,22 @@ } }, "node_modules/@applitools/dom-shared": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/@applitools/dom-shared/-/dom-shared-1.0.13.tgz", - "integrity": "sha512-FcZKhdnPcV42IT9tPK80Tlzs6Xxsv11hgfgMqKscOOtgZ02xK9d8w1tuSMRO9VFDzCLaEFe/QSLk8/FgrDMy7w==", + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@applitools/dom-shared/-/dom-shared-1.0.15.tgz", + "integrity": "sha512-XN77SPfzXriU1x6gTcublSe0yUJHxlYwHesOnWQov2dMVfHx7y3qp0yrjdVC7LO2bDIJIzDlPJRhfg2otlbxig==", "dev": true, "engines": { "node": ">=12.13.0" } }, "node_modules/@applitools/dom-snapshot": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@applitools/dom-snapshot/-/dom-snapshot-4.10.0.tgz", - "integrity": "sha512-ZoHVtcPOtZTItaB7vWFiKNLJAbWniFCICcaM5HExsF7VTbFViyh57ExE6OqRryLvSQSjeFLFcsjqio41aQkfBQ==", + "version": "4.11.3", + "resolved": "https://registry.npmjs.org/@applitools/dom-snapshot/-/dom-snapshot-4.11.3.tgz", + "integrity": "sha512-jdEWSbEOmD9LbzashTQ/YzYDdIKrhSBwNqNTIk8qjV8YtbQfZ+NtgCtW7nOsbknAMk95CfYEUV3R1rxCXs1XfA==", "dev": true, "dependencies": { - "@applitools/css-tree": "1.1.2", - "@applitools/dom-shared": "1.0.13", + "@applitools/css-tree": "1.1.4", + "@applitools/dom-shared": "1.0.15", "@applitools/functional-commons": "1.6.0", "pako": "1.0.11" }, @@ -576,40 +558,25 @@ } }, "node_modules/@applitools/driver": { - "version": "1.16.6", - "resolved": "https://registry.npmjs.org/@applitools/driver/-/driver-1.16.6.tgz", - "integrity": "sha512-a3xgpIaOP+8VdIEEx8GdNDu21nY9VyanO9zrMpgXUiwX5AzQJ56O2UjaVnewUAU9kD31Tbn6apofMUMNo4gEXQ==", + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/@applitools/driver/-/driver-1.18.0.tgz", + "integrity": "sha512-wJYPZ2oEzRtyxne518GgdQbE+JF7S6yZEZX6SJWpVwrv/MPBKD9byxRi89XZcSpyxweFt7Ud7yJskBbubXu7QQ==", "dev": true, "dependencies": { - "@applitools/logger": "2.0.15", - "@applitools/snippets": "2.4.25", - "@applitools/utils": "1.7.1", - "semver": "7.5.4" + "@applitools/logger": "2.0.18", + "@applitools/snippets": "2.4.27", + "@applitools/utils": "1.7.4", + "semver": "7.6.2" }, "engines": { "node": ">=12.13.0" } }, - "node_modules/@applitools/driver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/@applitools/driver/node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -617,26 +584,20 @@ "node": ">=10" } }, - "node_modules/@applitools/driver/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/@applitools/ec-client": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@applitools/ec-client/-/ec-client-1.8.1.tgz", - "integrity": "sha512-KeuF82FzmBd2CNuupc4fjzt928Ow3FeGOhvtsx7l27QohwEO/HB3+Src7e5k/p6BwrDXqxYhxIPDgHe9en8ZiA==", - "dev": true, - "dependencies": { - "@applitools/core-base": "1.11.1", - "@applitools/driver": "1.16.6", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/socket": "1.1.15", - "@applitools/spec-driver-webdriver": "1.1.4", - "@applitools/tunnel-client": "1.5.1", - "@applitools/utils": "1.7.1", + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@applitools/ec-client/-/ec-client-1.9.3.tgz", + "integrity": "sha512-fnsnQpyDi3rltFEeDeUnNIRULpoWBsSf4L5F7g08LBpuAR5MTpY2WArn1nzD12rfQRoTsO7/5H0DYv/+Mr5w3A==", + "dev": true, + "dependencies": { + "@applitools/core-base": "1.16.0", + "@applitools/driver": "1.18.0", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/socket": "1.1.18", + "@applitools/spec-driver-webdriver": "1.1.11", + "@applitools/tunnel-client": "1.5.7", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "webdriver": "7.31.1", "yargs": "^17.7.2" @@ -672,9 +633,9 @@ } }, "node_modules/@applitools/execution-grid-tunnel": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@applitools/execution-grid-tunnel/-/execution-grid-tunnel-3.0.3.tgz", - "integrity": "sha512-t2tXM2dJcXezAOpOZQ69GeSHvEpWwQDkncSN7OEQwWQ2q4eo4yFXYCrl/fQCrkYGYyjU3aCB1RjPCqvvK2tnRA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@applitools/execution-grid-tunnel/-/execution-grid-tunnel-3.0.5.tgz", + "integrity": "sha512-Kp8Sgb5sS/+0CEo0ytvQONzJdmru3vu8BcNwvLyJoqPNf7zSDTr3AR60p9l4hh11nsBzJyi3+Uh8oR968J+mng==", "dev": true, "dependencies": { "@applitools/eg-frpc": "1.0.5", @@ -768,26 +729,27 @@ } }, "node_modules/@applitools/eyes-storybook": { - "version": "3.49.0", - "resolved": "https://registry.npmjs.org/@applitools/eyes-storybook/-/eyes-storybook-3.49.0.tgz", - "integrity": "sha512-h9FUhIWHakBHyT/pbu9KWSxfa2hOQGdQdFnfGrMHOgkWBDkNEw7iHDJaVdLeZHu8953iRZEzrKDnUqQNNMr+vA==", + "version": "3.50.7", + "resolved": "https://registry.npmjs.org/@applitools/eyes-storybook/-/eyes-storybook-3.50.7.tgz", + "integrity": "sha512-V8WN9gMkfKLE9O2a3cTz/t7TFuxbAWiQ6PW/Z4zGVFi2NaXMD04i47f/QFR0qscK7VDhkAGfd6HWw6e18UaPkg==", "dev": true, "hasInstallScript": true, "dependencies": { - "@applitools/core": "4.12.2", - "@applitools/driver": "1.16.6", + "@applitools/core": "4.18.0", + "@applitools/driver": "1.18.0", "@applitools/functional-commons": "1.6.0", - "@applitools/logger": "2.0.15", + "@applitools/logger": "2.0.18", "@applitools/monitoring-commons": "1.0.19", - "@applitools/spec-driver-puppeteer": "1.4.4", - "@applitools/ufg-client": "1.11.1", - "@applitools/utils": "1.7.1", + "@applitools/spec-driver-puppeteer": "1.4.11", + "@applitools/ufg-client": "1.12.3", + "@applitools/utils": "1.7.4", "boxen": "4.2.0", "chalk": "3.0.0", "detect-port": "1.3.0", "lodash": "4.17.21", "ora": "3.4.0", "puppeteer": "21.11.0", + "semver": "7.6.2", "strip-ansi": "6.0.0", "throat": "6.0.2", "yargs": "17.7.2" @@ -799,15 +761,6 @@ "node": ">=16.0.0" } }, - "node_modules/@applitools/eyes-storybook/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/@applitools/eyes-storybook/node_modules/boxen": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/boxen/-/boxen-4.2.0.tgz", @@ -1024,6 +977,18 @@ "node": ">=4" } }, + "node_modules/@applitools/eyes-storybook/node_modules/semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/@applitools/eyes-storybook/node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -1072,12 +1037,12 @@ } }, "node_modules/@applitools/image": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/@applitools/image/-/image-1.1.10.tgz", - "integrity": "sha512-F3wT2WibcVRzaU4T7VjVusqfAn84kfrY37RljXjBq3r0QxOcG84tiXhKu4x3Av/XBwVILEUXnCRZ9H/iNcPEVg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/@applitools/image/-/image-1.1.13.tgz", + "integrity": "sha512-oeSnsTJxhD6juNlWufeWsiWV9dbS0a3OL75/r/Bo2yauAi6AsRMDeh+McXJfYlf1NVZbrVG0+vNXn52mDVEIyw==", "dev": true, "dependencies": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "bmpimagejs": "1.0.4", "jpeg-js": "0.4.4", "omggif": "1.0.10", @@ -1088,12 +1053,12 @@ } }, "node_modules/@applitools/logger": { - "version": "2.0.15", - "resolved": "https://registry.npmjs.org/@applitools/logger/-/logger-2.0.15.tgz", - "integrity": "sha512-urP41ZGhoRLc/XQatcja4+YySy73i+7qug9KLZCzfchAF2LGFl/SDGcqNLu5wui/ieBftu7EGz16wlAMVVUM7Q==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/@applitools/logger/-/logger-2.0.18.tgz", + "integrity": "sha512-d54OTreCXE+G9qUxiPDHHBzwof3EnXPrADdZ7ToB9AoI+kOgs/v6wjMx0ghAoXyyOiLvlvJnmdHSyJssRdv5GA==", "dev": true, "dependencies": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "chalk": "4.1.2", "debug": "4.3.4" }, @@ -1160,26 +1125,26 @@ "dev": true }, "node_modules/@applitools/nml-client": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@applitools/nml-client/-/nml-client-1.8.1.tgz", - "integrity": "sha512-l+OHSUN3C6y/AlN1AIr+knbT3sClw3W3ZBaprmBHA5mhKRv7lz0jHUSiK9On3n4H55QgLeVx/y/tFBnFoGOJSg==", + "version": "1.8.9", + "resolved": "https://registry.npmjs.org/@applitools/nml-client/-/nml-client-1.8.9.tgz", + "integrity": "sha512-Jwz42oRVnu46V2lgj0eTfKaOu3eYo8T2Z2QhsN/5xleKISJQ8B86954JuZy9Rwx75+9T+ddmYqWfjSBWfhmVhg==", "dev": true, "dependencies": { - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1" + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4" }, "engines": { "node": ">=12.13.0" } }, "node_modules/@applitools/req": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@applitools/req/-/req-1.6.6.tgz", - "integrity": "sha512-RJ2fQn5YuRocPVxFMqUTdjaj0oD2eDhtrtTk+wbS1t9g5/5LY7vkvgZuW0ehY/E7yN7RQf093c+VNxyabsshhg==", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@applitools/req/-/req-1.7.2.tgz", + "integrity": "sha512-L0tjPFGEJFAEGaifqtmtCghjkG7M0wnEwfzbHi6O+ThtTCbg4JSDRTaNvA+PLXQoS0mFvajG40/t5a4EgAG7QQ==", "dev": true, "dependencies": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "http-proxy-agent": "5.0.0", "https-proxy-agent": "5.0.1", @@ -1189,15 +1154,6 @@ "node": ">=16.13.0" } }, - "node_modules/@applitools/req/node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true, - "engines": { - "node": ">= 10" - } - }, "node_modules/@applitools/req/node_modules/data-uri-to-buffer": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", @@ -1207,43 +1163,6 @@ "node": ">= 12" } }, - "node_modules/@applitools/req/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@applitools/req/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@applitools/req/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, "node_modules/@applitools/req/node_modules/node-fetch": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.1.tgz", @@ -1263,50 +1182,50 @@ } }, "node_modules/@applitools/screenshoter": { - "version": "3.8.28", - "resolved": "https://registry.npmjs.org/@applitools/screenshoter/-/screenshoter-3.8.28.tgz", - "integrity": "sha512-Tod/JNuRi4ibY8vHbn1I5Ppbh7e7qRRJ4ZnKPq0YoaCR0As2jI4xQMFwJRYSNME3GgfD7pZjoLQEWOx3kuL0iQ==", + "version": "3.8.35", + "resolved": "https://registry.npmjs.org/@applitools/screenshoter/-/screenshoter-3.8.35.tgz", + "integrity": "sha512-1jos00VVJOU5uxgh9cVhj7nq9akMFvBIdfQRR9KkUFeylDxt8vRpkmO6zyfbxeK2jyiboPOZXPa0PvL7M0WNLQ==", "dev": true, "dependencies": { - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/snippets": "2.4.25", - "@applitools/utils": "1.7.1" + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/snippets": "2.4.27", + "@applitools/utils": "1.7.4" }, "engines": { "node": ">=12.13.0" } }, "node_modules/@applitools/snippets": { - "version": "2.4.25", - "resolved": "https://registry.npmjs.org/@applitools/snippets/-/snippets-2.4.25.tgz", - "integrity": "sha512-vnU9qq1IGkNpvh7Qy0m196t1u3mpx7NNUeHyJRVnJ53Ok4sb9s/KKrkrU9xYkKYY+T3AEvoN0Rp5LVVrKBHGQw==", + "version": "2.4.27", + "resolved": "https://registry.npmjs.org/@applitools/snippets/-/snippets-2.4.27.tgz", + "integrity": "sha512-n6ckwbXWyJ+/DoV1T6bRiGXITgTgjayV0j4AzHiBx+HF3JdzygxIkWtn7yl1dJfzeqEGyrtBK6Sq1tTG2GoQcA==", "dev": true, "engines": { "node": ">=12.13.0" } }, "node_modules/@applitools/socket": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@applitools/socket/-/socket-1.1.15.tgz", - "integrity": "sha512-NxFY+cWGjvDph1AUx0A2qSOLmObeKgHrPo59DBIOQr+Q6Rf9vZCrmeSNoi+q8VMGsNDaWMvRUDv3JAhiQOry9w==", + "version": "1.1.18", + "resolved": "https://registry.npmjs.org/@applitools/socket/-/socket-1.1.18.tgz", + "integrity": "sha512-EMI/MMfVH38ucuZhFWOTUR8cPvuoP9b+xi5yBJF8uLlJjxQEmGnvm+Pm3s9o3mfxQzDRddYGtpIo3TTZhMVZdQ==", "dev": true, "dependencies": { - "@applitools/logger": "2.0.15", - "@applitools/utils": "1.7.1" + "@applitools/logger": "2.0.18", + "@applitools/utils": "1.7.4" }, "engines": { "node": ">=12.13.0" } }, "node_modules/@applitools/spec-driver-puppeteer": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/@applitools/spec-driver-puppeteer/-/spec-driver-puppeteer-1.4.4.tgz", - "integrity": "sha512-Bj8dftrzQvjmHOLZy7ERixjoPP8tVCls07iA5pRd8wUziSVT7E71Id40oehef6jsdpgPRLoJ3EnuSabkusKNCA==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@applitools/spec-driver-puppeteer/-/spec-driver-puppeteer-1.4.11.tgz", + "integrity": "sha512-txxjl4jNWNppXLfA0yQNu9qOH/BpYEkK7idTessEONlJLYOcbH9sT2KFX0TfWAVyHRuin35cdux1o9lfGxapLQ==", "dev": true, "dependencies": { - "@applitools/driver": "1.16.6", - "@applitools/utils": "1.7.1" + "@applitools/driver": "1.18.0", + "@applitools/utils": "1.7.4" }, "engines": { "node": ">=12.13.0" @@ -1316,13 +1235,13 @@ } }, "node_modules/@applitools/spec-driver-webdriver": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/@applitools/spec-driver-webdriver/-/spec-driver-webdriver-1.1.4.tgz", - "integrity": "sha512-d8T9K+69Q9sF0h02Em+YOPqtkwH8VVPa5nsajC34t5N1RPJAE0asT3xB7gMPdPqWe88T7qTVMTHnFyHyL6Hzkw==", + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@applitools/spec-driver-webdriver/-/spec-driver-webdriver-1.1.11.tgz", + "integrity": "sha512-xeVeqiK+Oyi2xGRME54J3yTXUGR9d2NgcOCkXTdZ+QOj8iPzypelyeHkX4nKJNsLw4Ddh9uvaiFJmKppqGZ1Mg==", "dev": true, "dependencies": { - "@applitools/driver": "1.16.6", - "@applitools/utils": "1.7.1", + "@applitools/driver": "1.18.0", + "@applitools/utils": "1.7.4", "http-proxy-agent": "5.0.0", "https-proxy-agent": "5.0.1" }, @@ -1333,63 +1252,17 @@ "webdriver": ">=6.0.0" } }, - "node_modules/@applitools/spec-driver-webdriver/node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true, - "engines": { - "node": ">= 10" - } - }, - "node_modules/@applitools/spec-driver-webdriver/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/@applitools/spec-driver-webdriver/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@applitools/spec-driver-webdriver/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, "node_modules/@applitools/tunnel-client": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@applitools/tunnel-client/-/tunnel-client-1.5.1.tgz", - "integrity": "sha512-0ZcWS++S/t6MXuqHk9h003DCCymmRSX6hz4YhVXIcwSQjppWJzebvBITA8mhBdFPKEMhG8StDmO18bXYBRVkCQ==", + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/@applitools/tunnel-client/-/tunnel-client-1.5.7.tgz", + "integrity": "sha512-h2/U2ZTDQp67Q/sU72eNx7dQms54yzfmM/Cordp2ZSQN9FAxt/NN22cUr8Qf+r71Uuu/VYlvzZUdMGl42MuKmA==", "dev": true, "dependencies": { - "@applitools/execution-grid-tunnel": "3.0.3", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/socket": "1.1.15", - "@applitools/utils": "1.7.1", + "@applitools/execution-grid-tunnel": "3.0.5", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/socket": "1.1.18", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "yargs": "17.7.2" }, @@ -1401,16 +1274,16 @@ } }, "node_modules/@applitools/ufg-client": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@applitools/ufg-client/-/ufg-client-1.11.1.tgz", - "integrity": "sha512-hEWuFtv8mucRg+b4HAhGuNrzNJnMYz2J4f36vkyzxvw23+E4QvpIgMzNV7mZcvSAibRuGU7HkScH/ZCooujlUA==", + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/@applitools/ufg-client/-/ufg-client-1.12.3.tgz", + "integrity": "sha512-bSxLqxzAuc+ldum/nGoiM/iCcf97uku3bABxB90ilzUYT1DOu9vEGmaPxxGLDc+GRRVYlOYGNdIJF+DQP4dFTg==", "dev": true, "dependencies": { - "@applitools/css-tree": "1.1.2", - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1", + "@applitools/css-tree": "1.1.4", + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4", "@xmldom/xmldom": "0.8.10", "abort-controller": "3.0.0", "throat": "6.0.2" @@ -1420,9 +1293,9 @@ } }, "node_modules/@applitools/utils": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@applitools/utils/-/utils-1.7.1.tgz", - "integrity": "sha512-GrusNmsUhletygtGCrlTolkGD4oYGN9E7y+hdX5laeUSAkJ1kDrfbxqUZTuEaK5TDwL4vpWzF8cnOZeCA59+Zg==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@applitools/utils/-/utils-1.7.4.tgz", + "integrity": "sha512-qgJqx2yjlJBf79YyFehf1nSp4AXOdzJn3POQyg8CMWV0YH6HsjAfJjYaNrbXFcGYCSpPEJGhGehxC7GVKHX3YA==", "dev": true, "engines": { "node": ">=12.13.0" @@ -6459,16 +6332,6 @@ "@types/istanbul-lib-report": "*" } }, - "node_modules/@jest/core/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -6673,19 +6536,6 @@ "node": ">=8" } }, - "node_modules/@jest/core/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/core/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -7006,16 +6856,6 @@ "@types/istanbul-lib-report": "*" } }, - "node_modules/@jest/reporters/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/reporters/node_modules/anymatch": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", @@ -7199,19 +7039,6 @@ "node": ">=8" } }, - "node_modules/@jest/reporters/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@jest/reporters/node_modules/supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -8301,16 +8128,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/@lerna/create/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/@lerna/create/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -9252,19 +9069,6 @@ "node": ">=8" } }, - "node_modules/@lerna/create/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@lerna/create/node_modules/strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -10680,30 +10484,31 @@ } }, "node_modules/@puppeteer/browsers": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz", - "integrity": "sha512-PuvK6xZzGhKPvlx3fpfdM2kYY3P/hB1URtK8wA7XUJ6prn6pp22zvJHu48th0SGcHL9SutbPHrFuQgfXTFobWA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.3.0.tgz", + "integrity": "sha512-ioXoq9gPxkss4MYhD+SFaU9p1IHFUX0ILAWFPyjGaBdjLsYAlZw6j1iLA0N/m12uVHLFDfSYNF7EQccjinIMDA==", "dev": true, "dependencies": { - "debug": "4.3.4", - "extract-zip": "2.0.1", - "progress": "2.0.3", - "proxy-agent": "6.3.1", - "tar-fs": "3.0.4", - "unbzip2-stream": "1.4.3", - "yargs": "17.7.2" + "debug": "^4.3.5", + "extract-zip": "^2.0.1", + "progress": "^2.0.3", + "proxy-agent": "^6.4.0", + "semver": "^7.6.3", + "tar-fs": "^3.0.6", + "unbzip2-stream": "^1.4.3", + "yargs": "^17.7.2" }, "bin": { "browsers": "lib/cjs/main-cli.js" }, "engines": { - "node": ">=16.3.0" + "node": ">=18" } }, "node_modules/@puppeteer/browsers/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -10723,6 +10528,18 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/@puppeteer/browsers/node_modules/semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/@radix-ui/primitive": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz", @@ -15258,6 +15075,15 @@ "@testing-library/dom": ">=7.21.4" } }, + "node_modules/@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, "node_modules/@tootallnate/quickjs-emscripten": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", @@ -15974,6 +15800,13 @@ "@types/lodash": "*" } }, + "node_modules/@types/luxon": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.7.1.tgz", + "integrity": "sha512-H3iskjFIAn5SlJU7OuxUmTEpebK6TKB8rxZShDslBMZJ5u9S//KM1sbdAisiSrqwLQncVjnpi2OK2J51h+4lsg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/mapbox-gl": { "version": "2.7.6", "resolved": "https://registry.npmjs.org/@types/mapbox-gl/-/mapbox-gl-2.7.6.tgz", @@ -16064,6 +15897,13 @@ "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, + "node_modules/@types/raf": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/@types/raf/-/raf-3.4.3.tgz", + "integrity": "sha512-c4YAvMedbPZ5tEyxzQdMoOhhJ4RD3rngZIdwC2/qDN3d7JpEhB6fiBRKVY1lg5B7Wk+uPBjn5f39j1/2MY1oOw==", + "license": "MIT", + "optional": true + }, "node_modules/@types/range-parser": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", @@ -17848,6 +17688,7 @@ "version": "8.1.0", "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", @@ -17890,27 +17731,6 @@ "node": ">=12.0.0" } }, - "node_modules/@wdio/logger/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/@wdio/logger/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/@wdio/protocols": { "version": "7.27.0", "resolved": "https://registry.npmjs.org/@wdio/protocols/-/protocols-7.27.0.tgz", @@ -17942,9 +17762,9 @@ } }, "node_modules/@wdio/types/node_modules/@types/node": { - "version": "18.19.31", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz", - "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==", + "version": "18.19.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.42.tgz", + "integrity": "sha512-d2ZFc/3lnK2YCYhos8iaNIYu9Vfhr92nHiyJHRltXWjXUBjEE+A4I58Tdbnw4VhggSW+2j5y5gTrLs4biNnubg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -18527,15 +18347,6 @@ "string-width": "^4.1.0" } }, - "node_modules/ansi-align/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/ansi-align/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -18559,18 +18370,6 @@ "node": ">=8" } }, - "node_modules/ansi-align/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/ansi-escapes": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", @@ -18610,6 +18409,14 @@ "ansi-html": "bin/ansi-html" } }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -19792,9 +19599,7 @@ }, "node_modules/atob": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true, + "license": "(MIT OR Apache-2.0)", "bin": { "atob": "bin/atob.js" }, @@ -19835,9 +19640,9 @@ } }, "node_modules/avvio/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -19924,8 +19729,7 @@ "node_modules/b4a": { "version": "1.6.6", "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==", - "dev": true + "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" }, "node_modules/babel-core": { "version": "7.0.0-bridge.0", @@ -20564,6 +20368,45 @@ "integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==", "optional": true }, + "node_modules/bare-fs": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.3.1.tgz", + "integrity": "sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==", + "dev": true, + "optional": true, + "dependencies": { + "bare-events": "^2.0.0", + "bare-path": "^2.0.0", + "bare-stream": "^2.0.0" + } + }, + "node_modules/bare-os": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-2.4.0.tgz", + "integrity": "sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==", + "dev": true, + "optional": true + }, + "node_modules/bare-path": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-2.1.3.tgz", + "integrity": "sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==", + "dev": true, + "optional": true, + "dependencies": { + "bare-os": "^2.1.0" + } + }, + "node_modules/bare-stream": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.1.3.tgz", + "integrity": "sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==", + "dev": true, + "optional": true, + "dependencies": { + "streamx": "^2.18.0" + } + }, "node_modules/base": { "version": "0.11.2", "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", @@ -20646,6 +20489,16 @@ "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, + "node_modules/base64-arraybuffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", + "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -21080,6 +20933,18 @@ "node-int64": "^0.4.0" } }, + "node_modules/btoa": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz", + "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==", + "license": "(MIT OR Apache-2.0)", + "bin": { + "btoa": "bin/btoa.js" + }, + "engines": { + "node": ">= 0.4.0" + } + }, "node_modules/buf-compare": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buf-compare/-/buf-compare-1.0.1.tgz", @@ -21639,6 +21504,33 @@ } ] }, + "node_modules/canvg": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/canvg/-/canvg-3.0.10.tgz", + "integrity": "sha512-qwR2FRNO9NlzTeKIPIKpnTY6fqwuYSequ8Ru8c0YkYU7U0oW+hLUvWadLvAu1Rl72OMNiFhoLu4f8eUjQ7l/+Q==", + "license": "MIT", + "optional": true, + "dependencies": { + "@babel/runtime": "^7.12.5", + "@types/raf": "^3.4.0", + "core-js": "^3.8.3", + "raf": "^3.4.1", + "regenerator-runtime": "^0.13.7", + "rgbcolor": "^1.0.1", + "stackblur-canvas": "^2.0.0", + "svg-pathdata": "^6.0.3" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/canvg/node_modules/regenerator-runtime": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", + "license": "MIT", + "optional": true + }, "node_modules/capture-exit": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/capture-exit/-/capture-exit-2.0.0.tgz", @@ -21948,13 +21840,14 @@ } }, "node_modules/chromium-bidi": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.5.8.tgz", - "integrity": "sha512-blqh+1cEQbHBKmok3rVJkBlBxt9beKBgOsxbFgs7UJcoVbbeZ+K7+6liAsjgpc8l1Xd55cQUy14fXZdGSb4zIw==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.6.3.tgz", + "integrity": "sha512-qXlsCmpCZJAnoTYI83Iu6EdYQpMYdVkCfq08KDh2pmlVqK5t5IA9mGs4/LwCwp4fqisSOMXZxP3HIh8w8aRn0A==", "dev": true, "dependencies": { "mitt": "3.0.1", - "urlpattern-polyfill": "10.0.0" + "urlpattern-polyfill": "10.0.0", + "zod": "3.23.8" }, "peerDependencies": { "devtools-protocol": "*" @@ -22115,15 +22008,6 @@ "colors": "1.4.0" } }, - "node_modules/cli-table3/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/cli-table3/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -22147,18 +22031,6 @@ "node": ">=8" } }, - "node_modules/cli-table3/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cli-truncate": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz", @@ -22176,16 +22048,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cli-truncate/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/cli-truncate/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -22226,19 +22088,6 @@ "node": ">=8" } }, - "node_modules/cli-truncate/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cli-width": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", @@ -22261,14 +22110,6 @@ "node": ">=12" } }, - "node_modules/cliui/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/cliui/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -22290,17 +22131,6 @@ "node": ">=8" } }, - "node_modules/cliui/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/cliui/node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -22484,29 +22314,6 @@ "node": ">=8.0.0" } }, - "node_modules/columnify/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/columnify/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -23388,57 +23195,6 @@ "node": ">=4.0" } }, - "node_modules/cross-fetch": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz", - "integrity": "sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==", - "dev": true, - "dependencies": { - "node-fetch": "^2.6.12" - } - }, - "node_modules/cross-fetch/node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/cross-fetch/node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true - }, - "node_modules/cross-fetch/node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true - }, - "node_modules/cross-fetch/node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, "node_modules/cross-spawn": { "version": "6.0.5", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", @@ -23476,6 +23232,16 @@ "isobject": "^3.0.1" } }, + "node_modules/css-line-break": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/css-line-break/-/css-line-break-2.1.0.tgz", + "integrity": "sha512-FHcKFCZcAha3LwfVBhCQbW2nCNbkZXn7KVUJcsT5/P8YmfsVja0FMPJr0B903j/E69HUphKiV9iQArX8SDYA4w==", + "license": "MIT", + "optional": true, + "dependencies": { + "utrie": "^1.0.2" + } + }, "node_modules/css-loader": { "version": "6.8.1", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.8.1.tgz", @@ -24623,9 +24389,9 @@ } }, "node_modules/dayjs": { - "version": "1.11.11", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.11.tgz", - "integrity": "sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==" + "version": "1.11.13", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", + "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==" }, "node_modules/deasync": { "version": "0.1.29", @@ -25207,9 +24973,9 @@ } }, "node_modules/degenerator/node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", "dev": true }, "node_modules/delaunator": { @@ -25459,9 +25225,9 @@ } }, "node_modules/devtools-protocol": { - "version": "0.0.1232444", - "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz", - "integrity": "sha512-pM27vqEfxSxRkTMnF+XCmxSEb6duO5R+t8A9DEEJgy4Wz2RVanje2mmj99B6A3zv2r/qGfYlOvYznUhuokizmg==", + "version": "0.0.1312386", + "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1312386.tgz", + "integrity": "sha512-DPnhUXvmvKT2dFA/j7B+riVLUt9Q6RKJlcppojL5CoRywJJKLDYnRlw0gTFKfgDPHP5E04UoB71SxoJlVZy8FA==", "dev": true }, "node_modules/diff": { @@ -25597,11 +25363,26 @@ "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, + "node_modules/dom-to-image": { + "version": "2.6.0", + "resolved": "git+ssh://git@github.com/dmapper/dom-to-image.git#a7c386a8ea813930f05449ac71ab4be0c262dff3", + "license": "MIT" + }, "node_modules/dom-to-image-more": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/dom-to-image-more/-/dom-to-image-more-3.2.0.tgz", "integrity": "sha512-2bGQTB6m17MBseVhIjShwZqqqCyVS9GgTykWqvVXMqr56fSgHhXnEvZfZkaSuHJYW3ICZQ3sZwAu+UY5tfsF9Q==" }, + "node_modules/dom-to-pdf": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/dom-to-pdf/-/dom-to-pdf-0.3.2.tgz", + "integrity": "sha512-eHLQ/IK+2PQlRjybQ9UHYwpiTd/YZFKqGFyRCjVvi6CPlH58drWQnxf7HBCVRUyAjOtI3RG0kvLidPhC7dOhcQ==", + "license": "MIT", + "dependencies": { + "dom-to-image": "git+https://github.com/dmapper/dom-to-image.git", + "jspdf": "^2.5.1" + } + }, "node_modules/dom-walk": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz", @@ -25632,6 +25413,13 @@ "url": "https://github.com/fb55/domhandler?sponsor=1" } }, + "node_modules/dompurify": { + "version": "2.5.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.5.7.tgz", + "integrity": "sha512-2q4bEI+coQM8f5ez7kt2xclg1XsecaV9ASJk/54vwlfRRNQfDqJz2pzQ8t0Ix/ToBpXlVjrRIx7pFC/o8itG2Q==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optional": true + }, "node_modules/domutils": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", @@ -27198,15 +26986,6 @@ "url": "https://opencollective.com/eslint" } }, - "node_modules/eslint/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -27465,18 +27244,6 @@ "node": ">=8" } }, - "node_modules/eslint/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/eslint/node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -28379,6 +28146,12 @@ "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, + "node_modules/fast-uri": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz", + "integrity": "sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==", + "dev": true + }, "node_modules/fast-xml-parser": { "version": "4.2.7", "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.7.tgz", @@ -28436,26 +28209,11 @@ "integrity": "sha512-qKcDXmuZadJqdTm6vlCqioEbyewF60b/0LOFCcYN1B6BIZGlYJumWWOYs70SFYLDAH4YqdE1cxH/RKMG7rFxgA==", "dev": true }, - "node_modules/fastify/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/fastify/node_modules/semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -28463,12 +28221,6 @@ "node": ">=10" } }, - "node_modules/fastify/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/fastq": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.8.0.tgz", @@ -28597,6 +28349,12 @@ "resolved": "https://registry.npmjs.org/fetch-retry/-/fetch-retry-6.0.0.tgz", "integrity": "sha512-BUFj1aMubgib37I3v4q78fYo63Po7t4HUPTpQ6/QE6yK6cIQrP+W43FYToeTEyg5m2Y7eFUtijUuAv/PDlWuag==" }, + "node_modules/fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==", + "license": "MIT" + }, "node_modules/figures": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", @@ -28824,9 +28582,9 @@ } }, "node_modules/find-process/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -30615,16 +30373,6 @@ "node": ">=6.9.0" } }, - "node_modules/get-pkg-repo/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/get-pkg-repo/node_modules/cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -30688,19 +30436,6 @@ "node": ">=8" } }, - "node_modules/get-pkg-repo/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/get-pkg-repo/node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -30834,9 +30569,9 @@ } }, "node_modules/get-uri/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -32538,6 +32273,20 @@ "node": ">=6" } }, + "node_modules/html2canvas": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/html2canvas/-/html2canvas-1.4.1.tgz", + "integrity": "sha512-fPU6BHNpsyIhr8yyMpTLLxAbkaK8ArIBcmZIRiBLiDhjeqvXolaEmDGmELFuX9I4xDcaKKcJl+TKZLqruBbmWA==", + "license": "MIT", + "optional": true, + "dependencies": { + "css-line-break": "^2.1.0", + "text-segmentation": "^1.0.3" + }, + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/htmlparser2": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", @@ -32602,6 +32351,43 @@ "node": ">=8.0.0" } }, + "node_modules/http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/http-proxy-agent/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/http-proxy-agent/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, "node_modules/http-proxy-middleware": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", @@ -33530,15 +33316,6 @@ "node": ">=12.0.0" } }, - "node_modules/inquirer/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/inquirer/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -33571,18 +33348,6 @@ "node": ">=8" } }, - "node_modules/inquirer/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/inquirer/node_modules/tslib": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", @@ -36367,16 +36132,6 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, - "node_modules/jest-environment-jsdom/node_modules/@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, "node_modules/jest-environment-jsdom/node_modules/@types/istanbul-reports": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", @@ -36430,24 +36185,6 @@ "node": ">=12" } }, - "node_modules/jest-environment-jsdom/node_modules/debug": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", - "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, "node_modules/jest-environment-jsdom/node_modules/domexception": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", @@ -36536,21 +36273,6 @@ "node": ">=12" } }, - "node_modules/jest-environment-jsdom/node_modules/http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "license": "MIT", - "dependencies": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/jest-environment-jsdom/node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -36610,13 +36332,6 @@ } } }, - "node_modules/jest-environment-jsdom/node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true, - "license": "MIT" - }, "node_modules/jest-environment-jsdom/node_modules/saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -37014,15 +36729,6 @@ "@types/istanbul-lib-report": "*" } }, - "node_modules/jest-html-reporter/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-html-reporter/node_modules/dateformat": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz", @@ -37044,18 +36750,6 @@ "node": ">=10" } }, - "node_modules/jest-html-reporter/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/jest-leak-detector": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", @@ -39400,6 +39094,24 @@ "node": "*" } }, + "node_modules/jspdf": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-2.5.2.tgz", + "integrity": "sha512-myeX9c+p7znDWPk0eTrujCzNjT+CXdXyk7YmJq5nD5V7uLLKmSXnlQ/Jn/kuo3X09Op70Apm0rQSnFWyGK8uEQ==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.23.2", + "atob": "^2.1.2", + "btoa": "^1.2.1", + "fflate": "^0.8.1" + }, + "optionalDependencies": { + "canvg": "^3.0.6", + "core-js": "^3.6.0", + "dompurify": "^2.5.4", + "html2canvas": "^1.0.0-rc.5" + } + }, "node_modules/jsprim": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", @@ -40108,16 +39820,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/lerna/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/lerna/node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -41034,19 +40736,6 @@ "node": ">=8" } }, - "node_modules/lerna/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/lerna/node_modules/strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -41568,15 +41257,15 @@ } }, "node_modules/light-my-request/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dev": true, "dependencies": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -41625,16 +41314,6 @@ "enquirer": ">= 2.3.0 < 3" } }, - "node_modules/listr2/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/listr2/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -41660,19 +41339,6 @@ "node": ">=8" } }, - "node_modules/listr2/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/listr2/node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -42005,11 +41671,10 @@ } }, "node_modules/luxon": { - "version": "3.4.4", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", - "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", - "optional": true, - "peer": true, + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.6.1.tgz", + "integrity": "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==", + "license": "MIT", "engines": { "node": ">=12" } @@ -49013,16 +48678,6 @@ } } }, - "node_modules/nx/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/nx/node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", @@ -49187,19 +48842,6 @@ "node": ">=8" } }, - "node_modules/nx/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/nx/node_modules/tsconfig-paths": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", @@ -49264,16 +48906,6 @@ "node": ">=8.9" } }, - "node_modules/nyc/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">=8" - } - }, "node_modules/nyc/node_modules/cliui": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", @@ -49467,19 +49099,6 @@ "node": ">=8" } }, - "node_modules/nyc/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/nyc/node_modules/yargs": { "version": "15.4.1", "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", @@ -50096,27 +49715,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/ora/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/ora/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", @@ -50301,9 +49899,9 @@ } }, "node_modules/pac-proxy-agent": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.0.1.tgz", - "integrity": "sha512-ASV8yU4LLKBAjqIPMbrgtaKIvxQri/yh2OpI+S6hVa9JRkUI3Y3NPFbfngDtY7oFtSMD3w31Xns89mDa3Feo5A==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.0.2.tgz", + "integrity": "sha512-BFi3vZnO9X5Qt6NRz7ZOaPja3ic0PhlsmCRYLOpN11+mWBCR6XJDqW5RF3j8jm4WGGQZtBA+bTfxYzeKW73eHg==", "dev": true, "dependencies": { "@tootallnate/quickjs-emscripten": "^0.23.0", @@ -50311,9 +49909,9 @@ "debug": "^4.3.4", "get-uri": "^6.0.1", "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", - "pac-resolver": "^7.0.0", - "socks-proxy-agent": "^8.0.2" + "https-proxy-agent": "^7.0.5", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.4" }, "engines": { "node": ">= 14" @@ -50332,9 +49930,9 @@ } }, "node_modules/pac-proxy-agent/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -50362,9 +49960,9 @@ } }, "node_modules/pac-proxy-agent/node_modules/https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", "dev": true, "dependencies": { "agent-base": "^7.0.2", @@ -50984,8 +50582,7 @@ }, "node_modules/performance-now": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "license": "MIT" }, "node_modules/periscopic": { "version": "3.1.0", @@ -51847,14 +51444,6 @@ "node": ">= 10" } }, - "node_modules/pretty-format/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/pretty-format/node_modules/react-is": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", @@ -52095,15 +51684,15 @@ } }, "node_modules/proxy-agent": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.3.1.tgz", - "integrity": "sha512-Rb5RVBy1iyqOtNl15Cw/llpeLH8bsb37gM1FUfKQ+Wck6xHlbAhWGUFiTRHtkjqGTA5pSHz6+0hrPW/oECihPQ==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.4.0.tgz", + "integrity": "sha512-u0piLU+nCOHMgGjRbimiXmA9kM/L9EHh3zL81xCdp7m+Y2pHIsnmbdDoEDoAz5geaonNR6q6+yOPQs6n4T6sBQ==", "dev": true, "dependencies": { "agent-base": "^7.0.2", "debug": "^4.3.4", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.3", "lru-cache": "^7.14.1", "pac-proxy-agent": "^7.0.1", "proxy-from-env": "^1.1.0", @@ -52126,9 +51715,9 @@ } }, "node_modules/proxy-agent/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -52156,9 +51745,9 @@ } }, "node_modules/proxy-agent/node_modules/https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", "dev": true, "dependencies": { "agent-base": "^7.0.2", @@ -52248,44 +51837,44 @@ } }, "node_modules/puppeteer": { - "version": "21.11.0", - "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-21.11.0.tgz", - "integrity": "sha512-9jTHuYe22TD3sNxy0nEIzC7ZrlRnDgeX3xPkbS7PnbdwYjl2o/z/YuCrRBwezdKpbTDTJ4VqIggzNyeRcKq3cg==", + "version": "22.15.0", + "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-22.15.0.tgz", + "integrity": "sha512-XjCY1SiSEi1T7iSYuxS82ft85kwDJUS7wj1Z0eGVXKdtr5g4xnVcbjwxhq5xBnpK/E7x1VZZoJDxpjAOasHT4Q==", "dev": true, "hasInstallScript": true, "dependencies": { - "@puppeteer/browsers": "1.9.1", - "cosmiconfig": "9.0.0", - "puppeteer-core": "21.11.0" + "@puppeteer/browsers": "2.3.0", + "cosmiconfig": "^9.0.0", + "devtools-protocol": "0.0.1312386", + "puppeteer-core": "22.15.0" }, "bin": { "puppeteer": "lib/esm/puppeteer/node/cli.js" }, "engines": { - "node": ">=16.13.2" + "node": ">=18" } }, "node_modules/puppeteer-core": { - "version": "21.11.0", - "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-21.11.0.tgz", - "integrity": "sha512-ArbnyA3U5SGHokEvkfWjW+O8hOxV1RSJxOgriX/3A4xZRqixt9ZFHD0yPgZQF05Qj0oAqi8H/7stDorjoHY90Q==", + "version": "22.15.0", + "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-22.15.0.tgz", + "integrity": "sha512-cHArnywCiAAVXa3t4GGL2vttNxh7GqXtIYGym99egkNJ3oG//wL9LkvO4WE8W1TJe95t1F1ocu9X4xWaGsOKOA==", "dev": true, "dependencies": { - "@puppeteer/browsers": "1.9.1", - "chromium-bidi": "0.5.8", - "cross-fetch": "4.0.0", - "debug": "4.3.4", - "devtools-protocol": "0.0.1232444", - "ws": "8.16.0" + "@puppeteer/browsers": "2.3.0", + "chromium-bidi": "0.6.3", + "debug": "^4.3.6", + "devtools-protocol": "0.0.1312386", + "ws": "^8.18.0" }, "engines": { - "node": ">=16.13.2" + "node": ">=18" } }, "node_modules/puppeteer-core/node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -52306,9 +51895,9 @@ "dev": true }, "node_modules/puppeteer-core/node_modules/ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "dev": true, "engines": { "node": ">=10.0.0" @@ -52389,9 +51978,9 @@ } }, "node_modules/puppeteer/node_modules/typescript": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", - "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", "dev": true, "optional": true, "peer": true, @@ -52530,8 +52119,7 @@ }, "node_modules/raf": { "version": "3.4.1", - "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", - "integrity": "sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==", + "license": "MIT", "dependencies": { "performance-now": "^2.1.0" } @@ -56014,14 +55602,6 @@ "strip-ansi": "^6.0.1" } }, - "node_modules/renderkid/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/renderkid/node_modules/css-select": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", @@ -56103,17 +55683,6 @@ "entities": "^2.0.0" } }, - "node_modules/renderkid/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/repeat-element": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", @@ -56400,11 +55969,21 @@ } }, "node_modules/rfdc": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", - "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", "dev": true }, + "node_modules/rgbcolor": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/rgbcolor/-/rgbcolor-1.0.1.tgz", + "integrity": "sha512-9aZLIrhRaD97sgVhtJOW6ckOEh6/GnvQtdVNfdZ6s67+3/XwLS9lBcQYzEEhYVeUowN7pRzMLsyGhK2i/xvWbw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">= 0.8.15" + } + }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -57956,6 +57535,16 @@ "node": ">=8" } }, + "node_modules/stackblur-canvas": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/stackblur-canvas/-/stackblur-canvas-2.7.0.tgz", + "integrity": "sha512-yf7OENo23AGJhBriGx0QivY5JP6Y1HbrrDI6WLt6C5auYZXlQrheoY8hD4ibekFKz1HOfE48Ww8kMWMnJD/zcQ==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=0.1.14" + } + }, "node_modules/static-eval": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.1.0.tgz", @@ -58174,12 +57763,13 @@ "dev": true }, "node_modules/streamx": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz", - "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==", + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", + "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", "dependencies": { - "fast-fifo": "^1.1.0", - "queue-tick": "^1.0.1" + "fast-fifo": "^1.3.2", + "queue-tick": "^1.0.1", + "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" @@ -58224,27 +57814,6 @@ "node": ">=10" } }, - "node_modules/string-length/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-length/node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/string-similarity": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/string-similarity/-/string-similarity-4.0.4.tgz", @@ -58279,14 +57848,6 @@ "node": ">=8" } }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -58295,17 +57856,6 @@ "node": ">=8" } }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/string-width/node_modules/ansi-regex": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz", @@ -58413,8 +57963,7 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", + "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", @@ -58425,10 +57974,14 @@ "node": ">=8" } }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, "engines": { "node": ">=8" } @@ -58619,6 +58172,16 @@ "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==", "dev": true }, + "node_modules/svg-pathdata": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/svg-pathdata/-/svg-pathdata-6.0.3.tgz", + "integrity": "sha512-qsjeeq5YjBZ5eMdFuUa4ZosMLxgr5RZ+F+Y1OrDhuOCEInRMA3x74XdBtggJcj9kOeInz0WE+LgCPDkZFlBYJw==", + "license": "MIT", + "optional": true, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/svgo": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", @@ -58759,14 +58322,17 @@ } }, "node_modules/tar-fs": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.4.tgz", - "integrity": "sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.6.tgz", + "integrity": "sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==", "dev": true, "dependencies": { - "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^2.1.1", + "bare-path": "^2.1.0" } }, "node_modules/tar-fs/node_modules/tar-stream": { @@ -59046,6 +58612,14 @@ "node": ">=8" } }, + "node_modules/text-decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.1.tgz", + "integrity": "sha512-8zll7REEv4GDD3x4/0pW+ppIxSNs7H1J10IKFZsuOMscumCdM2a+toDGLPA3T+1+fLBql4zbt5z83GEQGGV5VA==", + "dependencies": { + "b4a": "^1.6.4" + } + }, "node_modules/text-extensions": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", @@ -59056,6 +58630,16 @@ "node": ">=0.10" } }, + "node_modules/text-segmentation": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/text-segmentation/-/text-segmentation-1.0.3.tgz", + "integrity": "sha512-iOiPUo/BGnZ6+54OsWxZidGCsdU8YbE4PSpdPinp7DeMtUJNJBoJ/ouUSTJjHkh1KntHaltHl/gDs2FC4i5+Nw==", + "license": "MIT", + "optional": true, + "dependencies": { + "utrie": "^1.0.2" + } + }, "node_modules/text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -60486,6 +60070,16 @@ "node": ">= 0.4.0" } }, + "node_modules/utrie": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/utrie/-/utrie-1.0.2.tgz", + "integrity": "sha512-1MLa5ouZiOmQzUbjbu9VmjLzn1QLXBhwpUa7kdLUQK+KQ5KA9I1vk5U4YHe/X2Ch7PYnJfWuWT+VbuxbGwljhw==", + "license": "MIT", + "optional": true, + "dependencies": { + "base64-arraybuffer": "^1.0.2" + } + }, "node_modules/uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -60861,9 +60455,9 @@ } }, "node_modules/webdriver/node_modules/@types/node": { - "version": "18.19.31", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz", - "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==", + "version": "18.19.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.42.tgz", + "integrity": "sha512-d2ZFc/3lnK2YCYhos8iaNIYu9Vfhr92nHiyJHRltXWjXUBjEE+A4I58Tdbnw4VhggSW+2j5y5gTrLs4biNnubg==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -61396,27 +60990,6 @@ "strip-ansi": "^6.0.0" } }, - "node_modules/webpack-hot-middleware/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/webpack-hot-middleware/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/webpack-manifest-plugin": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-4.1.1.tgz", @@ -62203,15 +61776,6 @@ "node": ">=8" } }, - "node_modules/widest-line/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, "node_modules/widest-line/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -62235,18 +61799,6 @@ "node": ">=8" } }, - "node_modules/widest-line/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/wildcard": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", @@ -62289,14 +61841,6 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -62318,26 +61862,6 @@ "node": ">=8" } }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "engines": { - "node": ">=8" - } - }, "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -62361,18 +61885,6 @@ "node": ">=8" } }, - "node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "devOptional": true, - "dependencies": { - "ansi-regex": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -62664,14 +62176,6 @@ "node": ">=12" } }, - "node_modules/yargs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/yargs/node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -62693,17 +62197,6 @@ "node": ">=8" } }, - "node_modules/yargs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/yargs/node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -62756,6 +62249,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, "node_modules/zrender": { "version": "5.4.1", "resolved": "https://registry.npmjs.org/zrender/-/zrender-5.4.1.tgz", @@ -63080,16 +62582,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "packages/generator-superset/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "packages/generator-superset/node_modules/ansi-styles": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", @@ -64141,19 +63633,6 @@ "node": ">=8" } }, - "packages/generator-superset/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "packages/generator-superset/node_modules/strip-bom-buf": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-bom-buf/-/strip-bom-buf-3.0.1.tgz", @@ -66531,8 +66010,7 @@ }, "plugins/legacy-preset-chart-nvd3/node_modules/dompurify": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz", - "integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA==" + "license": "(MPL-2.0 OR Apache-2.0)" }, "plugins/plugin-chart-echarts": { "name": "@superset-ui/plugin-chart-echarts", @@ -66884,86 +66362,68 @@ } }, "@applitools/core": { - "version": "4.12.2", - "resolved": "https://registry.npmjs.org/@applitools/core/-/core-4.12.2.tgz", - "integrity": "sha512-Q3yPuFMr3QpV1mBnYF0D3OwNP46JxraFVzn1QNth0pQ/UR4XLdB2a3vtDNFrLD583T3OYiZamu0zouq3+bOClg==", - "dev": true, - "requires": { - "@applitools/core-base": "1.11.1", - "@applitools/dom-capture": "11.2.6", - "@applitools/dom-snapshot": "4.10.0", - "@applitools/driver": "1.16.6", - "@applitools/ec-client": "1.8.1", - "@applitools/logger": "2.0.15", - "@applitools/nml-client": "1.8.1", - "@applitools/req": "1.6.6", - "@applitools/screenshoter": "3.8.28", - "@applitools/snippets": "2.4.25", - "@applitools/socket": "1.1.15", - "@applitools/spec-driver-webdriver": "1.1.4", - "@applitools/ufg-client": "1.11.1", - "@applitools/utils": "1.7.1", + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@applitools/core/-/core-4.18.0.tgz", + "integrity": "sha512-GCW9pwPwXIieKLF5cdA2ezuwwzWHFFnq9mGNAfsvWc1/o2rchj7VRxMRo2566esaniOGVtY7klf9HzJaaZQubQ==", + "dev": true, + "requires": { + "@applitools/core-base": "1.16.0", + "@applitools/dom-capture": "11.3.0", + "@applitools/dom-snapshot": "4.11.3", + "@applitools/driver": "1.18.0", + "@applitools/ec-client": "1.9.3", + "@applitools/logger": "2.0.18", + "@applitools/nml-client": "1.8.9", + "@applitools/req": "1.7.2", + "@applitools/screenshoter": "3.8.35", + "@applitools/snippets": "2.4.27", + "@applitools/socket": "1.1.18", + "@applitools/spec-driver-webdriver": "1.1.11", + "@applitools/ufg-client": "1.12.3", + "@applitools/utils": "1.7.4", "@types/ws": "8.5.5", "abort-controller": "3.0.0", "chalk": "4.1.2", "node-fetch": "2.6.7", - "semver": "7.5.4", + "semver": "7.6.2", "webdriver": "7.31.1", - "ws": "8.13.0", + "ws": "8.17.1", "yargs": "17.7.2" }, "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true }, "ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", + "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", "dev": true, "requires": {} - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true } } }, "@applitools/core-base": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@applitools/core-base/-/core-base-1.11.1.tgz", - "integrity": "sha512-PRkqjyBE+H/WPBpJp5JlluWb2Cl6POCm1+GyeKYeobekXD+uhZAQSv6C1kiY7KBv1mwuvqmvtBbbR8fYCfKrKw==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/@applitools/core-base/-/core-base-1.16.0.tgz", + "integrity": "sha512-6v5box6DqmvyfVNe0tjRSCIZpfkn6fc0DZMZI4+jKLczh4zm+Tlfey1ECavP3fRZayh79SGCpeIDqBNI9Ll7dA==", "dev": true, "requires": { - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1", + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "throat": "6.0.2" } }, "@applitools/css-tree": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@applitools/css-tree/-/css-tree-1.1.2.tgz", - "integrity": "sha512-+DBY7Rf/PorHniPYNNG9rDbQcjuXvrVTx3vXXMz7h4m8h8wjUDq5afIUQ9QSTj3H8awXKHRg1o9XJl5yA6cxOg==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/@applitools/css-tree/-/css-tree-1.1.4.tgz", + "integrity": "sha512-rH3aq/dkTweEUgS/MKuthD79CZDqpQVJlqmxqVxLZVAzbeFxYdTG/gnfG0zj6YJ025jzcPH2ktdW16Rl3QLutg==", "dev": true, "requires": { "mdn-data": "2.1.0", @@ -66979,85 +66439,67 @@ } }, "@applitools/dom-capture": { - "version": "11.2.6", - "resolved": "https://registry.npmjs.org/@applitools/dom-capture/-/dom-capture-11.2.6.tgz", - "integrity": "sha512-USNpYDaj+L8GcPX0pJFHbDpaHc/IFWJVvFiGrOWylgPPinBWtco52mj7lv5urSX9rVyxEF41awszA2BOFOIV3Q==", + "version": "11.3.0", + "resolved": "https://registry.npmjs.org/@applitools/dom-capture/-/dom-capture-11.3.0.tgz", + "integrity": "sha512-LGcNSPgzvlL/afQGUyykTfuPR6N+GYYQ5EaA/f5j4lgfYVxEyG/6t1W62GTImR86ZVHLEsKAQUKVE7jbKAZmVw==", "dev": true, "requires": { - "@applitools/dom-shared": "1.0.13", + "@applitools/dom-shared": "1.0.15", "@applitools/functional-commons": "1.6.0" } }, "@applitools/dom-shared": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/@applitools/dom-shared/-/dom-shared-1.0.13.tgz", - "integrity": "sha512-FcZKhdnPcV42IT9tPK80Tlzs6Xxsv11hgfgMqKscOOtgZ02xK9d8w1tuSMRO9VFDzCLaEFe/QSLk8/FgrDMy7w==", + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@applitools/dom-shared/-/dom-shared-1.0.15.tgz", + "integrity": "sha512-XN77SPfzXriU1x6gTcublSe0yUJHxlYwHesOnWQov2dMVfHx7y3qp0yrjdVC7LO2bDIJIzDlPJRhfg2otlbxig==", "dev": true }, "@applitools/dom-snapshot": { - "version": "4.10.0", - "resolved": "https://registry.npmjs.org/@applitools/dom-snapshot/-/dom-snapshot-4.10.0.tgz", - "integrity": "sha512-ZoHVtcPOtZTItaB7vWFiKNLJAbWniFCICcaM5HExsF7VTbFViyh57ExE6OqRryLvSQSjeFLFcsjqio41aQkfBQ==", + "version": "4.11.3", + "resolved": "https://registry.npmjs.org/@applitools/dom-snapshot/-/dom-snapshot-4.11.3.tgz", + "integrity": "sha512-jdEWSbEOmD9LbzashTQ/YzYDdIKrhSBwNqNTIk8qjV8YtbQfZ+NtgCtW7nOsbknAMk95CfYEUV3R1rxCXs1XfA==", "dev": true, "requires": { - "@applitools/css-tree": "1.1.2", - "@applitools/dom-shared": "1.0.13", + "@applitools/css-tree": "1.1.4", + "@applitools/dom-shared": "1.0.15", "@applitools/functional-commons": "1.6.0", "pako": "1.0.11" } }, "@applitools/driver": { - "version": "1.16.6", - "resolved": "https://registry.npmjs.org/@applitools/driver/-/driver-1.16.6.tgz", - "integrity": "sha512-a3xgpIaOP+8VdIEEx8GdNDu21nY9VyanO9zrMpgXUiwX5AzQJ56O2UjaVnewUAU9kD31Tbn6apofMUMNo4gEXQ==", + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/@applitools/driver/-/driver-1.18.0.tgz", + "integrity": "sha512-wJYPZ2oEzRtyxne518GgdQbE+JF7S6yZEZX6SJWpVwrv/MPBKD9byxRi89XZcSpyxweFt7Ud7yJskBbubXu7QQ==", "dev": true, "requires": { - "@applitools/logger": "2.0.15", - "@applitools/snippets": "2.4.25", - "@applitools/utils": "1.7.1", - "semver": "7.5.4" + "@applitools/logger": "2.0.18", + "@applitools/snippets": "2.4.27", + "@applitools/utils": "1.7.4", + "semver": "7.6.2" }, "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", "dev": true } } }, "@applitools/ec-client": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@applitools/ec-client/-/ec-client-1.8.1.tgz", - "integrity": "sha512-KeuF82FzmBd2CNuupc4fjzt928Ow3FeGOhvtsx7l27QohwEO/HB3+Src7e5k/p6BwrDXqxYhxIPDgHe9en8ZiA==", - "dev": true, - "requires": { - "@applitools/core-base": "1.11.1", - "@applitools/driver": "1.16.6", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/socket": "1.1.15", - "@applitools/spec-driver-webdriver": "1.1.4", - "@applitools/tunnel-client": "1.5.1", - "@applitools/utils": "1.7.1", + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@applitools/ec-client/-/ec-client-1.9.3.tgz", + "integrity": "sha512-fnsnQpyDi3rltFEeDeUnNIRULpoWBsSf4L5F7g08LBpuAR5MTpY2WArn1nzD12rfQRoTsO7/5H0DYv/+Mr5w3A==", + "dev": true, + "requires": { + "@applitools/core-base": "1.16.0", + "@applitools/driver": "1.18.0", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/socket": "1.1.18", + "@applitools/spec-driver-webdriver": "1.1.11", + "@applitools/tunnel-client": "1.5.7", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "webdriver": "7.31.1", "yargs": "^17.7.2" @@ -67080,9 +66522,9 @@ } }, "@applitools/execution-grid-tunnel": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@applitools/execution-grid-tunnel/-/execution-grid-tunnel-3.0.3.tgz", - "integrity": "sha512-t2tXM2dJcXezAOpOZQ69GeSHvEpWwQDkncSN7OEQwWQ2q4eo4yFXYCrl/fQCrkYGYyjU3aCB1RjPCqvvK2tnRA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@applitools/execution-grid-tunnel/-/execution-grid-tunnel-3.0.5.tgz", + "integrity": "sha512-Kp8Sgb5sS/+0CEo0ytvQONzJdmru3vu8BcNwvLyJoqPNf7zSDTr3AR60p9l4hh11nsBzJyi3+Uh8oR968J+mng==", "dev": true, "requires": { "@applitools/eg-frpc": "1.0.5", @@ -67148,36 +66590,31 @@ } }, "@applitools/eyes-storybook": { - "version": "3.49.0", - "resolved": "https://registry.npmjs.org/@applitools/eyes-storybook/-/eyes-storybook-3.49.0.tgz", - "integrity": "sha512-h9FUhIWHakBHyT/pbu9KWSxfa2hOQGdQdFnfGrMHOgkWBDkNEw7iHDJaVdLeZHu8953iRZEzrKDnUqQNNMr+vA==", + "version": "3.50.7", + "resolved": "https://registry.npmjs.org/@applitools/eyes-storybook/-/eyes-storybook-3.50.7.tgz", + "integrity": "sha512-V8WN9gMkfKLE9O2a3cTz/t7TFuxbAWiQ6PW/Z4zGVFi2NaXMD04i47f/QFR0qscK7VDhkAGfd6HWw6e18UaPkg==", "dev": true, "requires": { - "@applitools/core": "4.12.2", - "@applitools/driver": "1.16.6", + "@applitools/core": "4.18.0", + "@applitools/driver": "1.18.0", "@applitools/functional-commons": "1.6.0", - "@applitools/logger": "2.0.15", + "@applitools/logger": "2.0.18", "@applitools/monitoring-commons": "1.0.19", - "@applitools/spec-driver-puppeteer": "1.4.4", - "@applitools/ufg-client": "1.11.1", - "@applitools/utils": "1.7.1", + "@applitools/spec-driver-puppeteer": "1.4.11", + "@applitools/ufg-client": "1.12.3", + "@applitools/utils": "1.7.4", "boxen": "4.2.0", "chalk": "3.0.0", "detect-port": "1.3.0", "lodash": "4.17.21", "ora": "3.4.0", - "puppeteer": "21.11.0", + "puppeteer": "^22.4.1", + "semver": "7.6.2", "strip-ansi": "6.0.0", "throat": "6.0.2", "yargs": "17.7.2" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "boxen": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/boxen/-/boxen-4.2.0.tgz", @@ -67344,6 +66781,12 @@ "signal-exit": "^3.0.2" } }, + "semver": { + "version": "7.6.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", + "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true + }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", @@ -67384,12 +66827,12 @@ "dev": true }, "@applitools/image": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/@applitools/image/-/image-1.1.10.tgz", - "integrity": "sha512-F3wT2WibcVRzaU4T7VjVusqfAn84kfrY37RljXjBq3r0QxOcG84tiXhKu4x3Av/XBwVILEUXnCRZ9H/iNcPEVg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/@applitools/image/-/image-1.1.13.tgz", + "integrity": "sha512-oeSnsTJxhD6juNlWufeWsiWV9dbS0a3OL75/r/Bo2yauAi6AsRMDeh+McXJfYlf1NVZbrVG0+vNXn52mDVEIyw==", "dev": true, "requires": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "bmpimagejs": "1.0.4", "jpeg-js": "0.4.4", "omggif": "1.0.10", @@ -67397,12 +66840,12 @@ } }, "@applitools/logger": { - "version": "2.0.15", - "resolved": "https://registry.npmjs.org/@applitools/logger/-/logger-2.0.15.tgz", - "integrity": "sha512-urP41ZGhoRLc/XQatcja4+YySy73i+7qug9KLZCzfchAF2LGFl/SDGcqNLu5wui/ieBftu7EGz16wlAMVVUM7Q==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/@applitools/logger/-/logger-2.0.18.tgz", + "integrity": "sha512-d54OTreCXE+G9qUxiPDHHBzwof3EnXPrADdZ7ToB9AoI+kOgs/v6wjMx0ghAoXyyOiLvlvJnmdHSyJssRdv5GA==", "dev": true, "requires": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "chalk": "4.1.2", "debug": "4.3.4" }, @@ -67451,67 +66894,35 @@ } }, "@applitools/nml-client": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@applitools/nml-client/-/nml-client-1.8.1.tgz", - "integrity": "sha512-l+OHSUN3C6y/AlN1AIr+knbT3sClw3W3ZBaprmBHA5mhKRv7lz0jHUSiK9On3n4H55QgLeVx/y/tFBnFoGOJSg==", + "version": "1.8.9", + "resolved": "https://registry.npmjs.org/@applitools/nml-client/-/nml-client-1.8.9.tgz", + "integrity": "sha512-Jwz42oRVnu46V2lgj0eTfKaOu3eYo8T2Z2QhsN/5xleKISJQ8B86954JuZy9Rwx75+9T+ddmYqWfjSBWfhmVhg==", "dev": true, "requires": { - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1" + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4" } }, "@applitools/req": { - "version": "1.6.6", - "resolved": "https://registry.npmjs.org/@applitools/req/-/req-1.6.6.tgz", - "integrity": "sha512-RJ2fQn5YuRocPVxFMqUTdjaj0oD2eDhtrtTk+wbS1t9g5/5LY7vkvgZuW0ehY/E7yN7RQf093c+VNxyabsshhg==", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/@applitools/req/-/req-1.7.2.tgz", + "integrity": "sha512-L0tjPFGEJFAEGaifqtmtCghjkG7M0wnEwfzbHi6O+ThtTCbg4JSDRTaNvA+PLXQoS0mFvajG40/t5a4EgAG7QQ==", "dev": true, "requires": { - "@applitools/utils": "1.7.1", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "http-proxy-agent": "5.0.0", "https-proxy-agent": "5.0.1", "node-fetch": "3.3.1" }, "dependencies": { - "@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true - }, "data-uri-to-buffer": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", "dev": true }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "requires": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, "node-fetch": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.1.tgz", @@ -67526,124 +66937,90 @@ } }, "@applitools/screenshoter": { - "version": "3.8.28", - "resolved": "https://registry.npmjs.org/@applitools/screenshoter/-/screenshoter-3.8.28.tgz", - "integrity": "sha512-Tod/JNuRi4ibY8vHbn1I5Ppbh7e7qRRJ4ZnKPq0YoaCR0As2jI4xQMFwJRYSNME3GgfD7pZjoLQEWOx3kuL0iQ==", + "version": "3.8.35", + "resolved": "https://registry.npmjs.org/@applitools/screenshoter/-/screenshoter-3.8.35.tgz", + "integrity": "sha512-1jos00VVJOU5uxgh9cVhj7nq9akMFvBIdfQRR9KkUFeylDxt8vRpkmO6zyfbxeK2jyiboPOZXPa0PvL7M0WNLQ==", "dev": true, "requires": { - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/snippets": "2.4.25", - "@applitools/utils": "1.7.1" + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/snippets": "2.4.27", + "@applitools/utils": "1.7.4" } }, "@applitools/snippets": { - "version": "2.4.25", - "resolved": "https://registry.npmjs.org/@applitools/snippets/-/snippets-2.4.25.tgz", - "integrity": "sha512-vnU9qq1IGkNpvh7Qy0m196t1u3mpx7NNUeHyJRVnJ53Ok4sb9s/KKrkrU9xYkKYY+T3AEvoN0Rp5LVVrKBHGQw==", + "version": "2.4.27", + "resolved": "https://registry.npmjs.org/@applitools/snippets/-/snippets-2.4.27.tgz", + "integrity": "sha512-n6ckwbXWyJ+/DoV1T6bRiGXITgTgjayV0j4AzHiBx+HF3JdzygxIkWtn7yl1dJfzeqEGyrtBK6Sq1tTG2GoQcA==", "dev": true }, "@applitools/socket": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@applitools/socket/-/socket-1.1.15.tgz", - "integrity": "sha512-NxFY+cWGjvDph1AUx0A2qSOLmObeKgHrPo59DBIOQr+Q6Rf9vZCrmeSNoi+q8VMGsNDaWMvRUDv3JAhiQOry9w==", + "version": "1.1.18", + "resolved": "https://registry.npmjs.org/@applitools/socket/-/socket-1.1.18.tgz", + "integrity": "sha512-EMI/MMfVH38ucuZhFWOTUR8cPvuoP9b+xi5yBJF8uLlJjxQEmGnvm+Pm3s9o3mfxQzDRddYGtpIo3TTZhMVZdQ==", "dev": true, "requires": { - "@applitools/logger": "2.0.15", - "@applitools/utils": "1.7.1" + "@applitools/logger": "2.0.18", + "@applitools/utils": "1.7.4" } }, "@applitools/spec-driver-puppeteer": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/@applitools/spec-driver-puppeteer/-/spec-driver-puppeteer-1.4.4.tgz", - "integrity": "sha512-Bj8dftrzQvjmHOLZy7ERixjoPP8tVCls07iA5pRd8wUziSVT7E71Id40oehef6jsdpgPRLoJ3EnuSabkusKNCA==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@applitools/spec-driver-puppeteer/-/spec-driver-puppeteer-1.4.11.tgz", + "integrity": "sha512-txxjl4jNWNppXLfA0yQNu9qOH/BpYEkK7idTessEONlJLYOcbH9sT2KFX0TfWAVyHRuin35cdux1o9lfGxapLQ==", "dev": true, "requires": { - "@applitools/driver": "1.16.6", - "@applitools/utils": "1.7.1" + "@applitools/driver": "1.18.0", + "@applitools/utils": "1.7.4" } }, "@applitools/spec-driver-webdriver": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/@applitools/spec-driver-webdriver/-/spec-driver-webdriver-1.1.4.tgz", - "integrity": "sha512-d8T9K+69Q9sF0h02Em+YOPqtkwH8VVPa5nsajC34t5N1RPJAE0asT3xB7gMPdPqWe88T7qTVMTHnFyHyL6Hzkw==", + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@applitools/spec-driver-webdriver/-/spec-driver-webdriver-1.1.11.tgz", + "integrity": "sha512-xeVeqiK+Oyi2xGRME54J3yTXUGR9d2NgcOCkXTdZ+QOj8iPzypelyeHkX4nKJNsLw4Ddh9uvaiFJmKppqGZ1Mg==", "dev": true, "requires": { - "@applitools/driver": "1.16.6", - "@applitools/utils": "1.7.1", + "@applitools/driver": "1.18.0", + "@applitools/utils": "1.7.4", "http-proxy-agent": "5.0.0", "https-proxy-agent": "5.0.1" - }, - "dependencies": { - "@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "requires": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - } - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - } } }, "@applitools/tunnel-client": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@applitools/tunnel-client/-/tunnel-client-1.5.1.tgz", - "integrity": "sha512-0ZcWS++S/t6MXuqHk9h003DCCymmRSX6hz4YhVXIcwSQjppWJzebvBITA8mhBdFPKEMhG8StDmO18bXYBRVkCQ==", + "version": "1.5.7", + "resolved": "https://registry.npmjs.org/@applitools/tunnel-client/-/tunnel-client-1.5.7.tgz", + "integrity": "sha512-h2/U2ZTDQp67Q/sU72eNx7dQms54yzfmM/Cordp2ZSQN9FAxt/NN22cUr8Qf+r71Uuu/VYlvzZUdMGl42MuKmA==", "dev": true, "requires": { - "@applitools/execution-grid-tunnel": "3.0.3", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/socket": "1.1.15", - "@applitools/utils": "1.7.1", + "@applitools/execution-grid-tunnel": "3.0.5", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/socket": "1.1.18", + "@applitools/utils": "1.7.4", "abort-controller": "3.0.0", "yargs": "17.7.2" } }, "@applitools/ufg-client": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@applitools/ufg-client/-/ufg-client-1.11.1.tgz", - "integrity": "sha512-hEWuFtv8mucRg+b4HAhGuNrzNJnMYz2J4f36vkyzxvw23+E4QvpIgMzNV7mZcvSAibRuGU7HkScH/ZCooujlUA==", + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/@applitools/ufg-client/-/ufg-client-1.12.3.tgz", + "integrity": "sha512-bSxLqxzAuc+ldum/nGoiM/iCcf97uku3bABxB90ilzUYT1DOu9vEGmaPxxGLDc+GRRVYlOYGNdIJF+DQP4dFTg==", "dev": true, "requires": { - "@applitools/css-tree": "1.1.2", - "@applitools/image": "1.1.10", - "@applitools/logger": "2.0.15", - "@applitools/req": "1.6.6", - "@applitools/utils": "1.7.1", + "@applitools/css-tree": "1.1.4", + "@applitools/image": "1.1.13", + "@applitools/logger": "2.0.18", + "@applitools/req": "1.7.2", + "@applitools/utils": "1.7.4", "@xmldom/xmldom": "0.8.10", "abort-controller": "3.0.0", "throat": "6.0.2" } }, "@applitools/utils": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@applitools/utils/-/utils-1.7.1.tgz", - "integrity": "sha512-GrusNmsUhletygtGCrlTolkGD4oYGN9E7y+hdX5laeUSAkJ1kDrfbxqUZTuEaK5TDwL4vpWzF8cnOZeCA59+Zg==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@applitools/utils/-/utils-1.7.4.tgz", + "integrity": "sha512-qgJqx2yjlJBf79YyFehf1nSp4AXOdzJn3POQyg8CMWV0YH6HsjAfJjYaNrbXFcGYCSpPEJGhGehxC7GVKHX3YA==", "dev": true }, "@aw-web-design/x-default-browser": { @@ -71309,12 +70686,6 @@ "@types/istanbul-lib-report": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "ansi-styles": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", @@ -71450,15 +70821,6 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -71716,12 +71078,6 @@ "@types/istanbul-lib-report": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "anymatch": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", @@ -71847,15 +71203,6 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -72698,12 +72045,6 @@ "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", "dev": true }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -73328,15 +72669,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -74478,24 +73810,25 @@ } }, "@puppeteer/browsers": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-1.9.1.tgz", - "integrity": "sha512-PuvK6xZzGhKPvlx3fpfdM2kYY3P/hB1URtK8wA7XUJ6prn6pp22zvJHu48th0SGcHL9SutbPHrFuQgfXTFobWA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@puppeteer/browsers/-/browsers-2.3.0.tgz", + "integrity": "sha512-ioXoq9gPxkss4MYhD+SFaU9p1IHFUX0ILAWFPyjGaBdjLsYAlZw6j1iLA0N/m12uVHLFDfSYNF7EQccjinIMDA==", "dev": true, "requires": { - "debug": "4.3.4", - "extract-zip": "2.0.1", - "progress": "2.0.3", - "proxy-agent": "6.3.1", - "tar-fs": "3.0.4", - "unbzip2-stream": "1.4.3", - "yargs": "17.7.2" + "debug": "^4.3.5", + "extract-zip": "^2.0.1", + "progress": "^2.0.3", + "proxy-agent": "^6.4.0", + "semver": "^7.6.3", + "tar-fs": "^3.0.6", + "unbzip2-stream": "^1.4.3", + "yargs": "^17.7.2" }, "dependencies": { "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -74506,6 +73839,12 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true + }, + "semver": { + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "dev": true } } }, @@ -75029,7 +74368,7 @@ "@storybook/react-dom-shim": "8.1.11", "@storybook/theming": "8.1.11", "@storybook/types": "8.1.11", - "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "@types/react": "^16.9.53", "fs-extra": "^11.1.0", "react": "^16.8.0 || ^17.0.0 || ^18.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0", @@ -77961,12 +77300,6 @@ "type-fest": "^0.21.3" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true - }, "ansi-styles": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", @@ -78605,15 +77938,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "strip-bom-buf": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-bom-buf/-/strip-bom-buf-3.0.1.tgz", @@ -79521,9 +78845,7 @@ }, "dependencies": { "dompurify": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.0.tgz", - "integrity": "sha512-yoU4rhgPKCo+p5UrWWWNKiIq+ToGqmVVhk0PmMYBK4kRsR3/qhemNFL8f6CFmBd4gMwm3F4T7HBoydP5uY07fA==" + "version": "3.1.0" } } }, @@ -80142,7 +79464,7 @@ "integrity": "sha512-UdEUtlQapQ579NEcXDAUE275u+KUsPtxW7NmFrNt0bE6lW8lqNCyxDK0RSuECmNZ/S0/fgP00W9RWRhVKO/hRg==", "requires": { "@babel/runtime": "^7.12.5", - "@types/react": ">=16.9.0", + "@types/react": "^16.9.53", "@types/react-dom": ">=16.9.0", "@types/react-test-renderer": ">=16.9.0", "filter-console": "^0.1.1", @@ -80167,6 +79489,12 @@ "@babel/runtime": "^7.12.5" } }, + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", + "dev": true + }, "@tootallnate/quickjs-emscripten": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", @@ -80542,7 +79870,7 @@ "integrity": "sha512-RaO/TyyHZvXkpzinbMTZmd/S5biU4zxkvDsn22ujC29t9FMSzq8tnn8f2MxQ2P8GVhFRG5jTAL05DXKyTtpEQQ==", "requires": { "@types/cheerio": "*", - "@types/react": "^16" + "@types/react": "^16.9.53" } }, "@types/enzyme-adapter-react-16": { @@ -80682,7 +80010,7 @@ "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", "integrity": "sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "hoist-non-react-statics": "^3.3.0" } }, @@ -80856,6 +80184,12 @@ "@types/lodash": "*" } }, + "@types/luxon": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@types/luxon/-/luxon-3.7.1.tgz", + "integrity": "sha512-H3iskjFIAn5SlJU7OuxUmTEpebK6TKB8rxZShDslBMZJ5u9S//KM1sbdAisiSrqwLQncVjnpi2OK2J51h+4lsg==", + "dev": true + }, "@types/mapbox-gl": { "version": "2.7.6", "resolved": "https://registry.npmjs.org/@types/mapbox-gl/-/mapbox-gl-2.7.6.tgz", @@ -80944,6 +80278,12 @@ "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" }, + "@types/raf": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/@types/raf/-/raf-3.4.3.tgz", + "integrity": "sha512-c4YAvMedbPZ5tEyxzQdMoOhhJ4RD3rngZIdwC2/qDN3d7JpEhB6fiBRKVY1lg5B7Wk+uPBjn5f39j1/2MY1oOw==", + "optional": true + }, "@types/range-parser": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", @@ -80964,7 +80304,7 @@ "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-16.9.8.tgz", "integrity": "sha512-ykkPQ+5nFknnlU6lDd947WbQ6TE3NNzbQAkInC2EKY1qeYdTKp7onFusmYZb+ityzx2YviqT6BXSu+LyWWJwcA==", "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-gravatar": { @@ -80973,7 +80313,7 @@ "integrity": "sha512-esbOXcvdGEJAsu1X8yHmArQ28Jo1gUmRZNVyA8MlEn7Z1mjj+9daHKiRoDQk61Y0kqbFGl75C4DOfUhb9uk5Tw==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-json-tree": { @@ -80982,7 +80322,7 @@ "integrity": "sha512-HP0Sf0ZHjCi1FHLJxh/pLaxaevEW6ILlV2C5Dn3EZFTkLjWkv+EVf/l/zvtmoU9ZwuO/3TKVeWK/700UDxunTw==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-jsonschema-form": { @@ -80992,7 +80332,7 @@ "dev": true, "requires": { "@types/json-schema": "*", - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-loadable": { @@ -81000,7 +80340,7 @@ "resolved": "https://registry.npmjs.org/@types/react-loadable/-/react-loadable-5.5.6.tgz", "integrity": "sha512-2M7xH/wawZxNybbs/a76JkpUsMk4z6AxBh92cUtIBy2vK7EYYuitQbC4laY0hGz0e05R+mQ44YeHMtH2U+gMsw==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@types/webpack": "^4" } }, @@ -81010,7 +80350,7 @@ "integrity": "sha512-bAGh4e+w5D8dajd6InASVIyCo4pZLJ66oLb80F9OBLO1gKESbZcRCJpTT6uLXX+HAB57zw1WTdwJdAsewuTweg==", "requires": { "@types/hoist-non-react-statics": "^3.3.0", - "@types/react": "*", + "@types/react": "^16.9.53", "hoist-non-react-statics": "^3.3.0", "redux": "^4.0.0" } @@ -81022,7 +80362,7 @@ "dev": true, "requires": { "@types/history": "*", - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-router-dom": { @@ -81032,7 +80372,7 @@ "dev": true, "requires": { "@types/history": "^4.7.11", - "@types/react": "*", + "@types/react": "^16.9.53", "@types/react-router": "*" } }, @@ -81042,7 +80382,7 @@ "integrity": "sha512-ZqIJl+Pg8kD+47kxUjvrlElrraSUrYa4h0dauY/U/FTUuprSCqvUj+9PNQNQzVc6AJgIWUUxn87/gqsMHNbRjw==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-table": { @@ -81051,7 +80391,7 @@ "integrity": "sha512-47jMa1Pai7ily6BXJCW33IL5ghqmCWs2VM9s+h1D4mCaK5P4uNkZOW3RMMg8MCXBvAJ0v9+sPqKjhid0PaJPQA==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-test-renderer": { @@ -81059,7 +80399,7 @@ "resolved": "https://registry.npmjs.org/@types/react-test-renderer/-/react-test-renderer-17.0.1.tgz", "integrity": "sha512-3Fi2O6Zzq/f3QR9dRnlnHso9bMl7weKCviFmfF6B4LS1Uat6Hkm15k0ZAQuDz+UBq6B3+g+NM6IT2nr5QgPzCw==", "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-transition-group": { @@ -81068,7 +80408,7 @@ "integrity": "sha512-hT/+s0VQs2ojCX823m60m5f0sL5idt9SO6Tj6Dg+rdphGPIeJbJ6CxvBYkgkGKrYeDjvIpKTR38UzmtHJOGW3Q==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-ultimate-pagination": { @@ -81077,7 +80417,7 @@ "integrity": "sha512-xFyJn6Jl26Q0bi+QTnLo4W5tCDKOGNU5Gn9iCg+Y6J+VqtuKuJ1wcP1Ax+nXAu5HF9qTgApI/hRn7ceCDC6TAA==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-virtualized-auto-sizer": { @@ -81086,7 +80426,7 @@ "integrity": "sha512-nhYwlFiYa8M3S+O2T9QO/e1FQUYMr/wJENUdf/O0dhRi1RS/93rjrYQFYdbUqtdFySuhrtnEDX29P6eKOttY+A==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/react-window": { @@ -81095,7 +80435,7 @@ "integrity": "sha512-8Ls660bHR1AUA2kuRvVG9D/4XpRC6wjAaPT9dil7Ckc76eP9TKWZwwmgfq8Q1LANX3QNDnoU4Zp48A3w+zK69Q==", "dev": true, "requires": { - "@types/react": "*" + "@types/react": "^16.9.53" } }, "@types/redux-localstorage": { @@ -81692,7 +81032,7 @@ "resolved": "https://registry.npmjs.org/@visx/annotation/-/annotation-3.3.0.tgz", "integrity": "sha512-v0htpd/sT1kdU1N7frqmj078UByJXUwPQJT9LENv0ypssjGyRgvZERjkgSUuMKMjZquOBs/f6XOzxF4mLV57sA==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/drag": "3.3.0", "@visx/group": "3.3.0", "@visx/text": "3.3.0", @@ -81706,7 +81046,7 @@ "resolved": "https://registry.npmjs.org/@visx/axis/-/axis-3.8.0.tgz", "integrity": "sha512-CFIxPnRlIWIz8N+5n4DTSOQQ2Yb0D35YPylEkmk/c7J4haLCEhyI44JaOg6OYOk6ofCOsu9Fqe6dFAOP+MP1IQ==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/group": "3.3.0", "@visx/point": "3.3.0", "@visx/scale": "3.5.0", @@ -81721,7 +81061,7 @@ "resolved": "https://registry.npmjs.org/@visx/bounds/-/bounds-3.0.0.tgz", "integrity": "sha512-YQaSSER9erxlhppzRms6cvYdKqcIwk6eksrGdbJkBoHobhPo1JCIUXlmrA4qgrEnXInPJpueGE+PE5F+Dk12DA==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@types/react-dom": "*", "prop-types": "^15.5.10" } @@ -81740,7 +81080,7 @@ "resolved": "https://registry.npmjs.org/@visx/drag/-/drag-3.3.0.tgz", "integrity": "sha512-fLNsorq6GyANCqAE/dToG0q7YoGVxihGC9FZQUp0MCV1wMJIJ45ximhrl5NDng2ytbpWnBmXu8M8hdsdFuvIXw==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/event": "3.3.0", "@visx/point": "3.3.0", "prop-types": "^15.5.10" @@ -81751,7 +81091,7 @@ "resolved": "https://registry.npmjs.org/@visx/event/-/event-3.3.0.tgz", "integrity": "sha512-fKalbNgNz2ooVOTXhvcOx5IlEQDgVfX66rI7bgZhBxI2/scy+5rWcXJXpwkheRF68SMx9R93SjKW6tmiD0h+jA==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/point": "3.3.0" } }, @@ -81761,7 +81101,7 @@ "integrity": "sha512-U2r1rFLpim3afKuuAmrbxXGSDCaLwXHmjXxWN8PiIQPMxpS7eaa/V5g2TRd/+x0KCkaf3Ismk4VKMl8ZlrmxIQ==", "requires": { "@types/d3-shape": "^1.3.1", - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/group": "3.3.0", "classnames": "^2.3.1", "d3-shape": "^1.2.0", @@ -81773,7 +81113,7 @@ "resolved": "https://registry.npmjs.org/@visx/grid/-/grid-3.5.0.tgz", "integrity": "sha512-i1pdobTE223ItMiER3q4ojIaZWja3vg46TkS6FotnBZ4c0VRDHSrALQPdi0na+YEgppASWCQ2WrI/vD6mIkhSg==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/curve": "3.3.0", "@visx/group": "3.3.0", "@visx/point": "3.3.0", @@ -81788,7 +81128,7 @@ "resolved": "https://registry.npmjs.org/@visx/group/-/group-3.3.0.tgz", "integrity": "sha512-yKepDKwJqlzvnvPS0yDuW13XNrYJE4xzT6xM7J++441nu6IybWWwextyap8ey+kU651cYDb+q1Oi6aHvQwyEyw==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "classnames": "^2.3.1", "prop-types": "^15.6.2" } @@ -81803,7 +81143,7 @@ "resolved": "https://registry.npmjs.org/@visx/react-spring/-/react-spring-3.8.0.tgz", "integrity": "sha512-swWhgUd/7DoZPnqlJg5BDO/sv424swo26ZwAn64F8TuPjoBlqq/ZoqYxAtx1sFo//3VIVSl3rqICvM3X6tmTQg==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/axis": "3.8.0", "@visx/grid": "3.5.0", "@visx/scale": "3.5.0", @@ -81818,7 +81158,7 @@ "integrity": "sha512-Y3Bgrh6cJ760lG6yXsxJRNCmYZAHKQqSmTG2qxJ8yImledieGEqI0ZizXJgFkxoBaZK5gSMvFsmFWKtf7a86kQ==", "requires": { "@types/lodash": "^4.14.172", - "@types/react": "*", + "@types/react": "^16.9.53", "lodash": "^4.17.21", "prop-types": "^15.6.1" } @@ -81839,7 +81179,7 @@ "@types/d3-path": "^1.0.8", "@types/d3-shape": "^1.3.1", "@types/lodash": "^4.14.172", - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/curve": "3.3.0", "@visx/group": "3.3.0", "@visx/scale": "3.5.0", @@ -81856,7 +81196,7 @@ "integrity": "sha512-fOimcsf0GtQE9whM5MdA/xIkHMaV29z7qNqNXysUDE8znSMKsN+ott7kSg2ljAEE89CQo3WKHkPNettoVsa84w==", "requires": { "@types/lodash": "^4.14.172", - "@types/react": "*", + "@types/react": "^16.9.53", "classnames": "^2.3.1", "lodash": "^4.17.21", "prop-types": "^15.7.2", @@ -81868,7 +81208,7 @@ "resolved": "https://registry.npmjs.org/@visx/tooltip/-/tooltip-3.0.0.tgz", "integrity": "sha512-a+ZzlE/vVxQgW83k/Ypj721K09IKG4JRHVb7YDxiQnAawkJe9rkTxGoAIXD6PrqvERa+rSISgUWHAxuee5MnhA==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/bounds": "3.0.0", "classnames": "^2.3.1", "prop-types": "^15.5.10", @@ -81991,7 +81331,7 @@ "integrity": "sha512-twVovwdcEZZRkFV+x6yXGcyohf7gYHz0y/dFoS0SfGtEwT+CMEQgfO3ZDNQjU8X/m8je/wu9qtd35Zur+0RYOQ==", "requires": { "@types/d3-voronoi": "^1.1.9", - "@types/react": "*", + "@types/react": "^16.9.53", "classnames": "^2.3.1", "d3-voronoi": "^1.1.2", "prop-types": "^15.6.1" @@ -82003,7 +81343,7 @@ "integrity": "sha512-dbg1sV/mzNbUZGz7JFpWW/NP+LYnCX1Evhijv7FbinryWkeSu9vOl3ErkyhvRC/CnMxUcu6lsLcTLncTifEXFw==", "requires": { "@types/lodash": "^4.14.172", - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/annotation": "3.3.0", "@visx/axis": "3.8.0", "@visx/event": "3.3.0", @@ -82030,7 +81370,7 @@ "resolved": "https://registry.npmjs.org/@visx/tooltip/-/tooltip-3.3.0.tgz", "integrity": "sha512-0ovbxnvAphEU/RVJprWHdOJT7p3YfBDpwXclXRuhIY2EkH59g8sDHatDcYwiNPeqk61jBh1KACRZxqToMuutlg==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@visx/bounds": "3.3.0", "classnames": "^2.3.1", "prop-types": "^15.5.10", @@ -82042,7 +81382,7 @@ "resolved": "https://registry.npmjs.org/@visx/bounds/-/bounds-3.3.0.tgz", "integrity": "sha512-gESmN+4N2NkeUzqQEDZaS63umkGfMp9XjQcKBqtOR64mjjQtamh3lNVRWvKjJ2Zb421RbYHWq22Wv9nay6ZUOg==", "requires": { - "@types/react": "*", + "@types/react": "^16.9.53", "@types/react-dom": "*", "prop-types": "^15.5.10" } @@ -82180,7 +81520,7 @@ "integrity": "sha512-ONrmLUAG+8wzD3cn/EmsuZh6JHeyejqup3ZsV25t04VaVJAVQAJukAfNdH8YiwSJu0zSo+txkBTfrnOmFyQLOw==", "requires": { "@types/lodash": "^4.14.146", - "@types/react": "*", + "@types/react": "^16.9.53", "lodash": "^4.17.10", "prop-types": "^15.6.1", "resize-observer-polyfill": "1.5.1" @@ -82456,23 +81796,6 @@ "loglevel": "^1.6.0", "loglevel-plugin-prefix": "^0.8.4", "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } } }, "@wdio/protocols": { @@ -82492,9 +81815,9 @@ }, "dependencies": { "@types/node": { - "version": "18.19.31", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz", - "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==", + "version": "18.19.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.42.tgz", + "integrity": "sha512-d2ZFc/3lnK2YCYhos8iaNIYu9Vfhr92nHiyJHRltXWjXUBjEE+A4I58Tdbnw4VhggSW+2j5y5gTrLs4biNnubg==", "dev": true, "requires": { "undici-types": "~5.26.4" @@ -82942,12 +82265,6 @@ "string-width": "^4.1.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -82964,15 +82281,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -82999,6 +82307,11 @@ "integrity": "sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==", "dev": true }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -83897,10 +83210,7 @@ "dev": true }, "atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", - "dev": true + "version": "2.1.2" }, "atomic-sleep": { "version": "1.0.0", @@ -83926,9 +83236,9 @@ }, "dependencies": { "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -83999,8 +83309,7 @@ "b4a": { "version": "1.6.6", "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", - "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==", - "dev": true + "integrity": "sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg==" }, "babel-core": { "version": "7.0.0-bridge.0", @@ -84496,6 +83805,45 @@ "integrity": "sha512-h7z00dWdG0PYOQEvChhOSWvOfkIKsdZGkWr083FgN/HyoQuebSew/cgirYqh9SCuy/hRvxc5Vy6Fw8xAmYHLkQ==", "optional": true }, + "bare-fs": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-2.3.1.tgz", + "integrity": "sha512-W/Hfxc/6VehXlsgFtbB5B4xFcsCl+pAh30cYhoFyXErf6oGrwjh8SwiPAdHgpmWonKuYpZgGywN0SXt7dgsADA==", + "dev": true, + "optional": true, + "requires": { + "bare-events": "^2.0.0", + "bare-path": "^2.0.0", + "bare-stream": "^2.0.0" + } + }, + "bare-os": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-2.4.0.tgz", + "integrity": "sha512-v8DTT08AS/G0F9xrhyLtepoo9EJBJ85FRSMbu1pQUlAf6A8T0tEEQGMVObWeqpjhSPXsE0VGlluFBJu2fdoTNg==", + "dev": true, + "optional": true + }, + "bare-path": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-2.1.3.tgz", + "integrity": "sha512-lh/eITfU8hrj9Ru5quUp0Io1kJWIk1bTjzo7JH1P5dWmQ2EL4hFUlfI8FonAhSlgIfhn63p84CDY/x+PisgcXA==", + "dev": true, + "optional": true, + "requires": { + "bare-os": "^2.1.0" + } + }, + "bare-stream": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.1.3.tgz", + "integrity": "sha512-tiDAH9H/kP+tvNO5sczyn9ZAA7utrSMobyDchsnyyXBuUe2FSQWbxhtuHB8jwpHYYevVo2UJpcmvvjrbHboUUQ==", + "dev": true, + "optional": true, + "requires": { + "streamx": "^2.18.0" + } + }, "base": { "version": "0.11.2", "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", @@ -84562,6 +83910,12 @@ "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", "integrity": "sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ==" }, + "base64-arraybuffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz", + "integrity": "sha512-I3yl4r9QB5ZRY3XuJVEPfc2XhZO6YweFPI+UovAzn+8/hb3oJ6lnysaFcjVpkCPfVWFUDvoZ8kmVDP7WyRtYtQ==", + "optional": true + }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -84900,6 +84254,11 @@ "node-int64": "^0.4.0" } }, + "btoa": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/btoa/-/btoa-1.2.1.tgz", + "integrity": "sha512-SB4/MIGlsiVkMcHmT+pSmIPoNDoHg+7cMzmt3Uxt628MTz2487DKSqK/fuhFBrkuqrYv5UCEnACpF4dTFNKc/g==" + }, "buf-compare": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/buf-compare/-/buf-compare-1.0.1.tgz", @@ -85283,6 +84642,30 @@ "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001639.tgz", "integrity": "sha512-eFHflNTBIlFwP2AIKaYuBQN/apnUoKNhBdza8ZnW/h2di4LCZ4xFqYlxUxo+LQ76KFI1PGcC1QDxMbxTZpSCAg==" }, + "canvg": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/canvg/-/canvg-3.0.10.tgz", + "integrity": "sha512-qwR2FRNO9NlzTeKIPIKpnTY6fqwuYSequ8Ru8c0YkYU7U0oW+hLUvWadLvAu1Rl72OMNiFhoLu4f8eUjQ7l/+Q==", + "optional": true, + "requires": { + "@babel/runtime": "^7.12.5", + "@types/raf": "^3.4.0", + "core-js": "^3.8.3", + "raf": "^3.4.1", + "regenerator-runtime": "^0.13.7", + "rgbcolor": "^1.0.1", + "stackblur-canvas": "^2.0.0", + "svg-pathdata": "^6.0.3" + }, + "dependencies": { + "regenerator-runtime": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", + "optional": true + } + } + }, "capture-exit": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/capture-exit/-/capture-exit-2.0.0.tgz", @@ -85520,13 +84903,14 @@ } }, "chromium-bidi": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.5.8.tgz", - "integrity": "sha512-blqh+1cEQbHBKmok3rVJkBlBxt9beKBgOsxbFgs7UJcoVbbeZ+K7+6liAsjgpc8l1Xd55cQUy14fXZdGSb4zIw==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/chromium-bidi/-/chromium-bidi-0.6.3.tgz", + "integrity": "sha512-qXlsCmpCZJAnoTYI83Iu6EdYQpMYdVkCfq08KDh2pmlVqK5t5IA9mGs4/LwCwp4fqisSOMXZxP3HIh8w8aRn0A==", "dev": true, "requires": { "mitt": "3.0.1", - "urlpattern-polyfill": "10.0.0" + "urlpattern-polyfill": "10.0.0", + "zod": "3.23.8" }, "dependencies": { "mitt": { @@ -85652,12 +85036,6 @@ "string-width": "^4.2.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -85674,15 +85052,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -85697,13 +85066,6 @@ "string-width": "^4.2.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -85734,16 +85096,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -85763,11 +85115,6 @@ "wrap-ansi": "^7.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -85783,14 +85130,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -85938,23 +85277,6 @@ "requires": { "strip-ansi": "^6.0.1", "wcwidth": "^1.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } } }, "combined-stream": { @@ -86621,48 +85943,6 @@ "cross-spawn": "^6.0.5" } }, - "cross-fetch": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cross-fetch/-/cross-fetch-4.0.0.tgz", - "integrity": "sha512-e4a5N8lVvuLgAWgnCrLr2PP0YyDOTHa9H/Rj54dirp61qXnNq46m82bRhNqIA5VccJtWBvPTFRV3TtvHUKPB1g==", - "dev": true, - "requires": { - "node-fetch": "^2.6.12" - }, - "dependencies": { - "node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dev": true, - "requires": { - "whatwg-url": "^5.0.0" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "dev": true - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "dev": true - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dev": true, - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - } - } - }, "cross-spawn": { "version": "6.0.5", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", @@ -86692,6 +85972,15 @@ "isobject": "^3.0.1" } }, + "css-line-break": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/css-line-break/-/css-line-break-2.1.0.tgz", + "integrity": "sha512-FHcKFCZcAha3LwfVBhCQbW2nCNbkZXn7KVUJcsT5/P8YmfsVja0FMPJr0B903j/E69HUphKiV9iQArX8SDYA4w==", + "optional": true, + "requires": { + "utrie": "^1.0.2" + } + }, "css-loader": { "version": "6.8.1", "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-6.8.1.tgz", @@ -87603,9 +86892,9 @@ "dev": true }, "dayjs": { - "version": "1.11.11", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.11.tgz", - "integrity": "sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==" + "version": "1.11.13", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.13.tgz", + "integrity": "sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==" }, "deasync": { "version": "0.1.29", @@ -88034,9 +87323,9 @@ "optional": true }, "tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==", + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", + "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==", "dev": true } } @@ -88215,9 +87504,9 @@ } }, "devtools-protocol": { - "version": "0.0.1232444", - "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1232444.tgz", - "integrity": "sha512-pM27vqEfxSxRkTMnF+XCmxSEb6duO5R+t8A9DEEJgy4Wz2RVanje2mmj99B6A3zv2r/qGfYlOvYznUhuokizmg==", + "version": "0.0.1312386", + "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1312386.tgz", + "integrity": "sha512-DPnhUXvmvKT2dFA/j7B+riVLUt9Q6RKJlcppojL5CoRywJJKLDYnRlw0gTFKfgDPHP5E04UoB71SxoJlVZy8FA==", "dev": true }, "diff": { @@ -88333,11 +87622,24 @@ "entities": "^4.2.0" } }, + "dom-to-image": { + "version": "git+ssh://git@github.com/dmapper/dom-to-image.git#a7c386a8ea813930f05449ac71ab4be0c262dff3", + "from": "dom-to-image@git+https://github.com/dmapper/dom-to-image.git" + }, "dom-to-image-more": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/dom-to-image-more/-/dom-to-image-more-3.2.0.tgz", "integrity": "sha512-2bGQTB6m17MBseVhIjShwZqqqCyVS9GgTykWqvVXMqr56fSgHhXnEvZfZkaSuHJYW3ICZQ3sZwAu+UY5tfsF9Q==" }, + "dom-to-pdf": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/dom-to-pdf/-/dom-to-pdf-0.3.2.tgz", + "integrity": "sha512-eHLQ/IK+2PQlRjybQ9UHYwpiTd/YZFKqGFyRCjVvi6CPlH58drWQnxf7HBCVRUyAjOtI3RG0kvLidPhC7dOhcQ==", + "requires": { + "dom-to-image": "git+https://github.com/dmapper/dom-to-image.git", + "jspdf": "^2.5.1" + } + }, "dom-walk": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz", @@ -88356,6 +87658,12 @@ "domelementtype": "^2.3.0" } }, + "dompurify": { + "version": "2.5.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.5.7.tgz", + "integrity": "sha512-2q4bEI+coQM8f5ez7kt2xclg1XsecaV9ASJk/54vwlfRRNQfDqJz2pzQ8t0Ix/ToBpXlVjrRIx7pFC/o8itG2Q==", + "optional": true + }, "domutils": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", @@ -89146,12 +88454,6 @@ "text-table": "^0.2.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -89327,15 +88629,6 @@ "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -90471,6 +89764,12 @@ "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" }, + "fast-uri": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz", + "integrity": "sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw==", + "dev": true + }, "fast-xml-parser": { "version": "4.2.7", "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.7.tgz", @@ -90509,28 +89808,10 @@ "tiny-lru": "^8.0.1" }, "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, "semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true } } @@ -90642,6 +89923,11 @@ "resolved": "https://registry.npmjs.org/fetch-retry/-/fetch-retry-6.0.0.tgz", "integrity": "sha512-BUFj1aMubgib37I3v4q78fYo63Po7t4HUPTpQ6/QE6yK6cIQrP+W43FYToeTEyg5m2Y7eFUtijUuAv/PDlWuag==" }, + "fflate": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz", + "integrity": "sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==" + }, "figures": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", @@ -90820,9 +90106,9 @@ "dev": true }, "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -92111,12 +91397,6 @@ "yargs": "^16.2.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "cliui": { "version": "7.0.4", "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", @@ -92163,15 +91443,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -92265,9 +91536,9 @@ }, "dependencies": { "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -93542,6 +92813,16 @@ } } }, + "html2canvas": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/html2canvas/-/html2canvas-1.4.1.tgz", + "integrity": "sha512-fPU6BHNpsyIhr8yyMpTLLxAbkaK8ArIBcmZIRiBLiDhjeqvXolaEmDGmELFuX9I4xDcaKKcJl+TKZLqruBbmWA==", + "optional": true, + "requires": { + "css-line-break": "^2.1.0", + "text-segmentation": "^1.0.3" + } + }, "htmlparser2": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz", @@ -93593,6 +92874,34 @@ "requires-port": "^1.0.0" } }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "dev": true, + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "dependencies": { + "debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, "http-proxy-middleware": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.6.tgz", @@ -94230,12 +93539,6 @@ "wrap-ansi": "^7.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -94262,15 +93565,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "tslib": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", @@ -96347,12 +95641,6 @@ "chalk": "^4.0.0" } }, - "@tootallnate/once": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", - "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==", - "dev": true - }, "@types/istanbul-reports": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", @@ -96395,15 +95683,6 @@ "whatwg-url": "^11.0.0" } }, - "debug": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", - "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, "domexception": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", @@ -96457,17 +95736,6 @@ "whatwg-encoding": "^2.0.0" } }, - "http-proxy-agent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", - "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", - "dev": true, - "requires": { - "@tootallnate/once": "2", - "agent-base": "6", - "debug": "4" - } - }, "iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -96511,12 +95779,6 @@ "xml-name-validator": "^4.0.0" } }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, "saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -96804,12 +96066,6 @@ "@types/istanbul-lib-report": "*" } }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "dateformat": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.2.tgz", @@ -96821,15 +96077,6 @@ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", "dev": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -98543,6 +97790,21 @@ "through": ">=2.2.7 <3" } }, + "jspdf": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jspdf/-/jspdf-2.5.2.tgz", + "integrity": "sha512-myeX9c+p7znDWPk0eTrujCzNjT+CXdXyk7YmJq5nD5V7uLLKmSXnlQ/Jn/kuo3X09Op70Apm0rQSnFWyGK8uEQ==", + "requires": { + "@babel/runtime": "^7.23.2", + "atob": "^2.1.2", + "btoa": "^1.2.1", + "canvg": "^3.0.6", + "core-js": "^3.6.0", + "dompurify": "^2.5.4", + "fflate": "^0.8.1", + "html2canvas": "^1.0.0-rc.5" + } + }, "jsprim": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", @@ -99102,12 +98364,6 @@ "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", "dev": true }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -99732,15 +98988,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "strip-bom": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", @@ -100095,15 +99342,15 @@ }, "dependencies": { "ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", "dev": true, "requires": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" } }, "json-schema-traverse": { @@ -100141,13 +99388,6 @@ "wrap-ansi": "^7.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -100167,16 +99407,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -100457,11 +99687,9 @@ } }, "luxon": { - "version": "3.4.4", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.4.4.tgz", - "integrity": "sha512-zobTr7akeGHnv7eBOXcRgMeCP6+uyYsczwmeRCauvpvaAltgNyTbLH/+VaEAPUeWBT+1GuNmz4wC/6jtQzbbVA==", - "optional": true, - "peer": true + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.6.1.tgz", + "integrity": "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==" }, "lz-string": { "version": "1.5.0", @@ -104925,12 +104153,6 @@ "yargs-parser": "21.1.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", @@ -105035,15 +104257,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "tsconfig-paths": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz", @@ -105099,13 +104312,6 @@ "yargs": "^15.0.2" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true, - "peer": true - }, "cliui": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", @@ -105248,16 +104454,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "peer": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "yargs": { "version": "15.4.1", "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", @@ -105702,23 +104898,6 @@ "log-symbols": "^4.1.0", "strip-ansi": "^6.0.0", "wcwidth": "^1.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } } }, "os-tmpdir": { @@ -105842,9 +105021,9 @@ } }, "pac-proxy-agent": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.0.1.tgz", - "integrity": "sha512-ASV8yU4LLKBAjqIPMbrgtaKIvxQri/yh2OpI+S6hVa9JRkUI3Y3NPFbfngDtY7oFtSMD3w31Xns89mDa3Feo5A==", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/pac-proxy-agent/-/pac-proxy-agent-7.0.2.tgz", + "integrity": "sha512-BFi3vZnO9X5Qt6NRz7ZOaPja3ic0PhlsmCRYLOpN11+mWBCR6XJDqW5RF3j8jm4WGGQZtBA+bTfxYzeKW73eHg==", "dev": true, "requires": { "@tootallnate/quickjs-emscripten": "^0.23.0", @@ -105852,9 +105031,9 @@ "debug": "^4.3.4", "get-uri": "^6.0.1", "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", - "pac-resolver": "^7.0.0", - "socks-proxy-agent": "^8.0.2" + "https-proxy-agent": "^7.0.5", + "pac-resolver": "^7.0.1", + "socks-proxy-agent": "^8.0.4" }, "dependencies": { "agent-base": { @@ -105867,9 +105046,9 @@ } }, "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -105886,9 +105065,9 @@ } }, "https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", "dev": true, "requires": { "agent-base": "^7.0.2", @@ -106332,9 +105511,7 @@ "dev": true }, "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" + "version": "2.1.0" }, "periscopic": { "version": "3.1.0", @@ -106892,11 +106069,6 @@ "react-is": "^17.0.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "react-is": { "version": "17.0.2", "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", @@ -107087,15 +106259,15 @@ } }, "proxy-agent": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.3.1.tgz", - "integrity": "sha512-Rb5RVBy1iyqOtNl15Cw/llpeLH8bsb37gM1FUfKQ+Wck6xHlbAhWGUFiTRHtkjqGTA5pSHz6+0hrPW/oECihPQ==", + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/proxy-agent/-/proxy-agent-6.4.0.tgz", + "integrity": "sha512-u0piLU+nCOHMgGjRbimiXmA9kM/L9EHh3zL81xCdp7m+Y2pHIsnmbdDoEDoAz5geaonNR6q6+yOPQs6n4T6sBQ==", "dev": true, "requires": { "agent-base": "^7.0.2", "debug": "^4.3.4", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.2", + "http-proxy-agent": "^7.0.1", + "https-proxy-agent": "^7.0.3", "lru-cache": "^7.14.1", "pac-proxy-agent": "^7.0.1", "proxy-from-env": "^1.1.0", @@ -107112,9 +106284,9 @@ } }, "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -107131,9 +106303,9 @@ } }, "https-proxy-agent": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.4.tgz", - "integrity": "sha512-wlwpilI7YdjSkWaQ/7omYBMTliDcmCN8OLihO6I9B86g06lMyAoqgoDpV0XqoaPOKj+0DIdAvnsWfyAAhmimcg==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", + "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", "dev": true, "requires": { "agent-base": "^7.0.2", @@ -107218,14 +106390,15 @@ "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==" }, "puppeteer": { - "version": "21.11.0", - "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-21.11.0.tgz", - "integrity": "sha512-9jTHuYe22TD3sNxy0nEIzC7ZrlRnDgeX3xPkbS7PnbdwYjl2o/z/YuCrRBwezdKpbTDTJ4VqIggzNyeRcKq3cg==", + "version": "22.15.0", + "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-22.15.0.tgz", + "integrity": "sha512-XjCY1SiSEi1T7iSYuxS82ft85kwDJUS7wj1Z0eGVXKdtr5g4xnVcbjwxhq5xBnpK/E7x1VZZoJDxpjAOasHT4Q==", "dev": true, "requires": { - "@puppeteer/browsers": "1.9.1", - "cosmiconfig": "9.0.0", - "puppeteer-core": "21.11.0" + "@puppeteer/browsers": "2.3.0", + "cosmiconfig": "^9.0.0", + "devtools-protocol": "0.0.1312386", + "puppeteer-core": "22.15.0" }, "dependencies": { "argparse": { @@ -107268,9 +106441,9 @@ } }, "typescript": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", - "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", "dev": true, "optional": true, "peer": true @@ -107278,23 +106451,22 @@ } }, "puppeteer-core": { - "version": "21.11.0", - "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-21.11.0.tgz", - "integrity": "sha512-ArbnyA3U5SGHokEvkfWjW+O8hOxV1RSJxOgriX/3A4xZRqixt9ZFHD0yPgZQF05Qj0oAqi8H/7stDorjoHY90Q==", + "version": "22.15.0", + "resolved": "https://registry.npmjs.org/puppeteer-core/-/puppeteer-core-22.15.0.tgz", + "integrity": "sha512-cHArnywCiAAVXa3t4GGL2vttNxh7GqXtIYGym99egkNJ3oG//wL9LkvO4WE8W1TJe95t1F1ocu9X4xWaGsOKOA==", "dev": true, "requires": { - "@puppeteer/browsers": "1.9.1", - "chromium-bidi": "0.5.8", - "cross-fetch": "4.0.0", - "debug": "4.3.4", - "devtools-protocol": "0.0.1232444", - "ws": "8.16.0" + "@puppeteer/browsers": "2.3.0", + "chromium-bidi": "0.6.3", + "debug": "^4.3.6", + "devtools-protocol": "0.0.1312386", + "ws": "^8.18.0" }, "dependencies": { "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "requires": { "ms": "2.1.2" @@ -107307,9 +106479,9 @@ "dev": true }, "ws": { - "version": "8.16.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz", - "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", + "integrity": "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==", "dev": true, "requires": {} } @@ -107397,8 +106569,6 @@ }, "raf": { "version": "3.4.1", - "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", - "integrity": "sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==", "requires": { "performance-now": "^2.1.0" } @@ -109924,11 +109094,6 @@ "strip-ansi": "^6.0.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "css-select": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.3.0.tgz", @@ -109984,14 +109149,6 @@ "domutils": "^2.5.2", "entities": "^2.0.0" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -110213,11 +109370,17 @@ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" }, "rfdc": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", - "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==", "dev": true }, + "rgbcolor": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/rgbcolor/-/rgbcolor-1.0.1.tgz", + "integrity": "sha512-9aZLIrhRaD97sgVhtJOW6ckOEh6/GnvQtdVNfdZ6s67+3/XwLS9lBcQYzEEhYVeUowN7pRzMLsyGhK2i/xvWbw==", + "optional": true + }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -111447,6 +110610,12 @@ } } }, + "stackblur-canvas": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/stackblur-canvas/-/stackblur-canvas-2.7.0.tgz", + "integrity": "sha512-yf7OENo23AGJhBriGx0QivY5JP6Y1HbrrDI6WLt6C5auYZXlQrheoY8hD4ibekFKz1HOfE48Ww8kMWMnJD/zcQ==", + "optional": true + }, "static-eval": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.1.0.tgz", @@ -111604,13 +110773,14 @@ "dev": true }, "streamx": { - "version": "2.16.1", - "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.16.1.tgz", - "integrity": "sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ==", + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.18.0.tgz", + "integrity": "sha512-LLUC1TWdjVdn1weXGcSxyTR3T4+acB6tVGXT95y0nGbca4t4o/ng1wKAGTljm9VicuCVLvRlqFYXYy5GwgM7sQ==", "requires": { "bare-events": "^2.2.0", - "fast-fifo": "^1.1.0", - "queue-tick": "^1.0.1" + "fast-fifo": "^1.3.2", + "queue-tick": "^1.0.1", + "text-decoder": "^1.1.0" } }, "strict-uri-encode": { @@ -111644,23 +110814,6 @@ "requires": { "char-regex": "^1.0.2", "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.0" - } - } } }, "string-similarity": { @@ -111706,23 +110859,10 @@ "strip-ansi": "^6.0.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -111791,19 +110931,20 @@ } } }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, "strip-ansi-cjs": { "version": "npm:strip-ansi@6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { "ansi-regex": "^5.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - } } }, "strip-bom": { @@ -111938,6 +111079,12 @@ "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==", "dev": true }, + "svg-pathdata": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/svg-pathdata/-/svg-pathdata-6.0.3.tgz", + "integrity": "sha512-qsjeeq5YjBZ5eMdFuUa4ZosMLxgr5RZ+F+Y1OrDhuOCEInRMA3x74XdBtggJcj9kOeInz0WE+LgCPDkZFlBYJw==", + "optional": true + }, "svgo": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/svgo/-/svgo-3.2.0.tgz", @@ -112068,12 +111215,13 @@ } }, "tar-fs": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.4.tgz", - "integrity": "sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.0.6.tgz", + "integrity": "sha512-iokBDQQkUyeXhgPYaZxmczGPhnhXZ0CmrqI+MOb/WFGS9DW5wnfrLgtjUJBvz50vQ3qfRwJ62QVoCFu8mPVu5w==", "dev": true, "requires": { - "mkdirp-classic": "^0.5.2", + "bare-fs": "^2.1.1", + "bare-path": "^2.1.0", "pump": "^3.0.0", "tar-stream": "^3.1.5" }, @@ -112253,12 +111401,29 @@ "minimatch": "^3.0.4" } }, + "text-decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.1.tgz", + "integrity": "sha512-8zll7REEv4GDD3x4/0pW+ppIxSNs7H1J10IKFZsuOMscumCdM2a+toDGLPA3T+1+fLBql4zbt5z83GEQGGV5VA==", + "requires": { + "b4a": "^1.6.4" + } + }, "text-extensions": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-1.9.0.tgz", "integrity": "sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ==", "dev": true }, + "text-segmentation": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/text-segmentation/-/text-segmentation-1.0.3.tgz", + "integrity": "sha512-iOiPUo/BGnZ6+54OsWxZidGCsdU8YbE4PSpdPinp7DeMtUJNJBoJ/ouUSTJjHkh1KntHaltHl/gDs2FC4i5+Nw==", + "optional": true, + "requires": { + "utrie": "^1.0.2" + } + }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -113305,6 +112470,15 @@ "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", "dev": true }, + "utrie": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/utrie/-/utrie-1.0.2.tgz", + "integrity": "sha512-1MLa5ouZiOmQzUbjbu9VmjLzn1QLXBhwpUa7kdLUQK+KQ5KA9I1vk5U4YHe/X2Ch7PYnJfWuWT+VbuxbGwljhw==", + "optional": true, + "requires": { + "base64-arraybuffer": "^1.0.2" + } + }, "uuid": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", @@ -113600,9 +112774,9 @@ }, "dependencies": { "@types/node": { - "version": "18.19.31", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.31.tgz", - "integrity": "sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==", + "version": "18.19.42", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.42.tgz", + "integrity": "sha512-d2ZFc/3lnK2YCYhos8iaNIYu9Vfhr92nHiyJHRltXWjXUBjEE+A4I58Tdbnw4VhggSW+2j5y5gTrLs4biNnubg==", "dev": true, "requires": { "undici-types": "~5.26.4" @@ -114109,23 +113283,6 @@ "ansi-html-community": "0.0.8", "html-entities": "^2.1.0", "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - } } }, "webpack-manifest-plugin": { @@ -114575,12 +113732,6 @@ "string-width": "^4.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -114597,15 +113748,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -114631,12 +113773,6 @@ "strip-ansi": "^6.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -114653,15 +113789,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.0" } - }, - "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", - "devOptional": true, - "requires": { - "ansi-regex": "^5.0.0" - } } } }, @@ -114675,11 +113802,6 @@ "strip-ansi": "^6.0.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -114694,14 +113816,6 @@ "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } } } }, @@ -114917,11 +114031,6 @@ "yargs-parser": "^21.1.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -114937,14 +114046,6 @@ "strip-ansi": "^6.0.1" } }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, "y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -114985,6 +114086,12 @@ "integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==", "devOptional": true }, + "zod": { + "version": "3.23.8", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", + "dev": true + }, "zrender": { "version": "5.4.1", "resolved": "https://registry.npmjs.org/zrender/-/zrender-5.4.1.tgz", diff --git a/superset-frontend/package.json b/superset-frontend/package.json index bb7a2face829..00a6b483319c 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -1,6 +1,6 @@ { "name": "superset", - "version": "0.0.0-dev", + "version": "4.1.3", "description": "Superset is a data exploration platform designed to be visual, intuitive, and interactive.", "keywords": [ "big", @@ -132,6 +132,7 @@ "core-js": "^3.37.1", "d3-scale": "^2.1.2", "dom-to-image-more": "^3.2.0", + "dom-to-pdf": "^0.3.2", "emotion-rgba": "0.0.12", "fast-glob": "^3.2.7", "fs-extra": "^10.0.0", @@ -147,6 +148,7 @@ "json-bigint": "^1.0.0", "json-stringify-pretty-compact": "^2.0.0", "lodash": "^4.17.21", + "luxon": "^3.5.0", "mapbox-gl": "^2.10.0", "markdown-to-jsx": "^7.4.7", "match-sorter": "^6.3.4", @@ -155,8 +157,9 @@ "moment-timezone": "^0.5.44", "mousetrap": "^1.6.5", "mustache": "^2.2.1", + "nanoid": "^5.0.7", "polished": "^4.3.1", - "prop-types": "^15.7.2", + "prop-types": "^15.8.1", "query-string": "^6.13.7", "rc-trigger": "^5.3.4", "re-resizable": "^6.9.11", @@ -197,7 +200,6 @@ "rimraf": "^3.0.2", "rison": "^0.1.1", "scroll-into-view-if-needed": "^3.1.0", - "nanoid": "^5.0.7", "tinycolor2": "^1.4.2", "urijs": "^1.19.8", "use-event-callback": "^0.1.0", @@ -206,7 +208,7 @@ "yargs": "^17.7.2" }, "devDependencies": { - "@applitools/eyes-storybook": "^3.49.0", + "@applitools/eyes-storybook": "^3.50.7", "@babel/cli": "^7.22.6", "@babel/compat-data": "^7.22.6", "@babel/core": "^7.23.9", @@ -226,13 +228,14 @@ "@emotion/jest": "^11.11.0", "@hot-loader/react-dom": "^16.14.0", "@istanbuljs/nyc-config-typescript": "^1.0.1", + "@mihkeleidast/storybook-addon-source": "^1.0.1", "@storybook/addon-actions": "^8.1.11", "@storybook/addon-controls": "^8.1.11", "@storybook/addon-essentials": "^8.1.11", "@storybook/addon-links": "^8.1.11", "@storybook/addon-mdx-gfm": "^8.1.11", - "@storybook/preview-api": "^8.1.11", "@storybook/components": "^8.1.11", + "@storybook/preview-api": "^8.1.11", "@storybook/react": "^8.1.11", "@storybook/react-webpack5": "^8.1.11", "@svgr/webpack": "^8.0.1", @@ -250,6 +253,7 @@ "@types/jquery": "^3.5.8", "@types/js-levenshtein": "^1.1.3", "@types/json-bigint": "^1.0.4", + "@types/luxon": "^3.7.1", "@types/mousetrap": "^1.6.15", "@types/react": "^16.9.53", "@types/react-dom": "^16.9.8", @@ -338,7 +342,6 @@ "source-map-support": "^0.5.21", "speed-measure-webpack-plugin": "^1.5.0", "storybook": "^8.1.11", - "@mihkeleidast/storybook-addon-source": "^1.0.1", "style-loader": "^3.3.4", "thread-loader": "^3.0.4", "transform-loader": "^0.2.4", @@ -361,7 +364,9 @@ "d3-color": "^3.1.0", "yosay": { "ansi-regex": "^4.1.1" - } + }, + "puppeteer": "^22.4.1", + "@types/react": "^16.9.53" }, "readme": "ERROR: No README data found!", "scarfSettings": { diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx index 96cb6ab13d10..db449b3450ef 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/components/ColumnOption.tsx @@ -17,7 +17,7 @@ * under the License. */ import { useState, ReactNode, useLayoutEffect, RefObject } from 'react'; -import { css, styled, SupersetTheme } from '@superset-ui/core'; +import { css, SafeMarkdown, styled, SupersetTheme } from '@superset-ui/core'; import { Tooltip } from './Tooltip'; import { ColumnTypeLabel } from './ColumnTypeLabel/ColumnTypeLabel'; import CertifiedIconWithTooltip from './CertifiedIconWithTooltip'; @@ -28,6 +28,7 @@ import { getColumnTypeTooltipNode, } from './labelUtils'; import { SQLPopover } from './SQLPopover'; +import InfoTooltipWithTrigger from './InfoTooltipWithTrigger'; export type ColumnOptionProps = { column: ColumnMeta; @@ -50,6 +51,8 @@ export function ColumnOption({ }: ColumnOptionProps) { const { expression, column_name, type_generic } = column; const hasExpression = expression && expression !== column_name; + const warningMarkdown = + column.warning_markdown || column.warning_text || column.error_text; const type = hasExpression ? 'expression' : type_generic; const [tooltipText, setTooltipText] = useState<ReactNode>(column.column_name); const [columnTypeTooltipText, setcolumnTypeTooltipText] = useState<ReactNode>( @@ -94,6 +97,19 @@ export function ColumnOption({ details={column.certification_details} /> )} + {warningMarkdown && ( + <InfoTooltipWithTrigger + className="text-warning" + icon="warning" + tooltip={<SafeMarkdown source={warningMarkdown} />} + label={`warn-${column.column_name}`} + iconsStyle={{ marginLeft: 0 }} + {...(column.error_text && { + className: 'text-danger', + icon: 'exclamation-circle', + })} + /> + )} </StyleOverrides> ); } diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx index b558a0987ab0..c424cde518e2 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/components/MetricOption.tsx @@ -81,7 +81,8 @@ export function MetricOption({ </span> ); - const warningMarkdown = metric.warning_markdown || metric.warning_text; + const warningMarkdown = + metric.warning_markdown || metric.warning_text || metric.error_text; const [tooltipText, setTooltipText] = useState<ReactNode>(metric.metric_name); @@ -116,6 +117,10 @@ export function MetricOption({ tooltip={<SafeMarkdown source={warningMarkdown} />} label={`warn-${metric.metric_name}`} iconsStyle={{ marginLeft: 0 }} + {...(metric.error_text && { + className: 'text-danger', + icon: 'exclamation-circle', + })} /> )} </FlexRowContainer> diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/components/labelUtils.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/components/labelUtils.tsx index 66b25416f8b5..03af5c13e869 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/components/labelUtils.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/components/labelUtils.tsx @@ -55,8 +55,7 @@ const TooltipSection = ({ text: ReactNode; }) => ( <TooltipSectionWrapper> - <TooltipSectionLabel>{label}</TooltipSectionLabel> - <span>{text}</span> + <TooltipSectionLabel>{label}</TooltipSectionLabel>: <span>{text}</span> </TooltipSectionWrapper> ); @@ -71,12 +70,7 @@ export const getColumnTypeTooltipNode = (column: ColumnMeta): ReactNode => { return null; } - return ( - <TooltipSection - label={t('Column datatype')} - text={column.type.toLowerCase()} - /> - ); + return <TooltipSection label={t('Column type')} text={column.type} />; }; export const getColumnTooltipNode = ( diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx index 7fb4f9a8b9e4..d25273c08e9b 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/customControls.tsx @@ -39,6 +39,7 @@ import { SORT_SERIES_CHOICES, } from '../constants'; import { checkColumnType } from '../utils/checkColumnType'; +import { isSortable } from '../utils/isSortable'; export const contributionModeControl = { name: 'contributionMode', @@ -55,27 +56,6 @@ export const contributionModeControl = { }, }; -function isForcedCategorical(controls: ControlStateMapping): boolean { - return ( - checkColumnType( - getColumnLabel(controls?.x_axis?.value as QueryFormColumn), - controls?.datasource?.datasource, - [GenericDataType.Numeric], - ) && !!controls?.xAxisForceCategorical?.value - ); -} - -function isSortable(controls: ControlStateMapping): boolean { - return ( - isForcedCategorical(controls) || - checkColumnType( - getColumnLabel(controls?.x_axis?.value as QueryFormColumn), - controls?.datasource?.datasource, - [GenericDataType.String, GenericDataType.Boolean], - ) - ); -} - const xAxisSortVisibility = ({ controls }: { controls: ControlStateMapping }) => isSortable(controls) && ensureIsArray(controls?.groupby?.value).length === 0 && diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx index eaf0f172b45c..198c14163d30 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx @@ -50,6 +50,7 @@ import { import { formatSelectOptions, displayTimeRelatedControls, + getColorControlsProps, D3_FORMAT_OPTIONS, D3_FORMAT_DOCS, D3_TIME_FORMAT_OPTIONS, @@ -142,9 +143,7 @@ const linear_color_scheme: SharedControlConfig<'ColorSchemeControl'> = { renderTrigger: true, schemes: () => sequentialSchemeRegistry.getMap(), isLinear: true, - mapStateToProps: state => ({ - dashboardId: state?.form_data?.dashboardId, - }), + mapStateToProps: state => getColorControlsProps(state), }; const granularity: SharedControlConfig<'SelectControl'> = { @@ -333,9 +332,7 @@ const color_scheme: SharedControlConfig<'ColorSchemeControl'> = { choices: () => categoricalSchemeRegistry.keys().map(s => [s, s]), description: t('The color scheme for rendering chart'), schemes: () => categoricalSchemeRegistry.getMap(), - mapStateToProps: state => ({ - dashboardId: state?.form_data?.dashboardId, - }), + mapStateToProps: state => getColorControlsProps(state), }; const truncate_metric: SharedControlConfig<'CheckboxControl'> = { @@ -352,6 +349,14 @@ const show_empty_columns: SharedControlConfig<'CheckboxControl'> = { description: t('Show empty columns'), }; +const enable_ai_insights: SharedControlConfig<'CheckboxControl'> = { + type: 'CheckboxControl', + label: t('Enable AI insights'), + default: false, + description: t('Include AI-generated insights for this chart'), + renderTrigger: true, +}; + const temporal_columns_lookup: SharedControlConfig<'HiddenControl'> = { type: 'HiddenControl', initialValue: (control: ControlState, state: ControlPanelState | null) => @@ -409,4 +414,5 @@ export default { temporal_columns_lookup, currency_format, sort_by_metric, + enable_ai_insights, }; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts index d4170be17c62..e9606c6ba40a 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/types.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/types.ts @@ -83,9 +83,13 @@ export interface Dataset { owners?: Owner[]; filter_select?: boolean; filter_select_enabled?: boolean; + column_names?: string[]; } export interface ControlPanelState { + slice: { + slice_id: number; + }; form_data: QueryFormData; datasource: Dataset | QueryResponse | null; controls: ControlStateMapping; @@ -258,6 +262,7 @@ export interface BaseControlConfig< props: ControlPanelsContainerProps, controlData: AnyDict, ) => boolean; + disableStash?: boolean; hidden?: | boolean | ((props: ControlPanelsContainerProps, controlData: AnyDict) => boolean); diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/utils/colorControls.ts similarity index 53% rename from superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/index.ts rename to superset-frontend/packages/superset-ui-chart-controls/src/utils/colorControls.ts index 527dcba50a86..b9400d9d4152 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/utils/colorControls.ts @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,28 +16,17 @@ * specific language governing permissions and limitations * under the License. */ - -import { t, ChartMetadata, ChartPlugin } from '@superset-ui/core'; -import transformProps from './transformProps'; -import buildQuery from '../plugin/buildQuery'; -import thumbnail from '../images/thumbnail.png'; -import { LegacyWordCloudFormData } from './types'; - -const metadata = new ChartMetadata({ - credits: ['https://github.com/jasondavies/d3-cloud'], - description: '', - name: t('Word Cloud'), - thumbnail, - useLegacyApi: true, -}); - -export default class LegacyWordCloudChartPlugin extends ChartPlugin<LegacyWordCloudFormData> { - constructor() { - super({ - buildQuery, - loadChart: () => import('../chart/WordCloud'), - metadata, - transformProps, - }); - } -} +export const getColorControlsProps = (state: Record<string, any>) => { + const dashboardId = state?.form_data?.dashboardId; + return { + chartId: state?.slice?.slice_id, + dashboardId, + hasDashboardColorScheme: + !!dashboardId && !!state?.form_data?.dashboard_color_scheme, + hasCustomLabelsColor: + Object.keys(state?.form_data?.label_colors || {}).length > 0, + colorNamespace: state?.form_data?.color_namespace, + mapLabelsColors: state?.form_data?.map_label_colors || {}, + sharedLabelsColors: state?.form_data?.shared_label_colors || [], + }; +}; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/utils/index.ts b/superset-frontend/packages/superset-ui-chart-controls/src/utils/index.ts index 77e883cafca9..48e551987180 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/utils/index.ts +++ b/superset-frontend/packages/superset-ui-chart-controls/src/utils/index.ts @@ -27,3 +27,4 @@ export * from './defineSavedMetrics'; export * from './getStandardizedControls'; export * from './getTemporalColumns'; export { default as displayTimeRelatedControls } from './displayTimeRelatedControls'; +export * from './colorControls'; diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/utils/isSortable.ts b/superset-frontend/packages/superset-ui-chart-controls/src/utils/isSortable.ts new file mode 100644 index 000000000000..65b07ec91dc5 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/src/utils/isSortable.ts @@ -0,0 +1,51 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { + GenericDataType, + getColumnLabel, + isPhysicalColumn, + QueryFormColumn, +} from '@superset-ui/core'; +import { checkColumnType, ControlStateMapping } from '..'; + +export function isSortable(controls: ControlStateMapping): boolean { + const isForcedCategorical = + checkColumnType( + getColumnLabel(controls?.x_axis?.value as QueryFormColumn), + controls?.datasource?.datasource, + [GenericDataType.Numeric], + ) && !!controls?.xAxisForceCategorical?.value; + + const xAxisValue = controls?.x_axis?.value as QueryFormColumn; + + // Given that we don't know the type of a custom SQL column, + // we treat it as sortable and give the responsibility to the + // user to provide a sortable result. + const isCustomSQL = !isPhysicalColumn(xAxisValue); + + return ( + isForcedCategorical || + isCustomSQL || + checkColumnType( + getColumnLabel(xAxisValue), + controls?.datasource?.datasource, + [GenericDataType.String, GenericDataType.Boolean], + ) + ); +} diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/components/ColumnOption.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/components/ColumnOption.test.tsx index 2f3f8f7069f1..c5b369aa2dbe 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/components/ColumnOption.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/components/ColumnOption.test.tsx @@ -16,87 +16,115 @@ * specific language governing permissions and limitations * under the License. */ -import { isValidElement } from 'react'; -import { shallow, ShallowWrapper } from 'enzyme'; -import { GenericDataType } from '@superset-ui/core'; +import '@testing-library/jest-dom'; +import { render } from '@testing-library/react'; +import { + ThemeProvider, + supersetTheme, + GenericDataType, +} from '@superset-ui/core'; -import { ColumnOption, ColumnOptionProps, ColumnTypeLabel } from '../../src'; -import { SQLPopover } from '../../src/components/SQLPopover'; +import { ColumnOption, ColumnOptionProps } from '../../src'; -describe('ColumnOption', () => { - const defaultProps: ColumnOptionProps = { +jest.mock('../../src/components/SQLPopover', () => ({ + SQLPopover: () => <div data-test="mock-sql-popover" />, +})); +jest.mock('../../src/components/ColumnTypeLabel/ColumnTypeLabel', () => ({ + ColumnTypeLabel: ({ type }: { type: string }) => ( + <div data-test="mock-column-type-label">{type}</div> + ), +})); +jest.mock('../../src/components/InfoTooltipWithTrigger', () => () => ( + <div data-test="mock-info-tooltip-with-trigger" /> +)); + +const defaultProps: ColumnOptionProps = { + column: { + column_name: 'foo', + verbose_name: 'Foo', + expression: 'SUM(foo)', + description: 'Foo is the greatest column of all', + }, + showType: false, +}; + +const setup = (props: Partial<ColumnOptionProps> = {}) => + render( + <ThemeProvider theme={supersetTheme}> + <ColumnOption {...defaultProps} {...props} /> + </ThemeProvider>, + ); +test('shows a label with verbose_name', () => { + const { container } = setup(); + const lbl = container.getElementsByClassName('option-label'); + expect(lbl).toHaveLength(1); + expect(`${lbl[0].textContent}`).toEqual(defaultProps.column.verbose_name); +}); +test('shows SQL Popover trigger', () => { + const { getByTestId } = setup(); + expect(getByTestId('mock-sql-popover')).toBeInTheDocument(); +}); +test('shows a label with column_name when no verbose_name', () => { + const { getByText } = setup({ column: { - column_name: 'foo', - verbose_name: 'Foo', - expression: 'SUM(foo)', - description: 'Foo is the greatest column of all', + ...defaultProps.column, + verbose_name: undefined, }, - showType: false, - }; - - let wrapper: ShallowWrapper; - let props: ColumnOptionProps; - const factory = (o: ColumnOptionProps) => <ColumnOption {...o} />; - beforeEach(() => { - wrapper = shallow(factory(defaultProps)); - props = { ...defaultProps }; - }); - it('is a valid element', () => { - expect(isValidElement(<ColumnOption {...defaultProps} />)).toBe(true); }); - it('shows a label with verbose_name', () => { - const lbl = wrapper.find('.option-label'); - expect(lbl).toHaveLength(1); - expect(lbl.first().text()).toBe('Foo'); - }); - it('shows SQL Popover trigger', () => { - expect(wrapper.find(SQLPopover)).toHaveLength(1); - }); - it('shows a label with column_name when no verbose_name', () => { - delete props.column.verbose_name; - wrapper = shallow(factory(props)); - expect(wrapper.find('.option-label').first().text()).toBe('foo'); + expect(getByText(defaultProps.column.column_name)).toBeInTheDocument(); +}); +test('shows a column type label when showType is true', () => { + const { getByTestId } = setup({ + showType: true, + column: { + column_name: 'foo', + type: 'VARCHAR', + type_generic: GenericDataType.String, + }, }); - it('shows a column type label when showType is true', () => { - wrapper = shallow( - factory({ - ...props, - showType: true, - column: { - column_name: 'foo', - type: 'VARCHAR', - type_generic: GenericDataType.String, - }, - }), - ); - expect(wrapper.find(ColumnTypeLabel)).toHaveLength(1); + expect(getByTestId('mock-column-type-label')).toBeInTheDocument(); +}); +test('column with expression has correct column label if showType is true', () => { + const { getByTestId } = setup({ + showType: true, }); - it('column with expression has correct column label if showType is true', () => { - props.showType = true; - wrapper = shallow(factory(props)); - expect(wrapper.find(ColumnTypeLabel)).toHaveLength(1); - expect(wrapper.find(ColumnTypeLabel).props().type).toBe('expression'); + expect(getByTestId('mock-column-type-label')).toBeInTheDocument(); + expect(getByTestId('mock-column-type-label')).toHaveTextContent('expression'); +}); +test('shows no column type label when type is null', () => { + const { queryByTestId } = setup({ + showType: true, + column: { + column_name: 'foo', + }, }); - it('shows no column type label when type is null', () => { - wrapper = shallow( - factory({ - ...props, - showType: true, - column: { - column_name: 'foo', - }, - }), - ); - expect(wrapper.find(ColumnTypeLabel)).toHaveLength(0); + expect(queryByTestId('mock-column-type-label')).not.toBeInTheDocument(); +}); +test('dttm column has correct column label if showType is true', () => { + const { getByTestId } = setup({ + showType: true, + column: { + ...defaultProps.column, + expression: undefined, + type_generic: GenericDataType.Temporal, + }, }); - it('dttm column has correct column label if showType is true', () => { - props.showType = true; - props.column.expression = undefined; - props.column.type_generic = GenericDataType.Temporal; - wrapper = shallow(factory(props)); - expect(wrapper.find(ColumnTypeLabel)).toHaveLength(1); - expect(wrapper.find(ColumnTypeLabel).props().type).toBe( - GenericDataType.Temporal, - ); + expect(getByTestId('mock-column-type-label')).toBeInTheDocument(); + expect(getByTestId('mock-column-type-label')).toHaveTextContent( + String(GenericDataType.Temporal), + ); +}); +test('doesnt show InfoTooltipWithTrigger when no warning', () => { + const { queryByText } = setup(); + expect(queryByText('mock-info-tooltip-with-trigger')).not.toBeInTheDocument(); +}); +test('shows a warning with InfoTooltipWithTrigger when it contains warning', () => { + const { getByTestId } = setup({ + ...defaultProps, + column: { + ...defaultProps.column, + warning_text: 'This is a warning', + }, }); + expect(getByTestId('mock-info-tooltip-with-trigger')).toBeInTheDocument(); }); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/components/InfoTooltipWithTrigger.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/components/InfoTooltipWithTrigger.test.tsx index 33e2d8c7f256..0011f862b29a 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/components/InfoTooltipWithTrigger.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/components/InfoTooltipWithTrigger.test.tsx @@ -16,42 +16,69 @@ * specific language governing permissions and limitations * under the License. */ -import { shallow } from 'enzyme'; -import { Tooltip } from '../../src/components/Tooltip'; -import { InfoTooltipWithTrigger } from '../../src'; +import '@testing-library/jest-dom'; +import { fireEvent, render } from '@testing-library/react'; +import { ThemeProvider, supersetTheme } from '@superset-ui/core'; +import { InfoTooltipWithTrigger, InfoTooltipWithTriggerProps } from '../../src'; -describe('InfoTooltipWithTrigger', () => { - it('renders a tooltip', () => { - const wrapper = shallow( - <InfoTooltipWithTrigger label="test" tooltip="this is a test" />, - ); - expect(wrapper.find(Tooltip)).toHaveLength(1); - }); +jest.mock('../../src/components/Tooltip', () => ({ + Tooltip: ({ children }: { children: React.ReactNode }) => ( + <div data-test="mock-tooltip">{children}</div> + ), +})); + +const defaultProps = {}; + +const setup = (props: Partial<InfoTooltipWithTriggerProps> = {}) => + render( + <ThemeProvider theme={supersetTheme}> + <InfoTooltipWithTrigger {...defaultProps} {...props} /> + </ThemeProvider>, + ); - it('renders an info icon', () => { - const wrapper = shallow(<InfoTooltipWithTrigger />); - expect(wrapper.find('.fa-info-circle')).toHaveLength(1); +test('renders a tooltip', () => { + const { getAllByTestId } = setup({ + label: 'test', + tooltip: 'this is a test', }); + expect(getAllByTestId('mock-tooltip').length).toEqual(1); +}); + +test('renders an info icon', () => { + const { container } = setup(); + expect(container.getElementsByClassName('fa-info-circle')).toHaveLength(1); +}); - it('responds to keypresses', () => { - const clickHandler = jest.fn(); - const wrapper = shallow( - <InfoTooltipWithTrigger - label="test" - tooltip="this is a test" - onClick={clickHandler} - />, - ); - wrapper.find('.fa-info-circle').simulate('keypress', { key: 'Tab' }); - expect(clickHandler).toHaveBeenCalledTimes(0); - wrapper.find('.fa-info-circle').simulate('keypress', { key: 'Enter' }); - expect(clickHandler).toHaveBeenCalledTimes(1); - wrapper.find('.fa-info-circle').simulate('keypress', { key: ' ' }); - expect(clickHandler).toHaveBeenCalledTimes(2); +test('responds to keypresses', () => { + const clickHandler = jest.fn(); + const { getByRole } = setup({ + label: 'test', + tooltip: 'this is a test', + onClick: clickHandler, + }); + fireEvent.keyPress(getByRole('button'), { + key: 'Tab', + code: 9, + charCode: 9, + }); + expect(clickHandler).toHaveBeenCalledTimes(0); + fireEvent.keyPress(getByRole('button'), { + key: 'Enter', + code: 13, + charCode: 13, }); + expect(clickHandler).toHaveBeenCalledTimes(1); + fireEvent.keyPress(getByRole('button'), { + key: ' ', + code: 32, + charCode: 32, + }); + expect(clickHandler).toHaveBeenCalledTimes(2); +}); - it('has a bsStyle', () => { - const wrapper = shallow(<InfoTooltipWithTrigger bsStyle="something" />); - expect(wrapper.find('.text-something')).toHaveLength(1); +test('has a bsStyle', () => { + const { container } = setup({ + bsStyle: 'something', }); + expect(container.getElementsByClassName('text-something')).toHaveLength(1); }); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/components/MetricOption.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/components/MetricOption.test.tsx index 929d6db8aca2..49b78159f374 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/components/MetricOption.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/components/MetricOption.test.tsx @@ -16,72 +16,97 @@ * specific language governing permissions and limitations * under the License. */ -import { isValidElement } from 'react'; -import { shallow, ShallowWrapper } from 'enzyme'; +import '@testing-library/jest-dom'; +import { render } from '@testing-library/react'; +import { ThemeProvider, supersetTheme } from '@superset-ui/core'; import { MetricOption, MetricOptionProps } from '../../src'; -describe('MetricOption', () => { - const defaultProps = { +jest.mock('../../src/components/InfoTooltipWithTrigger', () => () => ( + <div data-test="mock-info-tooltip-with-trigger" /> +)); +jest.mock('../../src/components/ColumnTypeLabel/ColumnTypeLabel', () => ({ + ColumnTypeLabel: () => <div data-test="mock-column-type-label" />, +})); +jest.mock( + '../../src/components/Tooltip', + () => + ({ children }: { children: React.ReactNode }) => ( + <div data-test="mock-tooltip">{children}</div> + ), +); +jest.mock('../../src/components/SQLPopover', () => ({ + SQLPopover: () => <div data-test="mock-sql-popover" />, +})); + +const defaultProps = { + metric: { + metric_name: 'foo', + verbose_name: 'Foo', + expression: 'SUM(foo)', + label: 'test', + description: 'Foo is the greatest metric of all', + warning_text: 'Be careful when using foo', + }, + openInNewWindow: false, + showFormula: true, + showType: true, + url: '', +}; + +const setup = (props: Partial<MetricOptionProps> = {}) => + render( + <ThemeProvider theme={supersetTheme}> + <MetricOption {...defaultProps} {...props} /> + </ThemeProvider>, + ); +test('shows a label with verbose_name', () => { + const { container } = setup(); + const lbl = container.getElementsByClassName('option-label'); + expect(lbl).toHaveLength(1); + expect(`${lbl[0].textContent}`).toEqual(defaultProps.metric.verbose_name); +}); +test('shows a InfoTooltipWithTrigger', () => { + const { getByTestId } = setup(); + expect(getByTestId('mock-info-tooltip-with-trigger')).toBeInTheDocument(); +}); +test('shows SQL Popover trigger', () => { + const { getByTestId } = setup(); + expect(getByTestId('mock-sql-popover')).toBeInTheDocument(); +}); +test('shows a label with metric_name when no verbose_name', () => { + const { getByText } = setup({ metric: { - metric_name: 'foo', - verbose_name: 'Foo', - expression: 'SUM(foo)', - label: 'test', - description: 'Foo is the greatest metric of all', - warning_text: 'Be careful when using foo', + ...defaultProps.metric, + verbose_name: '', }, - openInNewWindow: false, - showFormula: true, - showType: true, - url: '', - }; - - let wrapper: ShallowWrapper; - let props: MetricOptionProps; - const factory = (o: MetricOptionProps) => <MetricOption {...o} />; - beforeEach(() => { - wrapper = shallow(factory(defaultProps)); - props = { ...defaultProps }; - }); - it('is a valid element', () => { - expect(isValidElement(<MetricOption {...defaultProps} />)).toBe(true); - }); - it('shows a label with verbose_name', () => { - const lbl = wrapper.find('.option-label'); - expect(lbl).toHaveLength(1); - expect(lbl.first().text()).toBe('Foo'); - }); - it('shows a InfoTooltipWithTrigger', () => { - expect(wrapper.find('InfoTooltipWithTrigger')).toHaveLength(1); - }); - it('shows SQL Popover trigger', () => { - expect(wrapper.find('SQLPopover')).toHaveLength(1); }); - it('shows a label with metric_name when no verbose_name', () => { - props.metric.verbose_name = ''; - wrapper = shallow(factory(props)); - expect(wrapper.find('.option-label').first().text()).toBe('foo'); - }); - it('doesnt show InfoTooltipWithTrigger when no warning', () => { - props.metric.warning_text = ''; - wrapper = shallow(factory(props)); - expect(wrapper.find('InfoTooltipWithTrigger')).toHaveLength(0); - }); - it('sets target="_blank" when openInNewWindow is true', () => { - props.url = 'https://github.com/apache/incubator-superset'; - wrapper = shallow(factory(props)); - expect(wrapper.find('a').prop('target')).toBe(''); - - props.openInNewWindow = true; - wrapper = shallow(factory(props)); - expect(wrapper.find('a').prop('target')).toBe('_blank'); + expect(getByText(defaultProps.metric.metric_name)).toBeInTheDocument(); +}); +test('doesnt show InfoTooltipWithTrigger when no warning', () => { + const { queryByText } = setup({ + metric: { + ...defaultProps.metric, + warning_text: '', + }, }); - it('shows a metric type label when showType is true', () => { - props.showType = true; - wrapper = shallow(factory(props)); - expect(wrapper.find('ColumnTypeLabel')).toHaveLength(1); + expect(queryByText('mock-info-tooltip-with-trigger')).not.toBeInTheDocument(); +}); +test('sets target="_blank" when openInNewWindow is true', () => { + const { getByRole } = setup({ + url: 'https://github.com/apache/incubator-superset', + openInNewWindow: true, }); - it('shows a Tooltip for the verbose metric name', () => { - expect(wrapper.find('Tooltip')).toHaveLength(1); + expect( + getByRole('link', { name: defaultProps.metric.verbose_name }), + ).toHaveAttribute('target', '_blank'); +}); +test('shows a metric type label when showType is true', () => { + const { getByTestId } = setup({ + showType: true, }); + expect(getByTestId('mock-column-type-label')).toBeInTheDocument(); +}); +test('shows a Tooltip for the verbose metric name', () => { + const { getByTestId } = setup(); + expect(getByTestId('mock-tooltip')).toBeInTheDocument(); }); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/components/labelUtils.test.tsx b/superset-frontend/packages/superset-ui-chart-controls/test/components/labelUtils.test.tsx index d32081b8988b..9b5b760f7915 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/test/components/labelUtils.test.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/test/components/labelUtils.test.tsx @@ -90,7 +90,7 @@ test('should get column datatype rendered as tooltip when column has a type', () </>, ); - expect(screen.getByText('Column datatype')).toBeVisible(); + expect(screen.getByText('Column type')).toBeVisible(); expect(screen.getByText('text')).toBeVisible(); }); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/utils/colorControls.test.ts b/superset-frontend/packages/superset-ui-chart-controls/test/utils/colorControls.test.ts new file mode 100644 index 000000000000..deadc3eedef3 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/test/utils/colorControls.test.ts @@ -0,0 +1,112 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { getColorControlsProps } from '../../src'; + +describe('getColorControlsProps', () => { + it('should return default values when state is empty', () => { + const state = {}; + const result = getColorControlsProps(state); + expect(result).toEqual({ + chartId: undefined, + dashboardId: undefined, + hasDashboardColorScheme: false, + hasCustomLabelsColor: false, + colorNamespace: undefined, + mapLabelsColors: {}, + sharedLabelsColors: [], + }); + }); + + it('should return correct values when state has form_data with dashboardId and color scheme', () => { + const state = { + form_data: { + dashboardId: 123, + dashboard_color_scheme: 'blueScheme', + label_colors: {}, + }, + slice: { slice_id: 456 }, + }; + const result = getColorControlsProps(state); + expect(result).toEqual({ + chartId: 456, + dashboardId: 123, + hasDashboardColorScheme: true, + hasCustomLabelsColor: false, + colorNamespace: undefined, + mapLabelsColors: {}, + sharedLabelsColors: [], + }); + }); + + it('should detect custom label colors correctly', () => { + const state = { + form_data: { + dashboardId: 123, + label_colors: { label1: '#000000' }, + }, + slice: { slice_id: 456 }, + }; + const result = getColorControlsProps(state); + expect(result).toEqual({ + chartId: 456, + dashboardId: 123, + hasDashboardColorScheme: false, + hasCustomLabelsColor: true, + colorNamespace: undefined, + mapLabelsColors: {}, + sharedLabelsColors: [], + }); + }); + + it('should return shared label colors when available', () => { + const state = { + form_data: { + shared_label_colors: ['#FF5733', '#33FF57'], + }, + }; + const result = getColorControlsProps(state); + expect(result).toEqual({ + chartId: undefined, + dashboardId: undefined, + hasDashboardColorScheme: false, + hasCustomLabelsColor: false, + sharedLabelsColors: ['#FF5733', '#33FF57'], + colorNamespace: undefined, + mapLabelsColors: {}, + }); + }); + + it('should handle missing form_data and slice properties', () => { + const state = { + form_data: { + dashboardId: 789, + }, + }; + const result = getColorControlsProps(state); + expect(result).toEqual({ + chartId: undefined, + dashboardId: 789, + hasDashboardColorScheme: false, + hasCustomLabelsColor: false, + colorNamespace: undefined, + mapLabelsColors: {}, + sharedLabelsColors: [], + }); + }); +}); diff --git a/superset-frontend/packages/superset-ui-chart-controls/test/utils/isSortable.test.ts b/superset-frontend/packages/superset-ui-chart-controls/test/utils/isSortable.test.ts new file mode 100644 index 000000000000..0ef9844f9950 --- /dev/null +++ b/superset-frontend/packages/superset-ui-chart-controls/test/utils/isSortable.test.ts @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { ControlStateMapping } from '@superset-ui/chart-controls'; +import { GenericDataType } from '@superset-ui/core'; +import { isSortable } from '../../src/utils/isSortable'; + +const controls: ControlStateMapping = { + datasource: { + datasource: { + columns: [ + { column_name: 'a', type_generic: GenericDataType.String }, + { column_name: 'b', type_generic: GenericDataType.Numeric }, + { column_name: 'c', type_generic: GenericDataType.Boolean }, + ], + }, + type: 'Select', + }, +}; + +test('should return true if the column is forced to be categorical', () => { + const c: ControlStateMapping = { + ...controls, + x_axis: { value: 'b', type: 'Select' }, + xAxisForceCategorical: { value: true, type: 'Checkbox' }, + }; + expect(isSortable(c)).toBe(true); +}); + +test('should return true if the column is a custom SQL column', () => { + const c: ControlStateMapping = { + ...controls, + x_axis: { + value: { label: 'custom_sql', sqlExpression: 'MAX(ID)' }, + type: 'Select', + }, + }; + expect(isSortable(c)).toBe(true); +}); + +test('should return true if the column type is String or Boolean', () => { + const c: ControlStateMapping = { + ...controls, + x_axis: { value: 'c', type: 'Checkbox' }, + }; + expect(isSortable(c)).toBe(true); +}); + +test('should return false if none of the conditions are met', () => { + const c: ControlStateMapping = { + ...controls, + x_axis: { value: 'b', type: 'Input' }, + }; + expect(isSortable(c)).toBe(false); +}); diff --git a/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorNamespace.ts b/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorNamespace.ts index 9c56d5114b9d..9389ad549fd5 100644 --- a/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorNamespace.ts +++ b/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorNamespace.ts @@ -17,6 +17,7 @@ * under the License. */ +import { cloneDeep } from 'lodash'; import CategoricalColorScale from './CategoricalColorScale'; import { ColorsLookup } from './types'; import getCategoricalSchemeRegistry from './CategoricalSchemeRegistrySingleton'; @@ -37,10 +38,21 @@ export default class CategoricalColorNamespace { this.forcedItems = {}; } - getScale(schemeId?: string) { - const id = schemeId ?? getCategoricalSchemeRegistry().getDefaultKey() ?? ''; + /** + * A new CategoricalColorScale instance is created for each chart. + * + * @param colorScheme - the color scheme to use + * @returns a new instance of a color scale + */ + getScale(colorScheme?: string) { + const id = + colorScheme ?? getCategoricalSchemeRegistry().getDefaultKey() ?? ''; const scheme = getCategoricalSchemeRegistry().get(id); - return new CategoricalColorScale(scheme?.colors ?? [], this.forcedItems); + return new CategoricalColorScale( + scheme?.colors ?? [], + this.forcedItems, + colorScheme, + ); } /** @@ -59,6 +71,17 @@ export default class CategoricalColorNamespace { resetColors() { this.forcedItems = {}; } + + resetColorsForLabels(labels: string[] = []) { + const updatedForcedItems = cloneDeep(this.forcedItems); + labels.forEach(label => { + if (updatedForcedItems.hasOwnProperty(label)) { + delete updatedForcedItems[label]; + } + }); + + this.forcedItems = { ...updatedForcedItems }; + } } const namespaces: { @@ -80,16 +103,19 @@ export function getNamespace(name: string = DEFAULT_NAMESPACE) { export function getColor( value?: string, - schemeId?: string, + colorScheme?: string, namespace?: string, ) { - return getNamespace(namespace).getScale(schemeId).getColor(value); + return getNamespace(namespace).getScale(colorScheme).getColor(value); } /* Returns a new scale instance within the same namespace. Especially useful when a chart is booting for the first time + + @param scheme - the applied color scheme + @param namespace - the namespace */ -export function getScale(scheme?: string, namespace?: string) { - return getNamespace(namespace).getScale(scheme); +export function getScale(colorScheme?: string, namespace?: string) { + return getNamespace(namespace).getScale(colorScheme); } diff --git a/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorScale.ts b/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorScale.ts index f97f84cdec48..707ae3d4afd6 100644 --- a/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorScale.ts +++ b/superset-frontend/packages/superset-ui-core/src/color/CategoricalColorScale.ts @@ -21,14 +21,16 @@ import { scaleOrdinal, ScaleOrdinal } from 'd3-scale'; import { ExtensibleFunction } from '../models'; import { ColorsInitLookup, ColorsLookup } from './types'; import stringifyAndTrim from './stringifyAndTrim'; -import getLabelsColorMap from './LabelsColorMapSingleton'; +import getLabelsColorMap, { + LabelsColorMapSource, +} from './LabelsColorMapSingleton'; import { getAnalogousColors } from './utils'; import { FeatureFlag, isFeatureEnabled } from '../utils'; // Use type augmentation to correct the fact that // an instance of CategoricalScale is also a function interface CategoricalColorScale { - (x: { toString(): string }, y?: number, w?: string): string; + (x: { toString(): string }, y?: number): string; } class CategoricalColorScale extends ExtensibleFunction { @@ -50,11 +52,16 @@ class CategoricalColorScale extends ExtensibleFunction { * Constructor * @param {*} colors an array of colors * @param {*} forcedColors optional parameter that comes from parent - * (usually CategoricalColorNamespace) + * @param {*} appliedColorScheme the color scheme applied to the chart + * */ - constructor(colors: string[], forcedColors: ColorsInitLookup = {}) { - super((value: string, sliceId?: number, colorScheme?: string) => - this.getColor(value, sliceId, colorScheme), + constructor( + colors: string[], + forcedColors: ColorsInitLookup = {}, + appliedColorScheme?: string, + ) { + super((value: string, sliceId?: number) => + this.getColor(value, sliceId, appliedColorScheme), ); // holds original color scheme colors this.originColors = colors; @@ -107,15 +114,28 @@ class CategoricalColorScale extends ExtensibleFunction { * * @param value the value of a label to get the color for * @param sliceId the ID of the current chart - * @param colorScheme the original color scheme of the chart + * @param appliedColorScheme the color scheme applied to the chart * @returns the color or the next available color */ - getColor(value?: string, sliceId?: number, colorScheme?: string): string { + getColor( + value?: string, + sliceId?: number, + appliedColorScheme?: string, + ): string { const cleanedValue = stringifyAndTrim(value); - // priority: forced color (i.e. custom label colors) > shared color > scale color + // priority: forced color (aka custom label colors) > shared color > scale color const forcedColor = this.forcedColors?.[cleanedValue]; - const isExistingLabel = this.chartLabelsColorMap.has(cleanedValue); - let color = forcedColor || this.scale(cleanedValue); + const { source } = this.labelsColorMapInstance; + const currentColorMap = + source === LabelsColorMapSource.Dashboard + ? this.labelsColorMapInstance.getColorMap() + : this.chartLabelsColorMap; + const isExistingLabel = currentColorMap.has(cleanedValue); + let color = + forcedColor || + (isExistingLabel + ? (currentColorMap.get(cleanedValue) as string) + : this.scale(cleanedValue)); // a forced color will always be used independently of the usage count if (!forcedColor && !isExistingLabel) { @@ -128,7 +148,7 @@ class CategoricalColorScale extends ExtensibleFunction { this.isColorUsed(color) ) { // fallback to least used color - color = this.getNextAvailableColor(color); + color = this.getNextAvailableColor(cleanedValue, color); } } @@ -141,7 +161,7 @@ class CategoricalColorScale extends ExtensibleFunction { cleanedValue, color, sliceId, - colorScheme, + appliedColorScheme, ); } return color; @@ -164,48 +184,76 @@ class CategoricalColorScale extends ExtensibleFunction { * @param color the color to check * @returns the count of the color usage in this slice */ - getColorUsageCount(currentColor: string): number { - let count = 0; - this.chartLabelsColorMap.forEach(color => { - if (color === currentColor) { - count += 1; - } - }); - return count; + getColorUsageCount(color: string): number { + return Array.from(this.chartLabelsColorMap.values()).filter( + value => value === color, + ).length; } /** - * Lower chances of color collision by returning the least used color - * Checks across colors of current slice within LabelsColorMapSingleton + * Lower chances of color collision by returning the least used color. + * Checks across colors of current slice within chartLabelsColorMap. * + * @param currentLabel the current label * @param currentColor the current color - * @returns the least used color that is not the excluded color + * @returns the least used color that is not the current color */ - getNextAvailableColor(currentColor: string) { - const colorUsageArray = this.colors.map(color => ({ - color, - count: this.getColorUsageCount(color), - })); - const currentColorCount = this.getColorUsageCount(currentColor); - const otherColors = colorUsageArray.filter( - colorEntry => colorEntry.color !== currentColor, - ); - // all other colors are used as much or more than currentColor - const hasNoneAvailable = otherColors.every( - colorEntry => colorEntry.count >= currentColorCount, + getNextAvailableColor(currentLabel: string, currentColor: string): string { + // Precompute color usage counts for all colors + const colorUsageCounts = new Map( + this.colors.map(color => [color, this.getColorUsageCount(color)]), ); - // fallback to currentColor color - if (!otherColors.length || hasNoneAvailable) { - return currentColor; + // Get an ordered array of labels from the map + const orderedLabels = Array.from(this.chartLabelsColorMap.keys()); + const currentLabelIndex = orderedLabels.indexOf(currentLabel); + + // Helper to infer "previous" and "next" labels based on index + const getAdjacentLabelsColors = (): string[] => { + const previousLabel = + currentLabelIndex > 0 ? orderedLabels[currentLabelIndex - 1] : null; + const nextLabel = + currentLabelIndex < orderedLabels.length - 1 + ? orderedLabels[currentLabelIndex + 1] + : null; + + const previousColor = previousLabel + ? this.chartLabelsColorMap.get(previousLabel) + : null; + const nextColor = nextLabel + ? this.chartLabelsColorMap.get(nextLabel) + : null; + + return [previousColor, nextColor].filter(color => color) as string[]; + }; + + const adjacentColors = getAdjacentLabelsColors(); + + // Determine adjusted score (usage count + penalties) + const calculateScore = (color: string): number => { + /* istanbul ignore next */ + const usageCount = colorUsageCounts.get(color) || 0; + const adjacencyPenalty = adjacentColors.includes(color) ? 100 : 0; + return usageCount + adjacencyPenalty; + }; + + // If there is any color that has never been used, prioritize it + const unusedColor = this.colors.find( + color => (colorUsageCounts.get(color) || 0) === 0, + ); + if (unusedColor) { + return unusedColor; } - // Finding the least used color - const leastUsedColor = otherColors.reduce((min, entry) => - entry.count < min.count ? entry : min, - ).color; + // If all colors are used, calculate scores and choose the best one + const otherColors = this.colors.filter(color => color !== currentColor); - return leastUsedColor; + // Find the color with the minimum score, defaulting to currentColor + return otherColors.reduce((bestColor, color) => { + const bestScore = calculateScore(bestColor); + const currentScore = calculateScore(color); + return currentScore < bestScore ? color : bestColor; + }, currentColor); } /** diff --git a/superset-frontend/packages/superset-ui-core/src/color/LabelsColorMapSingleton.ts b/superset-frontend/packages/superset-ui-core/src/color/LabelsColorMapSingleton.ts index 59d3f8cc5de7..cf50d6f6a252 100644 --- a/superset-frontend/packages/superset-ui-core/src/color/LabelsColorMapSingleton.ts +++ b/superset-frontend/packages/superset-ui-core/src/color/LabelsColorMapSingleton.ts @@ -18,6 +18,7 @@ */ import { makeSingleton } from '../utils'; +import CategoricalColorNamespace from './CategoricalColorNamespace'; export enum LabelsColorMapSource { Dashboard, @@ -25,7 +26,10 @@ export enum LabelsColorMapSource { } export class LabelsColorMap { - chartsLabelsMap: Map<number, { labels: string[]; scheme?: string }>; + chartsLabelsMap: Map< + number, + { labels: string[]; scheme?: string; ownScheme?: string } + >; colorMap: Map<string, string>; @@ -38,17 +42,38 @@ export class LabelsColorMap { this.source = LabelsColorMapSource.Dashboard; } - updateColorMap(categoricalNamespace: any, colorScheme?: string) { - const newColorMap = new Map(); - this.colorMap.clear(); + /** + * Wipes out the color map and updates it with the new color scheme. + * + * @param categoricalNamespace - the namespace to use for color mapping + * @param colorScheme - color scheme + */ + updateColorMap( + categoricalNamespace: CategoricalColorNamespace, + colorScheme?: string, + merge = false, + ) { + const newColorMap = this.colorMap; + + if (!merge) { + newColorMap.clear(); + } + this.chartsLabelsMap.forEach((chartConfig, sliceId) => { - const { labels, scheme: originalChartColorScheme } = chartConfig; - const currentColorScheme = colorScheme || originalChartColorScheme; - const colorScale = categoricalNamespace.getScale(currentColorScheme); + const { labels, ownScheme } = chartConfig; + const appliedColorScheme = colorScheme || ownScheme; + const colorScale = categoricalNamespace.getScale(appliedColorScheme); labels.forEach(label => { - const newColor = colorScale.getColor(label, sliceId); - newColorMap.set(label, newColor); + // if merge, apply the scheme only to new labels in the map + if (!merge || !this.colorMap.has(label)) { + const newColor = colorScale.getColor( + label, + sliceId, + appliedColorScheme, + ); + newColorMap.set(label, newColor); + } }); }); this.colorMap = newColorMap; @@ -58,29 +83,63 @@ export class LabelsColorMap { return this.colorMap; } + /** + * + * Called individually by each plugin via getColor fn. + * + * @param label - the label name + * @param color - the color + * @param sliceId - the chart id + * @param colorScheme - the color scheme + * + */ addSlice( label: string, color: string, sliceId: number, colorScheme?: string, ) { - if (this.source !== LabelsColorMapSource.Dashboard) return; - const chartConfig = this.chartsLabelsMap.get(sliceId) || { labels: [], - scheme: '', + scheme: undefined, + ownScheme: undefined, }; + const { labels } = chartConfig; if (!labels.includes(label)) { labels.push(label); this.chartsLabelsMap.set(sliceId, { labels, scheme: colorScheme, + ownScheme: chartConfig.ownScheme, + }); + } + if (this.source === LabelsColorMapSource.Dashboard) { + this.colorMap.set(label, color); + } + } + + /** + * Used to make sure all slices respect their original scheme. + * + * @param sliceId - the chart id + * @param ownScheme - the color scheme + */ + setOwnColorScheme(sliceId: number, ownScheme: string) { + const chartConfig = this.chartsLabelsMap.get(sliceId); + if (chartConfig) { + this.chartsLabelsMap.set(sliceId, { + ...chartConfig, + ownScheme, }); } - this.colorMap.set(label, color); } + /** + * Remove a slice from the color map. + * + * @param sliceId - the chart + */ removeSlice(sliceId: number) { if (this.source !== LabelsColorMapSource.Dashboard) return; @@ -96,10 +155,20 @@ export class LabelsColorMap { this.colorMap = newColorMap; } + /** + * Clear the shared labels color map. + */ clear() { - this.chartsLabelsMap.clear(); this.colorMap.clear(); } + + /** + * Clears all maps + */ + reset() { + this.clear(); + this.chartsLabelsMap.clear(); + } } const getInstance = makeSingleton(LabelsColorMap); diff --git a/superset-frontend/packages/superset-ui-core/src/connection/callApi/parseResponse.ts b/superset-frontend/packages/superset-ui-core/src/connection/callApi/parseResponse.ts index 52dc34808415..4ee81b80b3aa 100644 --- a/superset-frontend/packages/superset-ui-core/src/connection/callApi/parseResponse.ts +++ b/superset-frontend/packages/superset-ui-core/src/connection/callApi/parseResponse.ts @@ -57,11 +57,21 @@ export default async function parseResponse<T extends ParseMethod = 'json'>( const json = JSONbig.parse(rawData); const result: JsonResponse = { response, - // `json-bigint` could not handle floats well, see sidorares/json-bigint#62 - // TODO: clean up after json-bigint>1.0.1 is released - json: cloneDeepWith(json, (value: any) => - value?.isInteger?.() === false ? Number(value) : undefined, - ), + json: cloneDeepWith(json, (value: any) => { + if ( + value?.isInteger?.() === true && + (value?.isGreaterThan?.(Number.MAX_SAFE_INTEGER) || + value?.isLessThan?.(Number.MIN_SAFE_INTEGER)) + ) { + return BigInt(value); + } + // // `json-bigint` could not handle floats well, see sidorares/json-bigint#62 + // // TODO: clean up after json-bigint>1.0.1 is released + if (value?.isNaN?.() === false) { + return value?.toNumber?.(); + } + return undefined; + }), }; return result as ReturnType; } diff --git a/superset-frontend/packages/superset-ui-core/src/currency-format/CurrencyFormatter.ts b/superset-frontend/packages/superset-ui-core/src/currency-format/CurrencyFormatter.ts index a5f215fe9556..9540f44aafc5 100644 --- a/superset-frontend/packages/superset-ui-core/src/currency-format/CurrencyFormatter.ts +++ b/superset-frontend/packages/superset-ui-core/src/currency-format/CurrencyFormatter.ts @@ -31,13 +31,36 @@ interface CurrencyFormatter { (value: number | null | undefined): string; } -export const getCurrencySymbol = (currency: Partial<Currency>) => - new Intl.NumberFormat('en-US', { +const ZEN_COUNTRY_CODE_PARAM = 'currency_code'; +const CURRENCY_SYMBOL_MAPPING = { + IDR: 'Rp', +}; + +export const getCurrencySymbol = (currency: Partial<Currency>) => { + const urlParams = new URLSearchParams(window.location.search); + const zenCurrency = urlParams.get(ZEN_COUNTRY_CODE_PARAM)?.toUpperCase() as + | keyof typeof CURRENCY_SYMBOL_MAPPING + | undefined; + + if (zenCurrency) { + if (CURRENCY_SYMBOL_MAPPING[zenCurrency]) { + return CURRENCY_SYMBOL_MAPPING[zenCurrency]; + } + return new Intl.NumberFormat('en-US', { + style: 'currency', + currency: zenCurrency, + }) + .formatToParts(1) + .find(x => x.type === 'currency')?.value; + } + + return new Intl.NumberFormat('en-US', { style: 'currency', - currency: currency.symbol, + currency: currency.symbol || 'INR', }) .formatToParts(1) .find(x => x.type === 'currency')?.value; +}; class CurrencyFormatter extends ExtensibleFunction { d3Format: string; diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/Dashboard.ts b/superset-frontend/packages/superset-ui-core/src/query/types/Dashboard.ts index cb299c3ac96c..ac4b19cae55a 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/Dashboard.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/Dashboard.ts @@ -80,6 +80,24 @@ export type Filter = { description: string; }; +export type AppliedFilter = { + values: { + filters: Record<string, any>[]; + } | null; +}; + +export type AppliedCrossFilterType = { + filterType: undefined; + targets: number[]; + scope: number[]; +} & AppliedFilter; + +export type AppliedNativeFilterType = { + filterType: 'filter_select'; + scope: number[]; + targets: Partial<NativeFilterTarget>[]; +} & AppliedFilter; + export type FilterWithDataMask = Filter & { dataMask: DataMaskWithId }; export type Divider = Partial<Omit<Filter, 'id' | 'type'>> & { @@ -89,6 +107,24 @@ export type Divider = Partial<Omit<Filter, 'id' | 'type'>> & { type: typeof NativeFilterType.Divider; }; +export function isAppliedCrossFilterType( + filterElement: AppliedCrossFilterType | AppliedNativeFilterType | Filter, +): filterElement is AppliedCrossFilterType { + return ( + filterElement.filterType === undefined && + filterElement.hasOwnProperty('values') + ); +} + +export function isAppliedNativeFilterType( + filterElement: AppliedCrossFilterType | AppliedNativeFilterType | Filter, +): filterElement is AppliedNativeFilterType { + return ( + filterElement.filterType === 'filter_select' && + filterElement.hasOwnProperty('values') + ); +} + export function isNativeFilter( filterElement: Filter | Divider, ): filterElement is Filter { diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/Metric.ts b/superset-frontend/packages/superset-ui-core/src/query/types/Metric.ts index ac6523bedb35..227ca6e71d56 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/Metric.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/Metric.ts @@ -71,6 +71,7 @@ export interface Metric { verbose_name?: string; warning_markdown?: Maybe<string>; warning_text?: Maybe<string>; + error_text?: string; } export function isSavedMetric(metric: any): metric is SavedMetric { diff --git a/superset-frontend/packages/superset-ui-core/src/time-comparison/customTimeRangeDecode.ts b/superset-frontend/packages/superset-ui-core/src/time-comparison/customTimeRangeDecode.ts index d99c78281c92..bb5c3d485680 100644 --- a/superset-frontend/packages/superset-ui-core/src/time-comparison/customTimeRangeDecode.ts +++ b/superset-frontend/packages/superset-ui-core/src/time-comparison/customTimeRangeDecode.ts @@ -39,14 +39,14 @@ export const ISO8601_AND_CONSTANT = RegExp( ); const DATETIME_CONSTANT = ['now', 'today']; const SEVEN_DAYS_AGO = new Date(); -SEVEN_DAYS_AGO.setUTCHours(0, 0, 0, 0); +SEVEN_DAYS_AGO.setHours(0, 0, 0, 0); const MIDNIGHT = new Date(); -MIDNIGHT.setUTCHours(0, 0, 0, 0); +MIDNIGHT.setHours(0, 0, 0, 0); const defaultCustomRange: CustomRangeType = { - sinceDatetime: SEVEN_DAYS_AGO.setUTCDate( - SEVEN_DAYS_AGO.getUTCDate() - 7, + sinceDatetime: SEVEN_DAYS_AGO.setDate( + SEVEN_DAYS_AGO.getDate() - 7, ).toString(), sinceMode: 'relative', sinceGrain: 'day', diff --git a/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts b/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts index 775e2c129ad1..3f0ff82b2fcb 100644 --- a/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts +++ b/superset-frontend/packages/superset-ui-core/src/ui-overrides/types.ts @@ -134,13 +134,14 @@ export interface SQLFormExtensionProps { startQuery: (ctasArg?: any, ctas_method?: any) => void; } -export interface SQLResultTableExtentionProps { +export interface SQLResultTableExtensionProps { queryId: string; orderedColumnKeys: string[]; data: Record<string, unknown>[]; height: number; filterText?: string; expandedColumns?: string[]; + allowHTML?: boolean; } /** @@ -223,7 +224,7 @@ export type Extensions = Partial<{ 'database.delete.related': ComponentType<DatabaseDeleteRelatedExtensionProps>; 'dataset.delete.related': ComponentType<DatasetDeleteRelatedExtensionProps>; 'sqleditor.extension.form': ComponentType<SQLFormExtensionProps>; - 'sqleditor.extension.resultTable': ComponentType<SQLResultTableExtentionProps>; + 'sqleditor.extension.resultTable': ComponentType<SQLResultTableExtensionProps>; 'dashboard.slice.header': ComponentType<SliceHeaderExtension>; 'sqleditor.extension.customAutocomplete': ( args: CustomAutoCompleteArgs, diff --git a/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts b/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts index 67f3785ab60a..d741909d6451 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts +++ b/superset-frontend/packages/superset-ui-core/src/utils/featureFlags.ts @@ -22,6 +22,7 @@ import logger from './logging'; // check into source control. We're hardcoding the supported flags for now. export enum FeatureFlag { // PLEASE KEEP THE LIST SORTED ALPHABETICALLY + AiSummaryOnChart = 'AI_SUMMARY_ON_CHART', AlertsAttachReports = 'ALERTS_ATTACH_REPORTS', AlertReports = 'ALERT_REPORTS', AlertReportTabs = 'ALERT_REPORT_TABS', @@ -59,6 +60,9 @@ export enum FeatureFlag { Thumbnails = 'THUMBNAILS', UseAnalagousColors = 'USE_ANALAGOUS_COLORS', ForceSqlLabRunAsync = 'SQLLAB_FORCE_RUN_ASYNC', + SlackEnableAvatars = 'SLACK_ENABLE_AVATARS', + EnableDashboardScreenshotEndpoints = 'ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS', + EnableDashboardDownloadWebDriverScreenshot = 'ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT', } export type ScheduleQueriesProps = { @@ -93,7 +97,8 @@ export function initFeatureFlags(featureFlags?: FeatureFlagMap) { export function isFeatureEnabled(feature: FeatureFlag): boolean { try { - return !!window.featureFlags[feature]; + const flags = window.featureFlags as Record<string, unknown>; + return !!flags[feature]; } catch (error) { logger.error(`Failed to query feature flag ${feature}`); } diff --git a/superset-frontend/packages/superset-ui-core/src/utils/lruCache.ts b/superset-frontend/packages/superset-ui-core/src/utils/lruCache.ts index f6785850c22a..e92005986aa3 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/lruCache.ts +++ b/superset-frontend/packages/superset-ui-core/src/utils/lruCache.ts @@ -67,6 +67,10 @@ class LRUCache<T> { public get size() { return this.cache.size; } + + public values(): T[] { + return [...this.cache.values()]; + } } export function lruCache<T>(capacity = 100) { diff --git a/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorNameSpace.test.ts b/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorNameSpace.test.ts index 69fb38eea3f3..014be2548644 100644 --- a/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorNameSpace.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorNameSpace.test.ts @@ -161,4 +161,63 @@ describe('CategoricalColorNamespace', () => { expect(color).toBe(color2); }); }); + describe('statis resetColorsForLabels(labels)', () => { + it('removes specified labels from forcedItems', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.setColor('label2', 'blue'); + namespace.resetColorsForLabels(['label1']); + + expect(namespace.forcedItems).toMatchObject({ label2: 'blue' }); + }); + it('does not modify forcedItems if no labels are provided', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.setColor('label2', 'blue'); + namespace.resetColorsForLabels(); + + expect(namespace.forcedItems).toMatchObject({ + label1: 'red', + label2: 'blue', + }); + }); + it('does nothing if the label is not in forcedItems', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.resetColorsForLabels(['label2']); // label2 doesn't exist + + expect(namespace.forcedItems).toMatchObject({ label1: 'red' }); + }); + it('removes all labels when all are provided', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.setColor('label2', 'blue'); + namespace.resetColorsForLabels(['label1', 'label2']); + + expect(namespace.forcedItems).toMatchObject({}); + }); + it('creates a deep copy of forcedItems before modifying', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + + const originalForcedItems = namespace.forcedItems; + namespace.resetColorsForLabels(['label1']); + + expect(originalForcedItems).not.toBe(namespace.forcedItems); + }); + it('removes the label if it exists in updatedForcedItems', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.resetColorsForLabels(['label1']); + + expect(namespace.forcedItems).toEqual({}); + }); + it('does nothing for a label not in updatedForcedItems', () => { + const namespace = getNamespace('test-reset-individual'); + namespace.setColor('label1', 'red'); + namespace.resetColorsForLabels(['label2']); // label2 doesn't exist + + expect(namespace.forcedItems).toEqual({ label1: 'red' }); + }); + }); }); diff --git a/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorScale.test.ts b/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorScale.test.ts index 97d756cb0432..9ba4bcc5b01a 100644 --- a/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorScale.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/color/CategoricalColorScale.test.ts @@ -18,7 +18,11 @@ */ import { ScaleOrdinal } from 'd3-scale'; -import { CategoricalColorScale, FeatureFlag } from '@superset-ui/core'; +import { + CategoricalColorScale, + FeatureFlag, + LabelsColorMapSource, +} from '@superset-ui/core'; describe('CategoricalColorScale', () => { beforeEach(() => { @@ -43,7 +47,6 @@ describe('CategoricalColorScale', () => { expect(scale).toBeInstanceOf(CategoricalColorScale); expect(scale.forcedColors).toBe(forcedColors); }); - it('can refer to colors based on their index', () => { const forcedColors = { pig: 1, horse: 5 }; const scale = new CategoricalColorScale( @@ -67,7 +70,7 @@ describe('CategoricalColorScale', () => { >; let getNextAvailableColorSpy: jest.SpyInstance< string, - [currentColor: string] + [currentLabel: string, currentColor: string] >; beforeEach(() => { @@ -83,6 +86,36 @@ describe('CategoricalColorScale', () => { jest.restoreAllMocks(); }); + it('uses labelsColorMapInstance color map when source is Dashboard, otherwise uses chartLabelsColorMap', () => { + const sliceId = 123; + const colorScheme = 'preset'; + + // Mock chartLabelsColorMap and labelsColorMapInstance's getColorMap + const chartColorMap = new Map([['testValueChart', 'chartColor']]); + const dashboardColorMap = new Map([['testValueDash', 'dashboardColor']]); + scale.chartLabelsColorMap = chartColorMap; + jest + .spyOn(scale.labelsColorMapInstance, 'getColorMap') + .mockReturnValue(dashboardColorMap); + + // Test when source is Dashboard + scale.labelsColorMapInstance.source = LabelsColorMapSource.Dashboard; + const colorFromDashboard = scale.getColor( + 'testValueDash', + sliceId, + colorScheme, + ); + expect(colorFromDashboard).toBe('dashboardColor'); + + // Test when source is not Dashboard + scale.labelsColorMapInstance.source = LabelsColorMapSource.Explore; + const colorFromChart = scale.getColor( + 'testValueChart', + sliceId, + colorScheme, + ); + expect(colorFromChart).toBe('chartColor'); + }); it('returns same color for same value', () => { const scale = new CategoricalColorScale(['blue', 'red', 'green'], { pig: 'red', @@ -177,7 +210,10 @@ describe('CategoricalColorScale', () => { scale.getColor('testValue3'); scale.getColor('testValue4'); - expect(getNextAvailableColorSpy).toHaveBeenCalledWith('blue'); + expect(getNextAvailableColorSpy).toHaveBeenCalledWith( + 'testValue4', + 'blue', + ); getNextAvailableColorSpy.mockClear(); @@ -289,23 +325,25 @@ describe('CategoricalColorScale', () => { }); }); - describe('.getNextAvailableColor(currentColor)', () => { + describe('.getNextAvailableColor(currentLabel, currentColor)', () => { it('returns the current color if it is the least used or equally used among colors', () => { const scale = new CategoricalColorScale(['blue', 'red', 'green']); scale.getColor('cat'); scale.getColor('dog'); // Since 'green' hasn't been used, it's considered the least used. - expect(scale.getNextAvailableColor('blue')).toBe('green'); + expect(scale.getNextAvailableColor('fish', 'blue')).toBe('green'); }); - it('handles cases where all colors are equally used and returns the current color', () => { + it('returns the least used color among all', () => { const scale = new CategoricalColorScale(['blue', 'red', 'green']); scale.getColor('cat'); // blue scale.getColor('dog'); // red scale.getColor('fish'); // green - // All colors used once, so the function should return the current color - expect(scale.getNextAvailableColor('red')).toBe('red'); + scale.getColor('puppy'); // blue + scale.getColor('teddy'); // red + // All colors used, so the function should return least used + expect(scale.getNextAvailableColor('darling', 'red')).toBe('green'); }); it('returns the least used color accurately even when some colors are used more frequently', () => { @@ -324,7 +362,57 @@ describe('CategoricalColorScale', () => { scale.getColor('pony'); // green // Yellow is the least used color, so it should be returned. - expect(scale.getNextAvailableColor('blue')).toBe('yellow'); + expect(scale.getNextAvailableColor('pony', 'blue')).toBe('yellow'); + }); + it('does not return adjacent colors if a non-adjacent color is equally used', () => { + const scale = new CategoricalColorScale(['blue', 'red', 'green']); + scale.chartLabelsColorMap.set('label1', 'red'); // Adjacent + scale.chartLabelsColorMap.set('label2', 'blue'); // currentLabel + scale.chartLabelsColorMap.set('label3', 'green'); // Adjacent + + // Green and blue are equally used, but green is adjacent and penalized. + expect(scale.getNextAvailableColor('label2', 'blue')).toBe('blue'); + }); + it('prioritizes a color that has never been used, even if there are adjacent colors', () => { + const scale = new CategoricalColorScale(['blue', 'red', 'green']); + scale.getColor('cat'); // blue + scale.getColor('dog'); // red + + scale.chartLabelsColorMap.set('label1', 'red'); + scale.chartLabelsColorMap.set('label2', 'blue'); // currentLabel + + // Green has never been used, so it is prioritized. + expect(scale.getNextAvailableColor('label2', 'blue')).toBe('green'); + }); + it('returns the least used or unused color when there are no adjacent labels', () => { + const scale = new CategoricalColorScale(['blue', 'red', 'green']); + scale.getColor('cat'); // blue + scale.getColor('dog'); // red + + // No adjacent labels are defined in chartLabelsColorMap. + expect(scale.getNextAvailableColor('label2', 'green')).toBe('green'); + }); + it('handles colors that have never been used (fallback to usage count 0)', () => { + const scale = new CategoricalColorScale(['blue', 'red', 'green']); + + // Do not use "green" at all + scale.getColor('cat'); // blue + scale.getColor('dog'); // red + + // "green" has never been used, so usageCount for "green" should fallback to 0 + expect(scale.getNextAvailableColor('label2', 'red')).toBe('green'); + }); + it('handles a color with an explicit usage count of 0', () => { + const scale = new CategoricalColorScale(['blue', 'red', 'green']); + + // Mock or override getColorUsageCount to return 0 for "blue" + jest.spyOn(scale, 'getColorUsageCount').mockImplementation(color => { + if (color === 'blue') return 0; // Explicitly return 0 for "blue" + return 1; // Return 1 for other colors + }); + + // "blue" should still be a valid option with a usage count of 0 + expect(scale.getNextAvailableColor('label1', 'red')).toBe('blue'); }); }); diff --git a/superset-frontend/packages/superset-ui-core/test/color/LabelsColorMapSingleton.test.ts b/superset-frontend/packages/superset-ui-core/test/color/LabelsColorMapSingleton.test.ts index b93a416e7ffa..d794f16162a9 100644 --- a/superset-frontend/packages/superset-ui-core/test/color/LabelsColorMapSingleton.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/color/LabelsColorMapSingleton.test.ts @@ -53,7 +53,7 @@ describe('LabelsColorMap', () => { beforeEach(() => { getLabelsColorMap().source = LabelsColorMapSource.Dashboard; - getLabelsColorMap().clear(); + getLabelsColorMap().reset(); }); it('has default value out-of-the-box', () => { @@ -92,11 +92,17 @@ describe('LabelsColorMap', () => { expect(Object.fromEntries(colorMap)).toEqual({ b: 'green' }); }); - it('should do nothing when source is not dashboard', () => { + it('should set a new color only when source is dashboard', () => { const labelsColorMap = getLabelsColorMap(); labelsColorMap.source = LabelsColorMapSource.Explore; labelsColorMap.addSlice('a', 'red', 1); - expect(Object.fromEntries(labelsColorMap.chartsLabelsMap)).toEqual({}); + const colorMap = labelsColorMap.getColorMap(); + expect(Object.fromEntries(colorMap)).toEqual({}); + + labelsColorMap.source = LabelsColorMapSource.Dashboard; + labelsColorMap.addSlice('a', 'red', 1); + const colorMap2 = labelsColorMap.getColorMap(); + expect(Object.fromEntries(colorMap2)).toEqual({ a: 'red' }); }); }); @@ -126,7 +132,7 @@ describe('LabelsColorMap', () => { }); }); - describe('.updateColorMap(namespace, scheme)', () => { + describe('.updateColorMap(namespace, scheme, merge)', () => { let categoricalNamespace: any; let mockedNamespace: any; let labelsColorMap: any; @@ -141,18 +147,24 @@ describe('LabelsColorMap', () => { }; }); + it('should clear color map when not merge', () => { + labelsColorMap.addSlice('a', 'red', 1); + labelsColorMap.updateColorMap(mockedNamespace, 'testColors2', false); + expect(labelsColorMap.colorMap).toEqual(new Map([['a', 'mockColor']])); + }); + + it('should not clear color map when merge', () => { + labelsColorMap.addSlice('a', 'red', 1); + labelsColorMap.updateColorMap(mockedNamespace, 'testColors2', true); + expect(labelsColorMap.colorMap).not.toEqual(new Map()); + }); + it('should use provided color scheme', () => { labelsColorMap.addSlice('a', 'red', 1); labelsColorMap.updateColorMap(mockedNamespace, 'testColors2'); expect(mockedNamespace.getScale).toHaveBeenCalledWith('testColors2'); }); - it('should fallback to original chart color scheme if no color scheme is provided', () => { - labelsColorMap.addSlice('a', 'red', 1, 'originalScheme'); - labelsColorMap.updateColorMap(mockedNamespace); - expect(mockedNamespace.getScale).toHaveBeenCalledWith('originalScheme'); - }); - it('should fallback to undefined if no color scheme is provided', () => { labelsColorMap.addSlice('a', 'red', 1); labelsColorMap.addSlice('b', 'blue', 2); @@ -181,6 +193,23 @@ describe('LabelsColorMap', () => { }); }); + it('should update only new labels in the color map when merge', () => { + labelsColorMap.colorMap = new Map(); + + labelsColorMap.addSlice('a', 'yellow', 1); + labelsColorMap.addSlice('b', 'green', 1); + labelsColorMap.addSlice('c', 'purple', 1); + + labelsColorMap.updateColorMap(categoricalNamespace, 'testColors2', true); + + const mergedColorMap = labelsColorMap.getColorMap(); + expect(Object.fromEntries(mergedColorMap)).toEqual({ + a: 'yellow', + b: 'green', + c: 'purple', + }); + }); + it('should use recycle colors', () => { window.featureFlags = { [FeatureFlag.UseAnalagousColors]: false, @@ -231,4 +260,47 @@ describe('LabelsColorMap', () => { expect(Object.fromEntries(colorMap)).toEqual({}); }); }); + + describe('setOwnColorScheme(sliceId, ownScheme)', () => { + it('should update the scheme in the config', () => { + const labelsColorMap = getLabelsColorMap(); + labelsColorMap.source = LabelsColorMapSource.Explore; + const sliceId = 1; + const initialConfig = { labels: ['initial config'] }; + + labelsColorMap.chartsLabelsMap = new Map(); + labelsColorMap.chartsLabelsMap.set(sliceId, initialConfig); + + labelsColorMap.setOwnColorScheme(sliceId, 'newScheme'); + + expect(labelsColorMap.chartsLabelsMap.get(sliceId)).toEqual({ + ...initialConfig, + ownScheme: 'newScheme', + }); + }); + it('should update ownScheme when source is not Explore', () => { + const labelsColorMap = getLabelsColorMap(); + labelsColorMap.source = LabelsColorMapSource.Dashboard; + const sliceId = 1; + const initialConfig = { labels: ['initial config'] }; + + labelsColorMap.chartsLabelsMap = new Map(); + labelsColorMap.chartsLabelsMap.set(sliceId, initialConfig); + + labelsColorMap.setOwnColorScheme(sliceId, 'newScheme'); + + expect(labelsColorMap.chartsLabelsMap.get(sliceId)).toEqual({ + ...initialConfig, + ownScheme: 'newScheme', + }); + }); + it('should do nothing when chart config does not exist', () => { + const labelsColorMap = getLabelsColorMap(); + labelsColorMap.source = LabelsColorMapSource.Explore; + const sliceId = 1; + + labelsColorMap.setOwnColorScheme(sliceId, 'newScheme'); + expect(labelsColorMap.chartsLabelsMap.get(sliceId)).toEqual(undefined); + }); + }); }); diff --git a/superset-frontend/packages/superset-ui-core/test/connection/callApi/parseResponse.test.ts b/superset-frontend/packages/superset-ui-core/test/connection/callApi/parseResponse.test.ts index b08b5b8cb80c..29f3ab4fe574 100644 --- a/superset-frontend/packages/superset-ui-core/test/connection/callApi/parseResponse.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/connection/callApi/parseResponse.test.ts @@ -141,7 +141,7 @@ describe('parseResponse()', () => { const mockBigIntUrl = '/mock/get/bigInt'; const mockGetBigIntPayload = `{ "value": 9223372036854775807, "minus": { "value": -483729382918228373892, "str": "something" }, - "number": 1234, "floatValue": { "plus": 0.3452211361231223, "minus": -0.3452211361231223 }, + "number": 1234, "floatValue": { "plus": 0.3452211361231223, "minus": -0.3452211361231223, "even": 1234567890123456.0000000 }, "string.constructor": "data.constructor", "constructor": "constructor" }`; @@ -159,6 +159,7 @@ describe('parseResponse()', () => { expect(responseBigNumber.json.floatValue.minus).toEqual( -0.3452211361231223, ); + expect(responseBigNumber.json.floatValue.even).toEqual(1234567890123456); expect( responseBigNumber.json.floatValue.plus + responseBigNumber.json.floatValue.minus, diff --git a/superset-frontend/packages/superset-ui-core/test/query/types/Dashboard.test.ts b/superset-frontend/packages/superset-ui-core/test/query/types/Dashboard.test.ts index 79d798094086..c1c714395779 100644 --- a/superset-frontend/packages/superset-ui-core/test/query/types/Dashboard.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/query/types/Dashboard.test.ts @@ -24,6 +24,10 @@ import { FilterWithDataMask, Divider, isNativeFilterWithDataMask, + isAppliedCrossFilterType, + isAppliedNativeFilterType, + AppliedCrossFilterType, + AppliedNativeFilterType, } from '@superset-ui/core'; const filter: Filter = { @@ -51,6 +55,20 @@ const filterDivider: Divider = { description: 'Divider description.', }; +const appliedCrossFilter: AppliedCrossFilterType = { + filterType: undefined, + targets: [1, 2], + scope: [1, 2], + values: null, +}; + +const appliedNativeFilter: AppliedNativeFilterType = { + filterType: 'filter_select', + scope: [1, 2], + targets: [{}], + values: null, +}; + test('filter type guard', () => { expect(isNativeFilter(filter)).toBeTruthy(); expect(isNativeFilter(filterWithDataMask)).toBeTruthy(); @@ -68,3 +86,13 @@ test('filter divider type guard', () => { expect(isFilterDivider(filterWithDataMask)).toBeFalsy(); expect(isFilterDivider(filterDivider)).toBeTruthy(); }); + +test('applied cross filter type guard', () => { + expect(isAppliedCrossFilterType(appliedCrossFilter)).toBeTruthy(); + expect(isAppliedCrossFilterType(appliedNativeFilter)).toBeFalsy(); +}); + +test('applied native filter type guard', () => { + expect(isAppliedNativeFilterType(appliedNativeFilter)).toBeTruthy(); + expect(isAppliedNativeFilterType(appliedCrossFilter)).toBeFalsy(); +}); diff --git a/superset-frontend/packages/superset-ui-core/test/time-comparison/customTimeRangeDecode.test.ts b/superset-frontend/packages/superset-ui-core/test/time-comparison/customTimeRangeDecode.test.ts index b3e86166e826..d5342098a577 100644 --- a/superset-frontend/packages/superset-ui-core/test/time-comparison/customTimeRangeDecode.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/time-comparison/customTimeRangeDecode.test.ts @@ -151,14 +151,14 @@ describe('customTimeRangeDecode', () => { it('7) default', () => { const SEVEN_DAYS_AGO = new Date(); const MIDNIGHT = new Date(); - SEVEN_DAYS_AGO.setUTCHours(0, 0, 0, 0); - MIDNIGHT.setUTCHours(0, 0, 0, 0); + SEVEN_DAYS_AGO.setHours(0, 0, 0, 0); + MIDNIGHT.setHours(0, 0, 0, 0); expect( customTimeRangeDecode('now : DATEADD(DATETIME("TODAY"), -7, day)'), ).toEqual({ customRange: { - sinceDatetime: SEVEN_DAYS_AGO.setUTCDate( - SEVEN_DAYS_AGO.getUTCDate() - 7, + sinceDatetime: SEVEN_DAYS_AGO.setDate( + SEVEN_DAYS_AGO.getDate() - 7, ).toString(), sinceMode: 'relative', sinceGrain: 'day', @@ -176,18 +176,18 @@ describe('customTimeRangeDecode', () => { it('8) relative : relative return default', () => { const SEVEN_DAYS_AGO = new Date(); - SEVEN_DAYS_AGO.setUTCHours(0, 0, 0, 0); + SEVEN_DAYS_AGO.setHours(0, 0, 0, 0); const MIDNIGHT = new Date(); - MIDNIGHT.setUTCHours(0, 0, 0, 0); + MIDNIGHT.setHours(0, 0, 0, 0); expect( customTimeRangeDecode( 'DATEADD(DATETIME("2021-01-26T00:00:00"), -55, day) : DATEADD(DATETIME("2021-01-27T00:00:00"), 7, day)', ), ).toEqual({ customRange: { - sinceDatetime: SEVEN_DAYS_AGO.setUTCDate( - SEVEN_DAYS_AGO.getUTCDate() - 7, + sinceDatetime: SEVEN_DAYS_AGO.setDate( + SEVEN_DAYS_AGO.getDate() - 7, ).toString(), sinceMode: 'relative', sinceGrain: 'day', diff --git a/superset-frontend/packages/superset-ui-core/test/utils/lruCache.test.ts b/superset-frontend/packages/superset-ui-core/test/utils/lruCache.test.ts index f8a077eba031..2c7f1fafa404 100644 --- a/superset-frontend/packages/superset-ui-core/test/utils/lruCache.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/utils/lruCache.test.ts @@ -35,8 +35,11 @@ test('LRU operations', () => { expect(cache.size).toBe(3); expect(cache.has('1')).toBeFalsy(); expect(cache.get('1')).toBeUndefined(); + expect(cache.values()).toEqual(['b', 'c', 'd']); cache.get('2'); + expect(cache.values()).toEqual(['c', 'd', 'b']); cache.set('5', 'e'); + expect(cache.values()).toEqual(['d', 'b', 'e']); expect(cache.has('2')).toBeTruthy(); expect(cache.has('3')).toBeFalsy(); // @ts-expect-error @@ -44,6 +47,7 @@ test('LRU operations', () => { // @ts-expect-error expect(() => cache.get(0)).toThrow(TypeError); expect(cache.size).toBe(3); + expect(cache.values()).toEqual(['d', 'b', 'e']); cache.clear(); expect(cache.size).toBe(0); expect(cache.capacity).toBe(3); diff --git a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/plugin-chart-word-cloud/WordCloud.stories.tsx b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/plugin-chart-word-cloud/WordCloud.stories.tsx index 82f6ffedda4e..ca6d36357e20 100644 --- a/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/plugin-chart-word-cloud/WordCloud.stories.tsx +++ b/superset-frontend/packages/superset-ui-demo/storybook/stories/plugins/plugin-chart-word-cloud/WordCloud.stories.tsx @@ -17,26 +17,12 @@ * under the License. */ -import { SuperChart, getChartTransformPropsRegistry } from '@superset-ui/core'; -import { - WordCloudChartPlugin, - LegacyWordCloudChartPlugin, - WordCloudTransformProps, -} from '@superset-ui/plugin-chart-word-cloud'; +import { SuperChart } from '@superset-ui/core'; +import { WordCloudChartPlugin } from '@superset-ui/plugin-chart-word-cloud'; import { withResizableChartDemo } from '../../../shared/components/ResizableChartDemo'; import data from './data'; new WordCloudChartPlugin().configure({ key: 'word-cloud2' }).register(); -new LegacyWordCloudChartPlugin() - .configure({ key: 'legacy-word-cloud2' }) - .register(); - -// Enable the new WordCloud Props to show case its full features -// if the control panel is updated to be able to pass formData in the new format. -getChartTransformPropsRegistry().registerValue( - 'word-cloud2', - WordCloudTransformProps, -); export default { title: 'Chart Plugins/plugin-chart-word-cloud', diff --git a/superset-frontend/plugins/legacy-plugin-chart-chord/src/Chord.js b/superset-frontend/plugins/legacy-plugin-chart-chord/src/Chord.js index 2daed05f47f1..1d5ed45683fb 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-chord/src/Chord.js +++ b/superset-frontend/plugins/legacy-plugin-chart-chord/src/Chord.js @@ -93,7 +93,7 @@ function Chord(element, props) { .append('path') .attr('id', (d, i) => `group${i}`) .attr('d', arc) - .style('fill', (d, i) => colorFn(nodes[i], sliceId, colorScheme)); + .style('fill', (d, i) => colorFn(nodes[i], sliceId)); // Add a text label. const groupText = group.append('text').attr('x', 6).attr('dy', 15); @@ -121,7 +121,7 @@ function Chord(element, props) { .on('mouseover', d => { chord.classed('fade', p => p !== d); }) - .style('fill', d => colorFn(nodes[d.source.index], sliceId, colorScheme)) + .style('fill', d => colorFn(nodes[d.source.index], sliceId)) .attr('d', path); // Add an elaborate mouseover title for each chord. @@ -130,9 +130,9 @@ function Chord(element, props) { .text( d => `${nodes[d.source.index]} → ${nodes[d.target.index]}: ${f( - d.source.value, - )}\n${nodes[d.target.index]} → ${nodes[d.source.index]}: ${f( d.target.value, + )}\n${nodes[d.target.index]} → ${nodes[d.source.index]}: ${f( + d.source.value, )}`, ); } diff --git a/superset-frontend/plugins/legacy-plugin-chart-country-map/src/CountryMap.js b/superset-frontend/plugins/legacy-plugin-chart-country-map/src/CountryMap.js index 61ca6cc2fe76..b1cf5016dcf7 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-country-map/src/CountryMap.js +++ b/superset-frontend/plugins/legacy-plugin-chart-country-map/src/CountryMap.js @@ -37,6 +37,7 @@ const propTypes = { width: PropTypes.number, height: PropTypes.number, country: PropTypes.string, + colorScheme: PropTypes.string, linearColorScheme: PropTypes.string, mapBaseUrl: PropTypes.string, numberFormat: PropTypes.string, diff --git a/superset-frontend/plugins/legacy-plugin-chart-country-map/src/transformProps.js b/superset-frontend/plugins/legacy-plugin-chart-country-map/src/transformProps.js index 8789c3d2f34f..8d8d67fd9e65 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-country-map/src/transformProps.js +++ b/superset-frontend/plugins/legacy-plugin-chart-country-map/src/transformProps.js @@ -26,11 +26,38 @@ export default function transformProps(chartProps) { sliceId, } = formData; + // Minimal URL override: allow ?country_code=IN|US|UK|GB to override selected country + let countryFromForm = selectCountry + ? String(selectCountry).toLowerCase() + : null; + + try { + const params = new URLSearchParams(window.location.search); + const urlCountryCode = params.get('country_code'); + if (urlCountryCode) { + const cc = String(urlCountryCode).toUpperCase(); + const alpha2ToPluginKey = { + US: 'usa', + IN: 'india', + ID: 'indonesia', + AE: 'united_arab_emirates', + UK: 'uk', + }; + if (alpha2ToPluginKey[cc]) { + countryFromForm = alpha2ToPluginKey[cc]; + } + } + } catch (e) { + // no-op if URL parsing fails or window is unavailable + } + + // countryFromForm contains final selection, optionally overridden via URL + return { width, height, data: queriesData[0].data, - country: selectCountry ? String(selectCountry).toLowerCase() : null, + country: countryFromForm, linearColorScheme, numberFormat, colorScheme, diff --git a/superset-frontend/plugins/legacy-plugin-chart-histogram/src/Histogram.jsx b/superset-frontend/plugins/legacy-plugin-chart-histogram/src/Histogram.jsx index 0af5f8bf776b..c14b83c1ca84 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-histogram/src/Histogram.jsx +++ b/superset-frontend/plugins/legacy-plugin-chart-histogram/src/Histogram.jsx @@ -73,12 +73,11 @@ class CustomHistogram extends PureComponent { showLegend, sliceId, } = this.props; - const colorFn = CategoricalColorNamespace.getScale(colorScheme); const keys = data.map(d => d.key); const colorScale = scaleOrdinal({ domain: keys, - range: keys.map(x => colorFn(x, sliceId, colorScheme)), + range: keys.map(x => colorFn(x, sliceId)), }); return ( diff --git a/superset-frontend/plugins/legacy-plugin-chart-horizon/src/controlPanel.ts b/superset-frontend/plugins/legacy-plugin-chart-horizon/src/controlPanel.ts index c96b2f0ed7f3..51a43a450e11 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-horizon/src/controlPanel.ts +++ b/superset-frontend/plugins/legacy-plugin-chart-horizon/src/controlPanel.ts @@ -24,6 +24,12 @@ import { const config: ControlPanelConfig = { controlPanelSections: [ + { + label: t('Time'), + expanded: true, + description: t('Time related form attributes'), + controlSetRows: [['granularity_sqla'], ['time_range']], + }, { label: t('Query'), expanded: true, diff --git a/superset-frontend/plugins/legacy-plugin-chart-partition/src/Partition.js b/superset-frontend/plugins/legacy-plugin-chart-partition/src/Partition.js index e0ebd559a95d..22470616680b 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-partition/src/Partition.js +++ b/superset-frontend/plugins/legacy-plugin-chart-partition/src/Partition.js @@ -384,7 +384,7 @@ function Icicle(element, props) { // Apply color scheme g.selectAll('rect').style('fill', d => { - d.color = colorFn(d.name, sliceId, colorScheme); + d.color = colorFn(d.name, sliceId); return d.color; }); diff --git a/superset-frontend/plugins/legacy-plugin-chart-rose/src/Rose.js b/superset-frontend/plugins/legacy-plugin-chart-rose/src/Rose.js index e54fc0b6c542..93d402cb61be 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-rose/src/Rose.js +++ b/superset-frontend/plugins/legacy-plugin-chart-rose/src/Rose.js @@ -46,6 +46,7 @@ const propTypes = { numberFormat: PropTypes.string, useRichTooltip: PropTypes.bool, useAreaProportions: PropTypes.bool, + colorScheme: PropTypes.string, }; function copyArc(d) { @@ -120,14 +121,14 @@ function Rose(element, props) { .map(v => ({ key: v.name, value: v.value, - color: colorFn(v.name, sliceId, colorScheme), + color: colorFn(v.name, sliceId), highlight: v.id === d.arcId, })) : [ { key: d.name, value: d.val, - color: colorFn(d.name, sliceId, colorScheme), + color: colorFn(d.name, sliceId), }, ]; @@ -138,7 +139,7 @@ function Rose(element, props) { }; } - legend.width(width).color(d => colorFn(d.key, sliceId, colorScheme)); + legend.width(width).color(d => colorFn(d.key, sliceId)); legendWrap.datum(legendData(datum)).call(legend); tooltip.headerFormatter(timeFormat).valueFormatter(format); @@ -385,7 +386,7 @@ function Rose(element, props) { const arcs = ae .append('path') .attr('class', 'arc') - .attr('fill', d => colorFn(d.name, sliceId, colorScheme)) + .attr('fill', d => colorFn(d.name, sliceId)) .attr('d', arc); function mousemove() { diff --git a/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/src/SankeyLoop.js b/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/src/SankeyLoop.js index 00f47ada2666..c9fe27eb2351 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/src/SankeyLoop.js +++ b/superset-frontend/plugins/legacy-plugin-chart-sankey-loop/src/SankeyLoop.js @@ -83,7 +83,7 @@ function computeGraph(links) { function SankeyLoop(element, props) { const { data, width, height, colorScheme, sliceId } = props; - const color = CategoricalColorNamespace.getScale(colorScheme); + const colorFn = CategoricalColorNamespace.getScale(colorScheme); const margin = { ...defaultMargin, ...props.margin }; const innerWidth = width - margin.left - margin.right; const innerHeight = height - margin.top - margin.bottom; @@ -107,7 +107,7 @@ function SankeyLoop(element, props) { value / sValue, )})`, ) - .linkColor(d => color(d.source.name, sliceId)); + .linkColor(d => colorFn(d.source.name, sliceId)); const div = select(element); div.selectAll('*').remove(); diff --git a/superset-frontend/plugins/legacy-plugin-chart-sankey/src/Sankey.js b/superset-frontend/plugins/legacy-plugin-chart-sankey/src/Sankey.js index 0639edad45c0..a38142c56468 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-sankey/src/Sankey.js +++ b/superset-frontend/plugins/legacy-plugin-chart-sankey/src/Sankey.js @@ -67,7 +67,6 @@ function Sankey(element, props) { .attr('height', innerHeight + margin.top + margin.bottom) .append('g') .attr('transform', `translate(${margin.left},${margin.top})`); - const colorFn = CategoricalColorNamespace.getScale(colorScheme); const sankey = d3Sankey() @@ -219,7 +218,7 @@ function Sankey(element, props) { .attr('width', sankey.nodeWidth()) .style('fill', d => { const name = d.name || 'N/A'; - d.color = colorFn(name, sliceId, colorScheme); + d.color = colorFn(name, sliceId); return d.color; }) diff --git a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js index 6b69c6b2d8c3..03ea4ea9c78b 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js +++ b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js @@ -43,6 +43,7 @@ const propTypes = { showBubbles: PropTypes.bool, linearColorScheme: PropTypes.string, color: PropTypes.string, + colorScheme: PropTypes.string, setDataMask: PropTypes.func, onContextMenu: PropTypes.func, emitCrossFilters: PropTypes.bool, @@ -85,24 +86,24 @@ function WorldMap(element, props) { .range([1, maxBubbleSize]); let processedData; - let colorScale; + let colorFn; if (colorBy === ColorBy.Country) { - colorScale = CategoricalColorNamespace.getScale(colorScheme); + colorFn = CategoricalColorNamespace.getScale(colorScheme); processedData = filteredData.map(d => ({ ...d, radius: radiusScale(Math.sqrt(d.m2)), - fillColor: colorScale(d.name, sliceId), + fillColor: colorFn(d.name, sliceId), })); } else { - colorScale = getSequentialSchemeRegistry() + colorFn = getSequentialSchemeRegistry() .get(linearColorScheme) .createLinearScale(d3Extent(filteredData, d => d.m1)); processedData = filteredData.map(d => ({ ...d, radius: radiusScale(Math.sqrt(d.m2)), - fillColor: colorScale(d.m1), + fillColor: colorFn(d.m1), })); } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx index 7dff2af2214a..208ca6e0f5f3 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/CategoricalDeckGLContainer.tsx @@ -52,16 +52,14 @@ const { getScale } = CategoricalColorNamespace; function getCategories(fd: QueryFormData, data: JsonObject[]) { const c = fd.color_picker || { r: 0, g: 0, b: 0, a: 1 }; const fixedColor = [c.r, c.g, c.b, 255 * c.a]; - const colorFn = getScale(fd.color_scheme); + const appliedScheme = fd.color_scheme; + const colorFn = getScale(appliedScheme); const categories = {}; data.forEach(d => { if (d.cat_color != null && !categories.hasOwnProperty(d.cat_color)) { let color; if (fd.dimension) { - color = hexToRGB( - colorFn(d.cat_color, fd.sliceId, fd.color_scheme), - c.a * 255, - ); + color = hexToRGB(colorFn(d.cat_color, fd.sliceId), c.a * 255); } else { color = fixedColor; } @@ -132,15 +130,13 @@ const CategoricalDeckGLContainer = (props: CategoricalDeckGLContainerProps) => { const addColor = useCallback((data: JsonObject[], fd: QueryFormData) => { const c = fd.color_picker || { r: 0, g: 0, b: 0, a: 1 }; - const colorFn = getScale(fd.color_scheme); + const appliedScheme = fd.color_scheme; + const colorFn = getScale(appliedScheme); return data.map(d => { let color; if (fd.dimension) { - color = hexToRGB( - colorFn(d.cat_color, fd.sliceId, fd.color_scheme), - c.a * 255, - ); + color = hexToRGB(colorFn(d.cat_color, fd.sliceId), c.a * 255); return { ...d, color }; } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Legend.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Legend.tsx index 2b0a9b40da50..ab8ae78b73ef 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Legend.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Legend.tsx @@ -40,11 +40,13 @@ const StyledLegend = styled.div` margin: 0; & li a { + display: flex; color: ${theme.colors.grayscale.base}; text-decoration: none; + padding: ${theme.gridUnit}px 0; & span { - margin-right: ${theme.gridUnit * 3}px; + margin-right: ${theme.gridUnit}px; } } } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/Grid.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/Grid.tsx index ee5ae6c85fe0..242a8e065024 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/Grid.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Grid/Grid.tsx @@ -55,7 +55,8 @@ export function getLayer( setTooltip: (tooltip: TooltipProps['tooltip']) => void, ) { const fd = formData; - const colorScale = CategoricalColorNamespace.getScale(fd.color_scheme); + const appliedScheme = fd.color_scheme; + const colorScale = CategoricalColorNamespace.getScale(appliedScheme); const colorRange = colorScale .range() .map(color => hexToRGB(color)) as Color[]; diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/Hex.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/Hex.tsx index 84100da7586d..93df7f0b1d26 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/Hex.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Hex/Hex.tsx @@ -54,7 +54,8 @@ export function getLayer( setTooltip: (tooltip: TooltipProps['tooltip']) => void, ) { const fd = formData; - const colorScale = CategoricalColorNamespace.getScale(fd.color_scheme); + const appliedScheme = fd.color_scheme; + const colorScale = CategoricalColorNamespace.getScale(appliedScheme); const colorRange = colorScale .range() .map(color => hexToRGB(color)) as Color[]; diff --git a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Vis.js b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Vis.js index 06455f16d8b2..a454fe28b464 100644 --- a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Vis.js +++ b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/NVD3Vis.js @@ -658,9 +658,7 @@ function nvd3Vis(element, props) { } else if (vizType !== 'bullet') { const colorFn = getScale(colorScheme); chart.color( - d => - d.color || - colorFn(cleanColorInput(d[colorKey]), sliceId, colorScheme), + d => d.color || colorFn(cleanColorInput(d[colorKey]), sliceId), ); } diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts index 981fd9fc0adf..b21220dda7f4 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberTotal/controlPanel.ts @@ -27,6 +27,43 @@ import { import { headerFontSize, subheaderFontSize } from '../sharedControls'; export default { + sectionOverrides: { + datasourceAndVizType: { + controlSetRows: [ + ['datasource'], + ['viz_type'], + [ + { + name: 'slice_id', + config: { + type: 'HiddenControl', + label: t('Chart ID'), + hidden: true, + description: t('The id of the active chart'), + }, + }, + { + name: 'cache_timeout', + config: { + type: 'HiddenControl', + label: t('Cache Timeout (seconds)'), + hidden: true, + description: t('The number of seconds before expiring the cache'), + }, + }, + { + name: 'url_params', + config: { + type: 'HiddenControl', + label: t('URL parameters'), + hidden: true, + description: t('Extra parameters for use in jinja templated queries'), + }, + }, + ], + ], + }, + }, controlPanelSections: [ { label: t('Query'), diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberViz.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberViz.tsx index 04c6e74c69e3..82cfa90e8828 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberViz.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberViz.tsx @@ -26,6 +26,7 @@ import { BRAND_COLOR, styled, BinaryQueryObjectFilterClause, + getCurrencySymbol, } from '@superset-ui/core'; import Echart from '../components/Echart'; import { BigNumberVizProps } from './types'; @@ -120,7 +121,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { className="kicker" style={{ fontSize, - height: maxHeight, + height: 'auto', }} > {text} @@ -132,7 +133,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { const { bigNumber, headerFormatter, width, colorThresholdFormatters } = this.props; // @ts-ignore - const text = bigNumber === null ? t('No data') : headerFormatter(bigNumber); + const text = bigNumber === null ? '0' : headerFormatter(bigNumber); const hasThresholdColorFormatter = Array.isArray(colorThresholdFormatters) && @@ -156,7 +157,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { document.body.append(container); const fontSize = computeMaxFontSize({ text, - maxWidth: width - 8, // Decrease 8px for more precise font size + maxWidth: width * 0.9, // reduced it's max width maxHeight, className: 'header-line', container, @@ -174,8 +175,10 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { <div className="header-line" style={{ + display: 'flex', + alignItems: 'center', fontSize, - height: maxHeight, + height: 'auto', color: numberColor, }} onContextMenu={onContextMenu} @@ -189,9 +192,7 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { const { bigNumber, subheader, width, bigNumberFallback } = this.props; let fontSize = 0; - const NO_DATA_OR_HASNT_LANDED = t( - 'No data after filtering or data is NULL for the latest time record', - ); + const NO_DATA_OR_HASNT_LANDED = t('NO_DATA_OR_HASNT_LANDED'); const NO_DATA = t( 'Try applying different filters or ensuring your datasource has data', ); @@ -204,14 +205,14 @@ class BigNumberVis extends PureComponent<BigNumberVizProps> { document.body.append(container); fontSize = computeMaxFontSize({ text, - maxWidth: width, + maxWidth: width * 0.9, // max width reduced maxHeight, className: 'subheader-line', container, }); container.remove(); - return ( + return text === 'NO_DATA_OR_HASNT_LANDED' ? null : ( <div className="subheader-line" style={{ @@ -356,6 +357,7 @@ export default styled(BigNumberVis)` position: relative; line-height: 1em; white-space: nowrap; + margin-bottom:${theme.gridUnit * 2}px; span { position: absolute; bottom: 0; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx index 869583f847bb..da36b21e6b09 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/controlPanel.tsx @@ -28,6 +28,43 @@ import { import { headerFontSize, subheaderFontSize } from '../sharedControls'; const config: ControlPanelConfig = { + sectionOverrides: { + datasourceAndVizType: { + controlSetRows: [ + ['datasource'], + ['viz_type'], + [ + { + name: 'slice_id', + config: { + type: 'HiddenControl', + label: t('Chart ID'), + hidden: true, + description: t('The id of the active chart'), + }, + }, + { + name: 'cache_timeout', + config: { + type: 'HiddenControl', + label: t('Cache Timeout (seconds)'), + hidden: true, + description: t('The number of seconds before expiring the cache'), + }, + }, + { + name: 'url_params', + config: { + type: 'HiddenControl', + label: t('URL parameters'), + hidden: true, + description: t('Extra parameters for use in jinja templated queries'), + }, + }, + ], + ], + }, + }, controlPanelSections: [ { label: t('Query'), diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts index 015f0feee55b..d285a551b136 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/BigNumberWithTrendline/transformProps.ts @@ -28,7 +28,7 @@ import { t, tooltipHtml, } from '@superset-ui/core'; -import { EChartsCoreOption, graphic } from 'echarts'; +import { EChartsCoreOption, graphic } from 'echarts/core'; import { BigNumberVizProps, BigNumberDatum, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/types.ts index 2081460ad1c8..7c4908adac1c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BigNumber/types.ts @@ -17,7 +17,7 @@ * under the License. */ -import { EChartsCoreOption } from 'echarts'; +import type { EChartsCoreOption } from 'echarts/core'; import { ChartDataResponseResult, ContextMenuFilters, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/BoxPlot/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/BoxPlot/transformProps.ts index 4f5c8f323f95..fc29f7afe57b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/BoxPlot/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/BoxPlot/transformProps.ts @@ -23,8 +23,9 @@ import { getNumberFormatter, getTimeFormatter, } from '@superset-ui/core'; -import { EChartsCoreOption, BoxplotSeriesOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { BoxplotSeriesOption } from 'echarts/charts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { BoxPlotChartTransformedProps, BoxPlotQueryFormData, @@ -108,9 +109,9 @@ export default function transformProps( datum[`${metric}__outliers`], ], itemStyle: { - color: colorFn(groupbyLabel, sliceId, colorScheme), + color: colorFn(groupbyLabel, sliceId), opacity: isFiltered ? OpacityEnum.SemiTransparent : 0.6, - borderColor: colorFn(groupbyLabel, sliceId, colorScheme), + borderColor: colorFn(groupbyLabel, sliceId), }, }; }); @@ -149,7 +150,7 @@ export default function transformProps( }, }, itemStyle: { - color: colorFn(groupbyLabel, sliceId, colorScheme), + color: colorFn(groupbyLabel, sliceId), opacity: isFiltered ? OpacityEnum.SemiTransparent : OpacityEnum.NonTransparent, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/index.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/index.ts index e3761f207e88..e8880dac1a4c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/index.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/index.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Behavior, ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; +import { ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; import thumbnail from './images/thumbnail.png'; import transformProps from './transformProps'; import buildQuery from './buildQuery'; @@ -25,6 +25,7 @@ import example1 from './images/example1.png'; import example2 from './images/example2.png'; import { EchartsBubbleChartProps, EchartsBubbleFormData } from './types'; +// TODO: Implement cross filtering export default class EchartsBubbleChartPlugin extends ChartPlugin< EchartsBubbleFormData, EchartsBubbleChartProps @@ -35,7 +36,6 @@ export default class EchartsBubbleChartPlugin extends ChartPlugin< controlPanel, loadChart: () => import('./EchartsBubble'), metadata: new ChartMetadata({ - behaviors: [Behavior.InteractiveChart], category: t('Correlation'), credits: ['https://echarts.apache.org'], description: t( diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts index b60ad99161a2..081a48f2a601 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Bubble/transformProps.ts @@ -16,7 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { EChartsCoreOption, ScatterSeriesOption } from 'echarts'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { ScatterSeriesOption } from 'echarts/charts'; import { extent } from 'd3-array'; import { CategoricalColorNamespace, @@ -107,8 +108,8 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { legendOrientation, legendMargin, legendType, + sliceId, }: EchartsBubbleFormData = { ...DEFAULT_FORM_DATA, ...formData }; - const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); const legends = new Set<string>(); @@ -137,7 +138,10 @@ export default function transformProps(chartProps: EchartsBubbleChartProps) { ], ], type: 'scatter', - itemStyle: { color: colorFn(name), opacity }, + itemStyle: { + color: colorFn(name, sliceId), + opacity, + }, }); legends.add(name); }); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/transformProps.ts index a0b056933724..cd2a3a0ffbfb 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Funnel/transformProps.ts @@ -27,8 +27,9 @@ import { tooltipHtml, ValueFormatter, } from '@superset-ui/core'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; -import { EChartsCoreOption, FunnelSeriesOption } from 'echarts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { FunnelSeriesOption } from 'echarts/charts'; import { DEFAULT_FORM_DATA as DEFAULT_FUNNEL_FORM_DATA, EchartsFunnelChartProps, @@ -144,7 +145,6 @@ export default function transformProps( }, {}); const { setDataMask = () => {}, onContextMenu } = hooks; - const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); const numberFormatter = getValueFormatter( metric, @@ -174,7 +174,7 @@ export default function transformProps( value, name, itemStyle: { - color: colorFn(name, sliceId, colorScheme), + color: colorFn(name, sliceId), opacity: isFiltered ? OpacityEnum.SemiTransparent : OpacityEnum.NonTransparent, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/constants.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/constants.ts index 2e72350f7a9b..676b1f08d48c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/constants.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/constants.ts @@ -17,7 +17,7 @@ * under the License. */ import { SupersetTheme } from '@superset-ui/core'; -import { GaugeSeriesOption } from 'echarts'; +import type { GaugeSeriesOption } from 'echarts/charts'; export const defaultGaugeSeriesOption = ( theme: SupersetTheme, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/transformProps.ts index 187ce67dcb12..e2dd9d488090 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Gauge/transformProps.ts @@ -26,9 +26,10 @@ import { getValueFormatter, tooltipHtml, } from '@superset-ui/core'; -import { EChartsCoreOption, GaugeSeriesOption } from 'echarts'; -import { GaugeDataItemOption } from 'echarts/types/src/chart/gauge/GaugeSeries'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { GaugeSeriesOption } from 'echarts/charts'; +import type { GaugeDataItemOption } from 'echarts/types/src/chart/gauge/GaugeSeries'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { range } from 'lodash'; import { parseNumbersList } from '../utils/controls'; import { @@ -165,6 +166,7 @@ export default function transformProps( const name = groupbyLabels .map(column => `${verboseMap[column] || column}: ${data_point[column]}`) .join(', '); + const colorLabel = groupbyLabels.map(col => data_point[col] as string); columnsLabelMap.set( name, groupbyLabels.map(col => data_point[col] as string), @@ -173,7 +175,7 @@ export default function transformProps( value: data_point[metricLabel] as number, name, itemStyle: { - color: colorFn(index, sliceId, colorScheme), + color: colorFn(colorLabel, sliceId), }, title: { offsetCenter: [ @@ -201,7 +203,7 @@ export default function transformProps( item = { ...item, itemStyle: { - color: colorFn(index, sliceId, colorScheme), + color: colorFn(index, sliceId), opacity: OpacityEnum.SemiTransparent, }, detail: { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/constants.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/constants.ts index f7dea8013b35..c193be38eed4 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/constants.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/constants.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { GraphSeriesOption } from 'echarts'; +import type { GraphSeriesOption } from 'echarts/charts'; export const DEFAULT_GRAPH_SERIES_OPTION: GraphSeriesOption = { zoom: 0.7, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/transformProps.ts index 2cc3aff92f14..c98d6160921b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/transformProps.ts @@ -23,9 +23,10 @@ import { DataRecordValue, tooltipHtml, } from '@superset-ui/core'; -import { EChartsCoreOption, GraphSeriesOption } from 'echarts'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { GraphSeriesOption } from 'echarts/charts'; +import type { GraphEdgeItemOption } from 'echarts/types/src/chart/graph/GraphSeries'; import { extent as d3Extent } from 'd3-array'; -import { GraphEdgeItemOption } from 'echarts/types/src/chart/graph/GraphSeries'; import { EchartsGraphFormData, EChartGraphNode, @@ -197,6 +198,7 @@ export default function transformProps( const refs: Refs = {}; const metricLabel = getMetricLabel(metric); const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); + const firstColor = colorFn.range()[0]; const nodes: { [name: string]: number } = {}; const categories: Set<string> = new Set(); const echartNodes: EChartGraphNode[] = []; @@ -206,7 +208,12 @@ export default function transformProps( * Get the node id of an existing node, * or create a new node if it doesn't exist. */ - function getOrCreateNode(name: string, col: string, category?: string) { + function getOrCreateNode( + name: string, + col: string, + category?: string, + color?: string, + ) { if (!(name in nodes)) { nodes[name] = echartNodes.length; echartNodes.push({ @@ -220,6 +227,7 @@ export default function transformProps( ...getDefaultTooltip(refs), ...DEFAULT_GRAPH_SERIES_OPTION.tooltip, }, + itemStyle: { color }, }); } const node = echartNodes[nodes[name]]; @@ -247,8 +255,25 @@ export default function transformProps( const targetCategoryName = targetCategory ? getCategoryName(targetCategory, link[targetCategory]) : undefined; - const sourceNode = getOrCreateNode(sourceName, source, sourceCategoryName); - const targetNode = getOrCreateNode(targetName, target, targetCategoryName); + const sourceNodeColor = sourceCategoryName + ? colorFn(sourceCategoryName) + : firstColor; + const targetNodeColor = targetCategoryName + ? colorFn(targetCategoryName) + : firstColor; + + const sourceNode = getOrCreateNode( + sourceName, + source, + sourceCategoryName, + sourceNodeColor, + ); + const targetNode = getOrCreateNode( + targetName, + target, + targetCategoryName, + targetNodeColor, + ); sourceNode.value += value; targetNode.value += value; @@ -257,7 +282,9 @@ export default function transformProps( source: sourceNode.id, target: targetNode.id, value, - lineStyle: {}, + lineStyle: { + color: sourceNodeColor, + }, emphasis: {}, select: {}, }); @@ -270,14 +297,15 @@ export default function transformProps( }); const categoryList = [...categories]; - const series: GraphSeriesOption[] = [ { zoom: DEFAULT_GRAPH_SERIES_OPTION.zoom, type: 'graph', categories: categoryList.map(c => ({ name: c, - itemStyle: { color: colorFn(c, sliceId, colorScheme) }, + itemStyle: { + color: colorFn(c, sliceId), + }, })), layout, force: { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/types.ts index 4a45f79c4157..2c7deae3f69b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Graph/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Graph/types.ts @@ -17,8 +17,8 @@ * under the License. */ import { QueryFormData } from '@superset-ui/core'; -import { GraphNodeItemOption } from 'echarts/types/src/chart/graph/GraphSeries'; -import { SeriesTooltipOption } from 'echarts/types/src/util/types'; +import type { GraphNodeItemOption } from 'echarts/types/src/chart/graph/GraphSeries'; +import type { SeriesTooltipOption } from 'echarts/types/src/util/types'; import { BaseChartProps, BaseTransformedProps, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Heatmap/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Heatmap/transformProps.ts index 90524d12d0b8..d9b433b06a9b 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Heatmap/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Heatmap/transformProps.ts @@ -29,8 +29,9 @@ import { } from '@superset-ui/core'; import memoizeOne from 'memoize-one'; import { maxBy, minBy } from 'lodash'; -import { EChartsOption, HeatmapSeriesOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { ComposeOption } from 'echarts/core'; +import type { HeatmapSeriesOption } from 'echarts/charts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { HeatmapChartProps, HeatmapTransformedProps } from './types'; import { getDefaultTooltip } from '../utils/tooltip'; import { Refs } from '../types'; @@ -38,6 +39,8 @@ import { parseAxisBound } from '../utils/controls'; import { NULL_STRING } from '../constants'; import { getPercentFormatter } from '../utils/formatters'; +type EChartsOption = ComposeOption<HeatmapSeriesOption>; + const DEFAULT_ECHARTS_BOUNDS = [0, 200]; // Calculated totals per x and y categories plus total @@ -213,7 +216,7 @@ export default function transformProps( top: 0, itemHeight: legendType === 'continuous' ? 300 : 14, itemWidth: 15, - formatter: min => valueFormatter(min as number), + formatter: (min: number) => valueFormatter(min), inRange: { color: colors, }, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/buildQuery.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/buildQuery.ts index aed4492bd784..4afcb1e4af3c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/buildQuery.ts @@ -25,7 +25,6 @@ export default function buildQuery(formData: HistogramFormData) { return buildQueryContext(formData, baseQueryObject => [ { ...baseQueryObject, - extras: { where: `${column} IS NOT NULL` }, columns: [...groupby, column], post_processing: [histogramOperator(formData, baseQueryObject)], metrics: undefined, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/controlPanel.tsx index 59a7de2825c5..a347694f1bfb 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/controlPanel.tsx @@ -27,6 +27,7 @@ import { formatSelectOptionsForRange, dndGroupByControl, columnsByType, + sections, } from '@superset-ui/chart-controls'; import { showLegendControl, showValueControl } from '../controls'; @@ -104,6 +105,7 @@ const config: ControlPanelConfig = { ], ], }, + sections.titleControls, { label: t('Chart Options'), expanded: true, @@ -111,28 +113,6 @@ const config: ControlPanelConfig = { ['color_scheme'], [showValueControl], [showLegendControl], - [ - { - name: 'x_axis_title', - config: { - type: 'TextControl', - label: t('X Axis Title'), - renderTrigger: true, - default: '', - }, - }, - ], - [ - { - name: 'y_axis_title', - config: { - type: 'TextControl', - label: t('Y Axis Title'), - renderTrigger: true, - default: '', - }, - }, - ], ], }, ], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/index.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/index.ts index 6e732d35e259..15e6e84dddd4 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/index.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/index.ts @@ -17,7 +17,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Behavior, ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; +import { ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; import buildQuery from './buildQuery'; import controlPanel from './controlPanel'; import transformProps from './transformProps'; @@ -26,6 +26,7 @@ import example1 from './images/example1.png'; import example2 from './images/example2.png'; import { HistogramChartProps, HistogramFormData } from './types'; +// TODO: Implement cross filtering export default class EchartsHistogramChartPlugin extends ChartPlugin< HistogramFormData, HistogramChartProps @@ -46,7 +47,6 @@ export default class EchartsHistogramChartPlugin extends ChartPlugin< controlPanel, loadChart: () => import('./Histogram'), metadata: new ChartMetadata({ - behaviors: [Behavior.InteractiveChart], credits: ['https://echarts.apache.org'], category: t('Distribution'), description: t( diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/transformProps.ts index cdb4182cfd1b..df8fe3b1569f 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Histogram/transformProps.ts @@ -16,8 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -import { BarSeriesOption, EChartsOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { ComposeOption } from 'echarts/core'; +import type { BarSeriesOption } from 'echarts/charts'; +import type { GridComponentOption } from 'echarts/components'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { isEmpty } from 'lodash'; import { CategoricalColorNamespace, @@ -133,6 +135,8 @@ export default function transformProps( focusedSeries = index; }; + type EChartsOption = ComposeOption<GridComponentOption | BarSeriesOption>; + const echartOptions: EChartsOption = { grid: { ...defaultGrid, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/EchartsMixedTimeseries.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/EchartsMixedTimeseries.tsx index a0b0d93ffba6..c3fda189c727 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/EchartsMixedTimeseries.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/EchartsMixedTimeseries.tsx @@ -110,13 +110,25 @@ export default function EchartsMixedTimeseries({ const handleChange = useCallback( (seriesName: string, seriesIndex: number) => { - if (!emitCrossFilters) { + const isFirst = isFirstQuery(seriesIndex); + if ( + !emitCrossFilters || + (isFirst && groupby.length === 0) || + (!isFirst && groupbyB.length === 0) + ) { return; } setDataMask(getCrossFilterDataMask(seriesName, seriesIndex).dataMask); }, - [emitCrossFilters, setDataMask, getCrossFilterDataMask], + [ + isFirstQuery, + emitCrossFilters, + groupby.length, + groupbyB.length, + setDataMask, + getCrossFilterDataMask, + ], ); const eventHandlers: EventHandlers = { @@ -140,7 +152,7 @@ export default function EchartsMixedTimeseries({ const isFirst = isFirstQuery(seriesIndex); const values = [ ...(eventParams.name ? [eventParams.name] : []), - ...(isFirst ? labelMap : labelMapB)[eventParams.seriesName], + ...((isFirst ? labelMap : labelMapB)[eventParams.seriesName] || []), ]; if (data && xAxis.type === AxisType.Time) { drillToDetailFilters.push({ @@ -179,9 +191,14 @@ export default function EchartsMixedTimeseries({ }), }), ); + const hasCrossFilter = + (isFirst && groupby.length > 0) || (!isFirst && groupbyB.length > 0); + onContextMenu(pointerEvent.clientX, pointerEvent.clientY, { drillToDetail: drillToDetailFilters, - crossFilter: getCrossFilterDataMask(seriesName, seriesIndex), + crossFilter: hasCrossFilter + ? getCrossFilterDataMask(seriesName, seriesIndex) + : undefined, drillBy: { filters: drillByFilters, groupbyFieldName: isFirst ? 'groupby' : 'groupby_b', diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts index 29741f545c70..841d8f44225f 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/transformProps.ts @@ -35,7 +35,6 @@ import { isIntervalAnnotationLayer, isPhysicalColumn, isTimeseriesAnnotationLayer, - NumberFormats, QueryFormData, QueryFormMetric, TimeseriesChartDataResponseResult, @@ -44,7 +43,8 @@ import { ValueFormatter, } from '@superset-ui/core'; import { getOriginalSeries } from '@superset-ui/chart-controls'; -import { EChartsCoreOption, SeriesOption } from 'echarts'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { SeriesOption } from 'echarts'; import { DEFAULT_FORM_DATA, EchartsMixedTimeseriesChartTransformedProps, @@ -62,6 +62,7 @@ import { extractDataTotalValues, extractSeries, extractShowValueIndexes, + extractTooltipKeys, getAxisType, getColtypesMapping, getLegendProps, @@ -91,7 +92,6 @@ import { import { TIMEGRAIN_TO_TIMESTAMP, TIMESERIES_CONSTANTS } from '../constants'; import { getDefaultTooltip } from '../utils/tooltip'; import { - getPercentFormatter, getTooltipTimeFormatter, getXAxisFormatter, getYAxisFormatter, @@ -234,7 +234,6 @@ export default function transformProps( const xAxisDataType = dataTypes?.[xAxisLabel] ?? dataTypes?.[xAxisOrig]; const xAxisType = getAxisType(stack, xAxisForceCategorical, xAxisDataType); const series: SeriesOption[] = []; - const percentFormatter = getPercentFormatter(NumberFormats.PERCENT_2_POINT); const formatter = contributionMode ? getNumberFormatter(',.0%') : currencyFormat?.symbol @@ -583,76 +582,61 @@ export default function transformProps( : params.value[0]; const forecastValue: any[] = richTooltip ? params : [params]; - if (richTooltip && tooltipSortByMetric) { - forecastValue.sort((a, b) => b.data[1] - a.data[1]); - } + const sortedKeys = extractTooltipKeys( + forecastValue, + // horizontal mode is not supported in mixed series chart + 1, + richTooltip, + tooltipSortByMetric, + ); const rows: string[][] = []; const forecastValues = extractForecastValuesFromTooltipParams(forecastValue); - const isForecast = Object.values(forecastValues).some( - value => - value.forecastTrend || value.forecastLower || value.forecastUpper, - ); - - const total = Object.values(forecastValues).reduce( - (acc, value) => - value.observation !== undefined ? acc + value.observation : acc, - 0, - ); - const showTotal = richTooltip && !isForecast; const keys = Object.keys(forecastValues); - keys.forEach(key => { - const value = forecastValues[key]; - // if there are no dimensions, key is a verbose name of a metric, - // otherwise it is a comma separated string where the first part is metric name - let formatterKey; - if (primarySeries.has(key)) { - formatterKey = - groupby.length === 0 ? inverted[key] : labelMap[key]?.[0]; - } else { - formatterKey = - groupbyB.length === 0 ? inverted[key] : labelMapB[key]?.[0]; - } - const tooltipFormatter = getFormatter( - customFormatters, - formatter, - metrics, - formatterKey, - !!contributionMode, - ); - const tooltipFormatterSecondary = getFormatter( - customFormattersSecondary, - formatterSecondary, - metricsB, - formatterKey, - !!contributionMode, - ); - const row = formatForecastTooltipSeries({ - ...value, - seriesName: key, - formatter: primarySeries.has(key) - ? tooltipFormatter - : tooltipFormatterSecondary, + let focusedRow; + sortedKeys + .filter(key => keys.includes(key)) + .forEach(key => { + const value = forecastValues[key]; + // if there are no dimensions, key is a verbose name of a metric, + // otherwise it is a comma separated string where the first part is metric name + let formatterKey; + if (primarySeries.has(key)) { + formatterKey = + groupby.length === 0 ? inverted[key] : labelMap[key]?.[0]; + } else { + formatterKey = + groupbyB.length === 0 ? inverted[key] : labelMapB[key]?.[0]; + } + const tooltipFormatter = getFormatter( + customFormatters, + formatter, + metrics, + formatterKey, + !!contributionMode, + ); + const tooltipFormatterSecondary = getFormatter( + customFormattersSecondary, + formatterSecondary, + metricsB, + formatterKey, + !!contributionMode, + ); + const row = formatForecastTooltipSeries({ + ...value, + seriesName: key, + formatter: primarySeries.has(key) + ? tooltipFormatter + : tooltipFormatterSecondary, + }); + rows.push(row); + if (key === focusedSeries) { + focusedRow = rows.length - 1; + } }); - if (showTotal && value.observation !== undefined) { - row.push(percentFormatter.format(value.observation / (total || 1))); - } - rows.push(row); - }); - if (showTotal) { - rows.push([ - 'Total', - formatter.format(total), - percentFormatter.format(1), - ]); - } - return tooltipHtml( - rows, - tooltipFormatter(xValue), - keys.findIndex(key => key === focusedSeries), - ); + return tooltipHtml(rows, tooltipFormatter(xValue), focusedRow); }, }, legend: { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Pie/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Pie/transformProps.ts index 1b3898d8fd3b..b02f86b23274 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Pie/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Pie/transformProps.ts @@ -28,8 +28,9 @@ import { getValueFormatter, tooltipHtml, } from '@superset-ui/core'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; -import { EChartsCoreOption, PieSeriesOption } from 'echarts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { PieSeriesOption } from 'echarts/charts'; import { DEFAULT_FORM_DATA as DEFAULT_PIE_FORM_DATA, EchartsPieChartProps, @@ -190,7 +191,6 @@ export default function transformProps( }, {}); const { setDataMask = () => {}, onContextMenu } = hooks; - const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); const numberFormatter = getValueFormatter( metric, @@ -222,7 +222,7 @@ export default function transformProps( value, name, itemStyle: { - color: colorFn(name, sliceId, colorScheme), + color: colorFn(name, sliceId), opacity: isFiltered ? OpacityEnum.SemiTransparent : OpacityEnum.NonTransparent, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Radar/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Radar/transformProps.ts index dd49a1b87282..106323cc2586 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Radar/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Radar/transformProps.ts @@ -25,9 +25,10 @@ import { getTimeFormatter, NumberFormatter, } from '@superset-ui/core'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; -import { RadarSeriesDataItemOption } from 'echarts/types/src/chart/radar/RadarSeries'; -import { EChartsCoreOption, RadarSeriesOption } from 'echarts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { RadarSeriesDataItemOption } from 'echarts/types/src/chart/radar/RadarSeries'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { RadarSeriesOption } from 'echarts/charts'; import { DEFAULT_FORM_DATA as DEFAULT_RADAR_FORM_DATA, EchartsRadarChartProps, @@ -108,7 +109,6 @@ export default function transformProps( ...formData, }; const { setDataMask = () => {}, onContextMenu } = hooks; - const colorFn = CategoricalColorNamespace.getScale(colorScheme as string); const numberFormatter = getNumberFormatter(numberFormat); const formatter = (params: CallbackDataParams) => @@ -165,7 +165,7 @@ export default function transformProps( value: metricLabels.map(metricLabel => datum[metricLabel]), name: joinedName, itemStyle: { - color: colorFn(joinedName, sliceId, colorScheme), + color: colorFn(joinedName, sliceId), opacity: isFiltered ? OpacityEnum.Transparent : OpacityEnum.NonTransparent, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/index.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/index.ts index 77348bdab717..8ff658dd0ac2 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/index.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/index.ts @@ -17,7 +17,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Behavior, ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; +import { ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; import buildQuery from './buildQuery'; import controlPanel from './controlPanel'; import transformProps from './transformProps'; @@ -26,6 +26,7 @@ import example1 from './images/example1.png'; import example2 from './images/example2.png'; import { SankeyChartProps, SankeyFormData } from './types'; +// TODO: Implement cross filtering export default class EchartsSankeyChartPlugin extends ChartPlugin< SankeyFormData, SankeyChartProps @@ -46,7 +47,6 @@ export default class EchartsSankeyChartPlugin extends ChartPlugin< controlPanel, loadChart: () => import('./Sankey'), metadata: new ChartMetadata({ - behaviors: [Behavior.InteractiveChart], credits: ['https://echarts.apache.org'], category: t('Flow'), description: t( diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/transformProps.ts index 96be18c8988b..c3db5052bf12 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Sankey/transformProps.ts @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -import { EChartsOption, SankeySeriesOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { ComposeOption } from 'echarts/core'; +import type { SankeySeriesOption } from 'echarts/charts'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { CategoricalColorNamespace, NumberFormats, @@ -32,6 +33,7 @@ import { getDefaultTooltip } from '../utils/tooltip'; import { getPercentFormatter } from '../utils/formatters'; type Link = { source: string; target: string; value: number }; +type EChartsOption = ComposeOption<SankeySeriesOption>; export default function transformProps( chartProps: SankeyChartProps, @@ -39,7 +41,7 @@ export default function transformProps( const refs: Refs = {}; const { formData, height, hooks, queriesData, width } = chartProps; const { onLegendStateChanged } = hooks; - const { colorScheme, metric, source, target } = formData; + const { colorScheme, metric, source, target, sliceId } = formData; const { data } = queriesData[0]; const colorFn = CategoricalColorNamespace.getScale(colorScheme); const metricLabel = getMetricLabel(metric); @@ -66,7 +68,7 @@ export default function transformProps( ).map(name => ({ name, itemStyle: { - color: colorFn(name), + color: colorFn(name, sliceId), }, })); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/EchartsSunburst.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/EchartsSunburst.tsx index 5d74c50c4bbf..eee7bc8426f3 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/EchartsSunburst.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/EchartsSunburst.tsx @@ -95,7 +95,7 @@ export default function EchartsSunburst(props: SunburstTransformedProps) { const handleChange = useCallback( (treePathInfo: TreePathInfo[]) => { - if (!emitCrossFilters) { + if (!emitCrossFilters || !columns?.length) { return; } @@ -142,7 +142,9 @@ export default function EchartsSunburst(props: SunburstTransformedProps) { } onContextMenu(pointerEvent.clientX, pointerEvent.clientY, { drillToDetail: drillToDetailFilters, - crossFilter: getCrossFilterDataMask(treePathInfo), + crossFilter: columns?.length + ? getCrossFilterDataMask(treePathInfo) + : undefined, drillBy: { filters: drillByFilters, groupbyFieldName: 'columns' }, }); } diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/transformProps.ts index 7006e2178db3..853721b2926c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/transformProps.ts @@ -30,8 +30,8 @@ import { tooltipHtml, ValueFormatter, } from '@superset-ui/core'; -import { EChartsCoreOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { NULL_STRING, OpacityEnum } from '../constants'; import { defaultGrid } from '../defaults'; import { Refs } from '../types'; @@ -188,7 +188,11 @@ export default function transformProps( showTotal, sliceId, } = formData; - const { currencyFormats = {}, columnFormats = {} } = datasource; + const { + currencyFormats = {}, + columnFormats = {}, + verboseMap = {}, + } = datasource; const refs: Refs = {}; const primaryValueFormatter = getValueFormatter( metric, @@ -334,8 +338,10 @@ export default function transformProps( secondaryValueFormatter, colorByCategory, totalValue, - metricLabel, - secondaryMetricLabel, + metricLabel: verboseMap[metricLabel] || metricLabel, + secondaryMetricLabel: secondaryMetricLabel + ? verboseMap[secondaryMetricLabel] || secondaryMetricLabel + : undefined, }), }, series: [ diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/types.ts index 37844addea49..8fc8385d1760 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Sunburst/types.ts @@ -25,7 +25,7 @@ import { QueryFormData, QueryFormMetric, } from '@superset-ui/core'; -import { SunburstSeriesNodeItemOption } from 'echarts/types/src/chart/sunburst/SunburstSeries'; +import type { SunburstSeriesNodeItemOption } from 'echarts/types/src/chart/sunburst/SunburstSeries'; import { BaseTransformedProps, ContextMenuTransformedProps, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/EchartsTimeseries.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/EchartsTimeseries.tsx index e9cc9f687b75..b91db0b4c1b8 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/EchartsTimeseries.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/EchartsTimeseries.tsx @@ -27,9 +27,9 @@ import { LegendState, ensureIsArray, } from '@superset-ui/core'; -import { ViewRootGroup } from 'echarts/types/src/util/types'; -import GlobalModel from 'echarts/types/src/model/Global'; -import ComponentModel from 'echarts/types/src/model/Component'; +import type { ViewRootGroup } from 'echarts/types/src/util/types'; +import type GlobalModel from 'echarts/types/src/model/Global'; +import type ComponentModel from 'echarts/types/src/model/Component'; import { EchartsHandler, EventHandlers } from '../types'; import Echart from '../components/Echart'; import { TimeseriesChartTransformedProps } from './types'; @@ -70,6 +70,8 @@ export default function EchartsTimeseries({ setExtraControlHeight(updatedHeight); }, [formData.showExtraControls]); + const hasDimensions = ensureIsArray(groupby).length > 0; + const getModelInfo = (target: ViewRootGroup, globalModel: GlobalModel) => { let el = target; let model: ComponentModel | null = null; @@ -139,6 +141,9 @@ export default function EchartsTimeseries({ const eventHandlers: EventHandlers = { click: props => { + if (!hasDimensions) { + return; + } if (clickTimer.current) { clearTimeout(clickTimer.current); } @@ -215,8 +220,10 @@ export default function EchartsTimeseries({ onContextMenu(pointerEvent.clientX, pointerEvent.clientY, { drillToDetail: drillToDetailFilters, - crossFilter: getCrossFilterDataMask(seriesName), drillBy: { filters: drillByFilters, groupbyFieldName: 'groupby' }, + crossFilter: hasDimensions + ? getCrossFilterDataMask(seriesName) + : undefined, }); } }, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx index 7482c7a16b01..568cf07a8424 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx @@ -73,6 +73,8 @@ function createAxisTitleControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Changing this control takes effect instantly'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isVertical(controls) : isHorizontal(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -90,6 +92,8 @@ function createAxisTitleControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Changing this control takes effect instantly'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isVertical(controls) : isHorizontal(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -104,6 +108,8 @@ function createAxisTitleControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Changing this control takes effect instantly'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -121,6 +127,8 @@ function createAxisTitleControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Changing this control takes effect instantly'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -138,6 +146,8 @@ function createAxisTitleControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Changing this control takes effect instantly'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -160,6 +170,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { description: `${D3_TIME_FORMAT_DOCS}. ${TIME_SERIES_DESCRIPTION_TEXT}`, visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isVertical(controls) : isHorizontal(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -170,6 +182,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { ...xAxisLabelRotation.config, visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isVertical(controls) : isHorizontal(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -181,6 +195,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { label: t('Axis Format'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -196,6 +212,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Logarithmic axis'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -210,6 +228,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('Draw split lines for minor axis ticks'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -226,6 +246,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { description: t('It’s not recommended to truncate axis in Bar chart.'), visibility: ({ controls }: ControlPanelsContainerProps) => isXAxis ? isHorizontal(controls) : isVertical(controls), + disableStash: true, + resetOnHide: false, }, }, ], @@ -246,6 +268,8 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { visibility: ({ controls }: ControlPanelsContainerProps) => Boolean(controls?.truncateYAxis?.value) && (isXAxis ? isHorizontal(controls) : isVertical(controls)), + disableStash: true, + resetOnHide: false, }, }, ], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts index 9dae4385e973..4f2437a19123 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformProps.ts @@ -47,8 +47,9 @@ import { isDerivedSeries, getTimeOffset, } from '@superset-ui/chart-controls'; -import { EChartsCoreOption, SeriesOption } from 'echarts'; -import { LineStyleOption } from 'echarts/types/src/util/types'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { LineStyleOption } from 'echarts/types/src/util/types'; +import type { SeriesOption } from 'echarts'; import { EchartsTimeseriesChartProps, EchartsTimeseriesFormData, @@ -64,6 +65,7 @@ import { extractDataTotalValues, extractSeries, extractShowValueIndexes, + extractTooltipKeys, getAxisType, getColtypesMapping, getLegendProps, @@ -167,6 +169,8 @@ export default function transformProps( stack, tooltipTimeFormat, tooltipSortByMetric, + showTooltipTotal, + showTooltipPercentage, truncateXAxis, truncateYAxis, xAxis: xAxisOrig, @@ -188,7 +192,9 @@ export default function transformProps( }: EchartsTimeseriesFormData = { ...DEFAULT_FORM_DATA, ...formData }; const refs: Refs = {}; const groupBy = ensureIsArray(groupby); - const labelMap = Object.entries(label_map).reduce((acc, entry) => { + const labelMap: { [key: string]: string[] } = Object.entries( + label_map, + ).reduce((acc, entry) => { if ( entry[1].length > groupBy.length && Array.isArray(timeCompare) && @@ -198,7 +204,6 @@ export default function transformProps( } return { ...acc, [entry[0]]: entry[1] }; }, {}); - const colorScale = CategoricalColorNamespace.getScale(colorScheme as string); const rebasedData = rebaseForecastDatum(data, verboseMap); let xAxisLabel = getXAxisLabel(chartProps.rawFormData) as string; @@ -248,7 +253,7 @@ export default function transformProps( legendState, }); const seriesContexts = extractForecastSeriesContexts( - Object.values(rawSeries).map(series => series.name as string), + rawSeries.map(series => series.name as string), ); const isAreaExpand = stack === StackControlsValue.Expand; const xAxisDataType = dataTypes?.[xAxisLabel] ?? dataTypes?.[xAxisOrig]; @@ -375,6 +380,7 @@ export default function transformProps( xAxisType, colorScale, sliceId, + orientation, ), ); else if (isIntervalAnnotationLayer(layer)) { @@ -386,6 +392,7 @@ export default function transformProps( colorScale, theme, sliceId, + orientation, ), ); } else if (isEventAnnotationLayer(layer)) { @@ -397,6 +404,7 @@ export default function transformProps( colorScale, theme, sliceId, + orientation, ), ); } else if (isTimeseriesAnnotationLayer(layer)) { @@ -408,6 +416,7 @@ export default function transformProps( annotationData, colorScale, sliceId, + orientation, ), ); } @@ -482,7 +491,9 @@ export default function transformProps( minorTick: { show: minorTicks }, minInterval: xAxisType === AxisType.Time && timeGrainSqla - ? TIMEGRAIN_TO_TIMESTAMP[timeGrainSqla] + ? TIMEGRAIN_TO_TIMESTAMP[ + timeGrainSqla as keyof typeof TIMEGRAIN_TO_TIMESTAMP + ] : 0, ...getMinAndMaxFromBounds( xAxisType, @@ -538,11 +549,12 @@ export default function transformProps( ? params[0].value[xIndex] : params.value[xIndex]; const forecastValue: any[] = richTooltip ? params : [params]; - - if (richTooltip && tooltipSortByMetric) { - forecastValue.sort((a, b) => b.data[yIndex] - a.data[yIndex]); - } - + const sortedKeys = extractTooltipKeys( + forecastValue, + yIndex, + richTooltip, + tooltipSortByMetric, + ); const forecastValues: Record<string, ForecastValue> = extractForecastValuesFromTooltipParams(forecastValue, isHorizontal); @@ -561,40 +573,47 @@ export default function transformProps( value.observation !== undefined ? acc + value.observation : acc, 0, ); - const showTotal = Boolean(isMultiSeries) && richTooltip && !isForecast; - const showPercentage = showTotal && !forcePercentFormatter; + const allowTotal = Boolean(isMultiSeries) && richTooltip && !isForecast; + const showPercentage = + allowTotal && !forcePercentFormatter && showTooltipPercentage; const keys = Object.keys(forecastValues); - keys.forEach(key => { - const value = forecastValues[key]; - if (value.observation === 0 && stack) { - return; - } - const row = formatForecastTooltipSeries({ - ...value, - seriesName: key, - formatter, + let focusedRow; + sortedKeys + .filter(key => keys.includes(key)) + .forEach(key => { + const value = forecastValues[key]; + if (value.observation === 0 && stack) { + return; + } + const row = formatForecastTooltipSeries({ + ...value, + seriesName: key, + formatter, + }); + if (showPercentage && value.observation !== undefined) { + row.push( + percentFormatter.format(value.observation / (total || 1)), + ); + } + rows.push(row); + if (key === focusedSeries) { + focusedRow = rows.length - 1; + } }); - if (showPercentage && value.observation !== undefined) { - row.push(percentFormatter.format(value.observation / (total || 1))); - } - rows.push(row); - }); if (stack) { - keys.reverse(); rows.reverse(); + if (focusedRow !== undefined) { + focusedRow = rows.length - focusedRow - 1; + } } - if (showTotal) { + if (allowTotal && showTooltipTotal) { const totalRow = ['Total', formatter.format(total)]; if (showPercentage) { totalRow.push(percentFormatter.format(1)); } rows.push(totalRow); } - return tooltipHtml( - rows, - tooltipFormatter(xValue), - keys.findIndex(key => key === focusedSeries), - ); + return tooltipHtml(rows, tooltipFormatter(xValue), focusedRow); }, }, legend: { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts index 91649ecd55b7..a594cc926659 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/transformers.ts @@ -32,8 +32,7 @@ import { TimeseriesDataRecord, ValueFormatter, } from '@superset-ui/core'; -import { SeriesOption } from 'echarts'; -import { +import type { CallbackDataParams, DefaultStatesMixin, ItemStyleOption, @@ -43,16 +42,18 @@ import { SeriesLineLabelOption, ZRLineType, } from 'echarts/types/src/util/types'; -import { +import type { SeriesOption } from 'echarts'; +import type { MarkArea1DDataItemOption, MarkArea2DDataItemOption, } from 'echarts/types/src/component/marker/MarkAreaModel'; -import { MarkLine1DDataItemOption } from 'echarts/types/src/component/marker/MarkLineModel'; +import type { MarkLine1DDataItemOption } from 'echarts/types/src/component/marker/MarkLineModel'; import { extractForecastSeriesContext } from '../utils/forecast'; import { EchartsTimeseriesSeriesType, ForecastSeriesEnum, LegendOrientation, + OrientationType, StackType, } from '../types'; @@ -222,7 +223,7 @@ export function transformSeries( stackId = forecastSeries.name; } else if (stack && isObservation) { // the suffix of the observation series is '' (falsy), which disables - // stacking. Therefore we need to set something that is truthy. + // stacking. Therefore, we need to set something that is truthy. stackId = getTimeCompareStackId('obs', timeCompare, name); } else if (stack && isTrend) { stackId = getTimeCompareStackId(forecastSeries.type, timeCompare, name); @@ -315,6 +316,15 @@ export function transformSeries( show: !!showValue, position: isHorizontal ? 'right' : 'top', formatter: (params: any) => { + // don't show confidence band value labels, as they're already visible on the tooltip + if ( + [ + ForecastSeriesEnum.ForecastUpper, + ForecastSeriesEnum.ForecastLower, + ].includes(forecastSeries.type) + ) { + return ''; + } const { value, dataIndex, seriesIndex, seriesName } = params; const numericValue = isHorizontal ? value[0] : value[1]; const isSelectedLegend = !legendState || legendState[seriesName]; @@ -350,8 +360,11 @@ export function transformFormulaAnnotation( xAxisType: AxisType, colorScale: CategoricalColorScale, sliceId?: number, + orientation?: OrientationType, ): SeriesOption { const { name, color, opacity, width, style } = layer; + const isHorizontal = orientation === OrientationType.Horizontal; + return { name, id: name, @@ -365,7 +378,9 @@ export function transformFormulaAnnotation( }, type: 'line', smooth: true, - data: evalFormula(layer, data, xAxisCol, xAxisType), + data: evalFormula(layer, data, xAxisCol, xAxisType).map(([x, y]) => + isHorizontal ? [y, x] : [x, y], + ), symbolSize: 0, }; } @@ -377,6 +392,7 @@ export function transformIntervalAnnotation( colorScale: CategoricalColorScale, theme: SupersetTheme, sliceId?: number, + orientation?: OrientationType, ): SeriesOption[] { const series: SeriesOption[] = []; const annotations = extractRecordAnnotations(layer, annotationData); @@ -384,6 +400,7 @@ export function transformIntervalAnnotation( const { name, color, opacity, showLabel } = layer; const { descriptions, intervalEnd, time, title } = annotation; const label = formatAnnotationLabel(name, title, descriptions); + const isHorizontal = orientation === OrientationType.Horizontal; const intervalData: ( | MarkArea1DDataItemOption | MarkArea2DDataItemOption @@ -391,11 +408,9 @@ export function transformIntervalAnnotation( [ { name: label, - xAxis: time, - }, - { - xAxis: intervalEnd, + ...(isHorizontal ? { yAxis: time } : { xAxis: time }), }, + isHorizontal ? { yAxis: intervalEnd } : { xAxis: intervalEnd }, ], ]; const intervalLabel: SeriesLabelOption = showLabel @@ -452,6 +467,7 @@ export function transformEventAnnotation( colorScale: CategoricalColorScale, theme: SupersetTheme, sliceId?: number, + orientation?: OrientationType, ): SeriesOption[] { const series: SeriesOption[] = []; const annotations = extractRecordAnnotations(layer, annotationData); @@ -459,10 +475,11 @@ export function transformEventAnnotation( const { name, color, opacity, style, width, showLabel } = layer; const { descriptions, time, title } = annotation; const label = formatAnnotationLabel(name, title, descriptions); + const isHorizontal = orientation === OrientationType.Horizontal; const eventData: MarkLine1DDataItemOption[] = [ { name: label, - xAxis: time, + ...(isHorizontal ? { yAxis: time } : { xAxis: time }), }, ]; @@ -525,10 +542,12 @@ export function transformTimeseriesAnnotation( annotationData: AnnotationData, colorScale: CategoricalColorScale, sliceId?: number, + orientation?: OrientationType, ): SeriesOption[] { const series: SeriesOption[] = []; const { hideLine, name, opacity, showMarkers, style, width, color } = layer; const result = annotationData[name]; + const isHorizontal = orientation === OrientationType.Horizontal; if (isTimeseriesAnnotationResult(result)) { result.forEach(annotation => { const { key, values } = annotation; @@ -536,7 +555,11 @@ export function transformTimeseriesAnnotation( type: 'line', id: key, name: key, - data: values.map(row => [row.x, row.y] as [OptionName, number]), + data: values.map(({ x, y }) => + isHorizontal + ? ([y, x] as [number, OptionName]) + : ([x, y] as [OptionName, number]), + ), symbolSize: showMarkers ? markerSize : 0, lineStyle: { opacity: parseAnnotationOpacity(opacity), diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts index 6ca9650db62e..788317aaf7a2 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/types.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { OptionName } from 'echarts/types/src/util/types'; +import type { OptionName } from 'echarts/types/src/util/types'; import { AnnotationLayer, AxisType, @@ -75,6 +75,8 @@ export type EchartsTimeseriesFormData = QueryFormData & { stack: StackType; timeCompare?: string[]; tooltipTimeFormat?: string; + showTooltipTotal?: boolean; + showTooltipPercentage?: boolean; truncateXAxis: boolean; truncateYAxis: boolean; yAxisFormat?: string; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/constants.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/constants.ts index 35567c3fc593..79cea342a2c4 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/constants.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/constants.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { TreeSeriesOption } from 'echarts'; +import type { TreeSeriesOption } from 'echarts/charts'; import { EchartsTreeFormData } from './types'; export const DEFAULT_TREE_SERIES_OPTION: TreeSeriesOption = { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/transformProps.ts index a0b08816db5d..e0dc20ff542c 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/transformProps.ts @@ -21,12 +21,13 @@ import { DataRecordValue, tooltipHtml, } from '@superset-ui/core'; -import { EChartsCoreOption, TreeSeriesOption } from 'echarts'; -import { +import type { EChartsCoreOption } from 'echarts/core'; +import type { TreeSeriesOption } from 'echarts/charts'; +import type { TreeSeriesCallbackDataParams, TreeSeriesNodeItemOption, } from 'echarts/types/src/chart/tree/TreeSeries'; -import { OptionName } from 'echarts/types/src/util/types'; +import type { OptionName } from 'echarts/types/src/util/types'; import { EchartsTreeChartProps, EchartsTreeFormData, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/types.ts index 0fde0cde2a17..394837cb60ae 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Tree/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Tree/types.ts @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -import { OptionName } from 'echarts/types/src/util/types'; +import type { OptionName } from 'echarts/types/src/util/types'; +import type { TreeSeriesNodeItemOption } from 'echarts/types/src/chart/tree/TreeSeries'; import { ChartDataResponseResult, QueryFormData } from '@superset-ui/core'; -import { TreeSeriesNodeItemOption } from 'echarts/types/src/chart/tree/TreeSeries'; import { BaseChartProps, BaseTransformedProps } from '../types'; export type EchartsTreeFormData = QueryFormData & { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/EchartsTreemap.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/EchartsTreemap.tsx index 343c8cf72cfe..e00f9cb52b18 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/EchartsTreemap.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/EchartsTreemap.tsx @@ -97,7 +97,7 @@ export default function EchartsTreemap({ const handleChange = useCallback( (data, treePathInfo) => { - if (!emitCrossFilters) { + if (!emitCrossFilters || groupby.length === 0) { return; } @@ -144,7 +144,10 @@ export default function EchartsTreemap({ }); onContextMenu(pointerEvent.clientX, pointerEvent.clientY, { drillToDetail: drillToDetailFilters, - crossFilter: getCrossFilterDataMask(data, treePathInfo), + crossFilter: + groupby.length > 0 + ? getCrossFilterDataMask(data, treePathInfo) + : undefined, drillBy: { filters: drillByFilters, groupbyFieldName: 'groupby' }, }); } diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/transformProps.ts index 73feda2b2e8b..70755574fba7 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/transformProps.ts @@ -27,8 +27,9 @@ import { getValueFormatter, tooltipHtml, } from '@superset-ui/core'; -import { TreemapSeriesNodeItemOption } from 'echarts/types/src/chart/treemap/TreemapSeries'; -import { EChartsCoreOption, TreemapSeriesOption } from 'echarts'; +import type { TreemapSeriesNodeItemOption } from 'echarts/types/src/chart/treemap/TreemapSeries'; +import type { EChartsCoreOption } from 'echarts/core'; +import type { TreemapSeriesOption } from 'echarts/charts'; import { DEFAULT_FORM_DATA as DEFAULT_TREEMAP_FORM_DATA, EchartsTreemapChartProps, @@ -175,18 +176,18 @@ export default function transformProps( let item: TreemapSeriesNodeItemOption = { name, value, + colorSaturation: COLOR_SATURATION, + itemStyle: { + borderColor: BORDER_COLOR, + color: colorFn(name, sliceId), + borderWidth: BORDER_WIDTH, + gapWidth: GAP_WIDTH, + }, }; if (treeNode.children?.length) { item = { ...item, children: traverse(treeNode.children, newPath), - colorSaturation: COLOR_SATURATION, - itemStyle: { - borderColor: BORDER_COLOR, - color: colorFn(name, sliceId, colorScheme), - borderWidth: BORDER_WIDTH, - gapWidth: GAP_WIDTH, - }, }; } else { const joinedName = newPath.join(','); @@ -216,7 +217,7 @@ export default function transformProps( colorSaturation: COLOR_SATURATION, itemStyle: { borderColor: BORDER_COLOR, - color: colorFn(`${metricLabel}`, sliceId, colorScheme), + color: colorFn(`${metricLabel}`, sliceId), borderWidth: BORDER_WIDTH, gapWidth: GAP_WIDTH, }, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/types.ts index 1d4298838918..104cc913babe 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Treemap/types.ts @@ -23,7 +23,7 @@ import { QueryFormData, QueryFormMetric, } from '@superset-ui/core'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { BaseTransformedProps, ContextMenuTransformedProps, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts index cf7639e5e1be..acaccada18be 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/index.ts @@ -17,7 +17,7 @@ * specific language governing permissions and limitations * under the License. */ -import { Behavior, ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; +import { ChartMetadata, ChartPlugin, t } from '@superset-ui/core'; import buildQuery from './buildQuery'; import controlPanel from './controlPanel'; import transformProps from './transformProps'; @@ -27,6 +27,7 @@ import example2 from './images/example2.png'; import example3 from './images/example3.png'; import { EchartsWaterfallChartProps, EchartsWaterfallFormData } from './types'; +// TODO: Implement cross filtering export default class EchartsWaterfallChartPlugin extends ChartPlugin< EchartsWaterfallFormData, EchartsWaterfallChartProps @@ -47,7 +48,6 @@ export default class EchartsWaterfallChartPlugin extends ChartPlugin< controlPanel, loadChart: () => import('./EchartsWaterfall'), metadata: new ChartMetadata({ - behaviors: [Behavior.InteractiveChart], credits: ['https://echarts.apache.org'], category: t('Evolution'), description: t( diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts index f9ff6c68dc8f..ab21e7b37cca 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/transformProps.ts @@ -29,7 +29,8 @@ import { rgbToHex, tooltipHtml, } from '@superset-ui/core'; -import { EChartsOption, BarSeriesOption } from 'echarts'; +import type { ComposeOption } from 'echarts/core'; +import type { BarSeriesOption } from 'echarts/charts'; import { EchartsWaterfallChartProps, ISeriesData, @@ -43,6 +44,8 @@ import { getColtypesMapping } from '../utils/series'; import { Refs } from '../types'; import { NULL_STRING } from '../constants'; +type EChartsOption = ComposeOption<BarSeriesOption>; + function formatTooltip({ params, breakdownName, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/types.ts index 4386501199c8..71a28dd9f757 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Waterfall/types.ts @@ -24,8 +24,8 @@ import { QueryFormMetric, RgbaColor, } from '@superset-ui/core'; -import { BarDataItemOption } from 'echarts/types/src/chart/bar/BarSeries'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { BarDataItemOption } from 'echarts/types/src/chart/bar/BarSeries'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { BaseTransformedProps, LegendFormData } from '../types'; export type WaterfallFormXTicksLayout = diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/components/Echart.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/components/Echart.tsx index 5d85fac317ff..995e3a535134 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/components/Echart.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/components/Echart.tsx @@ -28,7 +28,37 @@ import { } from 'react'; import { styled } from '@superset-ui/core'; -import { ECharts, init } from 'echarts'; +import { use, init, EChartsType } from 'echarts/core'; +import { + SankeyChart, + PieChart, + BarChart, + FunnelChart, + GaugeChart, + GraphChart, + LineChart, + ScatterChart, + RadarChart, + BoxplotChart, + TreeChart, + TreemapChart, + HeatmapChart, + SunburstChart, +} from 'echarts/charts'; +import { CanvasRenderer } from 'echarts/renderers'; +import { + TooltipComponent, + GridComponent, + VisualMapComponent, + LegendComponent, + DataZoomComponent, + ToolboxComponent, + GraphicComponent, + AriaComponent, + MarkAreaComponent, + MarkLineComponent, +} from 'echarts/components'; +import { LabelLayout } from 'echarts/features'; import { EchartsHandler, EchartsProps, EchartsStylesProps } from '../types'; const Styles = styled.div<EchartsStylesProps>` @@ -36,6 +66,35 @@ const Styles = styled.div<EchartsStylesProps>` width: ${({ width }) => width}; `; +use([ + CanvasRenderer, + BarChart, + BoxplotChart, + FunnelChart, + GaugeChart, + GraphChart, + HeatmapChart, + LineChart, + PieChart, + RadarChart, + SankeyChart, + ScatterChart, + SunburstChart, + TreeChart, + TreemapChart, + AriaComponent, + DataZoomComponent, + GraphicComponent, + GridComponent, + MarkAreaComponent, + MarkLineComponent, + LegendComponent, + ToolboxComponent, + TooltipComponent, + VisualMapComponent, + LabelLayout, +]); + function Echart( { width, @@ -53,7 +112,7 @@ function Echart( // eslint-disable-next-line no-param-reassign refs.divRef = divRef; } - const chartRef = useRef<ECharts>(); + const chartRef = useRef<EChartsType>(); const currentSelection = useMemo( () => Object.keys(selectedValues) || [], [selectedValues], diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx index d6e9d6c68841..52651356b14d 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/controls.tsx @@ -210,9 +210,40 @@ const tooltipSortByMetricControl: ControlSetItem = { }, }; +const tooltipTotalControl: ControlSetItem = { + name: 'showTooltipTotal', + config: { + type: 'CheckboxControl', + label: t('Show total'), + renderTrigger: true, + default: true, + description: t('Whether to display the total value in the tooltip'), + visibility: ({ controls, form_data }: ControlPanelsContainerProps) => + Boolean(controls?.rich_tooltip?.value) && + form_data.viz_type !== 'mixed_timeseries', + }, +}; + +const tooltipPercentageControl: ControlSetItem = { + name: 'showTooltipPercentage', + config: { + type: 'CheckboxControl', + label: t('Show percentage'), + renderTrigger: true, + default: true, + description: t('Whether to display the percentage value in the tooltip'), + visibility: ({ controls, form_data }: ControlPanelsContainerProps) => + Boolean(controls?.rich_tooltip?.value) && + !controls?.contributionMode?.value && + form_data.viz_type !== 'mixed_timeseries', + }, +}; + export const richTooltipSection: ControlSetRow[] = [ [<ControlSubSectionHeader>{t('Tooltip')}</ControlSubSectionHeader>], [richTooltipControl], + [tooltipTotalControl], + [tooltipPercentageControl], [tooltipSortByMetricControl], [tooltipTimeFormatControl], ]; diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/types.ts b/superset-frontend/plugins/plugin-chart-echarts/src/types.ts index 4126aaeda742..02adce8cc577 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/types.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/types.ts @@ -32,8 +32,8 @@ import { SqlaFormData, ChartMetadata, } from '@superset-ui/core'; -import { EChartsCoreOption, ECharts } from 'echarts'; -import { TooltipMarker } from 'echarts/types/src/util/format'; +import type { EChartsCoreOption, EChartsType } from 'echarts/core'; +import type { TooltipMarker } from 'echarts/types/src/util/format'; import { StackControlsValue } from './constants'; export type EchartsStylesProps = { @@ -58,7 +58,7 @@ export interface EchartsProps { } export interface EchartsHandler { - getEchartInstance: () => ECharts | undefined; + getEchartInstance: () => EChartsType | undefined; } export enum ForecastSeriesEnum { diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/eventHandlers.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/eventHandlers.ts index 98e14d59ed0b..9afa2fcdef39 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/eventHandlers.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/eventHandlers.ts @@ -26,6 +26,7 @@ import { getNumberFormatter, getTimeFormatter, } from '@superset-ui/core'; +import { noop } from 'lodash'; import { BaseTransformedProps, @@ -137,7 +138,8 @@ export const contextMenuEventHandler = } onContextMenu(pointerEvent.clientX, pointerEvent.clientY, { drillToDetail: drillFilters, - crossFilter: getCrossFilterDataMask(e.name), + crossFilter: + groupby.length > 0 ? getCrossFilterDataMask(e.name) : undefined, drillBy: { filters: drillFilters, groupbyFieldName: 'groupby' }, }); } @@ -157,11 +159,14 @@ export const allEventHandlers = ( formData, } = transformedProps; const eventHandlers: EventHandlers = { - click: clickEventHandler( - getCrossFilterDataMask(selectedValues, groupby, labelMap), - setDataMask, - emitCrossFilters, - ), + click: + groupby.length > 0 + ? clickEventHandler( + getCrossFilterDataMask(selectedValues, groupby, labelMap), + setDataMask, + emitCrossFilters, + ) + : noop, contextmenu: contextMenuEventHandler( groupby, onContextMenu, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/forecast.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/forecast.ts index a68dafe8d4e7..c7244baf48d1 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/forecast.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/forecast.ts @@ -18,8 +18,8 @@ */ import { isNumber } from 'lodash'; import { DataRecord, DTTM_ALIAS, ValueFormatter } from '@superset-ui/core'; -import { OptionName } from 'echarts/types/src/util/types'; -import { TooltipMarker } from 'echarts/types/src/util/format'; +import type { OptionName } from 'echarts/types/src/util/types'; +import type { TooltipMarker } from 'echarts/types/src/util/format'; import { ForecastSeriesContext, ForecastSeriesEnum, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts index cb97dff93a39..57edbdfb0900 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/series.ts @@ -34,7 +34,9 @@ import { ValueFormatter, } from '@superset-ui/core'; import { SortSeriesType } from '@superset-ui/chart-controls'; -import { format, LegendComponentOption, SeriesOption } from 'echarts'; +import { format } from 'echarts/core'; +import type { LegendComponentOption } from 'echarts/components'; +import type { SeriesOption } from 'echarts'; import { isEmpty, maxBy, meanBy, minBy, orderBy, sumBy } from 'lodash'; import { NULL_STRING, @@ -433,7 +435,7 @@ export function getLegendProps( show, type, selected: legendState, - selector: ['all', 'inverse'], + selector: false, selectorLabel: { fontFamily: theme.typography.families.sansSerif, fontSize: theme.typography.sizes.s, @@ -640,3 +642,22 @@ export function getTimeCompareStackId( }) || defaultId ); } + +const TOOLTIP_SERIES_KEY = 'seriesId'; +export function extractTooltipKeys( + forecastValue: any[], + yIndex: number, + richTooltip?: boolean, + tooltipSortByMetric?: boolean, +): string[] { + if (richTooltip && tooltipSortByMetric) { + return forecastValue + .slice() + .sort((a, b) => b.data[yIndex] - a.data[yIndex]) + .map(value => value[TOOLTIP_SERIES_KEY]); + } + if (richTooltip) { + return forecastValue.map(s => s[TOOLTIP_SERIES_KEY]); + } + return [forecastValue[0][TOOLTIP_SERIES_KEY]]; +} diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/utils/tooltip.ts b/superset-frontend/plugins/plugin-chart-echarts/src/utils/tooltip.ts index 7110cae6d5cd..e630f1142ff9 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/utils/tooltip.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/src/utils/tooltip.ts @@ -17,7 +17,7 @@ * under the License. */ -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { CallbackDataParams } from 'echarts/types/src/util/types'; import { TOOLTIP_OVERFLOW_MARGIN, TOOLTIP_POINTER_MARGIN } from '../constants'; import { Refs } from '../types'; @@ -25,6 +25,8 @@ export function getDefaultTooltip(refs: Refs) { return { appendToBody: true, borderColor: 'transparent', + // CSS hack applied on this class to resolve https://github.com/apache/superset/issues/30058 + className: 'echarts-tooltip', position: ( canvasMousePos: [number, number], params: CallbackDataParams, diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/Graph/transformProps.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/Graph/transformProps.test.ts index 3c2e6e238445..7a55ec23c3a2 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/Graph/transformProps.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/Graph/transformProps.test.ts @@ -74,6 +74,9 @@ describe('EchartsGraph transformProps', () => { col: 'source_column', category: undefined, id: '0', + itemStyle: { + color: '#1f77b4', + }, label: { show: true }, name: 'source_value_1', select: { @@ -88,6 +91,9 @@ describe('EchartsGraph transformProps', () => { col: 'target_column', category: undefined, id: '1', + itemStyle: { + color: '#1f77b4', + }, label: { show: true }, name: 'target_value_1', select: { @@ -102,6 +108,9 @@ describe('EchartsGraph transformProps', () => { col: 'source_column', category: undefined, id: '2', + itemStyle: { + color: '#1f77b4', + }, label: { show: true }, name: 'source_value_2', select: { @@ -116,6 +125,9 @@ describe('EchartsGraph transformProps', () => { col: 'target_column', category: undefined, id: '3', + itemStyle: { + color: '#1f77b4', + }, label: { show: true }, name: 'target_value_2', select: { @@ -132,7 +144,7 @@ describe('EchartsGraph transformProps', () => { links: [ { emphasis: { lineStyle: { width: 12 } }, - lineStyle: { width: 6 }, + lineStyle: { width: 6, color: '#1f77b4' }, select: { lineStyle: { opacity: 1, width: 9.600000000000001 }, }, @@ -142,7 +154,7 @@ describe('EchartsGraph transformProps', () => { }, { emphasis: { lineStyle: { width: 5 } }, - lineStyle: { width: 1.5 }, + lineStyle: { width: 1.5, color: '#1f77b4' }, select: { lineStyle: { opacity: 1, width: 5 } }, source: '2', target: '3', @@ -217,6 +229,9 @@ describe('EchartsGraph transformProps', () => { data: [ { id: '0', + itemStyle: { + color: '#1f77b4', + }, col: 'source_column', name: 'source_value', value: 11, @@ -228,6 +243,9 @@ describe('EchartsGraph transformProps', () => { }, { id: '1', + itemStyle: { + color: '#ff7f0e', + }, col: 'target_column', name: 'target_value', value: 11, diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/Pie/transformProps.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/Pie/transformProps.test.ts index e0c199257429..34f8e5fd5636 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/Pie/transformProps.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/Pie/transformProps.test.ts @@ -22,8 +22,11 @@ import { SqlaFormData, supersetTheme, } from '@superset-ui/core'; -import { LabelFormatterCallback, PieSeriesOption } from 'echarts'; -import { CallbackDataParams } from 'echarts/types/src/util/types'; +import type { PieSeriesOption } from 'echarts/charts'; +import type { + LabelFormatterCallback, + CallbackDataParams, +} from 'echarts/types/src/util/types'; import transformProps, { parseParams } from '../../src/Pie/transformProps'; import { EchartsPieChartProps } from '../../src/Pie/types'; diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts index efc0ac745aed..7054f6019ad3 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts +++ b/superset-frontend/plugins/plugin-chart-echarts/test/utils/series.test.ts @@ -31,6 +31,7 @@ import { extractGroupbyLabel, extractSeries, extractShowValueIndexes, + extractTooltipKeys, formatSeriesName, getAxisType, getChartPadding, @@ -1072,3 +1073,29 @@ describe('getTimeCompareStackId', () => { expect(result).toEqual('123'); }); }); + +const forecastValue = [ + { + data: [0, 1], + seriesId: 'foo', + }, + { + data: [0, 2], + seriesId: 'bar', + }, +]; + +test('extractTooltipKeys with rich tooltip', () => { + const result = extractTooltipKeys(forecastValue, 1, true, false); + expect(result).toEqual(['foo', 'bar']); +}); + +test('extractTooltipKeys with rich tooltip and sorting by metrics', () => { + const result = extractTooltipKeys(forecastValue, 1, true, true); + expect(result).toEqual(['bar', 'foo']); +}); + +test('extractTooltipKeys with non-rich tooltip', () => { + const result = extractTooltipKeys(forecastValue, 1, false, false); + expect(result).toEqual(['foo']); +}); diff --git a/superset-frontend/plugins/plugin-chart-echarts/test/utils/transformers.test.ts b/superset-frontend/plugins/plugin-chart-echarts/test/utils/transformers.test.ts new file mode 100644 index 000000000000..113b416f9c5b --- /dev/null +++ b/superset-frontend/plugins/plugin-chart-echarts/test/utils/transformers.test.ts @@ -0,0 +1,349 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + AnnotationData, + AnnotationSourceType, + AnnotationStyle, + AnnotationType, + AxisType, + CategoricalColorNamespace, + EventAnnotationLayer, + FormulaAnnotationLayer, + IntervalAnnotationLayer, + supersetTheme, + TimeseriesAnnotationLayer, + TimeseriesDataRecord, +} from '@superset-ui/core'; +import { OrientationType } from '@superset-ui/plugin-chart-echarts'; +import { + transformEventAnnotation, + transformFormulaAnnotation, + transformIntervalAnnotation, + transformTimeseriesAnnotation, +} from '../../src/Timeseries/transformers'; + +const mockData: TimeseriesDataRecord[] = [ + { + __timestamp: 10, + }, + { + __timestamp: 20, + }, +]; + +const mockFormulaAnnotationLayer: FormulaAnnotationLayer = { + annotationType: AnnotationType.Formula as const, + name: 'My Formula', + show: true, + style: AnnotationStyle.Solid, + value: '50', + showLabel: true, +}; + +describe('transformFormulaAnnotation', () => { + it('should transform data correctly', () => { + expect( + transformFormulaAnnotation( + mockFormulaAnnotationLayer, + mockData, + '__timestamp', + AxisType.Value, + CategoricalColorNamespace.getScale(''), + undefined, + ).data, + ).toEqual([ + [10, 50], + [20, 50], + ]); + }); + + it('should swap x and y for horizontal chart', () => { + expect( + transformFormulaAnnotation( + mockFormulaAnnotationLayer, + mockData, + '__timestamp', + AxisType.Value, + CategoricalColorNamespace.getScale(''), + undefined, + OrientationType.Horizontal, + ).data, + ).toEqual([ + [50, 10], + [50, 20], + ]); + }); +}); + +const mockIntervalAnnotationLayer: IntervalAnnotationLayer = { + name: 'Interval annotation layer', + annotationType: AnnotationType.Interval as const, + sourceType: AnnotationSourceType.Native as const, + color: null, + style: AnnotationStyle.Solid, + width: 1, + show: true, + showLabel: false, + value: 1, +}; + +const mockIntervalAnnotationData: AnnotationData = { + 'Interval annotation layer': { + records: [ + { + start_dttm: 10, + end_dttm: 12, + short_descr: 'Timeseries 1', + long_descr: '', + json_metadata: '', + }, + { + start_dttm: 13, + end_dttm: 15, + short_descr: 'Timeseries 2', + long_descr: '', + json_metadata: '', + }, + ], + }, +}; + +describe('transformIntervalAnnotation', () => { + it('should transform data correctly', () => { + expect( + transformIntervalAnnotation( + mockIntervalAnnotationLayer, + mockData, + mockIntervalAnnotationData, + CategoricalColorNamespace.getScale(''), + supersetTheme, + ) + .map(annotation => annotation.markArea) + .map(markArea => markArea.data), + ).toEqual([ + [ + [ + { name: 'Interval annotation layer - Timeseries 1', xAxis: 10 }, + { xAxis: 12 }, + ], + ], + [ + [ + { name: 'Interval annotation layer - Timeseries 2', xAxis: 13 }, + { xAxis: 15 }, + ], + ], + ]); + }); + + it('should use yAxis for horizontal chart data', () => { + expect( + transformIntervalAnnotation( + mockIntervalAnnotationLayer, + mockData, + mockIntervalAnnotationData, + CategoricalColorNamespace.getScale(''), + supersetTheme, + undefined, + OrientationType.Horizontal, + ) + .map(annotation => annotation.markArea) + .map(markArea => markArea.data), + ).toEqual([ + [ + [ + { name: 'Interval annotation layer - Timeseries 1', yAxis: 10 }, + { yAxis: 12 }, + ], + ], + [ + [ + { name: 'Interval annotation layer - Timeseries 2', yAxis: 13 }, + { yAxis: 15 }, + ], + ], + ]); + }); +}); + +const mockEventAnnotationLayer: EventAnnotationLayer = { + annotationType: AnnotationType.Event, + color: null, + name: 'Event annotation layer', + show: true, + showLabel: false, + sourceType: AnnotationSourceType.Native, + style: AnnotationStyle.Solid, + value: 1, + width: 1, +}; + +const mockEventAnnotationData: AnnotationData = { + 'Event annotation layer': { + records: [ + { + start_dttm: 10, + end_dttm: 12, + short_descr: 'Test annotation', + long_descr: '', + json_metadata: '', + }, + { + start_dttm: 13, + end_dttm: 15, + short_descr: 'Test annotation 2', + long_descr: '', + json_metadata: '', + }, + ], + }, +}; + +describe('transformEventAnnotation', () => { + it('should transform data correctly', () => { + expect( + transformEventAnnotation( + mockEventAnnotationLayer, + mockData, + mockEventAnnotationData, + CategoricalColorNamespace.getScale(''), + supersetTheme, + ) + .map(annotation => annotation.markLine) + .map(markLine => markLine.data), + ).toEqual([ + [ + { + name: 'Event annotation layer - Test annotation', + xAxis: 10, + }, + ], + [{ name: 'Event annotation layer - Test annotation 2', xAxis: 13 }], + ]); + }); + + it('should use yAxis for horizontal chart data', () => { + expect( + transformEventAnnotation( + mockEventAnnotationLayer, + mockData, + mockEventAnnotationData, + CategoricalColorNamespace.getScale(''), + supersetTheme, + undefined, + OrientationType.Horizontal, + ) + .map(annotation => annotation.markLine) + .map(markLine => markLine.data), + ).toEqual([ + [ + { + name: 'Event annotation layer - Test annotation', + yAxis: 10, + }, + ], + [{ name: 'Event annotation layer - Test annotation 2', yAxis: 13 }], + ]); + }); +}); + +const mockTimeseriesAnnotationLayer: TimeseriesAnnotationLayer = { + annotationType: AnnotationType.Timeseries, + color: null, + hideLine: false, + name: 'Timeseries annotation layer', + overrides: { + time_range: null, + }, + show: true, + showLabel: false, + showMarkers: false, + sourceType: AnnotationSourceType.Line, + style: AnnotationStyle.Solid, + value: 1, + width: 1, +}; + +const mockTimeseriesAnnotationData: AnnotationData = { + 'Timeseries annotation layer': [ + { + key: 'Key 1', + values: [ + { + x: 10, + y: 12, + }, + ], + }, + { + key: 'Key 2', + values: [ + { + x: 12, + y: 15, + }, + { + x: 15, + y: 20, + }, + ], + }, + ], +}; + +describe('transformTimeseriesAnnotation', () => { + it('should transform data correctly', () => { + expect( + transformTimeseriesAnnotation( + mockTimeseriesAnnotationLayer, + 1, + mockData, + mockTimeseriesAnnotationData, + CategoricalColorNamespace.getScale(''), + ).map(annotation => annotation.data), + ).toEqual([ + [[10, 12]], + [ + [12, 15], + [15, 20], + ], + ]); + }); + + it('should swap x and y for horizontal chart', () => { + expect( + transformTimeseriesAnnotation( + mockTimeseriesAnnotationLayer, + 1, + mockData, + mockTimeseriesAnnotationData, + CategoricalColorNamespace.getScale(''), + undefined, + OrientationType.Horizontal, + ).map(annotation => annotation.data), + ).toEqual([ + [[12, 10]], + [ + [15, 12], + [20, 15], + ], + ]); + }); +}); diff --git a/superset-frontend/plugins/plugin-chart-handlebars/src/plugin/controls/metrics.tsx b/superset-frontend/plugins/plugin-chart-handlebars/src/plugin/controls/metrics.tsx index dd1516098a53..11b02cac0454 100644 --- a/superset-frontend/plugins/plugin-chart-handlebars/src/plugin/controls/metrics.tsx +++ b/superset-frontend/plugins/plugin-chart-handlebars/src/plugin/controls/metrics.tsx @@ -104,9 +104,7 @@ export const showTotalsControlSetItem: ControlSetItem = { type: 'CheckboxControl', label: t('Show summary'), default: false, - description: t( - 'Show total aggregations of selected metrics. Note that row limit does not apply to the result.', - ), + description: 'Shows total aggregation', visibility: isAggMode, resetOnHide: false, }, diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index a87371f440f0..06fb84b4eb02 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -983,9 +983,7 @@ export default function TableChart<D extends DataRecord = DataRecord>( > {t('Summary')} <Tooltip - overlay={t( - 'Show total aggregations of selected metrics. Note that row limit does not apply to the result.', - )} + overlay={'Shows total aggregation'} > <InfoCircleOutlined /> </Tooltip> diff --git a/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts b/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts index 554914053cb8..df7a8943f54f 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts @@ -125,8 +125,8 @@ const buildQuery: BuildQuery<TableChartFormData> = ( : [], ); - let temporalColumAdded = false; - let temporalColum = null; + let temporalColumnAdded = false; + let temporalColumn = null; if (queryMode === QueryMode.Aggregate) { metrics = metrics || []; @@ -180,23 +180,23 @@ const buildQuery: BuildQuery<TableChartFormData> = ( time_grain_sqla && temporalColumnsLookup?.[col]; - if (shouldBeAdded && !temporalColumAdded) { - temporalColum = { + if (shouldBeAdded && !temporalColumnAdded) { + temporalColumn = { timeGrain: time_grain_sqla, columnType: 'BASE_AXIS', sqlExpression: col, label: col, expressionType: 'SQL', } as AdhocColumn; - temporalColumAdded = true; + temporalColumnAdded = true; return false; // Do not include this in the output; it's added separately } return true; }); // So we ensure the temporal column is added first - if (temporalColum) { - columns = [temporalColum, ...columns]; + if (temporalColumn) { + columns = [temporalColumn, ...columns]; } } @@ -209,10 +209,15 @@ const buildQuery: BuildQuery<TableChartFormData> = ( (ownState.currentPage ?? 0) * (ownState.pageSize ?? 0); } + if (!temporalColumn) { + // This query is not using temporal column, so it doesn't need time grain + extras.time_grain_sqla = undefined; + } + let queryObject = { ...baseQueryObject, columns, - extras: !isEmpty(timeOffsets) && !temporalColum ? {} : extras, + extras, orderby, metrics, post_processing: postProcessing, @@ -250,7 +255,6 @@ const buildQuery: BuildQuery<TableChartFormData> = ( row_limit: 0, row_offset: 0, post_processing: [], - extras: undefined, // we don't need time grain here order_desc: undefined, // we don't need orderby stuff here, orderby: undefined, // because this query will be used for get total aggregation. }); diff --git a/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx index 91e2c5143a5b..5166e028e9e3 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/controlPanel.tsx @@ -370,9 +370,7 @@ const config: ControlPanelConfig = { type: 'CheckboxControl', label: t('Show summary'), default: false, - description: t( - 'Show total aggregations of selected metrics. Note that row limit does not apply to the result.', - ), + description: 'Shows total aggregation', visibility: isAggMode, resetOnHide: false, }, diff --git a/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts index a86f7d181baf..f3eb2d0955c3 100644 --- a/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts @@ -25,6 +25,28 @@ const basicFormData: TableChartFormData = { datasource: '11__table', }; +const extraQueryFormData: TableChartFormData = { + ...basicFormData, + time_grain_sqla: TimeGranularity.MONTH, + groupby: ['col1'], + query_mode: QueryMode.Aggregate, + show_totals: true, + metrics: ['aaa', 'aaa'], + adhoc_filters: [ + { + expressionType: 'SQL', + sqlExpression: "status IN ('In Process')", + clause: 'WHERE', + subject: null, + operator: null, + comparator: null, + isExtra: false, + isNew: false, + datasourceWarning: false, + filterOptionName: 'filter_v8m9t9oq5re_ndzk6g5am7', + } as any, + ], +}; describe('plugin-chart-table', () => { describe('buildQuery', () => { it('should add post-processing and ignore duplicate metrics', () => { @@ -114,5 +136,26 @@ describe('plugin-chart-table', () => { expressionType: 'SQL', }); }); + it('should include time_grain_sqla in extras if temporal colum is used and keep the rest', () => { + const { queries } = buildQuery({ + ...extraQueryFormData, + temporal_columns_lookup: { col1: true }, + }); + // Extras in regular query + expect(queries[0].extras?.time_grain_sqla).toEqual(TimeGranularity.MONTH); + expect(queries[0].extras?.where).toEqual("(status IN ('In Process'))"); + // Extras in summary query + expect(queries[1].extras?.time_grain_sqla).toEqual(TimeGranularity.MONTH); + expect(queries[1].extras?.where).toEqual("(status IN ('In Process'))"); + }); + it('should not include time_grain_sqla in extras if temporal colum is not used and keep the rest', () => { + const { queries } = buildQuery(extraQueryFormData); + // Extras in regular query + expect(queries[0].extras?.time_grain_sqla).toBeUndefined(); + expect(queries[0].extras?.where).toEqual("(status IN ('In Process'))"); + // Extras in summary query + expect(queries[1].extras?.time_grain_sqla).toBeUndefined(); + expect(queries[1].extras?.where).toEqual("(status IN ('In Process'))"); + }); }); }); diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/WordCloud.tsx b/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/WordCloud.tsx index 53bf98fe90af..6bdda4a3d11a 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/WordCloud.tsx +++ b/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/WordCloud.tsx @@ -29,7 +29,7 @@ import { SupersetThemeProps, withTheme, seed, - CategoricalColorScale, + CategoricalColorNamespace, } from '@superset-ui/core'; import { isEqual } from 'lodash'; @@ -230,7 +230,7 @@ class WordCloud extends PureComponent<FullWordCloudProps, WordCloudState> { encoder.channels.color.setDomainFromDataset(words); const { getValueFromDatum } = encoder.channels.color; - const colorFn = encoder.channels.color.scale as CategoricalColorScale; + const colorFn = CategoricalColorNamespace.getScale(colorScheme); const viewBoxWidth = width * scaleFactor; const viewBoxHeight = height * scaleFactor; @@ -250,11 +250,7 @@ class WordCloud extends PureComponent<FullWordCloudProps, WordCloudState> { fontSize={`${w.size}px`} fontWeight={w.weight} fontFamily={w.font} - fill={colorFn( - getValueFromDatum(w) as string, - sliceId, - colorScheme, - )} + fill={colorFn(getValueFromDatum(w) as string, sliceId)} textAnchor="middle" transform={`translate(${w.x}, ${w.y}) rotate(${w.rotate})`} > diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/index.ts b/superset-frontend/plugins/plugin-chart-word-cloud/src/index.ts index 76168eaba77b..c265e837a464 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/index.ts +++ b/superset-frontend/plugins/plugin-chart-word-cloud/src/index.ts @@ -18,7 +18,5 @@ */ export { default as WordCloudChartPlugin } from './plugin'; -export { default as WordCloudTransformProps } from './plugin/transformProps'; -export { default as LegacyWordCloudChartPlugin } from './legacyPlugin'; export * from './types'; export { default as configureEncodable } from './configureEncodable'; diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/transformProps.ts b/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/transformProps.ts deleted file mode 100644 index 5685edd92129..000000000000 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/transformProps.ts +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { ChartProps, getColumnLabel } from '@superset-ui/core'; -import { WordCloudProps, WordCloudEncoding } from '../chart/WordCloud'; -import { LegacyWordCloudFormData } from './types'; - -function getMetricLabel( - metric: LegacyWordCloudFormData['metric'], -): string | undefined { - if (typeof metric === 'string' || typeof metric === 'undefined') { - return metric; - } - if (Array.isArray(metric)) { - return metric.length > 0 ? getMetricLabel(metric[0]) : undefined; - } - - return metric.label; -} - -export default function transformProps(chartProps: ChartProps): WordCloudProps { - const { width, height, formData, queriesData } = chartProps; - const { - colorScheme, - metric, - rotation, - series, - sizeFrom = 0, - sizeTo, - sliceId, - } = formData as LegacyWordCloudFormData; - - const metricLabel = getMetricLabel(metric); - const seriesLabel = getColumnLabel(series); - - const encoding: Partial<WordCloudEncoding> = { - color: { - field: seriesLabel, - scale: { - scheme: colorScheme, - }, - type: 'nominal', - }, - fontSize: - typeof metricLabel === 'undefined' - ? undefined - : { - field: metricLabel, - scale: { - range: [sizeFrom, sizeTo], - zero: true, - }, - type: 'quantitative', - }, - text: { - field: seriesLabel, - }, - }; - - return { - data: queriesData[0].data, - encoding, - height, - rotation, - width, - sliceId, - colorScheme, - }; -} diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/index.ts b/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/index.ts index f50064bfe4ef..371727bc9a48 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/index.ts +++ b/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/index.ts @@ -18,7 +18,7 @@ */ import { t, ChartMetadata, ChartPlugin } from '@superset-ui/core'; -import transformProps from '../legacyPlugin/transformProps'; +import transformProps from './transformProps'; import buildQuery from './buildQuery'; import { WordCloudFormData } from '../types'; import thumbnail from '../images/thumbnail.png'; diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/transformProps.ts b/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/transformProps.ts index 59f625853863..a7fdcdde6f58 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-word-cloud/src/plugin/transformProps.ts @@ -17,14 +17,61 @@ * under the License. */ -import { ChartProps } from '@superset-ui/core'; -import { WordCloudProps } from '../chart/WordCloud'; +import { ChartProps, getColumnLabel } from '@superset-ui/core'; +import { WordCloudProps, WordCloudEncoding } from '../chart/WordCloud'; import { WordCloudFormData } from '../types'; +function getMetricLabel( + metric: WordCloudFormData['metric'], +): string | undefined { + if (typeof metric === 'string' || typeof metric === 'undefined') { + return metric; + } + if (Array.isArray(metric)) { + return metric.length > 0 ? getMetricLabel(metric[0]) : undefined; + } + + return metric.label; +} + export default function transformProps(chartProps: ChartProps): WordCloudProps { const { width, height, formData, queriesData } = chartProps; - const { encoding, rotation, sliceId, colorScheme } = - formData as WordCloudFormData; + const { + colorScheme, + metric, + rotation, + series, + sizeFrom = 0, + sizeTo, + sliceId, + } = formData as WordCloudFormData; + + const metricLabel = getMetricLabel(metric); + const seriesLabel = getColumnLabel(series); + + const encoding: Partial<WordCloudEncoding> = { + color: { + field: seriesLabel, + scale: { + scheme: colorScheme, + }, + type: 'nominal', + }, + fontSize: + typeof metricLabel === 'undefined' + ? undefined + : { + field: metricLabel, + scale: { + range: [sizeFrom, sizeTo], + zero: true, + }, + type: 'quantitative', + }, + text: { + field: seriesLabel, + }, + }; return { data: queriesData[0].data, diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/test/plugin/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-word-cloud/test/buildQuery.test.ts similarity index 92% rename from superset-frontend/plugins/plugin-chart-word-cloud/test/plugin/buildQuery.test.ts rename to superset-frontend/plugins/plugin-chart-word-cloud/test/buildQuery.test.ts index bd75f9a5e935..6c236742697c 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/test/plugin/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-word-cloud/test/buildQuery.test.ts @@ -17,8 +17,8 @@ * under the License. */ -import { WordCloudFormData } from '../../src'; -import buildQuery from '../../src/plugin/buildQuery'; +import { WordCloudFormData } from '../src'; +import buildQuery from '../src/plugin/buildQuery'; describe('WordCloud buildQuery', () => { const formData: WordCloudFormData = { diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/test/legacyPlugin/transformProps.test.ts b/superset-frontend/plugins/plugin-chart-word-cloud/test/legacyPlugin/transformProps.test.ts deleted file mode 100644 index a61e7408de75..000000000000 --- a/superset-frontend/plugins/plugin-chart-word-cloud/test/legacyPlugin/transformProps.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import { ChartProps, supersetTheme } from '@superset-ui/core'; -import transformProps from '../../src/legacyPlugin/transformProps'; - -describe('WordCloud transformProps', () => { - const formData = { - colorScheme: 'bnbColors', - datasource: '3__table', - granularity_sqla: 'ds', - metric: 'sum__num', - rotation: 'square', - series: 'name', - sizeFrom: 10, - sizeTo: 70, - }; - const chartProps = new ChartProps({ - formData, - width: 800, - height: 600, - queriesData: [ - { - data: [{ name: 'Hulk', sum__num: 1 }], - }, - ], - theme: supersetTheme, - }); - - it('should transform chart props for word cloud viz', () => { - expect(transformProps(chartProps)).toEqual({ - width: 800, - height: 600, - encoding: { - color: { - field: 'name', - scale: { - scheme: 'bnbColors', - }, - type: 'nominal', - }, - fontSize: { - field: 'sum__num', - scale: { - range: [10, 70], - zero: true, - }, - type: 'quantitative', - }, - text: { - field: 'name', - }, - }, - rotation: 'square', - colorScheme: 'bnbColors', - data: [{ name: 'Hulk', sum__num: 1 }], - }); - }); -}); diff --git a/superset-frontend/spec/fixtures/mockDashboardFormData.ts b/superset-frontend/spec/fixtures/mockDashboardFormData.ts index a1adb18a7e72..3f089802509f 100644 --- a/superset-frontend/spec/fixtures/mockDashboardFormData.ts +++ b/superset-frontend/spec/fixtures/mockDashboardFormData.ts @@ -26,10 +26,7 @@ export const getDashboardFormData = (overrides: JsonObject = {}) => ({ girl: '#FF69B4', boy: '#ADD8E6', }, - shared_label_colors: { - boy: '#ADD8E6', - girl: '#FF69B4', - }, + shared_label_colors: ['boy', 'girl'], color_scheme: 'd3Category20b', extra_filters: [ { diff --git a/superset-frontend/spec/fixtures/mockDashboardInfo.js b/superset-frontend/spec/fixtures/mockDashboardInfo.js index 2f747fd07b55..a046a554d096 100644 --- a/superset-frontend/spec/fixtures/mockDashboardInfo.js +++ b/superset-frontend/spec/fixtures/mockDashboardInfo.js @@ -26,6 +26,7 @@ export default { { id: 'DefaultsID', filterType: 'filter_select', + chartsInScope: [], targets: [{}], cascadeParentIds: [], }, diff --git a/superset-frontend/spec/fixtures/mockDashboardState.js b/superset-frontend/spec/fixtures/mockDashboardState.js index 737e38aef59e..42360cdc7142 100644 --- a/superset-frontend/spec/fixtures/mockDashboardState.js +++ b/superset-frontend/spec/fixtures/mockDashboardState.js @@ -113,6 +113,6 @@ export const overwriteConfirmMetadata = { slug: null, owners: [], json_metadata: - '{"timed_refresh_immune_slices":[],"expanded_slices":{},"refresh_frequency":0,"default_filters":"{}","color_scheme":"supersetColors","label_colors":{"0":"#FCC700","1":"#A868B7","15":"#3CCCCB","30":"#A38F79","45":"#8FD3E4","age":"#1FA8C9","Yes,":"#1FA8C9","Female":"#454E7C","Prefer":"#5AC189","No,":"#FF7F44","Male":"#666666","Prefer not to say":"#E04355","Ph.D.":"#FCC700","associate\'s degree":"#A868B7","bachelor\'s degree":"#3CCCCB","high school diploma or equivalent (GED)":"#A38F79","master\'s degree (non-professional)":"#8FD3E4","no high school (secondary school)":"#A1A6BD","professional degree (MBA, MD, JD, etc.)":"#ACE1C4","some college credit, no degree":"#FEC0A1","some high school":"#B2B2B2","trade, technical, or vocational training":"#EFA1AA","No, not an ethnic minority":"#1FA8C9","Yes, an ethnic minority":"#454E7C","<NULL>":"#5AC189","Yes":"#FF7F44","No":"#666666","last_yr_income":"#E04355","More":"#A1A6BD","Less":"#ACE1C4","I":"#FEC0A1","expected_earn":"#B2B2B2","Yes: Willing To":"#EFA1AA","No: Not Willing to":"#FDE380","No Answer":"#D3B3DA","In an Office (with Other Developers)":"#9EE5E5","No Preference":"#D1C6BC","From Home":"#1FA8C9"},"color_scheme_domain":["#1FA8C9","#454E7C","#5AC189","#FF7F44","#666666","#E04355","#FCC700","#A868B7","#3CCCCB","#A38F79","#8FD3E4","#A1A6BD","#ACE1C4","#FEC0A1","#B2B2B2","#EFA1AA","#FDE380","#D3B3DA","#9EE5E5","#D1C6BC"],"shared_label_colors":{"Male":"#5ac19e","Female":"#1f86c9","<NULL>":"#5AC189","Prefer not to say":"#47457c","No Answer":"#e05043","Yes, an ethnic minority":"#666666","No, not an ethnic minority":"#ffa444","age":"#1FA8C9"},"cross_filters_enabled":false,"filter_scopes":{},"chart_configuration":{},"positions":{}}', + '{"timed_refresh_immune_slices":[],"expanded_slices":{},"refresh_frequency":0,"default_filters":"{}","color_scheme":"supersetColors","label_colors":{"0":"#FCC700","1":"#A868B7","15":"#3CCCCB","30":"#A38F79","45":"#8FD3E4","age":"#1FA8C9","Yes,":"#1FA8C9","Female":"#454E7C","Prefer":"#5AC189","No,":"#FF7F44","Male":"#666666","Prefer not to say":"#E04355","Ph.D.":"#FCC700","associate\'s degree":"#A868B7","bachelor\'s degree":"#3CCCCB","high school diploma or equivalent (GED)":"#A38F79","master\'s degree (non-professional)":"#8FD3E4","no high school (secondary school)":"#A1A6BD","professional degree (MBA, MD, JD, etc.)":"#ACE1C4","some college credit, no degree":"#FEC0A1","some high school":"#B2B2B2","trade, technical, or vocational training":"#EFA1AA","No, not an ethnic minority":"#1FA8C9","Yes, an ethnic minority":"#454E7C","<NULL>":"#5AC189","Yes":"#FF7F44","No":"#666666","last_yr_income":"#E04355","More":"#A1A6BD","Less":"#ACE1C4","I":"#FEC0A1","expected_earn":"#B2B2B2","Yes: Willing To":"#EFA1AA","No: Not Willing to":"#FDE380","No Answer":"#D3B3DA","In an Office (with Other Developers)":"#9EE5E5","No Preference":"#D1C6BC","From Home":"#1FA8C9"},"color_scheme_domain":["#1FA8C9","#454E7C","#5AC189","#FF7F44","#666666","#E04355","#FCC700","#A868B7","#3CCCCB","#A38F79","#8FD3E4","#A1A6BD","#ACE1C4","#FEC0A1","#B2B2B2","#EFA1AA","#FDE380","#D3B3DA","#9EE5E5","#D1C6BC"],"shared_label_colors":["Male", "Female","<NULL>","Prefer not to say","No Answer","Yes, an ethnic minority","No, not an ethnic minority","age"],"cross_filters_enabled":false,"filter_scopes":{},"chart_configuration":{},"positions":{}}', }, }; diff --git a/superset-frontend/spec/fixtures/mockNativeFilters.ts b/superset-frontend/spec/fixtures/mockNativeFilters.ts index 070f48ab06bd..b83cdcc8dccd 100644 --- a/superset-frontend/spec/fixtures/mockNativeFilters.ts +++ b/superset-frontend/spec/fixtures/mockNativeFilters.ts @@ -133,6 +133,7 @@ export const singleNativeFiltersState = { id: [NATIVE_FILTER_ID], name: 'eth', type: 'text', + filterType: 'filter_select', targets: [{ datasetId: 13, column: { name: 'ethnic_minority' } }], defaultDataMask: { filterState: { diff --git a/superset-frontend/src/GlobalStyles.tsx b/superset-frontend/src/GlobalStyles.tsx index bd882e39732b..16712bf451bd 100644 --- a/superset-frontend/src/GlobalStyles.tsx +++ b/superset-frontend/src/GlobalStyles.tsx @@ -34,6 +34,11 @@ export const GlobalStyles = () => ( th { font-weight: ${theme.typography.weights.bold}; } + // CSS hack to resolve the issue caused by the invisible echart tooltip on + // https://github.com/apache/superset/issues/30058 + .echarts-tooltip[style*='visibility: hidden'] { + display: none !important; + } // TODO: Remove when on Ant Design 5. // Check src/components/Modal for more info. .modal-functions-ok-button { diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index 63fcfee25b09..e7cb6e0419e4 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -294,21 +294,25 @@ export function requestQueryResults(query) { return { type: REQUEST_QUERY_RESULTS, query }; } -export function fetchQueryResults(query, displayLimit) { - return function (dispatch) { +export function fetchQueryResults(query, displayLimit, timeoutInMs) { + return function (dispatch, getState) { + const { SQLLAB_QUERY_RESULT_TIMEOUT } = getState().common?.conf ?? {}; dispatch(requestQueryResults(query)); const queryParams = rison.encode({ key: query.resultsKey, rows: displayLimit || null, }); - + const timeout = timeoutInMs ?? SQLLAB_QUERY_RESULT_TIMEOUT; + const controller = new AbortController(); return SupersetClient.get({ endpoint: `/api/v1/sqllab/results/?q=${queryParams}`, parseMethod: 'json-bigint', + ...(timeout && { timeout, signal: controller.signal }), }) .then(({ json }) => dispatch(querySuccess(query, json))) - .catch(response => + .catch(response => { + controller.abort(); getClientErrorObject(response).then(error => { const message = error.error || @@ -318,8 +322,8 @@ export function fetchQueryResults(query, displayLimit) { return dispatch( queryFailed(query, message, error.link, error.errors), ); - }), - ); + }); + }); }; } @@ -627,6 +631,21 @@ export function setActiveQueryEditor(queryEditor) { }; } +export function switchQueryEditor(goBackward = false) { + return function (dispatch, getState) { + const { sqlLab } = getState(); + const { queryEditors, tabHistory } = sqlLab; + const qeid = tabHistory[tabHistory.length - 1]; + const currentIndex = queryEditors.findIndex(qe => qe.id === qeid); + const nextIndex = goBackward + ? currentIndex - 1 + queryEditors.length + : currentIndex + 1; + const newQueryEditor = queryEditors[nextIndex % queryEditors.length]; + + dispatch(setActiveQueryEditor(newQueryEditor)); + }; +} + export function loadQueryEditor(queryEditor) { return { type: LOAD_QUERY_EDITOR, queryEditor }; } @@ -1187,6 +1206,7 @@ export function popSavedQuery(saveQueryId) { schema: queryEditorProps.schema, sql: queryEditorProps.sql, templateParams: queryEditorProps.templateParams, + remoteId: queryEditorProps.remoteId, }; return dispatch(addQueryEditor(tmpAdaptedProps)); }) diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index 1c800283ea93..7591abfaea7f 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -30,6 +30,9 @@ import { initialState, queryId, } from 'src/SqlLab/fixtures'; +import { SupersetClient } from '@superset-ui/core'; +import { ADD_TOAST } from 'src/components/MessageToasts/actions'; +import { ToastType } from '../../components/MessageToasts/types'; const middlewares = [thunk]; const mockStore = configureMockStore(middlewares); @@ -174,8 +177,9 @@ describe('async actions', () => { describe('fetchQueryResults', () => { const makeRequest = () => { + const store = mockStore(initialState); const request = actions.fetchQueryResults(query); - return request(dispatch); + return request(dispatch, store.getState); }; it('makes the fetch request', () => { @@ -452,6 +456,112 @@ describe('async actions', () => { }); }); + describe('popSavedQuery', () => { + const supersetClientGetSpy = jest.spyOn(SupersetClient, 'get'); + const store = mockStore({}); + + const mockSavedQueryApiResponse = { + catalog: null, + changed_by: { + first_name: 'Superset', + id: 1, + last_name: 'Admin', + }, + changed_on: '2024-12-28T20:06:14.246743', + changed_on_delta_humanized: '8 days ago', + created_by: { + first_name: 'Superset', + id: 1, + last_name: 'Admin', + }, + database: { + database_name: 'examples', + id: 2, + }, + description: '', + id: 1, + label: 'Query 1', + schema: 'public', + sql: 'SELECT * FROM channels', + sql_tables: [ + { + catalog: null, + schema: null, + table: 'channels', + }, + ], + template_parameters: null, + }; + + const makeRequest = id => { + const request = actions.popSavedQuery(id); + const { dispatch } = store; + + return request(dispatch, () => initialState); + }; + + beforeEach(() => { + supersetClientGetSpy.mockClear(); + store.clearActions(); + }); + + afterAll(() => { + supersetClientGetSpy.mockRestore(); + }); + + it('calls API endpint with correct params', async () => { + supersetClientGetSpy.mockResolvedValue({ + json: { result: mockSavedQueryApiResponse }, + }); + + await makeRequest(123); + + expect(supersetClientGetSpy).toHaveBeenCalledWith({ + endpoint: '/api/v1/saved_query/123', + }); + }); + + it('dispatches addQueryEditor with correct params on successful API call', async () => { + supersetClientGetSpy.mockResolvedValue({ + json: { result: mockSavedQueryApiResponse }, + }); + + const expectedParams = { + name: 'Query 1', + dbId: 2, + catalog: null, + schema: 'public', + sql: 'SELECT * FROM channels', + templateParams: null, + remoteId: 1, + }; + + await makeRequest(1); + + const addQueryEditorAction = store + .getActions() + .find(action => action.type === actions.ADD_QUERY_EDITOR); + + expect(addQueryEditorAction).toBeTruthy(); + expect(addQueryEditorAction?.queryEditor).toEqual( + expect.objectContaining(expectedParams), + ); + }); + + it('should dispatch addDangerToast on API error', async () => { + supersetClientGetSpy.mockResolvedValue(new Error()); + + await makeRequest(1); + + const addToastAction = store + .getActions() + .find(action => action.type === ADD_TOAST); + + expect(addToastAction).toBeTruthy(); + expect(addToastAction?.payload?.toastType).toBe(ToastType.Danger); + }); + }); + describe('addQueryEditor', () => { it('creates new query editor', () => { expect.assertions(1); @@ -525,6 +635,85 @@ describe('async actions', () => { expect(store.getActions()).toEqual(expectedActions); }); + describe('swithQueryEditor', () => { + it('switch to the next tab editor', () => { + const store = mockStore(initialState); + const expectedActions = [ + { + type: actions.SET_ACTIVE_QUERY_EDITOR, + queryEditor: initialState.sqlLab.queryEditors[1], + }, + ]; + store.dispatch(actions.switchQueryEditor()); + + expect(store.getActions()).toEqual(expectedActions); + }); + + it('switch to the first tab editor once it reaches the rightmost tab', () => { + const store = mockStore({ + ...initialState, + sqlLab: { + ...initialState.sqlLab, + tabHistory: [ + initialState.sqlLab.queryEditors[ + initialState.sqlLab.queryEditors.length - 1 + ].id, + ], + }, + }); + const expectedActions = [ + { + type: actions.SET_ACTIVE_QUERY_EDITOR, + queryEditor: initialState.sqlLab.queryEditors[0], + }, + ]; + store.dispatch(actions.switchQueryEditor()); + + expect(store.getActions()).toEqual(expectedActions); + }); + + it('switch to the previous tab editor', () => { + const store = mockStore({ + ...initialState, + sqlLab: { + ...initialState.sqlLab, + tabHistory: [initialState.sqlLab.queryEditors[1].id], + }, + }); + const expectedActions = [ + { + type: actions.SET_ACTIVE_QUERY_EDITOR, + queryEditor: initialState.sqlLab.queryEditors[0], + }, + ]; + store.dispatch(actions.switchQueryEditor(true)); + + expect(store.getActions()).toEqual(expectedActions); + }); + + it('switch to the last tab editor once it reaches the leftmost tab', () => { + const store = mockStore({ + ...initialState, + sqlLab: { + ...initialState.sqlLab, + tabHistory: [initialState.sqlLab.queryEditors[0].id], + }, + }); + const expectedActions = [ + { + type: actions.SET_ACTIVE_QUERY_EDITOR, + queryEditor: + initialState.sqlLab.queryEditors[ + initialState.sqlLab.queryEditors.length - 1 + ], + }, + ]; + store.dispatch(actions.switchQueryEditor(true)); + + expect(store.getActions()).toEqual(expectedActions); + }); + }); + describe('backend sync', () => { const updateTabStateEndpoint = 'glob:*/tabstateview/*'; fetchMock.put(updateTabStateEndpoint, {}); diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/AceEditorWrapper.test.tsx b/superset-frontend/src/SqlLab/components/AceEditorWrapper/AceEditorWrapper.test.tsx index ea3c21640a87..e2abec9f8c76 100644 --- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/AceEditorWrapper.test.tsx +++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/AceEditorWrapper.test.tsx @@ -18,16 +18,29 @@ */ import configureStore from 'redux-mock-store'; import thunk from 'redux-thunk'; -import { render, waitFor } from 'spec/helpers/testing-library'; +import reducerIndex from 'spec/helpers/reducerIndex'; +import { render, waitFor, createStore } from 'spec/helpers/testing-library'; import { QueryEditor } from 'src/SqlLab/types'; import { Store } from 'redux'; import { initialState, defaultQueryEditor } from 'src/SqlLab/fixtures'; import AceEditorWrapper from 'src/SqlLab/components/AceEditorWrapper'; -import { AsyncAceEditorProps } from 'src/components/AsyncAceEditor'; +import { + AsyncAceEditorProps, + FullSQLEditor, +} from 'src/components/AsyncAceEditor'; +import { + queryEditorSetCursorPosition, + queryEditorSetDb, +} from 'src/SqlLab/actions/sqlLab'; +import fetchMock from 'fetch-mock'; const middlewares = [thunk]; const mockStore = configureStore(middlewares); +fetchMock.get('glob:*/api/v1/database/*/function_names/', { + function_names: [], +}); + jest.mock('src/components/Select/Select', () => () => ( <div data-test="mock-deprecated-select-select" /> )); @@ -36,9 +49,11 @@ jest.mock('src/components/Select/AsyncSelect', () => () => ( )); jest.mock('src/components/AsyncAceEditor', () => ({ - FullSQLEditor: (props: AsyncAceEditorProps) => ( - <div data-test="react-ace">{JSON.stringify(props)}</div> - ), + FullSQLEditor: jest + .fn() + .mockImplementation((props: AsyncAceEditorProps) => ( + <div data-test="react-ace">{JSON.stringify(props)}</div> + )), })); const setup = (queryEditor: QueryEditor, store?: Store) => @@ -59,6 +74,10 @@ const setup = (queryEditor: QueryEditor, store?: Store) => ); describe('AceEditorWrapper', () => { + beforeEach(() => { + (FullSQLEditor as any as jest.Mock).mockClear(); + }); + it('renders ace editor including sql value', async () => { const { getByTestId } = setup(defaultQueryEditor, mockStore(initialState)); await waitFor(() => expect(getByTestId('react-ace')).toBeInTheDocument()); @@ -91,4 +110,19 @@ describe('AceEditorWrapper', () => { JSON.stringify({ value: defaultQueryEditor.sql }).slice(1, -1), ); }); + + it('skips rerendering for updating cursor position', () => { + const store = createStore(initialState, reducerIndex); + setup(defaultQueryEditor, store); + + expect(FullSQLEditor).toHaveBeenCalled(); + const renderCount = (FullSQLEditor as any as jest.Mock).mock.calls.length; + const updatedCursorPosition = { row: 1, column: 9 }; + store.dispatch( + queryEditorSetCursorPosition(defaultQueryEditor, updatedCursorPosition), + ); + expect(FullSQLEditor).toHaveBeenCalledTimes(renderCount); + store.dispatch(queryEditorSetDb(defaultQueryEditor, 1)); + expect(FullSQLEditor).toHaveBeenCalledTimes(renderCount + 1); + }); }); diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx index 80ccc6ed1c07..ef2379c15797 100644 --- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx +++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx @@ -18,7 +18,7 @@ */ import { useState, useEffect, useRef } from 'react'; import type { IAceEditor } from 'react-ace/lib/types'; -import { useDispatch } from 'react-redux'; +import { shallowEqual, useDispatch, useSelector } from 'react-redux'; import { css, styled, usePrevious, useTheme } from '@superset-ui/core'; import { Global } from '@emotion/react'; @@ -27,7 +27,7 @@ import { queryEditorSetSelectedText } from 'src/SqlLab/actions/sqlLab'; import { FullSQLEditor as AceEditor } from 'src/components/AsyncAceEditor'; import type { KeyboardShortcut } from 'src/SqlLab/components/KeyboardShortcutButton'; import useQueryEditor from 'src/SqlLab/hooks/useQueryEditor'; -import type { CursorPosition } from 'src/SqlLab/types'; +import { SqlLabRootState, type CursorPosition } from 'src/SqlLab/types'; import { useAnnotations } from './useAnnotations'; import { useKeywords } from './useKeywords'; @@ -77,11 +77,20 @@ const AceEditorWrapper = ({ 'catalog', 'schema', 'templateParams', - 'cursorPosition', ]); + // Prevent a maximum update depth exceeded error + // by skipping access the unsaved query editor state + const cursorPosition = useSelector<SqlLabRootState, CursorPosition>( + ({ sqlLab: { queryEditors } }) => { + const { cursorPosition } = { + ...queryEditors.find(({ id }) => id === queryEditorId), + }; + return cursorPosition ?? { row: 0, column: 0 }; + }, + shallowEqual, + ); const currentSql = queryEditor.sql ?? ''; - const cursorPosition = queryEditor.cursorPosition ?? { row: 0, column: 0 }; const [sql, setSql] = useState(currentSql); // The editor changeSelection is called multiple times in a row, diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/useAnnotations.ts b/superset-frontend/src/SqlLab/components/AceEditorWrapper/useAnnotations.ts index f640e3077961..c64605c39583 100644 --- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/useAnnotations.ts +++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/useAnnotations.ts @@ -24,9 +24,12 @@ import { VALIDATION_DEBOUNCE_MS } from 'src/SqlLab/constants'; import { FetchValidationQueryParams, useQueryValidationsQuery, + ValidationResult, } from 'src/hooks/apiResources'; import { useDebounceValue } from 'src/hooks/useDebounceValue'; +const EMPTY = [] as ValidationResult[]; + export function useAnnotations(params: FetchValidationQueryParams) { const { sql, dbId, schema, templateParams } = params; const debouncedSql = useDebounceValue(sql, VALIDATION_DEBOUNCE_MS); @@ -73,7 +76,7 @@ export function useAnnotations(params: FetchValidationQueryParams) { text: `The server failed to validate your query.\n${message}`, }, ] - : [], + : EMPTY, }; }, }, diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/useKeywords.ts b/superset-frontend/src/SqlLab/components/AceEditorWrapper/useKeywords.ts index df45290f6cde..e02145f963a1 100644 --- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/useKeywords.ts +++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/useKeywords.ts @@ -77,7 +77,7 @@ export function useKeywords( // skipFetch is used to prevent re-evaluating memoized keywords // due to updated api results by skip flag const skipFetch = hasFetchedKeywords && skip; - const { data: schemaOptions } = useSchemasQueryState( + const { currentData: schemaOptions } = useSchemasQueryState( { dbId, catalog: catalog || undefined, @@ -85,7 +85,7 @@ export function useKeywords( }, { skip: skipFetch || !dbId }, ); - const { data: tableData } = useTablesQueryState( + const { currentData: tableData } = useTablesQueryState( { dbId, catalog, @@ -95,7 +95,7 @@ export function useKeywords( { skip: skipFetch || !dbId || !schema }, ); - const { data: functionNames, isError } = useDatabaseFunctionsQuery( + const { currentData: functionNames, isError } = useDatabaseFunctionsQuery( { dbId }, { skip: skipFetch || !dbId }, ); diff --git a/superset-frontend/src/SqlLab/components/KeyboardShortcutButton/index.tsx b/superset-frontend/src/SqlLab/components/KeyboardShortcutButton/index.tsx index d3c54933126d..fbc71065bcfc 100644 --- a/superset-frontend/src/SqlLab/components/KeyboardShortcutButton/index.tsx +++ b/superset-frontend/src/SqlLab/components/KeyboardShortcutButton/index.tsx @@ -38,6 +38,8 @@ export enum KeyboardShortcut { CtrlF = 'ctrl+f', CtrlH = 'ctrl+h', CtrlShiftF = 'ctrl+shift+f', + CtrlLeft = 'ctrl+[', + CtrlRight = 'ctrl+]', } export const KEY_MAP = { @@ -51,6 +53,8 @@ export const KEY_MAP = { [KeyboardShortcut.CtrlT]: userOS !== 'Windows' ? t('New tab') : undefined, [KeyboardShortcut.CtrlP]: t('Previous Line'), [KeyboardShortcut.CtrlShiftF]: t('Format SQL'), + [KeyboardShortcut.CtrlLeft]: t('Switch to the previous tab'), + [KeyboardShortcut.CtrlRight]: t('Switch to the next tab'), // default ace editor shortcuts [KeyboardShortcut.CmdF]: userOS === 'MacOS' ? t('Find') : undefined, [KeyboardShortcut.CtrlF]: userOS !== 'MacOS' ? t('Find') : undefined, diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx index f4808f52fdad..0c5f55054928 100644 --- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.tsx @@ -76,28 +76,35 @@ function QueryAutoRefresh({ last_updated_ms: queriesLastUpdate - QUERY_UPDATE_BUFFER_MS, }); + const controller = new AbortController(); pendingRequestRef.current = true; SupersetClient.get({ endpoint: `/api/v1/query/updated_since?q=${params}`, timeout: QUERY_TIMEOUT_LIMIT, parseMethod: 'json-bigint', + signal: controller.signal, }) .then(({ json }) => { if (json) { const jsonPayload = json as { result?: QueryResponse[] }; if (jsonPayload?.result?.length) { const queries = - jsonPayload?.result?.reduce((acc, current) => { - acc[current.id] = current; - return acc; - }, {}) ?? {}; + jsonPayload?.result?.reduce( + (acc: Record<string, QueryResponse>, current) => { + acc[current.id] = current; + return acc; + }, + {}, + ) ?? {}; dispatch(refreshQueries(queries)); } else { dispatch(clearInactiveQueries(QUERY_UPDATE_FREQ)); } } }) - .catch(() => {}) + .catch(() => { + controller.abort(); + }) .finally(() => { pendingRequestRef.current = false; }); diff --git a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx index 90b905c5a48c..4093a8feb39a 100644 --- a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx +++ b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx @@ -70,7 +70,11 @@ const QueryHistory = ({ ({ sqlLab: { queries } }: SqlLabRootState) => queries, shallowEqual, ); - const { data, isLoading, isFetching } = useEditorQueriesQuery( + const { + currentData: data, + isLoading, + isFetching, + } = useEditorQueriesQuery( { editorId: `${queryEditorId}`, pageIndex }, { skip: !isFeatureEnabled(FeatureFlag.SqllabBackendPersistence), diff --git a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx index 9c04fee8e718..bce0b95bb8cc 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/ResultSet.test.tsx @@ -32,6 +32,7 @@ import { initialState, user, queryWithNoQueryLimit, + failedQueryWithFrontendTimeoutErrors, } from 'src/SqlLab/fixtures'; const mockedProps = { @@ -104,6 +105,16 @@ const failedQueryWithErrorsState = { }, }, }; +const failedQueryWithTimeoutState = { + ...initialState, + sqlLab: { + ...initialState.sqlLab, + queries: { + [failedQueryWithFrontendTimeoutErrors.id]: + failedQueryWithFrontendTimeoutErrors, + }, + }, +}; const newProps = { displayLimit: 1001, @@ -319,6 +330,18 @@ describe('ResultSet', () => { expect(screen.getByText('Database error')).toBeInTheDocument(); }); + test('should render a timeout error with a retrial button', async () => { + await waitFor(() => { + setup( + { ...mockedProps, queryId: failedQueryWithFrontendTimeoutErrors.id }, + mockStore(failedQueryWithTimeoutState), + ); + }); + expect( + screen.getByRole('button', { name: /Retry fetching results/i }), + ).toBeInTheDocument(); + }); + test('renders if there is no limit in query.results but has queryLimit', async () => { const query = { ...queries[0], diff --git a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx index 152d0b9208c7..470a05cd0555 100644 --- a/superset-frontend/src/SqlLab/components/ResultSet/index.tsx +++ b/superset-frontend/src/SqlLab/components/ResultSet/index.tsx @@ -42,6 +42,7 @@ import { css, getNumberFormatter, getExtensionsRegistry, + ErrorTypeEnum, } from '@superset-ui/core'; import ErrorMessageWithStackTrace from 'src/components/ErrorMessage/ErrorMessageWithStackTrace'; import { @@ -225,8 +226,8 @@ const ResultSet = ({ reRunQueryIfSessionTimeoutErrorOnMount(); }, [reRunQueryIfSessionTimeoutErrorOnMount]); - const fetchResults = (q: typeof query) => { - dispatch(fetchQueryResults(q, displayLimit)); + const fetchResults = (q: typeof query, timeout?: number) => { + dispatch(fetchQueryResults(q, displayLimit, timeout)); }; const prevQuery = usePrevious(query); @@ -549,7 +550,18 @@ const ResultSet = ({ link={query.link} source="sqllab" /> - {trackingUrl} + {(query?.extra?.errors?.[0] || query?.errors?.[0])?.error_type === + ErrorTypeEnum.FRONTEND_TIMEOUT_ERROR ? ( + <Button + className="sql-result-track-job" + buttonSize="small" + onClick={() => fetchResults(query, 0)} + > + {t('Retry fetching results')} + </Button> + ) : ( + trackingUrl + )} </ResultlessStyles> ); } @@ -618,7 +630,7 @@ const ResultSet = ({ : []; const allowHTML = getItem( LocalStorageKeys.SqllabIsRenderHtmlEnabled, - false, + true, ); return ( <ResultContainer> diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx index d24c7e9d8bb6..b6af5d8f0e83 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/SqlEditor.test.tsx @@ -189,6 +189,18 @@ describe('SqlEditor', () => { expect(await findByTestId('react-ace')).toBeInTheDocument(); }); + it('skip rendering an AceEditorWrapper when the current tab is inactive', async () => { + const { findByTestId, queryByTestId } = setup( + { + ...mockedProps, + queryEditor: initialState.sqlLab.queryEditors[1], + }, + store, + ); + expect(await findByTestId('mock-sql-editor-left-bar')).toBeInTheDocument(); + expect(queryByTestId('react-ace')).not.toBeInTheDocument(); + }); + it('avoids rerendering EditorLeftBar and ResultSet while typing', async () => { const { findByTestId } = setup(mockedProps, store); const editor = await findByTestId('react-ace'); diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx index c17ac9324bdc..f9d745e37799 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditor/index.tsx @@ -80,6 +80,7 @@ import { updateSavedQuery, formatQuery, fetchQueryEditor, + switchQueryEditor, } from 'src/SqlLab/actions/sqlLab'; import { STATE_TYPE_MAP, @@ -307,7 +308,7 @@ const SqlEditor: FC<Props> = ({ getItem(LocalStorageKeys.SqllabIsAutocompleteEnabled, true), ); const [renderHTMLEnabled, setRenderHTMLEnabled] = useState( - getItem(LocalStorageKeys.SqllabIsRenderHtmlEnabled, false), + getItem(LocalStorageKeys.SqllabIsRenderHtmlEnabled, true), ); const [showCreateAsModal, setShowCreateAsModal] = useState(false); const [createAs, setCreateAs] = useState(''); @@ -321,6 +322,8 @@ const SqlEditor: FC<Props> = ({ const SqlFormExtension = extensionsRegistry.get('sqleditor.extension.form'); + const isTempId = (value: unknown): boolean => Number.isNaN(Number(value)); + const startQuery = useCallback( (ctasArg = false, ctas_method = CtasEnum.Table) => { if (!database) { @@ -445,6 +448,22 @@ const SqlEditor: FC<Props> = ({ formatCurrentQuery(true); }, }, + { + name: 'switchTabToLeft', + key: KeyboardShortcut.CtrlLeft, + descr: KEY_MAP[KeyboardShortcut.CtrlLeft], + func: () => { + dispatch(switchQueryEditor(true)); + }, + }, + { + name: 'switchTabToRight', + key: KeyboardShortcut.CtrlRight, + descr: KEY_MAP[KeyboardShortcut.CtrlRight], + func: () => { + dispatch(switchQueryEditor(false)); + }, + }, ]; }, [dispatch, queryEditor.sql, startQuery, stopQuery, formatCurrentQuery]); @@ -880,15 +899,17 @@ const SqlEditor: FC<Props> = ({ startQuery={startQuery} /> )} - <AceEditorWrapper - autocomplete={autocompleteEnabled} - onBlur={onSqlChanged} - onChange={onSqlChanged} - queryEditorId={queryEditor.id} - onCursorPositionChange={handleCursorPositionChange} - height={`${aceEditorHeight}px`} - hotkeys={hotkeys} - /> + {isActive && ( + <AceEditorWrapper + autocomplete={autocompleteEnabled && !isTempId(queryEditor.id)} + onBlur={onSqlChanged} + onChange={onSqlChanged} + queryEditorId={queryEditor.id} + onCursorPositionChange={handleCursorPositionChange} + height={`${aceEditorHeight}px`} + hotkeys={hotkeys} + /> + )} {renderEditorBottomBar(showEmptyState)} </div> <SouthPane diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx index d322b8cf3723..3abb5b400e9e 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx @@ -47,7 +47,14 @@ beforeEach(() => { count: 0, result: [], }); - fetchMock.get('glob:*/api/v1/database/*/schemas/?*', { + fetchMock.get('glob:*/api/v1/database/3/schemas/?*', { + error: 'Unauthorized', + }); + fetchMock.get('glob:*/api/v1/database/1/schemas/?*', { + count: 2, + result: ['main', 'db1_schema', 'db1_schema2'], + }); + fetchMock.get('glob:*/api/v1/database/2/schemas/?*', { count: 2, result: ['main', 'new_schema'], }); @@ -198,7 +205,7 @@ test('should toggle the table when the header is clicked', async () => { ); }); -test('When changing database the table list must be updated', async () => { +test('When changing database the schema and table list must be updated', async () => { const { rerender } = await renderAndWait(mockedProps, undefined, { ...initialState, sqlLab: { @@ -245,6 +252,32 @@ test('When changing database the table list must be updated', async () => { expect(updatedDbSelector[0]).toBeInTheDocument(); const updatedTableSelector = await screen.findAllByText(/new_table/i); expect(updatedTableSelector[0]).toBeInTheDocument(); + + const select = screen.getByRole('combobox', { + name: 'Select schema or type to search schemas', + }); + userEvent.click(select); + expect( + await screen.findByRole('option', { name: 'main' }), + ).toBeInTheDocument(); + expect( + await screen.findByRole('option', { name: 'new_schema' }), + ).toBeInTheDocument(); + rerender( + <SqlEditorLeftBar + {...mockedProps} + database={{ + id: 3, + database_name: 'unauth_db', + backend: 'minervasql', + }} + queryEditorId={extraQueryEditor1.id} + />, + ); + userEvent.click(select); + expect( + await screen.findByText('No compatible schema found'), + ).toBeInTheDocument(); }); test('ignore schema api when current schema is deprecated', async () => { diff --git a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx index 30f41e5ee358..2f391c269bef 100644 --- a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx +++ b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx @@ -51,11 +51,13 @@ const getTableMetadataEndpoint = /\/api\/v1\/database\/\d+\/table_metadata\/(?:\?.*)?$/; const getExtraTableMetadataEndpoint = /\/api\/v1\/database\/\d+\/table_metadata\/extra\/(?:\?.*)?$/; -const updateTableSchemaEndpoint = 'glob:*/tableschemaview/*/expanded'; +const updateTableSchemaExpandedEndpoint = 'glob:*/tableschemaview/*/expanded'; +const updateTableSchemaEndpoint = 'glob:*/tableschemaview/'; beforeEach(() => { fetchMock.get(getTableMetadataEndpoint, table); fetchMock.get(getExtraTableMetadataEndpoint, {}); + fetchMock.post(updateTableSchemaExpandedEndpoint, {}); fetchMock.post(updateTableSchemaEndpoint, {}); }); @@ -84,7 +86,7 @@ test('has 4 IconTooltip elements', async () => { initialState, }); await waitFor(() => - expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(4), + expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(5), ); }); @@ -104,7 +106,7 @@ test('fades table', async () => { initialState, }); await waitFor(() => - expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(4), + expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(5), ); const style = window.getComputedStyle(getAllByTestId('fade')[0]); expect(style.opacity).toBe('0'); @@ -125,7 +127,7 @@ test('sorts columns', async () => { }, ); await waitFor(() => - expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(4), + expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(5), ); expect( getAllByTestId('mock-column-element').map(el => el.textContent), @@ -154,7 +156,7 @@ test('removes the table', async () => { }, ); await waitFor(() => - expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(4), + expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(5), ); expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(0); fireEvent.click(getByText('Remove table preview')); @@ -174,6 +176,29 @@ test('fetches table metadata when expanded', async () => { await waitFor(() => expect(fetchMock.calls(getTableMetadataEndpoint)).toHaveLength(1), ); - expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(0); + expect(fetchMock.calls(updateTableSchemaExpandedEndpoint)).toHaveLength(0); expect(fetchMock.calls(getExtraTableMetadataEndpoint)).toHaveLength(1); }); + +test('refreshes table metadata when triggered', async () => { + const { getAllByTestId, getByText } = render( + <TableElement {...mockedProps} />, + { + useRedux: true, + initialState, + }, + ); + await waitFor(() => + expect(getAllByTestId('mock-icon-tooltip')).toHaveLength(5), + ); + expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(0); + expect(fetchMock.calls(getTableMetadataEndpoint)).toHaveLength(1); + + fireEvent.click(getByText('Refresh table schema')); + await waitFor(() => + expect(fetchMock.calls(getTableMetadataEndpoint)).toHaveLength(2), + ); + await waitFor(() => + expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1), + ); +}); diff --git a/superset-frontend/src/SqlLab/components/TableElement/index.tsx b/superset-frontend/src/SqlLab/components/TableElement/index.tsx index 87c2821f635e..824c9ec3c4e1 100644 --- a/superset-frontend/src/SqlLab/components/TableElement/index.tsx +++ b/superset-frontend/src/SqlLab/components/TableElement/index.tsx @@ -32,6 +32,7 @@ import { syncTable, } from 'src/SqlLab/actions/sqlLab'; import { + tableApiUtil, useTableExtendedMetadataQuery, useTableMetadataQuery, } from 'src/hooks/apiResources'; @@ -105,9 +106,9 @@ const TableElement = ({ table, ...props }: TableElementProps) => { const theme = useTheme(); const dispatch = useDispatch(); const { - data: tableMetadata, + currentData: tableMetadata, isSuccess: isMetadataSuccess, - isLoading: isMetadataLoading, + isFetching: isMetadataFetching, isError: hasMetadataError, } = useTableMetadataQuery( { @@ -119,7 +120,7 @@ const TableElement = ({ table, ...props }: TableElementProps) => { { skip: !expanded }, ); const { - data: tableExtendedMetadata, + currentData: tableExtendedMetadata, isSuccess: isExtraMetadataSuccess, isLoading: isExtraMetadataLoading, isError: hasExtendedMetadataError, @@ -177,6 +178,13 @@ const TableElement = ({ table, ...props }: TableElementProps) => { setSortColumns(prevState => !prevState); }; + const refreshTableMetadata = () => { + dispatch( + tableApiUtil.invalidateTags([{ type: 'TableMetadatas', id: name }]), + ); + dispatch(syncTable(table, tableData)); + }; + const renderWell = () => { let partitions; let metadata; @@ -268,6 +276,11 @@ const TableElement = ({ table, ...props }: TableElementProps) => { } `} > + <IconTooltip + className="fa fa-refresh pull-left m-l-2 pointer" + onClick={refreshTableMetadata} + tooltip={t('Refresh table schema')} + /> {keyLink} <IconTooltip className={ @@ -341,7 +354,7 @@ const TableElement = ({ table, ...props }: TableElementProps) => { </Tooltip> <div className="pull-right header-right-side"> - {isMetadataLoading || isExtraMetadataLoading ? ( + {isMetadataFetching || isExtraMetadataLoading ? ( <Loading position="inline" /> ) : ( <Fade diff --git a/superset-frontend/src/SqlLab/fixtures.ts b/superset-frontend/src/SqlLab/fixtures.ts index 257391654482..7c43a02b6c7f 100644 --- a/superset-frontend/src/SqlLab/fixtures.ts +++ b/superset-frontend/src/SqlLab/fixtures.ts @@ -22,6 +22,7 @@ import { ColumnKeyTypeType } from 'src/SqlLab/components/ColumnElement'; import { DatasourceType, denormalizeTimestamp, + ErrorTypeEnum, GenericDataType, QueryResponse, QueryState, @@ -546,6 +547,20 @@ export const failedQueryWithErrors = { tempTable: '', }; +export const failedQueryWithFrontendTimeoutErrors = { + ...failedQueryWithErrorMessage, + errors: [ + { + error_type: ErrorTypeEnum.FRONTEND_TIMEOUT_ERROR, + message: 'Request timed out', + level: 'error', + extra: { + timeout: 10, + }, + }, + ], +}; + const baseQuery: QueryResponse = { queryId: 567, dbId: 1, diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index 5f0b27e97a41..5741ad8878b7 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -17,6 +17,8 @@ * under the License. */ import { normalizeTimestamp, QueryState, t } from '@superset-ui/core'; +import { isEqual, omit } from 'lodash'; +import { shallowEqual } from 'react-redux'; import * as actions from '../actions/sqlLab'; import { now } from '../../utils/dates'; import { @@ -696,7 +698,17 @@ export default function sqlLabReducer(state = {}, action) { ? prevState : currentState, }; - change = true; + if ( + shallowEqual( + omit(newQueries[id], ['extra']), + omit(state.queries[id], ['extra']), + ) && + isEqual(newQueries[id].extra, state.queries[id].extra) + ) { + newQueries[id] = state.queries[id]; + } else { + change = true; + } } }); if (!change) { diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js index c3b603667b9b..d805b2cf1ef7 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.test.js @@ -449,6 +449,35 @@ describe('sqlLabReducer', () => { expect(newState.queries.abcd.endDttm).toBe(Number(endDttmInStr)); expect(newState.queriesLastUpdate).toBe(CHANGED_ON_TIMESTAMP); }); + it('should skip refreshing queries when polling contains existing results', () => { + const completedQuery = { + ...query, + extra: { + columns: [], + progress: null, + }, + }; + newState = sqlLabReducer( + { + ...newState, + queries: { abcd: query, def: completedQuery }, + }, + actions.refreshQueries({ + abcd: { + ...query, + }, + def: { + ...completedQuery, + extra: { + columns: [], + progress: null, + }, + }, + }), + ); + expect(newState.queries.abcd).toBe(query); + expect(newState.queries.def).toBe(completedQuery); + }); it('should refresh queries when polling returns empty', () => { newState = sqlLabReducer(newState, actions.refreshQueries({})); }); diff --git a/superset-frontend/src/assets/images/icons/ai.svg b/superset-frontend/src/assets/images/icons/ai.svg new file mode 100644 index 000000000000..1decfd963d53 --- /dev/null +++ b/superset-frontend/src/assets/images/icons/ai.svg @@ -0,0 +1 @@ +<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 16 16" fill="none"> <path d="M10.7489 9.60187L9.67552 10.0219C8.47552 10.4885 7.54219 11.4485 7.12219 12.6485L6.70885 13.8219C6.62219 14.0619 6.28219 14.0619 6.20219 13.8219L5.78885 12.6485C5.36885 11.4485 4.43552 10.4885 3.23552 10.0219L2.16219 9.60187C1.93552 9.5152 1.93552 9.1952 2.16219 9.10853L3.23552 8.68853C4.43552 8.22187 5.36885 7.26187 5.78885 6.06187L6.20219 4.88854C6.28885 4.64854 6.62885 4.64854 6.70885 4.88854L7.12219 6.06187C7.54219 7.26187 8.47552 8.22187 9.67552 8.68853L10.7489 9.10853C10.9755 9.1952 10.9755 9.5152 10.7489 9.60187ZM13.9222 4.22187L13.3822 4.00853C12.7755 3.7752 12.3022 3.28854 12.0955 2.68187L11.8889 2.08854C11.8489 1.96854 11.6755 1.96854 11.6289 2.08854L11.4222 2.68187C11.2089 3.28854 10.7355 3.7752 10.1355 4.00853L9.59552 4.22187C9.48219 4.26854 9.48219 4.42853 9.59552 4.46853L10.1355 4.68187C10.7422 4.9152 11.2155 5.40187 11.4222 6.00853L11.6289 6.60187C11.6689 6.72187 11.8422 6.72187 11.8889 6.60187L12.0955 6.00853C12.3089 5.40187 12.7822 4.9152 13.3822 4.68187L13.9222 4.46853C14.0355 4.42187 14.0355 4.26187 13.9222 4.22187Z" fill="url(#paint0_linear_8553_71512)"/> <defs> <linearGradient id="paint0_linear_8553_71512" x1="-0.124091" y1="8.0002" x2="16.0939" y2="7.30833" gradientUnits="userSpaceOnUse"> <stop stop-color="#1ECCB0"/> <stop offset="1" stop-color="#3535F3"/> </linearGradient> </defs> </svg> \ No newline at end of file diff --git a/superset-frontend/src/assets/images/loading.gif b/superset-frontend/src/assets/images/loading.gif index d82fc5d9244e..79f4761e418e 100644 Binary files a/superset-frontend/src/assets/images/loading.gif and b/superset-frontend/src/assets/images/loading.gif differ diff --git a/superset-frontend/src/components/AsyncAceEditor/Tooltip.test.tsx b/superset-frontend/src/components/AsyncAceEditor/Tooltip.test.tsx new file mode 100644 index 000000000000..8365bd6b5991 --- /dev/null +++ b/superset-frontend/src/components/AsyncAceEditor/Tooltip.test.tsx @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render, screen } from 'spec/helpers/testing-library'; +import Tooltip, { getTooltipHTML } from './Tooltip'; + +test('should render a tooltip', () => { + const expected = { + title: 'tooltip title', + icon: <div>icon</div>, + body: <div>body</div>, + meta: 'meta', + footer: <div>footer</div>, + }; + render(<Tooltip {...expected} />); + expect(screen.getByText(expected.title)).toBeInTheDocument(); + expect(screen.getByText(expected.meta)).toBeInTheDocument(); + expect(screen.getByText('icon')).toBeInTheDocument(); + expect(screen.getByText('body')).toBeInTheDocument(); +}); + +test('returns the tooltip HTML', () => { + const html = getTooltipHTML({ + title: 'tooltip title', + icon: <div>icon</div>, + body: <div>body</div>, + meta: 'meta', + footer: <div>footer</div>, + }); + expect(html).toContain('tooltip title'); +}); diff --git a/superset-frontend/src/components/AsyncAceEditor/Tooltip.tsx b/superset-frontend/src/components/AsyncAceEditor/Tooltip.tsx new file mode 100644 index 000000000000..bc504587a635 --- /dev/null +++ b/superset-frontend/src/components/AsyncAceEditor/Tooltip.tsx @@ -0,0 +1,57 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { renderToStaticMarkup } from 'react-dom/server'; +import { Tag } from 'src/components'; + +type Props = { + title: string; + icon?: React.ReactNode; + body?: React.ReactNode; + meta?: string; + footer?: React.ReactNode; +}; + +export const Tooltip: React.FC<Props> = ({ + title, + icon, + body, + meta, + footer, +}) => ( + <div className="tooltip-detail"> + <div className="tooltip-detail-head"> + <div className="tooltip-detail-title"> + {icon} + {title} + </div> + {meta && ( + <span className="tooltip-detail-meta"> + <Tag color="default">{meta}</Tag> + </span> + )} + </div> + {body && <div className="tooltip-detail-body">{body ?? title}</div>} + {footer && <div className="tooltip-detail-footer">{footer}</div>} + </div> +); + +export const getTooltipHTML = (props: Props) => + `${renderToStaticMarkup(<Tooltip {...props} />)}`; + +export default Tooltip; diff --git a/superset-frontend/src/components/AsyncAceEditor/index.tsx b/superset-frontend/src/components/AsyncAceEditor/index.tsx index 32e5a687fdf2..f599438c7daf 100644 --- a/superset-frontend/src/components/AsyncAceEditor/index.tsx +++ b/superset-frontend/src/components/AsyncAceEditor/index.tsx @@ -32,6 +32,10 @@ import AsyncEsmComponent, { } from 'src/components/AsyncEsmComponent'; import useEffectEvent from 'src/hooks/useEffectEvent'; import cssWorkerUrl from 'ace-builds/src-noconflict/worker-css'; +import { useTheme, css } from '@superset-ui/core'; +import { Global } from '@emotion/react'; + +export { getTooltipHTML } from './Tooltip'; config.setModuleUrl('ace/mode/css_worker', cssWorkerUrl); @@ -135,6 +139,7 @@ export default function AsyncAceEditor( }, ref, ) { + const supersetTheme = useTheme(); const langTools = acequire('ace/ext/language_tools'); const setCompleters = useEffectEvent( (keywords: AceCompleterKeyword[]) => { @@ -167,15 +172,66 @@ export default function AsyncAceEditor( }, [keywords, setCompleters]); return ( - <ReactAceEditor - ref={ref} - mode={mode} - theme={theme} - tabSize={tabSize} - defaultValue={defaultValue} - setOptions={{ fontFamily }} - {...props} - /> + <> + <Global + styles={css` + .ace_tooltip { + margin-left: ${supersetTheme.gridUnit * 2}px; + padding: 0px; + border: 1px solid ${supersetTheme.colors.grayscale.light1}; + } + + & .tooltip-detail { + background-color: ${supersetTheme.colors.grayscale.light5}; + white-space: pre-wrap; + word-break: break-all; + min-width: ${supersetTheme.gridUnit * 50}px; + max-width: ${supersetTheme.gridUnit * 100}px; + & .tooltip-detail-head { + background-color: ${supersetTheme.colors.grayscale.light4}; + color: ${supersetTheme.colors.grayscale.dark1}; + display: flex; + column-gap: ${supersetTheme.gridUnit}px; + align-items: baseline; + justify-content: space-between; + } + & .tooltip-detail-title { + display: flex; + column-gap: ${supersetTheme.gridUnit}px; + } + & .tooltip-detail-body { + word-break: break-word; + } + & .tooltip-detail-head, + & .tooltip-detail-body { + padding: ${supersetTheme.gridUnit}px + ${supersetTheme.gridUnit * 2}px; + } + & .tooltip-detail-footer { + border-top: 1px ${supersetTheme.colors.grayscale.light2} + solid; + padding: 0 ${supersetTheme.gridUnit * 2}px; + color: ${supersetTheme.colors.grayscale.dark1}; + font-size: ${supersetTheme.typography.sizes.xs}px; + } + & .tooltip-detail-meta { + & > .ant-tag { + margin-right: 0px; + } + } + } + `} + /> + <ReactAceEditor + ref={ref} + mode={mode} + theme={theme} + tabSize={tabSize} + defaultValue={defaultValue} + setOptions={{ fontFamily }} + {...props} + /> + </> ); }, ); diff --git a/superset-frontend/src/components/Button/Button.test.tsx b/superset-frontend/src/components/Button/Button.test.tsx index f65267f9f1d4..bf037f104e75 100644 --- a/superset-frontend/src/components/Button/Button.test.tsx +++ b/superset-frontend/src/components/Button/Button.test.tsx @@ -16,10 +16,7 @@ * specific language governing permissions and limitations * under the License. */ - -import { isValidElement } from 'react'; -import { ReactWrapper } from 'enzyme'; -import { styledMount as mount } from 'spec/helpers/theming'; +import { fireEvent, render } from 'spec/helpers/testing-library'; import Button from '.'; import { ButtonGallery, @@ -27,36 +24,27 @@ import { STYLES as buttonStyles, } from './Button.stories'; -describe('Button', () => { - let wrapper: ReactWrapper; - - // test the basic component - it('renders the base component', () => { - expect(isValidElement(<Button />)).toBe(true); - }); - - it('works with an onClick handler', () => { - const mockAction = jest.fn(); - wrapper = mount(<Button onClick={mockAction} />); - wrapper.find('Button').first().simulate('click'); - expect(mockAction).toHaveBeenCalled(); - }); +test('works with an onClick handler', () => { + const mockAction = jest.fn(); + const { getByRole } = render(<Button onClick={mockAction} />); + fireEvent.click(getByRole('button')); + expect(mockAction).toHaveBeenCalled(); +}); - it('does not handle onClicks when disabled', () => { - const mockAction = jest.fn(); - wrapper = mount(<Button onClick={mockAction} disabled />); - wrapper.find('Button').first().simulate('click'); - expect(mockAction).toHaveBeenCalledTimes(0); - }); +test('does not handle onClicks when disabled', () => { + const mockAction = jest.fn(); + const { getByRole } = render(<Button onClick={mockAction} disabled />); + fireEvent.click(getByRole('button')); + expect(mockAction).toHaveBeenCalledTimes(0); +}); - // test stories from the storybook! - it('All the sorybook gallery variants mount', () => { - wrapper = mount(<ButtonGallery />); +// test stories from the storybook! +test('All the sorybook gallery variants mount', () => { + const { getAllByRole } = render(<ButtonGallery />); - const permutationCount = - Object.values(buttonStyles.options).filter(o => o).length * - Object.values(buttonSizes.options).length; + const permutationCount = + Object.values(buttonStyles.options).filter(o => o).length * + Object.values(buttonSizes.options).length; - expect(wrapper.find(Button).length).toEqual(permutationCount); - }); + expect(getAllByRole('button')).toHaveLength(permutationCount); }); diff --git a/superset-frontend/src/components/Chart/AISummary.integration.test.tsx b/superset-frontend/src/components/Chart/AISummary.integration.test.tsx new file mode 100644 index 000000000000..68d27e4a1170 --- /dev/null +++ b/superset-frontend/src/components/Chart/AISummary.integration.test.tsx @@ -0,0 +1,337 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render, screen, waitFor } from 'spec/helpers/testing-library'; +import { FeatureFlag } from '@superset-ui/core'; +import { ThemeProvider } from '@emotion/react'; +import { supersetTheme } from '@superset-ui/core'; +import ChartRenderer from './ChartRenderer'; + +// Mock fetch globally +global.fetch = jest.fn(); +const mockFetch = fetch as jest.MockedFunction<typeof fetch>; + +// Mock SuperChart +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + SuperChart: ({ formData }) => ( + <div data-test="mock-super-chart">{JSON.stringify(formData)}</div> + ), + isFeatureEnabled: jest.fn(), +})); + +jest.mock('./ChartContextMenu/ChartContextMenu', () => () => ( + <div data-test="mock-chart-context-menu" /> +)); + +const mockIsFeatureEnabled = require('@superset-ui/core').isFeatureEnabled; + +// Mock window.location +Object.defineProperty(window, 'location', { + value: { + search: '?currency_code=USD&timezone=America/New_York', + }, + writable: true, +}); + +const baseProps = { + chartId: 123, + datasource: { id: 1, type: 'table' }, + formData: { + viz_type: 'line', + time_range: 'Last 90 days', + adhoc_filters: [ + { column: 'region', operator: '==', value: 'North America' }, + ], + }, + vizType: 'line', + height: 400, + width: 600, + title: 'Sales Performance Dashboard', + description: 'A comprehensive analysis of sales performance trends across different regions and time periods', + queriesResponse: [ + { + data: [ + { date: '2024-01-01', sales: 10000, region: 'North America' }, + { date: '2024-01-02', sales: 12000, region: 'North America' }, + { date: '2024-01-03', sales: 11500, region: 'North America' }, + ], + }, + ], +}; + +const mockSuccessResponse = { + ok: true, + json: async () => ({ + data: { + result: { + insight: 'This line chart displays sales performance over the last 90 days for North America, showing a positive growth trend with sales increasing from $10,000 to $12,000.', + }, + }, + }), +}; + +describe('AI Summary Integration Tests', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockFetch.mockClear(); + + // Enable AI summary feature flag + mockIsFeatureEnabled.mockImplementation((flag) => { + if (flag === FeatureFlag.AiSummary) return true; + return false; + }); + }); + + afterEach(() => { + mockIsFeatureEnabled.mockReset(); + }); + + it('should complete end-to-end flow from ChartRenderer to AI API with title and description', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...baseProps} /> + </ThemeProvider>, + ); + + // Wait for the AI summary to be generated and displayed + await waitFor( + () => { + expect(screen.getByText(/This line chart displays sales performance/)).toBeInTheDocument(); + }, + { timeout: 5000 } + ); + + // Verify the API was called with correct payload + expect(mockFetch).toHaveBeenCalledWith( + 'https://api.intelligence.fynd.com/service/panel/analytics/ai/sql-helper/explain-chart', + expect.objectContaining({ + method: 'POST', + headers: { + accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + chart_data: { + vizType: 'line', + dataSample: [ + { date: '2024-01-01', sales: 10000, region: 'North America' }, + { date: '2024-01-02', sales: 12000, region: 'North America' }, + { date: '2024-01-03', sales: 11500, region: 'North America' }, + ], + title: 'Sales Performance Dashboard', + description: 'A comprehensive analysis of sales performance trends across different regions and time periods', + currency_code: 'USD', + timezone: 'America/New_York', + }, + }), + credentials: 'include', + }), + ); + + // Verify the AI summary is displayed with proper styling + const aiSummary = screen.getByText(/This line chart displays sales performance/); + expect(aiSummary).toBeInTheDocument(); + + // Verify AI icon is present + expect(screen.getByLabelText('AI')).toBeInTheDocument(); + }); + + it('should handle missing description gracefully', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + const propsWithoutDescription = { ...baseProps }; + delete propsWithoutDescription.description; + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...propsWithoutDescription} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalled(); + }); + + // Verify API was called with undefined description + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify({ + chart_data: expect.objectContaining({ + title: 'Sales Performance Dashboard', + description: undefined, + }), + }), + }), + ); + }); + + it('should handle missing title correctly', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + const propsWithoutTitle = { + ...baseProps, + title: undefined, + }; + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...propsWithoutTitle} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalled(); + }); + + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify({ + chart_data: expect.objectContaining({ + title: undefined, + }), + }), + }), + ); + }); + + it('should handle API errors gracefully without breaking the chart', async () => { + mockFetch.mockRejectedValueOnce(new Error('API Error')); + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...baseProps} /> + </ThemeProvider>, + ); + + // Chart should still render even if AI summary fails + await waitFor(() => { + expect(screen.getByTestId('mock-super-chart')).toBeInTheDocument(); + }); + + // AI summary should not be displayed on error + expect(screen.queryByText(/This line chart displays/)).not.toBeInTheDocument(); + }); + + it('should handle large datasets by limiting to 200 rows', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + // Create a large dataset (300 rows) + const largeDataset = Array.from({ length: 300 }, (_, i) => ({ + date: `2024-01-${String(i + 1).padStart(2, '0')}`, + sales: 10000 + i * 100, + region: 'North America', + })); + + const propsWithLargeDataset = { + ...baseProps, + queriesResponse: [{ data: largeDataset }], + }; + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...propsWithLargeDataset} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalled(); + }); + + // Verify only 200 rows were sent to API + const call = mockFetch.mock.calls[0]; + const payload = JSON.parse(call[1].body); + expect(payload.chart_data.dataSample).toHaveLength(200); + }); + + it('should regenerate summary when description prop changes', async () => { + mockFetch.mockResolvedValue(mockSuccessResponse as any); + + const { rerender } = render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...baseProps} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalledTimes(1); + }); + + // Change description and rerender + const updatedProps = { + ...baseProps, + description: 'Updated description with new insights about the data', + }; + + rerender( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...updatedProps} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalledTimes(2); + }); + + // Verify the new description was sent + const secondCall = mockFetch.mock.calls[1]; + const payload = JSON.parse(secondCall[1].body); + expect(payload.chart_data.description).toBe('Updated description with new insights about the data'); + }); + + it('should include all URL parameters in API call', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + // Update window location with more parameters + Object.defineProperty(window, 'location', { + value: { + search: '?currency_code=EUR&timezone=Europe/London&country_code=GB&country=United Kingdom', + }, + writable: true, + }); + + render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...baseProps} /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockFetch).toHaveBeenCalled(); + }); + + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify({ + chart_data: expect.objectContaining({ + currency_code: 'EUR', + timezone: 'Europe/London', + country_code: 'GB', + country: 'United Kingdom', + }), + }), + }), + ); + }); +}); diff --git a/superset-frontend/src/components/Chart/AISummaryBox.test.tsx b/superset-frontend/src/components/Chart/AISummaryBox.test.tsx new file mode 100644 index 000000000000..409a7757ffea --- /dev/null +++ b/superset-frontend/src/components/Chart/AISummaryBox.test.tsx @@ -0,0 +1,307 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render, screen, waitFor, fireEvent } from 'spec/helpers/testing-library'; +import { ThemeProvider } from '@emotion/react'; +import { supersetTheme } from '@superset-ui/core'; +import AISummaryBox from './AISummaryBox'; +import * as aiSummary from '../../utils/aiSummary'; + +// Mock the aiSummary module +jest.mock('../../utils/aiSummary', () => ({ + generateSummary: jest.fn(), + extractRawDataSample: jest.fn(), +})); + +const mockGenerateSummary = aiSummary.generateSummary as jest.MockedFunction< + typeof aiSummary.generateSummary +>; +const mockExtractRawDataSample = aiSummary.extractRawDataSample as jest.MockedFunction< + typeof aiSummary.extractRawDataSample +>; + +const defaultProps = { + chartDomId: 'chart-id-123', + vizType: 'line', + title: 'Sales Trends', + description: 'Monthly sales data showing growth trends', + queriesData: [ + { + data: [ + { month: 'Jan', sales: 1000 }, + { month: 'Feb', sales: 1200 }, + { month: 'Mar', sales: 1500 }, + ], + }, + ], + timeRange: 'Last 3 months', + filters: { region: 'North America' }, +}; + +const renderComponent = (props = {}) => { + return render( + <ThemeProvider theme={supersetTheme}> + <AISummaryBox {...defaultProps} {...props} /> + </ThemeProvider>, + ); +}; + +describe('AISummaryBox', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockExtractRawDataSample.mockReturnValue([ + { month: 'Jan', sales: 1000 }, + { month: 'Feb', sales: 1200 }, + { month: 'Mar', sales: 1500 }, + ]); + }); + + it('should show loading state initially', () => { + mockGenerateSummary.mockImplementation(() => new Promise(() => {})); // Never resolves + renderComponent(); + + expect(screen.getByLabelText('AI')).toBeInTheDocument(); + expect(screen.getAllByRole('generic')).toHaveLength(6); // Skeleton lines + }); + + it('should call generateSummary with title and description', async () => { + const mockSummary = 'This chart shows strong sales growth over the first quarter.'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent(); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledWith( + { + vizType: 'line', + title: 'Sales Trends', + description: 'Monthly sales data showing growth trends', + dataSample: [ + { month: 'Jan', sales: 1000 }, + { month: 'Feb', sales: 1200 }, + { month: 'Mar', sales: 1500 }, + ], + imageBase64: undefined, + timeRange: 'Last 3 months', + filters: { region: 'North America' }, + }, + { + mode: 'data', + signal: expect.any(AbortSignal), + }, + ); + }); + }); + + it('should display AI summary when successful', async () => { + const mockSummary = 'This chart shows strong sales growth over the first quarter.'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent(); + + await waitFor(() => { + expect(screen.getByText(mockSummary)).toBeInTheDocument(); + }); + + expect(screen.getByLabelText('AI')).toBeInTheDocument(); + }); + + it('should handle undefined title and description', async () => { + const mockSummary = 'Chart analysis without title or description.'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent({ title: undefined, description: undefined }); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledWith( + expect.objectContaining({ + title: undefined, + description: undefined, + }), + expect.any(Object), + ); + }); + }); + + it('should handle empty description gracefully', async () => { + const mockSummary = 'Chart analysis with empty description.'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent({ description: '' }); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledWith( + expect.objectContaining({ + description: '', + }), + expect.any(Object), + ); + }); + }); + + it('should not display anything on error', async () => { + mockGenerateSummary.mockRejectedValue(new Error('API Error')); + + renderComponent(); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalled(); + }); + + // Component should not render anything when there's an error + expect(screen.queryByText(/chart/i)).not.toBeInTheDocument(); + }); + + it('should regenerate summary when description changes', async () => { + const mockSummary1 = 'First summary'; + const mockSummary2 = 'Updated summary with new description'; + mockGenerateSummary + .mockResolvedValueOnce(mockSummary1) + .mockResolvedValueOnce(mockSummary2); + + const { rerender } = renderComponent(); + + await waitFor(() => { + expect(screen.getByText(mockSummary1)).toBeInTheDocument(); + }); + + // Update description + rerender( + <ThemeProvider theme={supersetTheme}> + <AISummaryBox + {...defaultProps} + description="Updated description with more details" + /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledTimes(2); + expect(mockGenerateSummary).toHaveBeenLastCalledWith( + expect.objectContaining({ + description: 'Updated description with more details', + }), + expect.any(Object), + ); + }); + }); + + it('should regenerate summary when title changes', async () => { + const mockSummary = 'Summary with updated title'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + const { rerender } = renderComponent(); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledTimes(1); + }); + + // Update title + rerender( + <ThemeProvider theme={supersetTheme}> + <AISummaryBox {...defaultProps} title="Updated Chart Title" /> + </ThemeProvider>, + ); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledTimes(2); + expect(mockGenerateSummary).toHaveBeenLastCalledWith( + expect.objectContaining({ + title: 'Updated Chart Title', + }), + expect.any(Object), + ); + }); + }); + + it('should handle long summaries with expand/collapse', async () => { + const longSummary = 'This is a very long summary that should be truncated. '.repeat(20); + mockGenerateSummary.mockResolvedValue(longSummary); + + renderComponent(); + + await waitFor(() => { + expect(screen.getByText(longSummary)).toBeInTheDocument(); + }); + + // Should show expand button for long text + const expandButton = screen.getByTitle('view more'); + expect(expandButton).toBeInTheDocument(); + + // Click expand + fireEvent.click(expandButton); + + // Should show collapse button + expect(screen.getByTitle('view less')).toBeInTheDocument(); + }); + + it('should use image mode when no data sample available', async () => { + mockExtractRawDataSample.mockReturnValue(null); + const mockSummary = 'Summary based on chart image'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent(); + + await waitFor(() => { + expect(mockGenerateSummary).toHaveBeenCalledWith( + expect.objectContaining({ + dataSample: null, + }), + expect.objectContaining({ + mode: 'image', + }), + ); + }); + }); + + it('should call onHeightChange when provided', async () => { + const mockOnHeightChange = jest.fn(); + const mockSummary = 'Test summary'; + mockGenerateSummary.mockResolvedValue(mockSummary); + + renderComponent({ onHeightChange: mockOnHeightChange }); + + await waitFor(() => { + expect(mockOnHeightChange).toHaveBeenCalled(); + }); + }); + + it('should abort previous requests when props change', async () => { + let abortController: AbortController; + mockGenerateSummary.mockImplementation((input, options) => { + if (options?.signal) { + abortController = { abort: jest.fn() } as any; + return new Promise(() => {}); // Never resolves + } + return Promise.resolve('summary'); + }); + + const { rerender } = renderComponent(); + + // Change a prop to trigger new request + rerender( + <ThemeProvider theme={supersetTheme}> + <AISummaryBox {...defaultProps} title="New Title" /> + </ThemeProvider>, + ); + + // First request should be aborted + expect(mockGenerateSummary).toHaveBeenCalledTimes(2); + }); +}); diff --git a/superset-frontend/src/components/Chart/AISummaryBox.tsx b/superset-frontend/src/components/Chart/AISummaryBox.tsx new file mode 100644 index 000000000000..9b5202299972 --- /dev/null +++ b/superset-frontend/src/components/Chart/AISummaryBox.tsx @@ -0,0 +1,504 @@ +/** + * AISummaryBox + * + * Renders a compact, two-line AI-generated summary under a chart. + * + * Behavior + * - Prefers summarizing lightweight structured data extracted from queries. + * - Falls back to a low-resolution PNG snapshot of the chart if data sample is unavailable. + * - Calls generateSummary(), which prefers a backend endpoint and gracefully degrades to + * a generic 2-line placeholder when the endpoint is missing or returns an error. + * + * Styling + * - Shows an animated gradient border ("running color") around a lightly translucent container. + * + * Integration + * - Place directly beneath SuperChart; pass `chartDomId` of the rendered chart container + * (e.g., `chart-id-<chartId>`) so snapshots can be captured when needed. + */ +import { useEffect, useMemo, useRef, useState } from 'react'; +import styled from '@emotion/styled'; +import { keyframes } from '@emotion/react'; +import { rgba } from 'polished'; +import Icons from 'src/components/Icons'; +import { generateSummary, extractRawDataSample } from '../../utils/aiSummary'; + +type Props = { + chartDomId: string; // DOM id of the chart container (e.g., `chart-id-<id>`) + vizType: string; + title?: string; + description?: string; + queriesData?: unknown; + timeRange?: string | null; + filters?: Record<string, unknown>; + onHeightChange?: (height: number) => void; +}; + +const shimmer = keyframes` + 0% { background-position: 0% 50%; } + 100% { background-position: 200% 50%; } +`; + +const Border = styled('div')` + position: relative; + border-radius: 10px; + padding: 1px; + background: ${({ theme }) => + `linear-gradient(90deg, ${rgba(theme.colors.primary.base, 0.25)}, ${rgba( + theme.colors.success.base, + 0.25, + )}, ${rgba(theme.colors.primary.base, 0.25)})`}; + background-size: 200% 200%; + animation: ${shimmer} 6s linear infinite; + box-shadow: none; +`; + +// background: ${({ theme }) => rgba(theme.colors.grayscale.light5, 0.25)}; +const Container = styled('div')<{ hasActionButton: boolean }>` + border-radius: 9px; + color: inherit; + padding: 8px 12px + ${({ hasActionButton }) => (hasActionButton ? '22px' : '8px')}; + font-size: 12px; + line-height: 1.45; + backdrop-filter: none; + box-shadow: none; + overflow: visible; + margin-bottom: 4px; + display: grid; + grid-template-columns: 18px 1fr; + grid-auto-rows: auto; + column-gap: 8px; + user-select: none; +`; +const SkeletonLine = styled('div')` + height: 12px; + width: 100%; + margin: 6px 0; + border-radius: 6px; + background: ${({ theme }) => + `linear-gradient(90deg, ${theme.colors.grayscale.light3}, ${theme.colors.grayscale.light2}, ${theme.colors.grayscale.light3})`}; + background-size: 200% 200%; + animation: ${shimmer} 1.5s ease-in-out infinite; +`; + +const TextWrapper = styled('div')` + grid-column: 2; +`; + +const FullText = styled('div')` + white-space: normal; + word-break: break-word; +`; + +const CollapsedLine = styled('div')` + white-space: normal; + word-break: break-word; + display: block; +`; + +const HiddenMeasure = styled('div')` + position: absolute; + left: 0; + top: 0; + visibility: hidden; + pointer-events: none; + white-space: normal; + width: 100%; + height: auto; + overflow: visible; +`; + +const ToggleLink = styled('span')` + font-weight: 500; + color: ${({ theme }) => theme.colors.primary.base}; + text-decoration: underline; + cursor: pointer; +`; + +// Fallback-safe AI icon +function AIGlyph(props: { + width?: number; + height?: number; + 'aria-label'?: string; +}) { + const anyIcons = Icons as any; + const Icon = + anyIcons?.Ai || + anyIcons?.Bot || + anyIcons?.Robot || + anyIcons?.Bulb || + anyIcons?.Lightbulb || + anyIcons?.Thunderbolt || + anyIcons?.QuestionCircleOutlined || + Icons.InfoSolid; + return <Icon {...props} />; +} + +export default function AISummaryBox({ + chartDomId, + vizType, + title, + description, + queriesData, + timeRange, + filters, + onHeightChange, +}: Props) { + // Summary state lifecycle: loading -> text OR error + const [summary, setSummary] = useState<string>(''); + const [expanded, setExpanded] = useState<boolean>(false); + const [loading, setLoading] = useState<boolean>(false); + const [error, setError] = useState<string | null>(null); + + const abortRef = useRef<AbortController | null>(null); + const borderRef = useRef<HTMLDivElement | null>(null); + const textWrapperRef = useRef<HTMLDivElement | null>(null); + const measureRef = useRef<HTMLDivElement | null>(null); + + // Detect when the chart container is actually rendered to avoid layout jump + const [isChartReady, setIsChartReady] = useState<boolean>(false); + useEffect(() => { + setIsChartReady(false); + + const getChartElement = () => + document.getElementById(chartDomId) as HTMLElement | null; + + const isReady = () => { + const el = getChartElement(); + if (!el) return false; + + // Check for actual chart graphics/content + const hasGraphics = el.querySelector( + 'canvas, svg, .nvd3, .chart-container, [data-test="chart-container"]', + ); + + // Get element dimensions + const rect = el.getBoundingClientRect(); + const hasValidDimensions = rect.height > 0 && rect.width > 0; + + // For more robust detection, require both graphics AND meaningful dimensions + // Also check if the chart has actual rendered content (not just loading states) + const hasContent = hasGraphics && hasValidDimensions; + + // Additional check: ensure the chart isn't just showing loading/empty state + const isNotLoading = !el.querySelector( + '.loading, [data-test="loading"], .chart-shimmer, .empty-state', + ); + + return hasContent && isNotLoading; + }; + + const el = getChartElement(); + if (isReady()) { + setIsChartReady(true); + return () => {}; + } + + let mutationObserver: MutationObserver | undefined; + let resizeObserver: ResizeObserver | undefined; + + if (el) { + mutationObserver = new MutationObserver(() => { + if (isReady()) { + // Add a small delay to ensure chart is fully rendered + window.setTimeout(() => { + setIsChartReady(true); + mutationObserver?.disconnect(); + if (resizeObserver) { + resizeObserver.disconnect(); + } + }, 100); + } + }); + mutationObserver.observe(el, { childList: true, subtree: true }); + + if ((window as any).ResizeObserver) { + resizeObserver = new (window as any).ResizeObserver(() => { + if (isReady()) { + // Add a small delay to ensure chart is fully rendered + window.setTimeout(() => { + setIsChartReady(true); + mutationObserver?.disconnect(); + if (resizeObserver) { + resizeObserver.disconnect(); + } + }, 100); + } + }); + if (resizeObserver) { + resizeObserver.observe(el); + } + } + } + + const timeoutId = window.setTimeout(() => { + if (isReady()) { + // Add a small additional delay to ensure chart is fully rendered + window.setTimeout(() => setIsChartReady(true), 100); + } + }, 300); + + return () => { + window.clearTimeout(timeoutId); + mutationObserver?.disconnect(); + if (resizeObserver) { + resizeObserver.disconnect(); + } + }; + }, [chartDomId]); + + // Extract a compact data sample for the model; avoids sending full results + // Use raw sample rows for the custom API contract + const dataSample = useMemo( + () => extractRawDataSample(queriesData), + // eslint-disable-next-line react-hooks/exhaustive-deps + [JSON.stringify(queriesData)], + ); + + // Kick off AI summary generation, with abort support and graceful error fallback + async function run() { + abortRef.current?.abort(); + const controller = new AbortController(); + abortRef.current = controller; + setLoading(true); + setError(null); + try { + const imageBase64 = undefined; + const text = await generateSummary( + { + vizType, + title, + description, + dataSample, + imageBase64, + timeRange: timeRange ?? null, + filters: filters ?? {}, + }, + { + mode: dataSample ? 'data' : 'image', + signal: controller.signal, + }, + ); + setSummary(text); + } catch (e: any) { + setError(e?.message || 'Failed to summarize'); + } finally { + setLoading(false); + } + } + + // Memoize filters string to avoid re-renders + const filtersString = useMemo(() => JSON.stringify(filters), [filters]); + + useEffect(() => { + if (!isChartReady) return; + run(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ + isChartReady, + vizType, + chartDomId, + title, + description, + dataSample, + timeRange, + filtersString, + ]); + + // Always collapse when the page/tab becomes hidden OR the component leaves viewport + useEffect(() => { + const handleVisibility = () => { + if (document.hidden) setExpanded(false); + }; + document.addEventListener('visibilitychange', handleVisibility); + return () => + document.removeEventListener('visibilitychange', handleVisibility); + }, []); + + useEffect(() => { + const node = borderRef.current; + if (!node || !(window as any).IntersectionObserver) return undefined; + const observer = new (window as any).IntersectionObserver( + (entries: IntersectionObserverEntry[]) => { + entries.forEach(entry => { + if (entry.intersectionRatio === 0) { + setExpanded(false); + } + }); + }, + { threshold: 0 }, + ); + observer.observe(node); + return () => observer.disconnect(); + }, [borderRef]); + + // Also collapse when switching charts/containers + useEffect(() => { + setExpanded(false); + }, [chartDomId]); + + const shouldShow = + !loading && !error && Boolean(summary && summary.trim().length > 0); + + const fullText = useMemo(() => summary || '', [summary]); + + // Inline truncation with trailing "…" + const [collapsedText, setCollapsedText] = useState<string>(''); + const [needsMore, setNeedsMore] = useState<boolean>(false); + + useEffect(() => { + if (!shouldShow || expanded) return; + const node = measureRef.current; + if (!node) return; + + // Measure full text height first to decide if truncation is needed + node.innerHTML = fullText; + const lineHeight = parseFloat( + window.getComputedStyle(node).lineHeight || '0', + ); + const maxHeight = Math.ceil(lineHeight * 2); + const fullHeight = Math.ceil(node.getBoundingClientRect().height); + + if (fullHeight <= maxHeight) { + setCollapsedText(fullText); + setNeedsMore(false); + return; + } + + setNeedsMore(true); + const suffix = + ' … <span style="font-weight:600;text-decoration:underline;">view more</span>'; + + // Binary search best cut index + let low = 0; + let high = fullText.length; + let best = 0; + while (low <= high) { + const mid = Math.floor((low + high) / 2); + node.innerHTML = `${fullText.slice(0, mid)}${suffix}`; + const h = Math.ceil(node.getBoundingClientRect().height); + if (h <= maxHeight) { + best = mid; + low = mid + 1; + } else { + high = mid - 1; + } + } + + // Prefer word boundary near best + let cut = best; + const lookback = 20; + const spaceIdx = fullText + .slice(Math.max(0, best - lookback), best) + .lastIndexOf(' '); + if (spaceIdx > -1) { + cut = Math.max(0, best - (lookback - spaceIdx)); + } + + setCollapsedText(fullText.slice(0, cut).trimEnd()); + }, [shouldShow, expanded, fullText]); + + // Recompute on resize when collapsed + useEffect(() => { + if (!shouldShow || expanded) return undefined; + const handler = () => { + // force recompute by updating dependency-like state + setCollapsedText(prev => prev); + }; + window.addEventListener('resize', handler); + return () => window.removeEventListener('resize', handler); + }, [shouldShow, expanded]); + + // Report height to parent during loading and when shown/hidden or expand/collapse/content changes + useEffect(() => { + if (!onHeightChange) return undefined; + const node = borderRef.current; + const id = window.setTimeout(() => { + if (node) { + const rect = node.getBoundingClientRect(); + onHeightChange(Math.ceil(rect.height)); + } else { + onHeightChange(0); + } + }, 0); + return () => window.clearTimeout(id); + }, [onHeightChange, loading, shouldShow, expanded, fullText, collapsedText]); + + // Do not render anything until the chart is ready to avoid centering/jumping + if (!isChartReady) return null; + + if (loading) { + return ( + <Border ref={borderRef}> + <Container hasActionButton={false}> + <div style={{ gridColumn: 1, alignSelf: 'start' }}> + <AIGlyph width={14} height={14} aria-label="AI" /> + </div> + <div style={{ gridColumn: 2 }}> + <SkeletonLine /> + <SkeletonLine style={{ width: '92%' }} /> + <div style={{ height: 8 }} /> + </div> + </Container> + </Border> + ); + } + + if (!shouldShow) return null; + + return ( + <Border ref={borderRef}> + <Container hasActionButton={false}> + <div style={{ gridColumn: 1, alignSelf: 'start' }}> + <AIGlyph width={14} height={14} aria-label="AI" /> + </div> + <TextWrapper ref={textWrapperRef}> + {expanded ? ( + <FullText> + {fullText}{' '} + <ToggleLink + role="button" + tabIndex={0} + onClick={() => setExpanded(false)} + onKeyDown={e => { + if (e.key === 'Enter' || e.key === ' ') setExpanded(false); + }} + aria-label="view less" + title="view less" + > + view less + </ToggleLink> + </FullText> + ) : ( + <> + <CollapsedLine> + {collapsedText} + {needsMore ? ( + <> + {' '} + …{' '} + <ToggleLink + role="button" + tabIndex={0} + onClick={() => setExpanded(true)} + onKeyDown={e => { + if (e.key === 'Enter' || e.key === ' ') + setExpanded(true); + }} + aria-label="view more" + title="view more" + > + view more + </ToggleLink> + </> + ) : null} + </CollapsedLine> + <HiddenMeasure ref={measureRef} /> + </> + )} + </TextWrapper> + </Container> + </Border> + ); +} diff --git a/superset-frontend/src/components/Chart/Chart.jsx b/superset-frontend/src/components/Chart/Chart.jsx index ea1a2d968c99..4e83e25efcd6 100644 --- a/superset-frontend/src/components/Chart/Chart.jsx +++ b/superset-frontend/src/components/Chart/Chart.jsx @@ -28,6 +28,7 @@ import { } from '@superset-ui/core'; import { PLACEHOLDER_DATASOURCE } from 'src/dashboard/constants'; import Loading from 'src/components/Loading'; +import ChartShimmer from 'src/components/Chart/ChartShimmer'; import { EmptyStateBig } from 'src/components/EmptyState'; import ErrorBoundary from 'src/components/ErrorBoundary'; import { Logger, LOG_ACTIONS_RENDER_CHART } from 'src/logger/LogUtils'; @@ -82,6 +83,8 @@ const propTypes = { datasetsStatus: PropTypes.oneOf(['loading', 'error', 'complete']), isInView: PropTypes.bool, emitCrossFilters: PropTypes.bool, + description: PropTypes.string, + title: PropTypes.string, }; const BLANK = {}; @@ -105,7 +108,6 @@ const defaultProps = { const Styles = styled.div` min-height: ${p => p.height}px; position: relative; - text-align: center; .chart-tooltip { opacity: 0.75; @@ -133,17 +135,10 @@ const LoadingDiv = styled.div` position: absolute; left: 50%; top: 50%; - width: 80%; + width: 100%; transform: translate(-50%, -50%); `; -const MessageSpan = styled.span` - display: block; - margin: ${({ theme }) => theme.gridUnit * 4}px auto; - width: fit-content; - color: ${({ theme }) => theme.colors.grayscale.base}; -`; - const MonospaceDiv = styled.div` font-family: ${({ theme }) => theme.typography.families.monospace}; word-break: break-word; @@ -229,7 +224,7 @@ class Chart extends PureComponent { data-test="chart-container" height={height} > - <Loading /> + <ChartShimmer height={height} /> </Styles> ); } @@ -248,15 +243,10 @@ class Chart extends PureComponent { ); } - renderSpinner(databaseName) { - const message = databaseName - ? t('Waiting on %s', databaseName) - : t('Waiting on database...'); - + renderSpinner() { return ( <LoadingDiv> - <Loading position="inline-centered" /> - <MessageSpan>{message}</MessageSpan> + <ChartShimmer height={this.props.height} /> </LoadingDiv> ); } @@ -273,7 +263,7 @@ class Chart extends PureComponent { data-test={this.props.vizType} /> ) : ( - <Loading /> + <ChartShimmer height={this.props.height} /> )} </div> ); @@ -284,13 +274,13 @@ class Chart extends PureComponent { height, chartAlert, chartStatus, - datasource, + // datasource, errorMessage, chartIsStale, queriesResponse = [], width, } = this.props; - const databaseName = datasource?.database?.name; + // const databaseName = datasource?.database?.name; const isLoading = chartStatus === 'loading'; this.renderContainerStartTime = Logger.getTimestamp(); @@ -346,9 +336,7 @@ class Chart extends PureComponent { height={height} width={width} > - {isLoading - ? this.renderSpinner(databaseName) - : this.renderChartContainer()} + {isLoading ? this.renderSpinner() : this.renderChartContainer()} </Styles> </ErrorBoundary> ); diff --git a/superset-frontend/src/components/Chart/ChartRenderer.description.test.jsx b/superset-frontend/src/components/Chart/ChartRenderer.description.test.jsx new file mode 100644 index 000000000000..00cc9271f751 --- /dev/null +++ b/superset-frontend/src/components/Chart/ChartRenderer.description.test.jsx @@ -0,0 +1,201 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render, screen, waitFor } from 'spec/helpers/testing-library'; +import { FeatureFlag } from '@superset-ui/core'; +import { ThemeProvider } from '@emotion/react'; +import { supersetTheme } from '@superset-ui/core'; +import ChartRenderer from './ChartRenderer'; + +// Mock dependencies +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + SuperChart: ({ formData }) => ( + <div data-test="mock-super-chart">{JSON.stringify(formData)}</div> + ), + isFeatureEnabled: jest.fn(), +})); + +jest.mock('./ChartContextMenu/ChartContextMenu', () => () => ( + <div data-test="mock-chart-context-menu" /> +)); + +// Mock AISummaryBox to verify props +jest.mock('./AISummaryBox', () => { + return function MockAISummaryBox(props) { + return ( + <div data-test="mock-ai-summary-box"> + <div data-test="ai-title">{props.title}</div> + <div data-test="ai-description">{props.description}</div> + <div data-test="ai-viztype">{props.vizType}</div> + </div> + ); + }; +}); + +const mockIsFeatureEnabled = require('@superset-ui/core').isFeatureEnabled; + +const baseProps = { + chartId: 1, + datasource: { id: 1, type: 'table' }, + formData: { + viz_type: 'line', + }, + vizType: 'line', + height: 400, + width: 600, + title: 'Test Chart Title', + description: 'Test Chart Description', + queriesResponse: [ + { + data: [ + { month: 'Jan', sales: 1000 }, + { month: 'Feb', sales: 1200 }, + ], + }, + ], +}; + +const renderChartRenderer = (props = {}) => { + // Enable AI summary feature flag + mockIsFeatureEnabled.mockImplementation((flag) => { + if (flag === FeatureFlag.AiSummary) return true; + return false; + }); + + return render( + <ThemeProvider theme={supersetTheme}> + <ChartRenderer {...baseProps} {...props} /> + </ThemeProvider>, + ); +}; + +describe('ChartRenderer - Description Integration', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + afterEach(() => { + mockIsFeatureEnabled.mockReset(); + }); + + it('should pass title and description to AISummaryBox when provided', async () => { + renderChartRenderer(); + + await waitFor(() => { + expect(screen.getByTestId('mock-ai-summary-box')).toBeInTheDocument(); + }); + + expect(screen.getByTestId('ai-description')).toHaveTextContent('Test Chart Description'); + expect(screen.getByTestId('ai-title')).toHaveTextContent('Test Chart Title'); + expect(screen.getByTestId('ai-viztype')).toHaveTextContent('line'); + }); + + it('should handle undefined title and description gracefully', async () => { + renderChartRenderer({ title: undefined, description: undefined }); + + await waitFor(() => { + expect(screen.getByTestId('mock-ai-summary-box')).toBeInTheDocument(); + }); + + expect(screen.getByTestId('ai-description')).toBeEmptyDOMElement(); + expect(screen.getByTestId('ai-title')).toBeEmptyDOMElement(); + }); + + it('should handle null title and description gracefully', async () => { + renderChartRenderer({ title: null, description: null }); + + await waitFor(() => { + expect(screen.getByTestId('mock-ai-summary-box')).toBeInTheDocument(); + }); + + expect(screen.getByTestId('ai-description')).toBeEmptyDOMElement(); + expect(screen.getByTestId('ai-title')).toBeEmptyDOMElement(); + }); + + it('should handle empty string title and description', async () => { + renderChartRenderer({ title: '', description: '' }); + + await waitFor(() => { + expect(screen.getByTestId('mock-ai-summary-box')).toBeInTheDocument(); + }); + + expect(screen.getByTestId('ai-description')).toBeEmptyDOMElement(); + expect(screen.getByTestId('ai-title')).toBeEmptyDOMElement(); + }); + + it('should pass provided title correctly', async () => { + renderChartRenderer({ title: 'Custom Chart Title' }); + + await waitFor(() => { + expect(screen.getByTestId('ai-title')).toHaveTextContent('Custom Chart Title'); + }); + }); + + it('should not render AISummaryBox when feature flag is disabled', () => { + mockIsFeatureEnabled.mockImplementation(() => false); + + renderChartRenderer({ description: 'Test description' }); + + expect(screen.queryByTestId('mock-ai-summary-box')).not.toBeInTheDocument(); + }); + + it('should pass all required props to AISummaryBox', async () => { + const description = 'Detailed chart description'; + const timeRange = 'Last 30 days'; + const filters = [{ column: 'region', value: 'US' }]; + + renderChartRenderer({ + description, + formData: { + ...baseProps.formData, + time_range: timeRange, + adhoc_filters: filters, + }, + }); + + await waitFor(() => { + expect(screen.getByTestId('mock-ai-summary-box')).toBeInTheDocument(); + }); + + // Verify the mock component received the props + const mockAISummaryBox = screen.getByTestId('mock-ai-summary-box'); + expect(mockAISummaryBox).toBeInTheDocument(); + }); + + it('should handle very long descriptions', async () => { + const longDescription = 'This is a very long description. '.repeat(100); + + renderChartRenderer({ description: longDescription }); + + await waitFor(() => { + expect(screen.getByTestId('ai-description')).toHaveTextContent(longDescription); + }); + }); + + it('should handle special characters in description', async () => { + const specialDescription = 'Description with special chars: <>&"\'🚀💡📊'; + + renderChartRenderer({ description: specialDescription }); + + await waitFor(() => { + expect(screen.getByTestId('ai-description')).toHaveTextContent(specialDescription); + }); + }); +}); diff --git a/superset-frontend/src/components/Chart/ChartRenderer.jsx b/superset-frontend/src/components/Chart/ChartRenderer.jsx index b9cd6caf4fb9..e5e92e0a01fa 100644 --- a/superset-frontend/src/components/Chart/ChartRenderer.jsx +++ b/superset-frontend/src/components/Chart/ChartRenderer.jsx @@ -32,6 +32,7 @@ import { Logger, LOG_ACTIONS_RENDER_CHART } from 'src/logger/LogUtils'; import { EmptyStateBig, EmptyStateSmall } from 'src/components/EmptyState'; import { ChartSource } from 'src/types/ChartSource'; import ChartContextMenu from './ChartContextMenu/ChartContextMenu'; +import AISummaryBox from './AISummaryBox'; const propTypes = { annotationData: PropTypes.object, @@ -63,6 +64,8 @@ const propTypes = { postTransformProps: PropTypes.func, source: PropTypes.oneOf([ChartSource.Dashboard, ChartSource.Explore]), emitCrossFilters: PropTypes.bool, + description: PropTypes.string, + title: PropTypes.string, }; const BLANK = {}; @@ -106,6 +109,8 @@ class ChartRenderer extends Component { this.handleLegendStateChanged = this.handleLegendStateChanged.bind(this); this.onContextMenuFallback = this.onContextMenuFallback.bind(this); + this.aiHeightUpdateId = null; + this.hooks = { onAddFilter: this.handleAddFilter, onContextMenu: this.state.showContextMenu @@ -293,7 +298,7 @@ class ChartRenderer extends Component { : ''; let noResultsComponent; - const noResultTitle = t('No results were returned for this query'); + const noResultTitle = t('No data'); const noResultDescription = this.props.source === ChartSource.Explore ? t( @@ -323,6 +328,19 @@ class ChartRenderer extends Component { ? { inContextMenu: this.state.inContextMenu } : {}; + // Reserve vertical room for the AI summary to avoid clipping + const showAISummary = + isFeatureEnabled(FeatureFlag.AiSummaryOnChart) && + vizType !== 'big_number_total' && + vizType !== 'big_number' && + Boolean(currentFormData && currentFormData.enable_ai_insights); + const defaultReserved = 64; + const bufferPx = 8; + const reserved = showAISummary + ? (this.state.aiBoxHeight || defaultReserved) + bufferPx + : 0; + const innerChartHeight = Math.max(50, (height || 0) - reserved); + return ( <> {this.state.showContextMenu && ( @@ -346,7 +364,7 @@ class ChartRenderer extends Component { className={chartClassName} chartType={vizType} width={width} - height={height} + height={innerChartHeight} annotationData={annotationData} datasource={datasource} initialValues={initialValues} @@ -364,6 +382,36 @@ class ChartRenderer extends Component { legendState={this.state.legendState} {...drillToDetailProps} /> + {showAISummary && ( + <div + style={{ + marginTop: vizType === 'country_map' ? 0 : 8, + marginBottom: vizType === 'country_map' ? 0 : 12, + }} + > + <AISummaryBox + chartDomId={`chart-id-${chartId}`} + vizType={vizType} + title={this.props.title} + description={this.props.description} + queriesData={this.mutableQueriesResponse} + timeRange={formData?.time_range} + filters={formData?.adhoc_filters} + onHeightChange={h => { + const prev = this.state.aiBoxHeight || 0; + const threshold = 4; // ignore tiny changes to avoid jitter + if (Math.abs((h || 0) - prev) > threshold) { + if (this.aiHeightUpdateId) + window.cancelAnimationFrame(this.aiHeightUpdateId); + this.aiHeightUpdateId = window.requestAnimationFrame(() => { + if (this.state.aiBoxHeight !== h) + this.setState({ aiBoxHeight: h }); + }); + } + }} + /> + </div> + )} </div> </> ); diff --git a/superset-frontend/src/components/Chart/ChartRenderer.test.jsx b/superset-frontend/src/components/Chart/ChartRenderer.test.jsx index 5f8a32bd4f05..53b35b6cf415 100644 --- a/superset-frontend/src/components/Chart/ChartRenderer.test.jsx +++ b/superset-frontend/src/components/Chart/ChartRenderer.test.jsx @@ -16,11 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -import { shallow } from 'enzyme'; -import { SuperChart } from '@superset-ui/core'; +import { render } from 'spec/helpers/testing-library'; import ChartRenderer from 'src/components/Chart/ChartRenderer'; +jest.mock('@superset-ui/core', () => ({ + ...jest.requireActual('@superset-ui/core'), + SuperChart: ({ formData }) => ( + <div data-test="mock-super-chart">{JSON.stringify(formData)}</div> + ), +})); + +jest.mock( + 'src/components/Chart/ChartContextMenu/ChartContextMenu', + () => () => <div data-test="mock-chart-context-menu" />, +); + const requiredProps = { chartId: 1, datasource: {}, @@ -31,18 +42,18 @@ const requiredProps = { vizType: 'table', }; -describe('ChartRenderer', () => { - it('should render SuperChart', () => { - const wrapper = shallow( - <ChartRenderer {...requiredProps} chartIsStale={false} />, - ); - expect(wrapper.find(SuperChart)).toExist(); - }); +test('should render SuperChart', () => { + const { getByTestId } = render( + <ChartRenderer {...requiredProps} chartIsStale={false} />, + ); + expect(getByTestId('mock-super-chart')).toBeInTheDocument(); +}); - it('should use latestQueryFormData instead of formData when chartIsStale is true', () => { - const wrapper = shallow(<ChartRenderer {...requiredProps} chartIsStale />); - expect(wrapper.find(SuperChart).prop('formData')).toEqual({ - testControl: 'bar', - }); - }); +test('should use latestQueryFormData instead of formData when chartIsStale is true', () => { + const { getByTestId } = render( + <ChartRenderer {...requiredProps} chartIsStale />, + ); + expect(getByTestId('mock-super-chart')).toHaveTextContent( + JSON.stringify({ testControl: 'bar' }), + ); }); diff --git a/superset-frontend/src/components/Chart/ChartShimmer.tsx b/superset-frontend/src/components/Chart/ChartShimmer.tsx new file mode 100644 index 000000000000..5d136cdc6428 --- /dev/null +++ b/superset-frontend/src/components/Chart/ChartShimmer.tsx @@ -0,0 +1,72 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { CSSProperties } from 'react'; +import { keyframes, styled, useTheme } from '@superset-ui/core'; + +export interface ChartShimmerProps { + height?: number; + width?: number; + className?: string; + style?: CSSProperties; +} + +const shimmer = keyframes` + 0% { background-position: 0% 50%; } + 100% { background-position: 200% 50%; } +`; + +const Container = styled.div` + position: relative; + width: 100%; + height: 100%; +`; + +const BodyBlock = styled.div` + border-radius: ${({ theme }) => theme.borderRadius}px; + width: 100%; + background: ${({ theme }) => + `linear-gradient(90deg, ${theme.colors.grayscale.light3}, ${theme.colors.grayscale.light2}, ${theme.colors.grayscale.light3})`}; + background-size: 200% 200%; + animation: ${shimmer} 1.5s ease-in-out infinite; + border-radius: 6px; +`; + +export default function ChartShimmer({ + height, + width, + className, + style, +}: ChartShimmerProps) { + const theme = useTheme(); + const effectiveHeight = + typeof height === 'number' && height > 0 ? height : 220; + const reservedHeader = 16 + theme.gridUnit * 2; // header height + margin + const bodyHeight = Math.max(50, effectiveHeight - reservedHeader); + + return ( + <Container + className={className} + style={{ ...style, minHeight: effectiveHeight, width: width || '100%' }} + data-test="chart-shimmer" + > + <BodyBlock style={{ height: bodyHeight }} /> + </Container> + ); +} diff --git a/superset-frontend/src/components/Chart/DrillBy/DrillByMenuItems.tsx b/superset-frontend/src/components/Chart/DrillBy/DrillByMenuItems.tsx index f694db3d272e..b80417a13007 100644 --- a/superset-frontend/src/components/Chart/DrillBy/DrillByMenuItems.tsx +++ b/superset-frontend/src/components/Chart/DrillBy/DrillByMenuItems.tsx @@ -138,7 +138,7 @@ export const DrillByMenuItems = ({ useEffect(() => { if (open) { - ref.current?.input.focus(); + ref.current?.input.focus({ preventScroll: true }); } else { // Reset search input when menu is closed ref.current?.setValue(''); diff --git a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx index bcb0f53ee11e..5382eb4062dd 100644 --- a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx +++ b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx @@ -281,7 +281,7 @@ export default function DrillDetailPane({ </pre> ); } else if (bootstrapping) { - // Render loading if first page hasn't loaded + // Render shimmer if first page hasn't loaded tableContent = <Loading />; } else if (resultsPage?.total === 0) { // Render empty state if no results are returned for page diff --git a/superset-frontend/src/components/Chart/chartAction.js b/superset-frontend/src/components/Chart/chartAction.js index 8a9124818424..f93abd96eb51 100644 --- a/superset-frontend/src/components/Chart/chartAction.js +++ b/superset-frontend/src/components/Chart/chartAction.js @@ -43,6 +43,7 @@ import { allowCrossDomain as domainShardingEnabled } from 'src/utils/hostNamesCo import { updateDataMask } from 'src/dataMask/actions'; import { waitForAsyncData } from 'src/middleware/asyncEvent'; import { safeStringify } from 'src/utils/safeStringify'; +import { convertFormDataForAPI, convertAnnotationFormDataForAPI } from './timezoneChartActions'; export const CHART_UPDATE_STARTED = 'CHART_UPDATE_STARTED'; export function chartUpdateStarted(queryController, latestQueryFormData, key) { @@ -264,6 +265,10 @@ export function runAnnotationQuery({ ...(formData || charts[sliceKey].latestQueryFormData), }; + // Convert annotation and form data for timezone-aware API calls + const { annotation: convertedAnnotation, formData: convertedFd } = + convertAnnotationFormDataForAPI(annotation, fd); + if (!annotation.sourceType) { return Promise.resolve(); } @@ -271,42 +276,42 @@ export function runAnnotationQuery({ // In the original formData the `granularity` attribute represents the time grain (eg // `P1D`), but in the request payload it corresponds to the name of the column where // the time grain should be applied (eg, `Date`), so we need to move things around. - fd.time_grain_sqla = fd.time_grain_sqla || fd.granularity; - fd.granularity = fd.granularity_sqla; + convertedFd.time_grain_sqla = convertedFd.time_grain_sqla || convertedFd.granularity; + convertedFd.granularity = convertedFd.granularity_sqla; - const overridesKeys = Object.keys(annotation.overrides); + const overridesKeys = Object.keys(convertedAnnotation.overrides || {}); if (overridesKeys.includes('since') || overridesKeys.includes('until')) { - annotation.overrides = { - ...annotation.overrides, + convertedAnnotation.overrides = { + ...convertedAnnotation.overrides, time_range: null, }; } - const sliceFormData = Object.keys(annotation.overrides).reduce( + const sliceFormData = Object.keys(convertedAnnotation.overrides || {}).reduce( (d, k) => ({ ...d, - [k]: annotation.overrides[k] || fd[k], + [k]: convertedAnnotation.overrides[k] || convertedFd[k], }), {}, ); - if (!isDashboardRequest && fd) { - const hasExtraFilters = fd.extra_filters && fd.extra_filters.length > 0; + if (!isDashboardRequest && convertedFd) { + const hasExtraFilters = convertedFd.extra_filters && convertedFd.extra_filters.length > 0; sliceFormData.extra_filters = hasExtraFilters - ? fd.extra_filters + ? convertedFd.extra_filters : undefined; } - const url = getAnnotationJsonUrl(annotation.value, force); + const url = getAnnotationJsonUrl(convertedAnnotation.value, force); const controller = new AbortController(); const { signal } = controller; - dispatch(annotationQueryStarted(annotation, controller, sliceKey)); + dispatch(annotationQueryStarted(convertedAnnotation, controller, sliceKey)); - const annotationIndex = fd?.annotation_layers?.findIndex( - it => it.name === annotation.name, + const annotationIndex = convertedFd?.annotation_layers?.findIndex( + it => it.name === convertedAnnotation.name, ); if (annotationIndex >= 0) { - fd.annotation_layers[annotationIndex].overrides = sliceFormData; + convertedFd.annotation_layers[annotationIndex].overrides = sliceFormData; } return SupersetClient.post({ @@ -315,7 +320,7 @@ export function runAnnotationQuery({ timeout: queryTimeout * 1000, headers: { 'Content-Type': 'application/json' }, jsonPayload: buildV1ChartDataPayload({ - formData: fd, + formData: convertedFd, force, resultFormat: 'json', resultType: 'full', diff --git a/superset-frontend/src/components/Chart/timezoneChartActions.js b/superset-frontend/src/components/Chart/timezoneChartActions.js new file mode 100644 index 000000000000..196e7cec3c84 --- /dev/null +++ b/superset-frontend/src/components/Chart/timezoneChartActions.js @@ -0,0 +1,139 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { getCurrentTimezone } from 'src/utils/dateUtils'; +import { convertRequestDatesToUTC } from 'src/utils/timezoneApiUtils'; + +/** + * Convert form data dates to UTC before sending to API + * This ensures all datetime values are in UTC as expected by the backend + */ +export function convertFormDataForAPI(formData) { + const timezone = getCurrentTimezone(); + + // NOTE: We should NOT skip conversion just because user's viewing timezone is UTC! + // The data might come from a different timezone and still needs conversion. + // Only skip if we can determine the data is already in UTC format. + // For now, always attempt conversion - the convertRequestDatesToUTC function + // will handle the actual timezone detection and conversion logic. + + // Create a copy to avoid mutating the original + const converted = { ...formData }; + + try { + const dateFieldsFound = []; + Object.keys(converted).forEach(key => { + if (converted[key] && typeof converted[key] === 'string') { + // Check if it looks like a date + if (/\d{4}-\d{2}-\d{2}/.test(converted[key])) { + dateFieldsFound.push({ field: key, value: converted[key] }); + } + } + }); + + // Convert timezone-aware date fields to UTC + const result = convertRequestDatesToUTC(converted, { + convertRequestDates: true, + additionalDateFields: [ + // Chart-specific date fields that commonly contain dates + 'time_range', + 'time_range_endpoints', + 'since', + 'until', + 'start_date_offset', + 'end_date_offset', + 'x_ticks_layout', + 'datetime_format', + 'granularity', + 'granularity_sqla', + 'time_grain_sqla', + // Add more common date fields found in form data + 'extra_filters', + 'adhoc_filters', + ], + }); + + + return result; + } catch (error) { + console.error('❌ [TIMEZONE CONVERSION ERROR]:', error); + return formData; // Return original if conversion fails + } +} + +/** + * Timezone-aware wrapper for chart data requests + * This should be used instead of the original getChartDataRequest when timezone conversion is needed + */ +export async function getTimezoneAwareChartDataRequest(originalRequest) { + return async function timezoneAwareRequest({ + formData, + setDataMask = () => {}, + resultFormat = 'json', + resultType = 'full', + force = false, + method = 'POST', + requestParams = {}, + ownState = {}, + }) { + // Convert form data dates to UTC before sending to API + const convertedFormData = convertFormDataForAPI(formData); + + // Call the original request with converted form data + return originalRequest({ + formData: convertedFormData, + setDataMask, + resultFormat, + resultType, + force, + method, + requestParams, + ownState, + }); + }; +} + +/** + * Convert annotation query form data for timezone-aware requests + */ +export function convertAnnotationFormDataForAPI(annotation, formData) { + const timezone = getCurrentTimezone(); + + // NOTE: Same fix as above - don't skip conversion just because user timezone is UTC + // The annotation data might still need timezone conversion regardless of user's viewing timezone + + try { + // Convert annotation overrides + const convertedAnnotation = { ...annotation }; + if (annotation.overrides) { + convertedAnnotation.overrides = convertRequestDatesToUTC(annotation.overrides); + } + + // Convert form data + const convertedFormData = convertFormDataForAPI(formData); + + return { + annotation: convertedAnnotation, + formData: convertedFormData, + }; + } catch (error) { + console.warn('[Timezone] Failed to convert annotation form data dates to UTC:', error); + return { annotation, formData }; + } +} diff --git a/superset-frontend/src/components/Checkbox/Checkbox.test.tsx b/superset-frontend/src/components/Checkbox/Checkbox.test.tsx index 14edb4de989b..f16eeae88ad9 100644 --- a/superset-frontend/src/components/Checkbox/Checkbox.test.tsx +++ b/superset-frontend/src/components/Checkbox/Checkbox.test.tsx @@ -16,62 +16,46 @@ * specific language governing permissions and limitations * under the License. */ +import { fireEvent, render } from 'spec/helpers/testing-library'; -import { isValidElement } from 'react'; -import { ReactWrapper } from 'enzyme'; -import { - styledMount as mount, - styledShallow as shallow, -} from 'spec/helpers/theming'; +import Checkbox from 'src/components/Checkbox'; -import Checkbox, { - CheckboxChecked, - CheckboxUnchecked, -} from 'src/components/Checkbox'; +jest.mock('src/components/Checkbox/CheckboxIcons', () => ({ + CheckboxChecked: () => <div data-test="mock-CheckboxChecked" />, + CheckboxUnchecked: () => <div data-test="mock-CheckboxUnchecked" />, +})); -describe('Checkbox', () => { - let wrapper: ReactWrapper; - - it('renders the base component', () => { - expect( - isValidElement( - <Checkbox style={{}} checked={false} onChange={() => true} />, - ), - ).toBe(true); - }); - - describe('when unchecked', () => { - it('renders the unchecked component', () => { - const shallowWrapper = shallow( - <Checkbox style={{}} checked={false} onChange={() => true} />, - ); - expect(shallowWrapper.dive().find(CheckboxUnchecked)).toExist(); - }); - }); - - describe('when checked', () => { - it('renders the checked component', () => { - const shallowWrapper = shallow( - <Checkbox style={{}} checked onChange={() => true} />, - ); - expect(shallowWrapper.dive().find(CheckboxChecked)).toExist(); - }); - }); - - it('works with an onChange handler', () => { - const mockAction = jest.fn(); - wrapper = mount( - <Checkbox style={{}} checked={false} onChange={mockAction} />, +describe('when unchecked', () => { + test('renders the unchecked component', () => { + const { getByTestId } = render( + <Checkbox style={{}} checked={false} onChange={() => true} />, ); - wrapper.find('Checkbox').first().simulate('click'); - expect(mockAction).toHaveBeenCalled(); + expect(getByTestId('mock-CheckboxUnchecked')).toBeInTheDocument(); }); +}); - it('renders custom Checkbox styles without melting', () => { - wrapper = mount( - <Checkbox onChange={() => true} checked={false} style={{ opacity: 1 }} />, +describe('when checked', () => { + test('renders the checked component', () => { + const { getByTestId } = render( + <Checkbox style={{}} checked onChange={() => true} />, ); - expect(wrapper.find('Checkbox')).toExist(); - expect(wrapper.find('Checkbox')).toHaveStyle({ opacity: 1 }); + expect(getByTestId('mock-CheckboxChecked')).toBeInTheDocument(); }); }); + +test('works with an onChange handler', () => { + const mockAction = jest.fn(); + const { getByRole } = render( + <Checkbox style={{}} checked={false} onChange={mockAction} />, + ); + fireEvent.click(getByRole('checkbox')); + expect(mockAction).toHaveBeenCalled(); +}); + +test('renders custom Checkbox styles without melting', () => { + const { getByRole } = render( + <Checkbox onChange={() => true} checked={false} style={{ opacity: 1 }} />, + ); + expect(getByRole('checkbox')).toBeInTheDocument(); + expect(getByRole('checkbox')).toHaveStyle({ opacity: 1 }); +}); diff --git a/superset-frontend/src/components/ConfirmStatusChange/ConfirmStatusChange.test.jsx b/superset-frontend/src/components/ConfirmStatusChange/ConfirmStatusChange.test.jsx index fea20af988ad..bb8da13af164 100644 --- a/superset-frontend/src/components/ConfirmStatusChange/ConfirmStatusChange.test.jsx +++ b/superset-frontend/src/components/ConfirmStatusChange/ConfirmStatusChange.test.jsx @@ -16,48 +16,51 @@ * specific language governing permissions and limitations * under the License. */ -import { mount } from 'enzyme'; +import { fireEvent, render, waitFor } from 'spec/helpers/testing-library'; import Button from 'src/components/Button'; -import { act } from 'react-dom/test-utils'; -import { supersetTheme, ThemeProvider } from '@superset-ui/core'; import ConfirmStatusChange from 'src/components/ConfirmStatusChange'; -import Modal from 'src/components/Modal'; - -describe('ConfirmStatusChange', () => { - const mockedProps = { - title: 'please confirm', - description: 'are you sure?', - onConfirm: jest.fn(), - }; - const wrapper = mount( + +const mockedProps = { + title: 'please confirm', + description: 'are you sure?', + onConfirm: jest.fn(), +}; + +test('opens a confirm modal', () => { + const { getByTestId } = render( <ConfirmStatusChange {...mockedProps}> {confirm => ( <> - <Button id="btn1" onClick={confirm} /> + <Button data-test="btn1" onClick={confirm} /> </> )} </ConfirmStatusChange>, - { - wrappingComponent: ThemeProvider, - wrappingComponentProps: { theme: supersetTheme }, - }, ); - it('opens a confirm modal', () => { - act(() => { - wrapper.find('#btn1').first().props().onClick('foo'); - }); + fireEvent.click(getByTestId('btn1')); + + expect(getByTestId(`${mockedProps.title}-modal`)).toBeInTheDocument(); +}); + +test('calls the function on confirm', async () => { + const { getByTestId, getByRole } = render( + <ConfirmStatusChange {...mockedProps}> + {confirm => ( + <> + <Button data-test="btn1" onClick={() => confirm('foo')} /> + </> + )} + </ConfirmStatusChange>, + ); - wrapper.update(); + fireEvent.click(getByTestId('btn1')); - expect(wrapper.find(Modal)).toExist(); - }); + const confirmInput = getByTestId('delete-modal-input'); + fireEvent.change(confirmInput, { target: { value: 'DELETE' } }); - it('calls the function on confirm', () => { - act(() => { - wrapper.find(Button).last().props().onClick(); - }); + const confirmButton = getByRole('button', { name: 'Delete' }); + fireEvent.click(confirmButton); - expect(mockedProps.onConfirm).toHaveBeenCalledWith('foo'); - }); + await waitFor(() => expect(mockedProps.onConfirm).toHaveBeenCalledTimes(1)); + expect(mockedProps.onConfirm).toHaveBeenCalledWith('foo'); }); diff --git a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx index bebdf01936c1..aa9cac748b06 100644 --- a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx +++ b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx @@ -56,7 +56,7 @@ const fakeDatabaseApiResult = { allow_file_upload: 'Allow Csv Upload', allow_ctas: 'Allow Ctas', allow_cvas: 'Allow Cvas', - allow_dml: 'Allow Dml', + allow_dml: 'Allow DDL and DML', allow_run_async: 'Allow Run Async', allows_cost_estimate: 'Allows Cost Estimate', allows_subquery: 'Allows Subquery', @@ -272,7 +272,6 @@ test('should display options in order of the api response', async () => { }); test('Should fetch the search keyword when total count exceeds initial options', async () => { - fetchMock.reset(); fetchMock.get( databaseApiRoute, { @@ -365,7 +364,7 @@ test('Sends the correct schema when changing the schema', async () => { }); await waitFor(() => expect(fetchMock.calls(databaseApiRoute).length).toBe(1)); rerender(<DatabaseSelector {...props} />); - expect(props.onSchemaChange).toBeCalledTimes(0); + expect(props.onSchemaChange).toHaveBeenCalledTimes(0); const select = screen.getByRole('combobox', { name: 'Select schema or type to search schemas', }); @@ -376,5 +375,5 @@ test('Sends the correct schema when changing the schema', async () => { await waitFor(() => expect(props.onSchemaChange).toHaveBeenCalledWith('information_schema'), ); - expect(props.onSchemaChange).toBeCalledTimes(1); + expect(props.onSchemaChange).toHaveBeenCalledTimes(1); }); diff --git a/superset-frontend/src/components/DatabaseSelector/index.tsx b/superset-frontend/src/components/DatabaseSelector/index.tsx index 4cf84115683e..0f1b6aa30c1f 100644 --- a/superset-frontend/src/components/DatabaseSelector/index.tsx +++ b/superset-frontend/src/components/DatabaseSelector/index.tsx @@ -260,7 +260,7 @@ export default function DatabaseSelector({ } const { - data: schemaData, + currentData: schemaData, isFetching: loadingSchemas, refetch: refetchSchemas, } = useSchemas({ @@ -392,6 +392,7 @@ export default function DatabaseSelector({ options={catalogOptions} showSearch value={currentCatalog || undefined} + allowClear />, refreshIcon, ); @@ -418,6 +419,7 @@ export default function DatabaseSelector({ options={schemaOptions} showSearch value={currentSchema} + allowClear />, refreshIcon, ); diff --git a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx index 2d30debf4670..64a9716ef427 100644 --- a/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx +++ b/superset-frontend/src/components/Datasource/ChangeDatasourceModal.test.jsx @@ -16,16 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -import { mount } from 'enzyme'; +import { waitFor, render, fireEvent } from 'spec/helpers/testing-library'; import configureStore from 'redux-mock-store'; import fetchMock from 'fetch-mock'; import thunk from 'redux-thunk'; -import { act } from 'react-dom/test-utils'; import sinon from 'sinon'; -import { supersetTheme, ThemeProvider } from '@superset-ui/core'; -import Modal from 'src/components/Modal'; import { ChangeDatasourceModal } from 'src/components/Datasource'; -import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; import mockDatasource from 'spec/fixtures/mockDatasource'; const mockStore = configureStore([thunk]); @@ -57,60 +53,40 @@ fetchMock.get(DATASOURCES_ENDPOINT, { result: [mockDatasource['7__table']] }); fetchMock.get(DATASOURCE_ENDPOINT, DATASOURCE_PAYLOAD); fetchMock.get(INFO_ENDPOINT, {}); -async function mountAndWait(props = mockedProps) { - const mounted = mount(<ChangeDatasourceModal store={store} {...props} />, { - wrappingComponent: ThemeProvider, - wrappingComponentProps: { theme: supersetTheme }, - }); - await waitForComponentToPaint(mounted); - - return mounted; -} - -describe('ChangeDatasourceModal', () => { - let wrapper; - - beforeEach(async () => { - wrapper = await mountAndWait(); - }); - - it('renders', () => { - expect(wrapper.find(ChangeDatasourceModal)).toHaveLength(1); - }); - - it('renders a Modal', () => { - expect(wrapper.find(Modal)).toExist(); - }); - - it('fetches datasources', async () => { - expect(fetchMock.calls(INFO_ENDPOINT)).toHaveLength(3); - }); - - it('renders confirmation message', async () => { - await waitForComponentToPaint(wrapper, 1000); - - act(() => { - wrapper.find('[data-test="datasource-link"]').at(0).props().onClick(); - }); - - await waitForComponentToPaint(wrapper); +afterEach(() => { + fetchMock.resetHistory(); +}); - expect(wrapper.find('.proceed-btn')).toExist(); +const setup = (props = mockedProps) => + render(<ChangeDatasourceModal {...props} />, { + useRedux: true, + store, }); - it('changes the datasource', async () => { - await waitForComponentToPaint(wrapper, 1000); +test('renders', () => { + const { getByTestId } = setup(); + expect(getByTestId('Swap dataset-modal')).toBeInTheDocument(); +}); - act(() => { - wrapper.find('[data-test="datasource-link"]').at(0).props().onClick(); - }); - await waitForComponentToPaint(wrapper); +test('fetches datasources', async () => { + setup(); + await waitFor(() => expect(fetchMock.calls(INFO_ENDPOINT)).toHaveLength(1)); +}); - act(() => { - wrapper.find('.proceed-btn').at(0).props().onClick(datasourceData); - }); - await waitForComponentToPaint(wrapper); +test('renders confirmation message', async () => { + const { findByTestId, getByRole } = setup(); + const confirmLink = await findByTestId('datasource-link'); + fireEvent.click(confirmLink); + expect(getByRole('button', { name: 'Proceed' })).toBeInTheDocument(); +}); - expect(fetchMock.calls(/api\/v1\/dataset\/7/)).toHaveLength(1); - }); +test('changes the datasource', async () => { + const { findByTestId, getByRole } = setup(); + const confirmLink = await findByTestId('datasource-link'); + fireEvent.click(confirmLink); + const proceedButton = getByRole('button', { name: 'Proceed' }); + fireEvent.click(proceedButton); + await waitFor(() => + expect(fetchMock.calls(/api\/v1\/dataset\/7/)).toHaveLength(1), + ); }); diff --git a/superset-frontend/src/components/Datasource/CollectionTable.test.jsx b/superset-frontend/src/components/Datasource/CollectionTable.test.jsx index 6ed5b7125037..ad4d7351ecf1 100644 --- a/superset-frontend/src/components/Datasource/CollectionTable.test.jsx +++ b/superset-frontend/src/components/Datasource/CollectionTable.test.jsx @@ -16,8 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { isValidElement } from 'react'; -import { shallow } from 'enzyme'; +import { render } from 'spec/helpers/testing-library'; import mockDatasource from 'spec/fixtures/mockDatasource'; import CollectionTable from './CollectionTable'; @@ -27,22 +26,13 @@ const props = { tableColumns: ['column_name', 'type', 'groupby'], }; -describe('CollectionTable', () => { - let wrapper; - let el; - - beforeEach(() => { - el = <CollectionTable {...props} />; - wrapper = shallow(el); - }); - - it('is valid', () => { - expect(isValidElement(el)).toBe(true); - }); - - it('renders a table', () => { - const { length } = mockDatasource['7__table'].columns; - expect(wrapper.find('table')).toExist(); - expect(wrapper.find('tbody tr.row')).toHaveLength(length); - }); +test('renders a table', () => { + const { length } = mockDatasource['7__table'].columns; + const { getByRole } = render(<CollectionTable {...props} />); + expect(getByRole('table')).toBeInTheDocument(); + expect( + getByRole('table') + .getElementsByTagName('tbody')[0] + .getElementsByClassName('row'), + ).toHaveLength(length); }); diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx index 9282fda4a9a6..22c80d4c6934 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx @@ -1419,6 +1419,13 @@ class DatasourceEditor extends PureComponent { </> } /> + <Alert + css={theme => ({ marginBottom: theme.gridUnit * 4 })} + type="info" + message={t( + 'Upon updating the dataset query, navigate to the columns tab and click `SYNC COLUMNS FROM SOURCE` to get the updated columns.', + )} + /> <StyledTableTabs fullWidth={false} id="table-tabs" diff --git a/superset-frontend/src/components/Datasource/Field.test.tsx b/superset-frontend/src/components/Datasource/Field.test.tsx index 3e8fdefeaec4..2e696dca8cd3 100644 --- a/superset-frontend/src/components/Datasource/Field.test.tsx +++ b/superset-frontend/src/components/Datasource/Field.test.tsx @@ -16,40 +16,34 @@ * specific language governing permissions and limitations * under the License. */ +import { fireEvent, render, screen } from 'spec/helpers/testing-library'; -import { render, screen } from 'spec/helpers/testing-library'; -import { shallow } from 'enzyme'; -import TextAreaControl from 'src/explore/components/controls/TextAreaControl'; import Field from './Field'; -describe('Field', () => { - const defaultProps = { - fieldKey: 'mock', - value: '', - label: 'mock', - description: 'description', - control: <TextAreaControl />, - onChange: jest.fn(), - compact: false, - inline: false, - }; +const defaultProps = { + fieldKey: 'mock', + value: '', + label: 'mock', + description: 'description', + control: <input type="text" data-test="mock-text-control" />, + onChange: jest.fn(), + compact: false, + inline: false, +}; - it('should render', () => { - const { container } = render(<Field {...defaultProps} />); - expect(container).toBeInTheDocument(); - }); +test('should render', () => { + const { container } = render(<Field {...defaultProps} />); + expect(container).toBeInTheDocument(); +}); - it('should call onChange', () => { - const wrapper = shallow(<Field {...defaultProps} />); - const textArea = wrapper.find(TextAreaControl); - textArea.simulate('change', { target: { value: 'x' } }); - expect(defaultProps.onChange).toHaveBeenCalled(); - }); +test('should call onChange', () => { + const { getByTestId } = render(<Field {...defaultProps} />); + const textArea = getByTestId('mock-text-control'); + fireEvent.change(textArea, { target: { value: 'x' } }); + expect(defaultProps.onChange).toHaveBeenCalled(); +}); - it('should render compact', () => { - render(<Field {...defaultProps} compact />); - expect( - screen.queryByText(defaultProps.description), - ).not.toBeInTheDocument(); - }); +test('should render compact', () => { + render(<Field {...defaultProps} compact />); + expect(screen.queryByText(defaultProps.description)).not.toBeInTheDocument(); }); diff --git a/superset-frontend/src/components/DeleteModal/DeleteModal.test.tsx b/superset-frontend/src/components/DeleteModal/DeleteModal.test.tsx index 165a83537871..ae966f48ed26 100644 --- a/superset-frontend/src/components/DeleteModal/DeleteModal.test.tsx +++ b/superset-frontend/src/components/DeleteModal/DeleteModal.test.tsx @@ -77,12 +77,12 @@ test('Calling "onConfirm" only after typing "delete" in the input', () => { expect(props.onConfirm).toBeCalledTimes(0); // do not execute "onConfirm" if you have not typed "delete" - userEvent.click(screen.getByText('delete')); + userEvent.click(screen.getByText('Delete')); expect(props.onConfirm).toBeCalledTimes(0); // execute "onConfirm" if you have typed "delete" userEvent.type(screen.getByTestId('delete-modal-input'), 'delete'); - userEvent.click(screen.getByText('delete')); + userEvent.click(screen.getByText('Delete')); expect(props.onConfirm).toBeCalledTimes(1); // confirm input has been cleared diff --git a/superset-frontend/src/components/DeleteModal/index.tsx b/superset-frontend/src/components/DeleteModal/index.tsx index 647e683a4763..7c1847a81740 100644 --- a/superset-frontend/src/components/DeleteModal/index.tsx +++ b/superset-frontend/src/components/DeleteModal/index.tsx @@ -81,7 +81,7 @@ export default function DeleteModal({ disablePrimaryButton={disableChange} onHide={hide} onHandledPrimaryAction={confirm} - primaryButtonName={t('delete')} + primaryButtonName={t('Delete')} primaryButtonType="danger" show={open} title={title} diff --git a/superset-frontend/src/components/DynamicEditableTitle/index.tsx b/superset-frontend/src/components/DynamicEditableTitle/index.tsx index 6f2d6333a9f7..dd439194383a 100644 --- a/superset-frontend/src/components/DynamicEditableTitle/index.tsx +++ b/superset-frontend/src/components/DynamicEditableTitle/index.tsx @@ -20,6 +20,7 @@ import { ChangeEvent, KeyboardEvent, + memo, useCallback, useEffect, useLayoutEffect, @@ -72,144 +73,154 @@ const titleStyles = (theme: SupersetTheme) => css` position: absolute; left: -9999px; display: inline-block; + white-space: pre; } `; -export const DynamicEditableTitle = ({ - title, - placeholder, - onSave, - canEdit, - label, -}: DynamicEditableTitleProps) => { - const [isEditing, setIsEditing] = useState(false); - const [currentTitle, setCurrentTitle] = useState(title || ''); - const contentRef = useRef<HTMLInputElement>(null); - const [showTooltip, setShowTooltip] = useState(false); - - const { width: inputWidth, ref: sizerRef } = useResizeDetector(); - const { width: containerWidth, ref: containerRef } = useResizeDetector({ - refreshMode: 'debounce', - }); - - useEffect(() => { - setCurrentTitle(title); - }, [title]); - - useEffect(() => { - if (isEditing && contentRef?.current) { - contentRef.current.focus(); - // move cursor and scroll to the end - if (contentRef.current.setSelectionRange) { - const { length } = contentRef.current.value; - contentRef.current.setSelectionRange(length, length); - contentRef.current.scrollLeft = contentRef.current.scrollWidth; +export const DynamicEditableTitle = memo( + ({ + title, + placeholder, + onSave, + canEdit, + label, + }: DynamicEditableTitleProps) => { + const [isEditing, setIsEditing] = useState(false); + const [currentTitle, setCurrentTitle] = useState(title || ''); + const contentRef = useRef<HTMLInputElement>(null); + const [showTooltip, setShowTooltip] = useState(false); + + const { width: inputWidth, ref: sizerRef } = useResizeDetector(); + const { width: containerWidth, ref: containerRef } = useResizeDetector({ + refreshMode: 'debounce', + }); + + useEffect(() => { + setCurrentTitle(title); + }, [title]); + + useEffect(() => { + if (isEditing && contentRef?.current) { + contentRef.current.focus(); + // move cursor and scroll to the end + if (contentRef.current.setSelectionRange) { + const { length } = contentRef.current.value; + contentRef.current.setSelectionRange(length, length); + contentRef.current.scrollLeft = contentRef.current.scrollWidth; + } } - } - }, [isEditing]); - - // a trick to make the input grow when user types text - // we make additional span component, place it somewhere out of view and copy input - // then we can measure the width of that span to resize the input element - useLayoutEffect(() => { - if (sizerRef?.current) { - sizerRef.current.textContent = currentTitle || placeholder; - } - }, [currentTitle, placeholder, sizerRef]); - - useEffect(() => { - if ( - contentRef.current && - contentRef.current.scrollWidth > contentRef.current.clientWidth - ) { - setShowTooltip(true); - } else { - setShowTooltip(false); - } - }, [inputWidth, containerWidth]); - - const handleClick = useCallback(() => { - if (!canEdit || isEditing) { - return; - } - setIsEditing(true); - }, [canEdit, isEditing]); - - const handleBlur = useCallback(() => { - if (!canEdit) { - return; - } - const formattedTitle = currentTitle.trim(); - setCurrentTitle(formattedTitle); - if (title !== formattedTitle) { - onSave(formattedTitle); - } - setIsEditing(false); - }, [canEdit, currentTitle, onSave, title]); + }, [isEditing]); + + // a trick to make the input grow when user types text + // we make additional span component, place it somewhere out of view and copy input + // then we can measure the width of that span to resize the input element + useLayoutEffect(() => { + if (sizerRef?.current) { + sizerRef.current.textContent = currentTitle || placeholder; + } + }, [currentTitle, placeholder, sizerRef]); + + useEffect(() => { + if ( + contentRef.current && + contentRef.current.scrollWidth > contentRef.current.clientWidth + ) { + setShowTooltip(true); + } else { + setShowTooltip(false); + } + }, [inputWidth, containerWidth]); - const handleChange = useCallback( - (ev: ChangeEvent<HTMLInputElement>) => { - if (!canEdit || !isEditing) { + const handleClick = useCallback(() => { + if (!canEdit || isEditing) { return; } - setCurrentTitle(ev.target.value); - }, - [canEdit, isEditing], - ); + setIsEditing(true); + }, [canEdit, isEditing]); - const handleKeyPress = useCallback( - (ev: KeyboardEvent<HTMLInputElement>) => { + const handleBlur = useCallback(() => { if (!canEdit) { return; } - if (ev.key === 'Enter') { - ev.preventDefault(); - contentRef.current?.blur(); + const formattedTitle = currentTitle.trim(); + setCurrentTitle(formattedTitle); + if (title !== formattedTitle) { + onSave(formattedTitle); } - }, - [canEdit], - ); - - return ( - <div css={titleStyles} ref={containerRef}> - <Tooltip - id="title-tooltip" - title={showTooltip && currentTitle && !isEditing ? currentTitle : null} - > - {canEdit ? ( - <input - data-test="editable-title-input" - className="dynamic-title-input" - aria-label={label ?? t('Title')} - ref={contentRef} - onChange={handleChange} - onBlur={handleBlur} - onClick={handleClick} - onKeyPress={handleKeyPress} - placeholder={placeholder} - value={currentTitle} - css={css` - cursor: ${isEditing ? 'text' : 'pointer'}; - - ${inputWidth && - inputWidth > 0 && - css` - width: ${inputWidth + 1}px; + setIsEditing(false); + }, [canEdit, currentTitle, onSave, title]); + + const handleChange = useCallback( + (ev: ChangeEvent<HTMLInputElement>) => { + if (!canEdit || !isEditing) { + return; + } + setCurrentTitle(ev.target.value); + }, + [canEdit, isEditing], + ); + + const handleKeyPress = useCallback( + (ev: KeyboardEvent<HTMLInputElement>) => { + if (!canEdit) { + return; + } + if (ev.key === 'Enter') { + ev.preventDefault(); + contentRef.current?.blur(); + } + }, + [canEdit], + ); + + return ( + <div css={titleStyles} ref={containerRef}> + <Tooltip + id="title-tooltip" + title={ + showTooltip && currentTitle && !isEditing ? currentTitle : null + } + > + {canEdit ? ( + <input + data-test="editable-title-input" + className="dynamic-title-input" + aria-label={label ?? t('Title')} + ref={contentRef} + onChange={handleChange} + onBlur={handleBlur} + onClick={handleClick} + onKeyPress={handleKeyPress} + placeholder={placeholder} + value={currentTitle} + css={css` + cursor: ${isEditing ? 'text' : 'pointer'}; + + ${inputWidth && + inputWidth > 0 && + css` + width: ${inputWidth + 1}px; + `} `} - `} - /> - ) : ( - <span - className="dynamic-title" - aria-label={label ?? t('Title')} - ref={contentRef} - data-test="editable-title" - > - {currentTitle} - </span> - )} - </Tooltip> - <span ref={sizerRef} className="input-sizer" aria-hidden tabIndex={-1} /> - </div> - ); -}; + /> + ) : ( + <span + className="dynamic-title" + aria-label={label ?? t('Title')} + ref={contentRef} + data-test="editable-title" + > + {currentTitle} + </span> + )} + </Tooltip> + <span + ref={sizerRef} + className="input-sizer" + aria-hidden + tabIndex={-1} + /> + </div> + ); + }, +); diff --git a/superset-frontend/src/components/EditableTitle/EditableTitle.test.tsx b/superset-frontend/src/components/EditableTitle/EditableTitle.test.tsx index 56402502e9c6..12ecfacf780e 100644 --- a/superset-frontend/src/components/EditableTitle/EditableTitle.test.tsx +++ b/superset-frontend/src/components/EditableTitle/EditableTitle.test.tsx @@ -16,88 +16,98 @@ * specific language governing permissions and limitations * under the License. */ -import { isValidElement } from 'react'; -import { shallow } from 'enzyme'; -import sinon from 'sinon'; +import { fireEvent, getByRole, render } from 'spec/helpers/testing-library'; import EditableTable from 'src/components/EditableTitle'; -describe('EditableTitle', () => { - const callback = sinon.spy(); - const mockProps = { - title: 'my title', - canEdit: true, - onSaveTitle: callback, - }; - const mockEvent = { - target: { - value: 'new title', - }, - }; - let editableWrapper = shallow(<EditableTable {...mockProps} />); - const notEditableWrapper = shallow( - <EditableTable title="my title" onSaveTitle={callback} />, +const mockEvent = { + target: { + value: 'new title', + }, +}; +const mockProps = { + title: 'my title', + canEdit: true, + onSaveTitle: jest.fn(), +}; + +test('should render title', () => { + const { getByRole } = render(<EditableTable {...mockProps} />); + expect(getByRole('button')).toBeInTheDocument(); + expect(getByRole('button')).toHaveValue(mockProps.title); +}); +test('should not render an input if it is not editable', () => { + const { queryByRole } = render( + <EditableTable title="my title" onSaveTitle={jest.fn()} />, ); - it('is valid', () => { - expect(isValidElement(<EditableTable {...mockProps} />)).toBe(true); - }); - it('should render title', () => { - const titleElement = editableWrapper.find('input'); - expect(titleElement.props().value).toBe('my title'); - expect(titleElement.props().type).toBe('button'); - }); - it('should not render an input if it is not editable', () => { - expect(notEditableWrapper.find('input')).not.toExist(); + expect(queryByRole('button')).not.toBeInTheDocument(); +}); + +describe('should handle click', () => { + test('should change title', () => { + const { getByRole, container } = render(<EditableTable {...mockProps} />); + fireEvent.click(getByRole('button')); + expect(container.querySelector('input')?.getAttribute('type')).toEqual( + 'text', + ); }); +}); - describe('should handle click', () => { - it('should change title', () => { - editableWrapper.find('input').simulate('click'); - expect(editableWrapper.find('input').props().type).toBe('text'); - }); +describe('should handle change', () => { + test('should change title', () => { + const { getByTestId, container } = render( + <EditableTable {...mockProps} editing />, + ); + fireEvent.change(getByTestId('editable-title-input'), mockEvent); + expect(container.querySelector('input')).toHaveValue('new title'); }); +}); - describe('should handle change', () => { - afterEach(() => { - editableWrapper = shallow(<EditableTable {...mockProps} />); - }); - it('should change title', () => { - editableWrapper.find('input').simulate('change', mockEvent); - expect(editableWrapper.find('input').props().value).toBe('new title'); - }); +describe('should handle blur', () => { + const setup = (overrides: Partial<typeof mockProps> = {}) => { + const selectors = render(<EditableTable {...mockProps} {...overrides} />); + fireEvent.click(selectors.getByRole('button')); + return selectors; + }; + + test('default input type should be text', () => { + const { container } = setup(); + expect(container.querySelector('input')?.getAttribute('type')).toEqual( + 'text', + ); }); - describe('should handle blur', () => { - beforeEach(() => { - editableWrapper.find('input').simulate('click'); - }); - afterEach(() => { - callback.resetHistory(); - editableWrapper = shallow(<EditableTable {...mockProps} />); - }); + test('should trigger callback', () => { + const callback = jest.fn(); + const { getByTestId, container } = setup({ onSaveTitle: callback }); + fireEvent.change(getByTestId('editable-title-input'), mockEvent); + fireEvent.blur(getByTestId('editable-title-input')); + expect(callback).toHaveBeenCalledTimes(1); + expect(callback).toHaveBeenCalledWith('new title'); + expect(container.querySelector('input')?.getAttribute('type')).toEqual( + 'button', + ); + }); - it('default input type should be text', () => { - expect(editableWrapper.find('input').props().type).toBe('text'); - }); + test('should not trigger callback', () => { + const callback = jest.fn(); + const { getByTestId, container } = setup({ onSaveTitle: callback }); + fireEvent.blur(getByTestId('editable-title-input')); + expect(container.querySelector('input')?.getAttribute('type')).toEqual( + 'button', + ); + // no change + expect(callback).not.toHaveBeenCalled(); + }); - it('should trigger callback', () => { - editableWrapper.find('input').simulate('change', mockEvent); - editableWrapper.find('input').simulate('blur'); - expect(editableWrapper.find('input').props().type).toBe('button'); - expect(callback.callCount).toBe(1); - expect(callback.getCall(0).args[0]).toBe('new title'); - }); - it('should not trigger callback', () => { - editableWrapper.find('input').simulate('blur'); - expect(editableWrapper.find('input').props().type).toBe('button'); - // no change - expect(callback.callCount).toBe(0); - }); - it('should not save empty title', () => { - editableWrapper.find('input').simulate('blur'); - expect(editableWrapper.find('input').props().type).toBe('button'); - expect(editableWrapper.find('input').props().value).toBe('my title'); - expect(callback.callCount).toBe(0); - }); + test('should not save empty title', () => { + const callback = jest.fn(); + const { getByTestId, container } = setup({ onSaveTitle: callback }); + fireEvent.blur(getByTestId('editable-title-input')); + expect(container.querySelector('input')?.getAttribute('type')).toEqual( + 'button', + ); + expect(getByRole(container, 'button')).toHaveValue(mockProps.title); + expect(callback).not.toHaveBeenCalled(); }); }); diff --git a/superset-frontend/src/components/FacePile/FacePile.test.tsx b/superset-frontend/src/components/FacePile/FacePile.test.tsx index 0e7b4516493f..ce8acdc1af3b 100644 --- a/superset-frontend/src/components/FacePile/FacePile.test.tsx +++ b/superset-frontend/src/components/FacePile/FacePile.test.tsx @@ -16,9 +16,10 @@ * specific language governing permissions and limitations * under the License. */ +import { Provider } from 'react-redux'; import { styledMount as mount } from 'spec/helpers/theming'; - import { Avatar } from 'src/components'; +import { store } from 'src/views/store'; import FacePile from '.'; import { getRandomColor } from './utils'; @@ -29,7 +30,11 @@ const users = [...new Array(10)].map((_, i) => ({ })); describe('FacePile', () => { - const wrapper = mount(<FacePile users={users} />); + const wrapper = mount( + <Provider store={store}> + <FacePile users={users} /> + </Provider>, + ); it('is a valid element', () => { expect(wrapper.find(FacePile)).toExist(); diff --git a/superset-frontend/src/components/FacePile/index.tsx b/superset-frontend/src/components/FacePile/index.tsx index 526babbc3ef3..b5586304d364 100644 --- a/superset-frontend/src/components/FacePile/index.tsx +++ b/superset-frontend/src/components/FacePile/index.tsx @@ -16,17 +16,21 @@ * specific language governing permissions and limitations * under the License. */ +import type Owner from 'src/types/Owner'; import { getCategoricalSchemeRegistry, styled, + isFeatureEnabled, + FeatureFlag, SupersetTheme, } from '@superset-ui/core'; +import getOwnerName from 'src/utils/getOwnerName'; import { Tooltip } from 'src/components/Tooltip'; import { Avatar } from 'src/components'; import { getRandomColor } from './utils'; interface FacePileProps { - users: { first_name: string; last_name: string; id: number }[]; + users: Owner[]; maxCount?: number; } @@ -55,11 +59,14 @@ const StyledGroup = styled(Avatar.Group)` export default function FacePile({ users, maxCount = 4 }: FacePileProps) { return ( <StyledGroup maxCount={maxCount}> - {users.map(({ first_name, last_name, id }) => { - const name = `${first_name} ${last_name}`; + {users.map(user => { + const { first_name, last_name, id } = user; + const name = getOwnerName(user); const uniqueKey = `${id}-${first_name}-${last_name}`; const color = getRandomColor(uniqueKey, colorList); - const avatarUrl = `/api/v1/user/${id}/avatar.png`; + const avatarUrl = isFeatureEnabled(FeatureFlag.SlackEnableAvatars) + ? `/api/v1/user/${id}/avatar.png` + : undefined; return ( <Tooltip key={name} title={name} placement="top"> <StyledAvatar diff --git a/superset-frontend/src/components/FormRow/FormRow.test.jsx b/superset-frontend/src/components/FormRow/FormRow.test.jsx index 68d9c5691dcf..62853a13b108 100644 --- a/superset-frontend/src/components/FormRow/FormRow.test.jsx +++ b/superset-frontend/src/components/FormRow/FormRow.test.jsx @@ -16,42 +16,44 @@ * specific language governing permissions and limitations * under the License. */ -import { shallow } from 'enzyme'; +import { render } from 'spec/helpers/testing-library'; -import { InfoTooltipWithTrigger } from '@superset-ui/chart-controls'; -import { Row, Col } from 'src/components'; import TextControl from 'src/explore/components/controls/TextControl'; import FormRow from 'src/components/FormRow'; +jest.mock('@superset-ui/chart-controls', () => ({ + ...jest.requireActual('@superset-ui/chart-controls'), + InfoTooltipWithTrigger: () => <div data-test="mock-info-tooltip" />, +})); +jest.mock('src/components', () => ({ + ...jest.requireActual('src/components'), + Row: ({ children }) => <div data-test="mock-row">{children}</div>, + Col: ({ children }) => <div data-test="mock-col">{children}</div>, +})); + const defaultProps = { label: 'Hello', tooltip: 'A tooltip', control: <TextControl label="test_cbox" />, }; -describe('FormRow', () => { - let wrapper; - - const getWrapper = (overrideProps = {}) => { - const props = { - ...defaultProps, - ...overrideProps, - }; - return shallow(<FormRow {...props} />); +const setup = (overrideProps = {}) => { + const props = { + ...defaultProps, + ...overrideProps, }; + return render(<FormRow {...props} />); +}; - beforeEach(() => { - wrapper = getWrapper(); - }); - - it('renders an InfoTooltipWithTrigger only if needed', () => { - expect(wrapper.find(InfoTooltipWithTrigger)).toExist(); - wrapper = getWrapper({ tooltip: null }); - expect(wrapper.find(InfoTooltipWithTrigger)).not.toExist(); - }); +test('renders an InfoTooltipWithTrigger only if needed', () => { + const { getByTestId, queryByTestId, rerender } = setup(); + expect(getByTestId('mock-info-tooltip')).toBeInTheDocument(); + rerender(<FormRow {...defaultProps} tooltip={null} />); + expect(queryByTestId('mock-info-tooltip')).not.toBeInTheDocument(); +}); - it('renders a Row and 2 Cols', () => { - expect(wrapper.find(Row)).toExist(); - expect(wrapper.find(Col)).toHaveLength(2); - }); +test('renders a Row and 2 Cols', () => { + const { getByTestId, getAllByTestId } = setup(); + expect(getByTestId('mock-row')).toBeInTheDocument(); + expect(getAllByTestId('mock-col')).toHaveLength(2); }); diff --git a/superset-frontend/src/components/IconTooltip/IconTooltip.test.jsx b/superset-frontend/src/components/IconTooltip/IconTooltip.test.jsx index e75bfcc4c142..6aac65dd5851 100644 --- a/superset-frontend/src/components/IconTooltip/IconTooltip.test.jsx +++ b/superset-frontend/src/components/IconTooltip/IconTooltip.test.jsx @@ -16,25 +16,29 @@ * specific language governing permissions and limitations * under the License. */ -import { isValidElement } from 'react'; -import { shallow } from 'enzyme'; -import { Tooltip } from 'src/components/Tooltip'; +import { render } from 'spec/helpers/testing-library'; import { IconTooltip } from 'src/components/IconTooltip'; -describe('IconTooltip', () => { - const mockedProps = { - tooltip: 'This is a tooltip', - }; - it('renders', () => { - expect(isValidElement(<IconTooltip>TEST</IconTooltip>)).toBe(true); - }); - it('renders with props', () => { - expect( - isValidElement(<IconTooltip {...mockedProps}>TEST</IconTooltip>), - ).toBe(true); - }); - it('renders a tooltip', () => { - const wrapper = shallow(<IconTooltip {...mockedProps}>TEST</IconTooltip>); - expect(wrapper.find(Tooltip)).toExist(); - }); +jest.mock('src/components/Tooltip', () => ({ + Tooltip: () => <div data-test="mock-tooltip" />, +})); + +const mockedProps = { + tooltip: 'This is a tooltip', +}; +test('renders', () => { + const { container } = render(<IconTooltip>TEST</IconTooltip>); + expect(container).toBeInTheDocument(); +}); +test('renders with props', () => { + const { container } = render( + <IconTooltip {...mockedProps}>TEST</IconTooltip>, + ); + expect(container).toBeInTheDocument(); +}); +test('renders a tooltip', () => { + const { getByTestId } = render( + <IconTooltip {...mockedProps}>TEST</IconTooltip>, + ); + expect(getByTestId('mock-tooltip')).toBeInTheDocument(); }); diff --git a/superset-frontend/src/components/Icons/index.tsx b/superset-frontend/src/components/Icons/index.tsx index ee09aceb10c5..bcd75873676d 100644 --- a/superset-frontend/src/components/Icons/index.tsx +++ b/superset-frontend/src/components/Icons/index.tsx @@ -105,6 +105,7 @@ const IconFileNames = [ 'keyboard', 'layers', 'lightbulb', + 'ai', 'line-chart-tile', 'link', 'list', diff --git a/superset-frontend/src/components/ImportModal/ImportModal.test.tsx b/superset-frontend/src/components/ImportModal/ImportModal.test.tsx index 3998890e3da5..69bd1a8e1aa5 100644 --- a/superset-frontend/src/components/ImportModal/ImportModal.test.tsx +++ b/superset-frontend/src/components/ImportModal/ImportModal.test.tsx @@ -16,18 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -import { act } from 'react-dom/test-utils'; import thunk from 'redux-thunk'; import configureStore from 'redux-mock-store'; -import { styledMount as mount } from 'spec/helpers/theming'; -import { ReactWrapper } from 'enzyme'; +import { fireEvent, render, waitFor } from 'spec/helpers/testing-library'; import fetchMock from 'fetch-mock'; -import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; -import { Upload } from 'src/components'; -import Button from 'src/components/Button'; import { ImportResourceName } from 'src/views/CRUD/types'; -import ImportModelsModal from 'src/components/ImportModal'; -import Modal from 'src/components/Modal'; +import ImportModelsModal, { + ImportModelsModalProps, +} from 'src/components/ImportModal'; const mockStore = configureStore([thunk]); const store = mockStore({}); @@ -48,148 +44,98 @@ const requiredProps = { onHide: () => {}, }; -describe('ImportModelsModal', () => { - let wrapper: ReactWrapper; - - beforeEach(() => { - wrapper = mount(<ImportModelsModal {...requiredProps} />, { - context: { store }, - }); - }); - - afterEach(() => { - jest.clearAllMocks(); - }); +afterEach(() => { + jest.clearAllMocks(); +}); - it('renders', () => { - expect(wrapper.find(ImportModelsModal)).toExist(); - }); +const setup = (overrides: Partial<ImportModelsModalProps> = {}) => + render(<ImportModelsModal {...requiredProps} {...overrides} />, { store }); - it('renders a Modal', () => { - expect(wrapper.find(Modal)).toExist(); - }); +test('renders', () => { + const { container } = setup(); + expect(container).toBeInTheDocument(); +}); - it('renders "Import database" header', () => { - expect(wrapper.find('h4').text()).toEqual('Import database'); - }); +test('renders a Modal', () => { + const { getByTestId } = setup(); + expect(getByTestId('model-modal')).toBeInTheDocument(); +}); - it('renders a file input field', () => { - expect(wrapper.find('input[type="file"]')).toExist(); - }); +test('renders "Import database" header', () => { + const { getByText } = setup(); + expect(getByText('Import database')).toBeInTheDocument(); +}); - it('should render the close, file, import and cancel buttons', () => { - expect(wrapper.find('button')).toHaveLength(4); - }); +test('renders a file input field', () => { + setup(); + expect(document.querySelector('input[type="file"]')).toBeInTheDocument(); +}); - it('should render the import button initially disabled', () => { - expect(wrapper.find(Button).at(2).prop('disabled')).toBe(true); - }); +test('should render the close, file, import and cancel buttons', () => { + setup(); + expect(document.querySelectorAll('button')).toHaveLength(4); +}); - it('should render the import button enabled when a file is selected', () => { - const file = new File([new ArrayBuffer(1)], 'model_export.zip'); - act(() => { - const handler = wrapper.find(Upload).prop('onChange'); - if (handler) { - handler({ - fileList: [], - file: { - name: 'model_export.zip', - originFileObj: file, - uid: '-1', - size: 0, - type: 'zip', - }, - }); - } - }); - wrapper.update(); - expect(wrapper.find(Button).at(2).prop('disabled')).toBe(false); - }); +test('should render the import button initially disabled', () => { + const { getByRole } = setup(); + expect(getByRole('button', { name: 'Import' })).toBeDisabled(); +}); - it('should POST with request header `Accept: application/json`', async () => { - const file = new File([new ArrayBuffer(1)], 'model_export.zip'); - act(() => { - const handler = wrapper.find(Upload).prop('onChange'); - if (handler) { - handler({ - fileList: [], - file: { - name: 'model_export.zip', - originFileObj: file, - uid: '-1', - size: 0, - type: 'zip', - }, - }); - } - }); - wrapper.update(); - - wrapper.find(Button).at(2).simulate('click'); - await waitForComponentToPaint(wrapper); - expect(fetchMock.calls(DATABASE_IMPORT_URL)[0][1]?.headers).toStrictEqual({ - Accept: 'application/json', - 'X-CSRFToken': '1234', - }); - }); +test('should render the import button enabled when a file is selected', async () => { + const file = new File([new ArrayBuffer(1)], 'model_export.zip'); + const { getByTestId, getByRole } = setup(); + await waitFor(() => + fireEvent.change(getByTestId('model-file-input'), { + target: { + files: [file], + }, + }), + ); + expect(getByRole('button', { name: 'Import' })).toBeEnabled(); +}); - it('should render password fields when needed for import', () => { - const wrapperWithPasswords = mount( - <ImportModelsModal - {...requiredProps} - passwordFields={['databases/examples.yaml']} - />, - { - context: { store }, +test('should POST with request header `Accept: application/json`', async () => { + const file = new File([new ArrayBuffer(1)], 'model_export.zip'); + const { getByTestId, getByRole } = setup(); + await waitFor(() => + fireEvent.change(getByTestId('model-file-input'), { + target: { + files: [file], }, - ); - expect(wrapperWithPasswords.find('input[type="password"]')).toExist(); + }), + ); + fireEvent.click(getByRole('button', { name: 'Import' })); + await waitFor(() => + expect(fetchMock.calls(DATABASE_IMPORT_URL)).toHaveLength(1), + ); + expect(fetchMock.calls(DATABASE_IMPORT_URL)[0][1]?.headers).toStrictEqual({ + Accept: 'application/json', + 'X-CSRFToken': '1234', }); +}); - it('should render ssh_tunnel password fields when needed for import', () => { - const wrapperWithPasswords = mount( - <ImportModelsModal - {...requiredProps} - sshTunnelPasswordFields={['databases/examples.yaml']} - />, - { - context: { store }, - }, - ); - expect( - wrapperWithPasswords.find('[data-test="ssh_tunnel_password"]'), - ).toExist(); +test('should render password fields when needed for import', () => { + setup({ passwordFields: ['databases/examples.yaml'] }); + expect(document.querySelector('input[type="password"]')).toBeInTheDocument(); +}); + +test('should render ssh_tunnel password fields when needed for import', () => { + const { getByTestId } = setup({ + sshTunnelPasswordFields: ['databases/examples.yaml'], }); + expect(getByTestId('ssh_tunnel_password')).toBeInTheDocument(); +}); - it('should render ssh_tunnel private_key fields when needed for import', () => { - const wrapperWithPasswords = mount( - <ImportModelsModal - {...requiredProps} - sshTunnelPrivateKeyFields={['databases/examples.yaml']} - />, - { - context: { store }, - }, - ); - expect( - wrapperWithPasswords.find('[data-test="ssh_tunnel_private_key"]'), - ).toExist(); +test('should render ssh_tunnel private_key fields when needed for import', () => { + const { getByTestId } = setup({ + sshTunnelPrivateKeyFields: ['databases/examples.yaml'], }); + expect(getByTestId('ssh_tunnel_private_key')).toBeInTheDocument(); +}); - it('should render ssh_tunnel private_key_password fields when needed for import', () => { - const wrapperWithPasswords = mount( - <ImportModelsModal - {...requiredProps} - sshTunnelPrivateKeyPasswordFields={['databases/examples.yaml']} - />, - { - context: { store }, - }, - ); - expect( - wrapperWithPasswords.find( - '[data-test="ssh_tunnel_private_key_password"]', - ), - ).toExist(); +test('should render ssh_tunnel private_key_password fields when needed for import', () => { + const { getByTestId } = setup({ + sshTunnelPrivateKeyPasswordFields: ['databases/examples.yaml'], }); + expect(getByTestId('ssh_tunnel_private_key_password')).toBeInTheDocument(); }); diff --git a/superset-frontend/src/components/Label/Label.test.tsx b/superset-frontend/src/components/Label/Label.test.tsx index 36758b5a3dbd..fdf85b9c9336 100644 --- a/superset-frontend/src/components/Label/Label.test.tsx +++ b/superset-frontend/src/components/Label/Label.test.tsx @@ -16,42 +16,28 @@ * specific language governing permissions and limitations * under the License. */ +import { fireEvent, render } from 'spec/helpers/testing-library'; -import { isValidElement } from 'react'; -import { ReactWrapper } from 'enzyme'; -import { styledMount as mount } from 'spec/helpers/theming'; import Label from '.'; import { LabelGallery, options } from './Label.stories'; -describe('Label', () => { - let wrapper: ReactWrapper; - - // test the basic component - it('renders the base component (no onClick)', () => { - expect(isValidElement(<Label />)).toBe(true); - }); - - it('renders with role=undefined when onClick is not present', () => { - wrapper = mount(<Label />); - expect(wrapper.find('span').prop('role')).toBeUndefined(); - }); - - it('renders with role="button" when onClick is present', () => { - const mockAction = jest.fn(); - wrapper = mount(<Label onClick={mockAction} />); - expect(wrapper.find('span').prop('role')).toBe('button'); - }); +// test the basic component +test('renders the base component (no onClick)', () => { + const { container } = render(<Label />); + expect(container).toBeInTheDocument(); +}); - it('works with an onClick handler', () => { - const mockAction = jest.fn(); - wrapper = mount(<Label onClick={mockAction} />); - wrapper.find(Label).simulate('click'); - expect(mockAction).toHaveBeenCalled(); - }); +test('works with an onClick handler', () => { + const mockAction = jest.fn(); + const { getByText } = render(<Label onClick={mockAction}>test</Label>); + fireEvent.click(getByText('test')); + expect(mockAction).toHaveBeenCalled(); +}); - // test stories from the storybook! - it('renders all the storybook gallery variants', () => { - wrapper = mount(<LabelGallery />); - expect(wrapper.find(Label).length).toEqual(options.length * 2); - }); +// test stories from the storybook! +test('renders all the storybook gallery variants', () => { + const { container } = render(<LabelGallery />); + expect(container.querySelectorAll('.ant-tag')).toHaveLength( + options.length * 2, + ); }); diff --git a/superset-frontend/src/components/LastUpdated/LastUpdated.test.tsx b/superset-frontend/src/components/LastUpdated/LastUpdated.test.tsx index a7a2b2c837d4..f020a47448c6 100644 --- a/superset-frontend/src/components/LastUpdated/LastUpdated.test.tsx +++ b/superset-frontend/src/components/LastUpdated/LastUpdated.test.tsx @@ -16,30 +16,22 @@ * specific language governing permissions and limitations * under the License. */ +import { fireEvent, render } from 'spec/helpers/testing-library'; -import { MouseEvent } from 'react'; -import { ReactWrapper } from 'enzyme'; -import { styledMount as mount } from 'spec/helpers/theming'; -import waitForComponentToPaint from 'spec/helpers/waitForComponentToPaint'; import LastUpdated from '.'; -describe('LastUpdated', () => { - let wrapper: ReactWrapper; - const updatedAt = new Date('Sat Dec 12 2020 00:00:00 GMT-0800'); +const updatedAt = new Date('Sat Dec 12 2020 00:00:00 GMT-0800'); - it('renders the base component (no refresh)', () => { - const wrapper = mount(<LastUpdated updatedAt={updatedAt} />); - expect(/^Last Updated .+$/.test(wrapper.text())).toBe(true); - }); +test('renders the base component (no refresh)', () => { + const { getByText } = render(<LastUpdated updatedAt={updatedAt} />); + expect(getByText(/^Last Updated .+$/)).toBeInTheDocument(); +}); - it('renders a refresh action', async () => { - const mockAction = jest.fn(); - wrapper = mount(<LastUpdated updatedAt={updatedAt} update={mockAction} />); - await waitForComponentToPaint(wrapper); - const props = wrapper.find('[aria-label="refresh"]').first().props(); - if (props.onClick) { - props.onClick({} as MouseEvent); - } - expect(mockAction).toHaveBeenCalled(); - }); +test('renders a refresh action', async () => { + const mockAction = jest.fn(); + const { getByLabelText } = render( + <LastUpdated updatedAt={updatedAt} update={mockAction} />, + ); + fireEvent.click(getByLabelText('refresh')); + expect(mockAction).toHaveBeenCalled(); }); diff --git a/superset-frontend/src/components/ListView/ListView.tsx b/superset-frontend/src/components/ListView/ListView.tsx index c9a39e71754f..8e96f0641087 100644 --- a/superset-frontend/src/components/ListView/ListView.tsx +++ b/superset-frontend/src/components/ListView/ListView.tsx @@ -457,7 +457,7 @@ function ListView<T extends object = any>({ ) : ( <EmptyStateBig {...emptyState} - title={emptyState?.title || t('No Data')} + title={emptyState?.title || '0'} image={emptyState?.image || 'filter-results.svg'} /> )} diff --git a/superset-frontend/src/components/MessageToasts/Toast.test.jsx b/superset-frontend/src/components/MessageToasts/Toast.test.jsx index 810c6a32baa2..130eaf156fb7 100644 --- a/superset-frontend/src/components/MessageToasts/Toast.test.jsx +++ b/superset-frontend/src/components/MessageToasts/Toast.test.jsx @@ -16,10 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { mount } from 'enzyme'; -import { ThemeProvider, supersetTheme } from '@superset-ui/core'; +import { fireEvent, render, waitFor } from 'spec/helpers/testing-library'; import Toast from 'src/components/MessageToasts/Toast'; -import { act } from 'react-dom/test-utils'; import mockMessageToasts from './mockMessageToasts'; const props = { @@ -27,35 +25,22 @@ const props = { onCloseToast() {}, }; -const setup = overrideProps => - mount(<Toast {...props} {...overrideProps} />, { - wrappingComponent: ThemeProvider, - wrappingComponentProps: { theme: supersetTheme }, - }); +const setup = overrideProps => render(<Toast {...props} {...overrideProps} />); -describe('Toast', () => { - it('should render', () => { - const wrapper = setup(); - expect(wrapper.find('[data-test="toast-container"]')).toExist(); - }); - - it('should render toastText within the div', () => { - const wrapper = setup(); - const container = wrapper.find('[data-test="toast-container"]'); - expect(container.hostNodes().childAt(1).text()).toBe(props.toast.text); - }); +test('should render', () => { + const { getByTestId } = setup(); + expect(getByTestId('toast-container')).toBeInTheDocument(); +}); - it('should call onCloseToast upon toast dismissal', async () => - act( - () => - new Promise(done => { - const onCloseToast = id => { - expect(id).toBe(props.toast.id); - done(); - }; +test('should render toastText within the div', () => { + const { getByTestId } = setup(); + expect(getByTestId('toast-container')).toHaveTextContent(props.toast.text); +}); - const wrapper = setup({ onCloseToast }); - wrapper.find('[data-test="close-button"]').props().onClick(); - }), - )); +test('should call onCloseToast upon toast dismissal', async () => { + const onCloseToast = jest.fn(); + const { getByTestId } = setup({ onCloseToast }); + fireEvent.click(getByTestId('close-button')); + await waitFor(() => expect(onCloseToast).toHaveBeenCalledTimes(1)); + expect(onCloseToast).toHaveBeenCalledWith(props.toast.id); }); diff --git a/superset-frontend/src/components/MessageToasts/ToastPresenter.test.jsx b/superset-frontend/src/components/MessageToasts/ToastPresenter.test.jsx index 73a9ac4b06e5..f0b4ba738c0d 100644 --- a/superset-frontend/src/components/MessageToasts/ToastPresenter.test.jsx +++ b/superset-frontend/src/components/MessageToasts/ToastPresenter.test.jsx @@ -16,35 +16,33 @@ * specific language governing permissions and limitations * under the License. */ -import { shallow } from 'enzyme'; -import Toast from 'src/components/MessageToasts/Toast'; +import { fireEvent, render, waitFor } from 'spec/helpers/testing-library'; + import ToastPresenter from 'src/components/MessageToasts/ToastPresenter'; import mockMessageToasts from './mockMessageToasts'; -describe('ToastPresenter', () => { - const props = { - toasts: mockMessageToasts, - removeToast() {}, - }; +const props = { + toasts: mockMessageToasts, + removeToast() {}, +}; - function setup(overrideProps) { - const wrapper = shallow(<ToastPresenter {...props} {...overrideProps} />); - return wrapper; - } +function setup(overrideProps) { + return render(<ToastPresenter {...props} {...overrideProps} />); +} - it('should render a div with id toast-presenter', () => { - const wrapper = setup(); - expect(wrapper.find('#toast-presenter')).toExist(); - }); +test('should render a div with id toast-presenter', () => { + const { container } = setup(); + expect(container.querySelector('#toast-presenter')).toBeInTheDocument(); +}); - it('should render a Toast for each toast object', () => { - const wrapper = setup(); - expect(wrapper.find(Toast)).toHaveLength(props.toasts.length); - }); +test('should render a Toast for each toast object', () => { + const { getAllByRole } = setup(); + expect(getAllByRole('alert')).toHaveLength(props.toasts.length); +}); - it('should pass removeToast to the Toast component', () => { - const removeToast = () => {}; - const wrapper = setup({ removeToast }); - expect(wrapper.find(Toast).first().prop('onCloseToast')).toBe(removeToast); - }); +test('should pass removeToast to the Toast component', async () => { + const removeToast = jest.fn(); + const { getAllByTestId } = setup({ removeToast }); + fireEvent.click(getAllByTestId('close-button')[0]); + await waitFor(() => expect(removeToast).toHaveBeenCalledTimes(1)); }); diff --git a/superset-frontend/src/components/ProgressBar/ProgressBar.stories.tsx b/superset-frontend/src/components/ProgressBar/ProgressBar.stories.tsx index ffab33725530..62ad8ae203e5 100644 --- a/superset-frontend/src/components/ProgressBar/ProgressBar.stories.tsx +++ b/superset-frontend/src/components/ProgressBar/ProgressBar.stories.tsx @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +import { AntdThemeProvider } from 'src/components/AntdThemeProvider'; import ProgressBar, { ProgressBarProps } from '.'; export default { @@ -24,37 +25,67 @@ export default { }; export const InteractiveProgressBar = (args: ProgressBarProps) => ( - <ProgressBar {...args} /> + <AntdThemeProvider> + <ProgressBar {...args} type="line" /> + </AntdThemeProvider> ); -InteractiveProgressBar.args = { +export const InteractiveProgressCircle = (args: ProgressBarProps) => ( + <AntdThemeProvider> + <ProgressBar {...args} type="circle" /> + </AntdThemeProvider> +); + +export const InteractiveProgressDashboard = (args: ProgressBarProps) => ( + <AntdThemeProvider> + <ProgressBar {...args} type="dashboard" /> + </AntdThemeProvider> +); + +const commonArgs = { striped: true, percent: 90, showInfo: true, - status: 'normal', strokeColor: '#FF0000', trailColor: '#000', strokeLinecap: 'round', type: 'line', }; -InteractiveProgressBar.argTypes = { - status: { +const commonArgTypes = { + strokeLinecap: { control: { type: 'select', }, - options: ['normal', 'success', 'exception', 'active'], + options: ['round', 'butt', 'square'], }, - strokeLinecap: { + type: { control: { type: 'select', }, - options: ['round', 'square'], + options: ['line', 'circle', 'dashboard'], }, - type: { +}; + +InteractiveProgressBar.args = { + ...commonArgs, + status: 'normal', +}; + +InteractiveProgressBar.argTypes = { + ...commonArgTypes, + status: { control: { type: 'select', }, - options: ['line', 'circle', 'dashboard'], + options: ['normal', 'success', 'exception', 'active'], }, }; + +InteractiveProgressCircle.args = commonArgs; + +InteractiveProgressCircle.argTypes = commonArgTypes; + +InteractiveProgressDashboard.args = commonArgs; + +InteractiveProgressDashboard.argTypes = commonArgTypes; diff --git a/superset-frontend/src/components/ProgressBar/index.tsx b/superset-frontend/src/components/ProgressBar/index.tsx index 097cbc563026..1fd2324cf582 100644 --- a/superset-frontend/src/components/ProgressBar/index.tsx +++ b/superset-frontend/src/components/ProgressBar/index.tsx @@ -17,8 +17,8 @@ * under the License. */ import { styled } from '@superset-ui/core'; -import { Progress as AntdProgress } from 'antd'; -import { ProgressProps } from 'antd/lib/progress/progress'; +import { Progress as AntdProgress } from 'antd-v5'; +import { ProgressProps } from 'antd-v5/lib/progress/progress'; export interface ProgressBarProps extends ProgressProps { striped?: boolean; @@ -28,18 +28,11 @@ export interface ProgressBarProps extends ProgressProps { const ProgressBar = styled(({ striped, ...props }: ProgressBarProps) => ( <AntdProgress data-test="progress-bar" {...props} /> ))` - line-height: 0; position: static; - .ant-progress-inner { + .antd5-progress-inner { position: static; } - .ant-progress-outer { - ${({ percent }) => !percent && `display: none;`} - } - .ant-progress-text { - font-size: ${({ theme }) => theme.typography.sizes.s}px; - } - .ant-progress-bg { + .antd5-progress-bg { position: static; ${({ striped }) => striped && diff --git a/superset-frontend/src/components/Slider/Slider.stories.tsx b/superset-frontend/src/components/Slider/Slider.stories.tsx index 3f8abeacebe3..34ac64404153 100644 --- a/superset-frontend/src/components/Slider/Slider.stories.tsx +++ b/superset-frontend/src/components/Slider/Slider.stories.tsx @@ -16,15 +16,58 @@ * specific language governing permissions and limitations * under the License. */ -import Slider, { SliderSingleProps } from '.'; +import Slider, { SliderSingleProps, SliderRangeProps } from '.'; export default { title: 'Slider', component: Slider, }; -export const InteractiveSlider = (args: SliderSingleProps) => ( - <Slider {...args} style={{ width: 400, height: 400 }} /> +const tooltipPlacement = [ + 'top', + 'left', + 'bottom', + 'right', + 'topLeft', + 'topRight', + 'bottomLeft', + 'bottomRight', + 'leftTop', + 'leftBottom', + 'rightTop', + 'rightBottom', +] as const; + +export const InteractiveSlider = ({ + tooltipOpen, + tooltipPosition, + ...args +}: SliderSingleProps & { + tooltipOpen: boolean; + tooltipPosition: (typeof tooltipPlacement)[number]; +}) => ( + <Slider + {...args} + tooltip={{ + ...args.tooltip, + open: tooltipOpen, + placement: tooltipPosition, + }} + style={{ width: 400, height: 400 }} + /> +); + +export const InteractiveRangeSlider = ({ + tooltipOpen, + draggableTrack, + ...args +}: SliderRangeProps & { tooltipOpen: boolean; draggableTrack: boolean }) => ( + <Slider + {...args} + tooltip={{ open: tooltipOpen }} + range={{ draggableTrack }} + style={{ width: 400, height: 400 }} + /> ); InteractiveSlider.args = { @@ -32,17 +75,33 @@ InteractiveSlider.args = { max: 100, defaultValue: 70, step: 1, + marks: {}, + disabled: false, + reverse: false, + vertical: false, + autoFocus: false, + keyboard: true, + dots: false, + included: true, + tooltipPosition: 'bottom', }; InteractiveSlider.argTypes = { onChange: { action: 'onChange' }, - disabled: { + onChangeComplete: { action: 'onChangeComplete' }, + tooltipOpen: { control: { type: 'boolean' }, }, - reverse: { - control: { type: 'boolean' }, - }, - vertical: { - control: { type: 'boolean' }, + tooltipPosition: { + options: tooltipPlacement, + control: { type: 'select' }, }, }; + +InteractiveRangeSlider.args = { + ...InteractiveSlider.args, + defaultValue: [50, 70], + draggableTrack: false, +}; + +InteractiveRangeSlider.argTypes = InteractiveSlider.argTypes; diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/types.ts b/superset-frontend/src/components/Slider/Slider.test.tsx similarity index 61% rename from superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/types.ts rename to superset-frontend/src/components/Slider/Slider.test.tsx index 2cdc348126c3..8413e0ebc67a 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/legacyPlugin/types.ts +++ b/superset-frontend/src/components/Slider/Slider.test.tsx @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,14 +16,24 @@ * specific language governing permissions and limitations * under the License. */ +import { render, screen } from 'spec/helpers/testing-library'; +import Slider from '.'; -import { QueryFormColumn, QueryFormData } from '@superset-ui/core'; -import { RotationType } from '../chart/WordCloud'; - -export type LegacyWordCloudFormData = QueryFormData & { - colorScheme: string; - rotation?: RotationType; - series: QueryFormColumn; - sizeFrom?: number; - sizeTo: number; +const mockedProps = { + defaultValue: 90, + tooltip: { + open: true, + }, }; + +test('should render', () => { + const { container } = render(<Slider {...mockedProps} />); + expect(container).toBeInTheDocument(); +}); + +test('should render with default value on tooltip', () => { + render(<Slider {...mockedProps} />); + expect( + screen.getAllByText(`${mockedProps.defaultValue}`)[0], + ).toBeInTheDocument(); +}); diff --git a/superset-frontend/src/components/Slider/index.tsx b/superset-frontend/src/components/Slider/index.tsx index 27a3d3d90c09..05f21b0bb9ed 100644 --- a/superset-frontend/src/components/Slider/index.tsx +++ b/superset-frontend/src/components/Slider/index.tsx @@ -16,13 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -import AntdSlider, { - SliderSingleProps, - SliderRangeProps, -} from 'antd/lib/slider'; +import { SliderSingleProps, SliderRangeProps } from 'antd-v5/lib/slider'; +import { Slider as AntdSlider } from 'antd-v5'; export type { SliderSingleProps, SliderRangeProps }; export default function Slider(props: SliderSingleProps | SliderRangeProps) { - return <AntdSlider {...props} css={{ marginLeft: 0, marginRight: 0 }} />; + return <AntdSlider {...props} />; } diff --git a/superset-frontend/src/components/Table/cell-renderers/NumericCell/index.tsx b/superset-frontend/src/components/Table/cell-renderers/NumericCell/index.tsx index 00fa54615052..327b7ba39bb3 100644 --- a/superset-frontend/src/components/Table/cell-renderers/NumericCell/index.tsx +++ b/superset-frontend/src/components/Table/cell-renderers/NumericCell/index.tsx @@ -254,7 +254,7 @@ export enum CurrencyCode { HRK = 'HRK', HTG = 'HTG', HUF = 'HUF', - IDR = 'IDR', + IDR = 'Rp', ILS = 'ILS', INR = 'INR', IQD = 'IQD', diff --git a/superset-frontend/src/components/TableSelector/index.tsx b/superset-frontend/src/components/TableSelector/index.tsx index a80c9418a702..940f42cb3dc8 100644 --- a/superset-frontend/src/components/TableSelector/index.tsx +++ b/superset-frontend/src/components/TableSelector/index.tsx @@ -188,7 +188,7 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({ SelectValue | undefined >(undefined); const { - data, + currentData: data, isFetching: loadingTables, refetch, } = useTables({ diff --git a/superset-frontend/src/components/TimezoneContext/README.md b/superset-frontend/src/components/TimezoneContext/README.md new file mode 100644 index 000000000000..09a733d1ad85 --- /dev/null +++ b/superset-frontend/src/components/TimezoneContext/README.md @@ -0,0 +1,156 @@ +# Timezone Context + +This document explains how to use the global timezone functionality in Superset frontend. + +## Overview + +The timezone context allows all dates in the application to respect a timezone specified via URL parameter. This provides a consistent way to display dates and times across the entire application based on the user's specified timezone. + +## URL Parameter + +Add a `timezone` parameter to any URL to set the application timezone: + +``` +https://your-superset.com/dashboard/1/?timezone=Asia/Kolkata +https://your-superset.com/dashboard/1/?timezone=Asia/Dubai +https://your-superset.com/dashboard/1/?timezone=America/New_York +``` + +Valid timezone values are any timezone name from the [IANA Time Zone Database](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones), such as: +- `Asia/Kolkata` +- `Asia/Dubai` +- `Europe/London` +- `America/New_York` +- `UTC` + +If no timezone is specified or an invalid timezone is provided, the application defaults to `UTC`. + +## Usage in Components + +### Using the Timezone Context Hook + +```tsx +import { useTimezone } from 'src/components/TimezoneContext'; + +function MyComponent() { + const { timezone, formatDate, formatDateTime } = useTimezone(); + + const currentTime = new Date(); + + return ( + <div> + <p>Current timezone: {timezone}</p> + <p>Date: {formatDate(currentTime)}</p> + <p>DateTime: {formatDateTime(currentTime)}</p> + </div> + ); +} +``` + +### Using Utility Functions + +```tsx +import { formatDate, formatDateTime, getCurrentTimezone } from 'src/utils/dateUtils'; + +function MyComponent() { + const currentTime = new Date(); + + return ( + <div> + <p>Current timezone: {getCurrentTimezone()}</p> + <p>Date: {formatDate(currentTime)}</p> + <p>DateTime: {formatDateTime(currentTime)}</p> + <p>Custom format: {formatDateTime(currentTime, 'MMM DD, YYYY HH:mm')}</p> + </div> + ); +} +``` + +## API Reference + +### TimezoneContext Hook + +#### `useTimezone()` + +Returns an object with timezone utilities: + +- `timezone: string` - Current timezone (e.g., 'Asia/Kolkata') +- `setTimezone: (timezone: string) => void` - Function to update timezone +- `formatDate: (date: MomentInput, format?: string) => string` - Format date in current timezone +- `formatDateTime: (date: MomentInput, format?: string) => string` - Format datetime in current timezone + +### Utility Functions + +#### `getCurrentTimezone(): string` +Returns the current timezone from URL parameter or default (UTC). + +#### `formatDate(date: MomentInput, format?: string, timezone?: string): string` +Formats a date in the specified timezone (or current timezone if not provided). +- Default format: `'YYYY-MM-DD'` + +#### `formatDateTime(date: MomentInput, format?: string, timezone?: string): string` +Formats a datetime in the specified timezone (or current timezone if not provided). +- Default format: `'YYYY-MM-DD HH:mm:ss'` + +#### `createMomentInTimezone(date: MomentInput, timezone?: string): moment.Moment` +Creates a moment object in the specified timezone. + +#### `parseAndConvertToTimezone(dateString: string, inputFormat?: string, timezone?: string): moment.Moment` +Parses a date string and converts it to the specified timezone. + +#### `getTimezoneDisplayName(timezone?: string): string` +Returns a user-friendly timezone display name (e.g., "Asia/Kolkata (UTC+05:30)"). + +#### `isValidTimezone(timezone: string): boolean` +Checks if a timezone string is valid. + +## Examples + +### Setting Timezone via URL + +``` +# Indian Standard Time +/?timezone=Asia/Kolkata + +# UAE Time +/?timezone=Asia/Dubai + +# Eastern Time +/?timezone=America/New_York + +# UTC (default) +/?timezone=UTC +``` + +### Custom Date Formatting + +```tsx +import { formatDateTime } from 'src/utils/dateUtils'; + +// Format in different timezones +const timestamp = new Date('2023-12-25T12:00:00Z'); + +const kolkataTime = formatDateTime(timestamp, 'YYYY-MM-DD HH:mm:ss', 'Asia/Kolkata'); +// Result: "2023-12-25 17:30:00" + +const dubaiTime = formatDateTime(timestamp, 'YYYY-MM-DD HH:mm:ss', 'Asia/Dubai'); +// Result: "2023-12-25 16:00:00" + +const utcTime = formatDateTime(timestamp, 'YYYY-MM-DD HH:mm:ss', 'UTC'); +// Result: "2023-12-25 12:00:00" +``` + +## Implementation Details + +- The timezone context is provided at the root level via `RootContextProviders` +- URL parameter changes are automatically detected and applied +- The `LastUpdated` component has been updated to use timezone context +- All date formatting throughout the application should use the timezone context or utility functions + +## Testing + +The timezone functionality includes comprehensive tests. Run them with: + +```bash +npm test -- --testPathPattern="TimezoneContext.test.tsx" +``` \ No newline at end of file diff --git a/superset-frontend/src/components/TimezoneContext/TimezoneContext.test.tsx b/superset-frontend/src/components/TimezoneContext/TimezoneContext.test.tsx new file mode 100644 index 000000000000..d91522a977f1 --- /dev/null +++ b/superset-frontend/src/components/TimezoneContext/TimezoneContext.test.tsx @@ -0,0 +1,81 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render, screen } from '@testing-library/react'; +import { TimezoneProvider, useTimezone } from './index'; + +// Mock the URL parameter utility +jest.mock('src/utils/urlUtils', () => ({ + getUrlParam: jest.fn(() => null), +})); + +function TestComponent() { + const { timezone, formatDate, formatDateTime } = useTimezone(); + const testDate = new Date('2023-12-25T12:00:00Z'); + + return ( + <div> + <div data-test="timezone">{timezone}</div> + <div data-test="formatted-date">{formatDate(testDate)}</div> + <div data-test="formatted-datetime">{formatDateTime(testDate)}</div> + </div> + ); +} + +describe('TimezoneContext', () => { + it('should provide default timezone when no URL parameter is set', () => { + render( + <TimezoneProvider> + <TestComponent /> + </TimezoneProvider> + ); + + const timezoneElement = screen.getByTestId('timezone'); + expect(timezoneElement).toBeTruthy(); + expect(timezoneElement.textContent).toBe('UTC'); + }); + + it('should format dates correctly', () => { + render( + <TimezoneProvider> + <TestComponent /> + </TimezoneProvider> + ); + + const formattedDate = screen.getByTestId('formatted-date'); + const formattedDateTime = screen.getByTestId('formatted-datetime'); + + expect(formattedDate).toBeTruthy(); + expect(formattedDateTime).toBeTruthy(); + + // Check that the formatted values contain expected patterns + expect(formattedDate.textContent).toMatch(/^\d{4}-\d{2}-\d{2}$/); + expect(formattedDateTime.textContent).toMatch(/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/); + }); + + it('should throw error when useTimezone is used outside provider', () => { + const spy = jest.spyOn(console, 'error').mockImplementation(() => {}); + + expect(() => { + render(<TestComponent />); + }).toThrow('useTimezone must be used within a TimezoneProvider'); + + spy.mockRestore(); + }); +}); \ No newline at end of file diff --git a/superset-frontend/src/components/TimezoneContext/index.tsx b/superset-frontend/src/components/TimezoneContext/index.tsx new file mode 100644 index 000000000000..c82d6d5f88e4 --- /dev/null +++ b/superset-frontend/src/components/TimezoneContext/index.tsx @@ -0,0 +1,126 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createContext, useContext, useEffect, useState, ReactNode } from 'react'; +import moment from 'moment-timezone'; +import { URL_PARAMS } from 'src/constants'; +import { getCurrentTimezone as getCurrentTimezoneUtil, isValidTimezone } from 'src/utils/dateUtils'; + +interface TimezoneContextType { + timezone: string; + setTimezone: (timezone: string) => void; + formatDate: (date: moment.MomentInput, format?: string) => string; + formatDateTime: (date: moment.MomentInput, format?: string) => string; + convertToUTC: (date: moment.MomentInput) => moment.Moment; + convertFromUTC: (utcDate: moment.MomentInput) => moment.Moment; +} + +const TimezoneContext = createContext<TimezoneContextType | undefined>(undefined); + +const DEFAULT_TIMEZONE = 'Asia/Kolkata'; +const DEFAULT_DATE_FORMAT = 'YYYY-MM-DD'; +const DEFAULT_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ss'; + +interface TimezoneProviderProps { + children: ReactNode; +} + +export function TimezoneProvider({ children }: TimezoneProviderProps) { + // Initialize timezone strictly from URL param or default via getCurrentTimezone + const [timezone, setTimezoneState] = useState<string>(() => { + const initial = getCurrentTimezoneUtil(); + return initial; + }); + + // Function to update timezone + const setTimezone = (newTimezone: string) => { + const targetTz = isValidTimezone(newTimezone) ? newTimezone : DEFAULT_TIMEZONE; + if (!isValidTimezone(newTimezone)) { + console.warn(`Invalid timezone: ${newTimezone}. Falling back to default: ${DEFAULT_TIMEZONE}`); + } + // Sync URL param so UI always reflects URL or default + try { + const url = new URL(window.location.href); + url.searchParams.set(URL_PARAMS.timezone.name, targetTz); + // Use replaceState to avoid polluting history + window.history.replaceState({}, '', url.toString()); + } catch (e) { + console.warn('Failed to sync timezone to URL param:', e); + } + setTimezoneState(targetTz); + }; + + // Function to format date in the current timezone + const formatDate = (date: moment.MomentInput, format = DEFAULT_DATE_FORMAT): string => { + return moment.tz(date, timezone).format(format); + }; + + // Function to format datetime in the current timezone + const formatDateTime = (date: moment.MomentInput, format = DEFAULT_DATETIME_FORMAT): string => { + return moment.tz(date, timezone).format(format); + }; + + // Convert a date from current timezone to UTC for API calls + const convertToUTC = (date: moment.MomentInput): moment.Moment => { + // First parse the date in the current timezone, then convert to UTC + return moment.tz(date, timezone).utc(); + }; + + // Convert a UTC date to the current timezone for display + const convertFromUTC = (utcDate: moment.MomentInput): moment.Moment => { + // Parse as UTC, then convert to current timezone + return moment.utc(utcDate).tz(timezone); + }; + + // Watch for URL parameter changes to always reflect URL or default + useEffect(() => { + const handlePopState = () => { + const current = getCurrentTimezoneUtil(); + setTimezoneState(current); + }; + + window.addEventListener('popstate', handlePopState); + return () => window.removeEventListener('popstate', handlePopState); + }, []); + + const contextValue: TimezoneContextType = { + timezone, + setTimezone, + formatDate, + formatDateTime, + convertToUTC, + convertFromUTC, + }; + + return ( + <TimezoneContext.Provider value={contextValue}> + {children} + </TimezoneContext.Provider> + ); +} + +export function useTimezone(): TimezoneContextType { + const context = useContext(TimezoneContext); + if (context === undefined) { + throw new Error('useTimezone must be used within a TimezoneProvider'); + } + return context; +} + +export { TimezoneContext }; \ No newline at end of file diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.DaylightSavingTime.test.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.DaylightSavingTime.test.tsx index 81dbc6fdcd7e..395d48ad99d5 100644 --- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.DaylightSavingTime.test.tsx +++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.DaylightSavingTime.test.tsx @@ -30,10 +30,13 @@ const loadComponent = (mockCurrentTime?: string) => { return new Promise<FC<TimezoneSelectorProps>>(resolve => { const { default: TimezoneSelector } = module.require('./index'); resolve(TimezoneSelector); - jest.useRealTimers(); }); }; +afterEach(() => { + jest.useRealTimers(); +}); + test('render timezones in correct order for daylight saving time', async () => { const TimezoneSelector = await loadComponent('2022-07-01'); const onTimezoneChange = jest.fn(); diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx index 173118c7e84c..08ed5e9749f2 100644 --- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx +++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx @@ -16,10 +16,10 @@ * specific language governing permissions and limitations * under the License. */ +import userEvent from '@testing-library/user-event'; import moment from 'moment-timezone'; import { FC } from 'react'; import { render, screen, waitFor } from 'spec/helpers/testing-library'; -import userEvent from '@testing-library/user-event'; import type { TimezoneSelectorProps } from './index'; const loadComponent = (mockCurrentTime?: string) => { @@ -30,7 +30,6 @@ const loadComponent = (mockCurrentTime?: string) => { return new Promise<FC<TimezoneSelectorProps>>(resolve => { const { default: TimezoneSelector } = module.require('./index'); resolve(TimezoneSelector); - jest.useRealTimers(); }); }; @@ -44,6 +43,10 @@ const openSelectMenu = () => { jest.spyOn(moment.tz, 'guess').mockReturnValue('America/New_York'); +afterEach(() => { + jest.useRealTimers(); +}); + test('use the timezone from `moment` if no timezone provided', async () => { const TimezoneSelector = await loadComponent('2022-01-01'); const onTimezoneChange = jest.fn(); @@ -89,7 +92,6 @@ test('render timezones in correct oder for standard time', async () => { expect(options[0]).toHaveTextContent('GMT -05:00 (Eastern Standard Time)'); expect(options[1]).toHaveTextContent('GMT -11:00 (Pacific/Pago_Pago)'); expect(options[2]).toHaveTextContent('GMT -10:00 (Hawaii Standard Time)'); - expect(options[3]).toHaveTextContent('GMT -10:00 (America/Adak)'); }); test('can select a timezone values and returns canonical timezone name', async () => { @@ -106,7 +108,7 @@ test('can select a timezone values and returns canonical timezone name', async ( const searchInput = screen.getByRole('combobox'); // search for mountain time - await userEvent.type(searchInput, 'mou', { delay: 10 }); + await userEvent.type(searchInput, 'mou'); const findTitle = 'GMT -07:00 (Mountain Standard Time)'; const selectOption = await screen.findByTitle(findTitle); userEvent.click(selectOption); diff --git a/superset-frontend/src/constants.ts b/superset-frontend/src/constants.ts index b7d34ad73869..3bb4e5b4d939 100644 --- a/superset-frontend/src/constants.ts +++ b/superset-frontend/src/constants.ts @@ -111,6 +111,10 @@ export const URL_PARAMS = { name: 'focused_chart', type: 'number', }, + timezone: { + name: 'timezone', + type: 'string', + }, } as const; export const RESERVED_CHART_URL_PARAMS: string[] = [ diff --git a/superset-frontend/src/dashboard/actions/dashboardState.js b/superset-frontend/src/dashboard/actions/dashboardState.js index 9b0c1818212a..6f46ffe60e1d 100644 --- a/superset-frontend/src/dashboard/actions/dashboardState.js +++ b/superset-frontend/src/dashboard/actions/dashboardState.js @@ -28,6 +28,7 @@ import { t, getClientErrorObject, getCategoricalSchemeRegistry, + promiseTimeout, } from '@superset-ui/core'; import { addChart, @@ -55,22 +56,25 @@ import { getActiveFilters } from 'src/dashboard/util/activeDashboardFilters'; import { safeStringify } from 'src/utils/safeStringify'; import { logEvent } from 'src/logger/actions'; import { LOG_ACTIONS_CONFIRM_OVERWRITE_DASHBOARD_METADATA } from 'src/logger/LogUtils'; +import { isEqual } from 'lodash'; import { UPDATE_COMPONENTS_PARENTS_LIST } from './dashboardLayout'; import { saveChartConfiguration, dashboardInfoChanged, SAVE_CHART_CONFIG_COMPLETE, } from './dashboardInfo'; -import { fetchDatasourceMetadata } from './datasources'; +import { fetchDatasourceMetadata, setDatasources } from './datasources'; import { updateDirectPathToFilter } from './dashboardFilters'; import { SET_FILTER_CONFIG_COMPLETE } from './nativeFilters'; import getOverwriteItems from '../util/getOverwriteItems'; import { applyColors, + enforceSharedLabelsColorsArray, isLabelsColorMapSynced, - getLabelsColorMapEntries, getColorSchemeDomain, getColorNamespace, + getLabelsColorMapEntries, + getFreshSharedLabels, } from '../../utils/colorScheme'; export const SET_UNSAVED_CHANGES = 'SET_UNSAVED_CHANGES'; @@ -224,6 +228,41 @@ export function saveDashboardFinished() { return { type: SAVE_DASHBOARD_FINISHED }; } +export const SET_DASHBOARD_LABELS_COLORMAP_SYNCABLE = + 'SET_DASHBOARD_LABELS_COLORMAP_SYNCABLE'; +export const SET_DASHBOARD_LABELS_COLORMAP_SYNCED = + 'SET_DASHBOARD_LABELS_COLORMAP_SYNCED'; +export const SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCABLE = + 'SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCABLE'; +export const SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCED = + 'SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCED'; + +export function setDashboardLabelsColorMapSync() { + return { type: SET_DASHBOARD_LABELS_COLORMAP_SYNCABLE }; +} + +export function setDashboardLabelsColorMapSynced() { + return { type: SET_DASHBOARD_LABELS_COLORMAP_SYNCED }; +} + +export function setDashboardSharedLabelsColorsSync() { + return { type: SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCABLE }; +} + +export function setDashboardSharedLabelsColorsSynced() { + return { type: SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCED }; +} + +export const setDashboardMetadata = updatedMetadata => async dispatch => { + dispatch( + dashboardInfoChanged({ + metadata: { + ...updatedMetadata, + }, + }), + ); +}; + export function saveDashboardRequest(data, id, saveType) { return (dispatch, getState) => { dispatch({ type: UPDATE_COMPONENTS_PARENTS_LIST }); @@ -254,7 +293,11 @@ export function saveDashboardRequest(data, id, saveType) { const hasId = item => item.id !== undefined; const metadataCrossFiltersEnabled = data.metadata?.cross_filters_enabled; - // making sure the data is what the backend expects + const colorScheme = data.metadata?.color_scheme; + const customLabelsColor = data.metadata?.label_colors || {}; + const sharedLabelsColor = enforceSharedLabelsColorsArray( + data.metadata?.shared_label_colors, + ); const cleanedData = { ...data, certified_by: certified_by || '', @@ -270,11 +313,14 @@ export function saveDashboardRequest(data, id, saveType) { metadata: { ...data.metadata, color_namespace: getColorNamespace(data.metadata?.color_namespace), - color_scheme: data.metadata?.color_scheme || '', - color_scheme_domain: data.metadata?.color_scheme_domain || [], + color_scheme: colorScheme || '', + color_scheme_domain: colorScheme + ? getColorSchemeDomain(colorScheme) + : [], expanded_slices: data.metadata?.expanded_slices || {}, - label_colors: data.metadata?.label_colors || {}, - shared_label_colors: data.metadata?.shared_label_colors || {}, + label_colors: customLabelsColor, + shared_label_colors: getFreshSharedLabels(sharedLabelsColor), + map_label_colors: getLabelsColorMapEntries(customLabelsColor), refresh_frequency: data.metadata?.refresh_frequency || 0, timed_refresh_immune_slices: data.metadata?.timed_refresh_immune_slices || [], @@ -324,11 +370,7 @@ export function saveDashboardRequest(data, id, saveType) { // syncing with the backend transformations of the metadata if (updatedDashboard.json_metadata) { const metadata = JSON.parse(updatedDashboard.json_metadata); - dispatch( - dashboardInfoChanged({ - metadata, - }), - ); + dispatch(setDashboardMetadata(metadata)); if (metadata.chart_configuration) { dispatch({ type: SAVE_CHART_CONFIG_COMPLETE, @@ -341,6 +383,17 @@ export function saveDashboardRequest(data, id, saveType) { filterConfig: metadata.native_filter_configuration, }); } + + // fetch datasets to make sure they are up to date + SupersetClient.get({ + endpoint: `/api/v1/dashboard/${id}/datasets`, + headers: { 'Content-Type': 'application/json' }, + }).then(({ json }) => { + const datasources = json?.result ?? []; + if (datasources.length) { + dispatch(setDatasources(datasources)); + } + }); } if (lastModifiedTime) { dispatch(saveDashboardRequestSuccess(lastModifiedTime)); @@ -666,68 +719,259 @@ export function setDatasetsStatus(status) { }; } -const updateDashboardMetadata = async (id, metadata, dispatch) => { - await SupersetClient.put({ +const storeDashboardMetadata = async (id, metadata) => + SupersetClient.put({ endpoint: `/api/v1/dashboard/${id}`, headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ json_metadata: JSON.stringify(metadata) }), }); - dispatch(dashboardInfoChanged({ metadata })); -}; -export const updateDashboardLabelsColor = () => async (dispatch, getState) => { +/** + * + * Persists the label colors maps in the dashboard metadata. + * It runs when outdated color info are detected in stored metadata. + * + * @returns void + */ +export const persistDashboardLabelsColor = () => async (dispatch, getState) => { const { dashboardInfo: { id, metadata }, + dashboardState: { labelsColorMapMustSync, sharedLabelsColorsMustSync }, } = getState(); - const categoricalSchemes = getCategoricalSchemeRegistry(); - const colorScheme = metadata?.color_scheme; - const colorSchemeRegistry = categoricalSchemes.get( - metadata?.color_scheme, - true, - ); - const defaultScheme = categoricalSchemes.defaultKey; - const fallbackScheme = defaultScheme?.toString() || 'supersetColors'; - const colorSchemeDomain = metadata?.color_scheme_domain || []; + if (labelsColorMapMustSync || sharedLabelsColorsMustSync) { + dispatch(setDashboardLabelsColorMapSynced()); + dispatch(setDashboardSharedLabelsColorsSynced()); + storeDashboardMetadata(id, metadata); + } +}; + +/** + * Checks the stored dashboard metadata for inconsistencies. + * Update the current metadata with validated color information. + * It runs only on Dashboard page load. + * + * @param {*} metadata - the stored dashboard metadata + * @returns void + */ +export const applyDashboardLabelsColorOnLoad = metadata => async dispatch => { try { const updatedMetadata = { ...metadata }; - let updatedScheme = metadata?.color_scheme; + const customLabelsColor = metadata.label_colors || {}; + let hasChanged = false; + + // backward compatibility of shared_label_colors + const sharedLabels = metadata.shared_label_colors || []; + if (!Array.isArray(sharedLabels) && Object.keys(sharedLabels).length > 0) { + hasChanged = true; + updatedMetadata.shared_label_colors = []; + } + // backward compatibility of map_label_colors + const hasMapLabelColors = + Object.keys(metadata.map_label_colors || {}).length > 0; + + let updatedScheme = metadata.color_scheme; + const categoricalSchemes = getCategoricalSchemeRegistry(); + const colorSchemeRegistry = categoricalSchemes.get(updatedScheme, true); + const hasInvalidColorScheme = !!updatedScheme && !colorSchemeRegistry; + + // color scheme might not exist any longer + if (hasInvalidColorScheme) { + const defaultScheme = categoricalSchemes.defaultKey; + const fallbackScheme = defaultScheme?.toString() || 'supersetColors'; + hasChanged = true; - // Color scheme does not exist anymore, fallback to default - if (colorScheme && !colorSchemeRegistry) { updatedScheme = fallbackScheme; updatedMetadata.color_scheme = updatedScheme; - updatedMetadata.color_scheme_domain = getColorSchemeDomain(colorScheme); dispatch(setColorScheme(updatedScheme)); - // must re-apply colors from fresh labels color map - applyColors(updatedMetadata, true); - } - - // stored labels color map and applied might differ - const isMapSynced = isLabelsColorMapSynced(metadata); - if (!isMapSynced) { - // re-apply a fresh labels color map - applyColors(updatedMetadata, true); - // pull and store the just applied labels color map - updatedMetadata.shared_label_colors = getLabelsColorMapEntries(); } // the stored color domain registry and fresh might differ at this point - const freshColorSchemeDomain = getColorSchemeDomain(colorScheme); - const isRegistrySynced = - colorSchemeDomain.toString() !== freshColorSchemeDomain.toString(); - if (colorScheme && !isRegistrySynced) { + const freshColorSchemeDomain = updatedScheme + ? getColorSchemeDomain(updatedScheme) + : []; + const currentColorSchemeDomain = metadata.color_scheme_domain || []; + + if (!isEqual(freshColorSchemeDomain, currentColorSchemeDomain)) { + hasChanged = true; updatedMetadata.color_scheme_domain = freshColorSchemeDomain; } - if ( - (colorScheme && (!colorSchemeRegistry || !isRegistrySynced)) || - !isMapSynced - ) { - await updateDashboardMetadata(id, updatedMetadata, dispatch); + // if color scheme is invalid or map is missing, apply a fresh color map + // if valid, apply the stored map to keep consistency across refreshes + const shouldGoFresh = !hasMapLabelColors || hasInvalidColorScheme; + applyColors(updatedMetadata, shouldGoFresh); + + if (shouldGoFresh) { + // a fresh color map has been applied + // needs to be stored for consistency + hasChanged = true; + updatedMetadata.map_label_colors = + getLabelsColorMapEntries(customLabelsColor); } - } catch (error) { - console.error('Failed to update dashboard color settings:', error); + + if (hasChanged) { + dispatch(setDashboardMetadata(updatedMetadata)); + dispatch(setDashboardLabelsColorMapSync()); + } + } catch (e) { + console.error('Failed to update dashboard color on load:', e); } }; + +/** + * + * Ensure that the stored color map matches fresh map. + * + * @param {*} metadata - the dashboard metadata + * @returns void + */ +export const ensureSyncedLabelsColorMap = metadata => (dispatch, getState) => { + const syncLabelsColorMap = () => { + const { + dashboardState: { labelsColorMapMustSync }, + } = getState(); + const updatedMetadata = { ...metadata }; + const customLabelsColor = metadata.label_colors || {}; + const isMapSynced = isLabelsColorMapSynced(metadata); + const mustSync = !isMapSynced; + + if (mustSync) { + const freshestColorMapEntries = + getLabelsColorMapEntries(customLabelsColor); + updatedMetadata.map_label_colors = freshestColorMapEntries; + dispatch(setDashboardMetadata(updatedMetadata)); + } + + if (mustSync && !labelsColorMapMustSync) { + // prepare to persist the just applied labels color map + dispatch(setDashboardLabelsColorMapSync()); + } + }; + promiseTimeout(syncLabelsColorMap, 500); +}; + +/** + * + * Ensure that the stored shared labels colors match current. + * + * @param {*} metadata - the dashboard metadata + * @param {*} forceFresh - when true it will use the fresh shared labels ignoring stored ones + * @returns void + */ +export const ensureSyncedSharedLabelsColors = + (metadata, forceFresh = false) => + (dispatch, getState) => { + const syncSharedLabelsColors = () => { + const { + dashboardState: { sharedLabelsColorsMustSync }, + } = getState(); + const updatedMetadata = { ...metadata }; + const sharedLabelsColors = enforceSharedLabelsColorsArray( + metadata.shared_label_colors, + ); + const freshLabelsColors = getFreshSharedLabels( + forceFresh ? [] : sharedLabelsColors, + ); + const isSharedLabelsColorsSynced = isEqual( + sharedLabelsColors, + freshLabelsColors, + ); + + const mustSync = !isSharedLabelsColorsSynced; + + if (mustSync) { + updatedMetadata.shared_label_colors = freshLabelsColors; + dispatch(setDashboardMetadata(updatedMetadata)); + } + + if (mustSync && !sharedLabelsColorsMustSync) { + // prepare to persist the shared labels colors + dispatch(setDashboardSharedLabelsColorsSync()); + } + }; + promiseTimeout(syncSharedLabelsColors, 500); + }; + +/** + * + * Updates the color map with new labels and colors as they appear. + * + * @param {*} renderedChartIds - the charts that have finished rendering + * @returns void + */ +export const updateDashboardLabelsColor = + renderedChartIds => (dispatch, getState) => { + try { + const { + dashboardInfo: { metadata }, + charts, + } = getState(); + const colorScheme = metadata.color_scheme; + const labelsColorMapInstance = getLabelsColorMap(); + const fullLabelsColors = metadata.map_label_colors || {}; + const sharedLabelsColors = enforceSharedLabelsColorsArray( + metadata.shared_label_colors, + ); + const customLabelsColors = metadata.label_colors || {}; + + // for dashboards with no color scheme, the charts should always use their individual schemes + // this logic looks for unique labels (not shared across multiple charts) of each rendered chart + // it applies a new color to those unique labels when the applied scheme is not up to date + // while leaving shared label colors and custom label colors intact for color consistency + const shouldReset = []; + if (renderedChartIds.length > 0) { + const sharedLabelsSet = new Set(sharedLabelsColors); + renderedChartIds.forEach(id => { + const chart = charts[id]; + const formData = chart.form_data || chart.latestQueryFormData; + // ensure charts have their original color scheme always available + labelsColorMapInstance.setOwnColorScheme( + formData.slice_id, + formData.color_scheme, + ); + + // if dashboard has a scheme, charts should ignore individual schemes + // thus following logic is inapplicable if a dashboard color scheme exists + if (colorScheme) return; + + const chartColorScheme = formData.color_scheme; + const currentChartConfig = labelsColorMapInstance.chartsLabelsMap.get( + formData.slice_id, + ); + const currentChartLabels = currentChartConfig?.labels || []; + const uniqueChartLabels = currentChartLabels.filter( + l => + !sharedLabelsSet.has(l) && !customLabelsColors.hasOwnProperty(l), + ); + + // Map unique labels to colors + const uniqueChartLabelsColor = new Set( + uniqueChartLabels.map(l => fullLabelsColors[l]).filter(Boolean), + ); + + const expectedColorsForChartScheme = new Set( + getColorSchemeDomain(chartColorScheme), + ); + + // Check if any unique label color is not in the expected colors set + const shouldResetColors = [...uniqueChartLabelsColor].some( + color => !expectedColorsForChartScheme.has(color), + ); + + // Only push uniqueChartLabels if they require resetting + if (shouldResetColors) shouldReset.push(...uniqueChartLabels); + }); + } + + // an existing map is available, use mrge option + // to only apply colors to newly found labels + const shouldGoFresh = shouldReset.length > 0 ? shouldReset : false; + const shouldMerge = !shouldGoFresh; + // re-apply the color map first to get fresh maps accordingly + applyColors(metadata, shouldGoFresh, shouldMerge); + } catch (e) { + console.error('Failed to update colors for new charts and labels:', e); + } + }; diff --git a/superset-frontend/src/dashboard/components/AnchorLink/index.tsx b/superset-frontend/src/dashboard/components/AnchorLink/index.tsx index a0162baef2e8..837444412aa7 100644 --- a/superset-frontend/src/dashboard/components/AnchorLink/index.tsx +++ b/superset-frontend/src/dashboard/components/AnchorLink/index.tsx @@ -64,7 +64,7 @@ export default function AnchorLink({ }, [id, scrollIntoView]); return ( - <span className="anchor-link-container" id={id}> + <span className="anchor-link-container" id={id} data-test="anchor-link"> {showShortLinkButton && dashboardId && ( <URLShortLinkButton anchorLinkId={id} diff --git a/superset-frontend/src/dashboard/components/Dashboard.jsx b/superset-frontend/src/dashboard/components/Dashboard.jsx index 8cc0d8103026..ddbc2441a523 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.jsx @@ -41,6 +41,7 @@ import { areObjectsEqual } from '../../reduxUtils'; import getLocationHash from '../util/getLocationHash'; import isDashboardEmpty from '../util/isDashboardEmpty'; import { getAffectedOwnDataCharts } from '../util/charts/getOwnDataCharts'; +import { getRelatedCharts } from '../util/getRelatedCharts'; const propTypes = { actions: PropTypes.shape({ @@ -211,9 +212,10 @@ class Dashboard extends PureComponent { applyFilters() { const { appliedFilters } = this; - const { activeFilters, ownDataCharts } = this.props; + const { activeFilters, ownDataCharts, slices } = this.props; // refresh charts if a filter was removed, added, or changed + const currFilterKeys = Object.keys(activeFilters); const appliedFilterKeys = Object.keys(appliedFilters); @@ -222,16 +224,21 @@ class Dashboard extends PureComponent { ownDataCharts, this.appliedOwnDataCharts, ); + [...allKeys].forEach(filterKey => { if ( !currFilterKeys.includes(filterKey) && appliedFilterKeys.includes(filterKey) ) { // filterKey is removed? - affectedChartIds.push(...appliedFilters[filterKey].scope); + affectedChartIds.push( + ...getRelatedCharts(filterKey, appliedFilters[filterKey], slices), + ); } else if (!appliedFilterKeys.includes(filterKey)) { // filterKey is newly added? - affectedChartIds.push(...activeFilters[filterKey].scope); + affectedChartIds.push( + ...getRelatedCharts(filterKey, activeFilters[filterKey], slices), + ); } else { // if filterKey changes value, // update charts in its scope @@ -244,7 +251,9 @@ class Dashboard extends PureComponent { }, ) ) { - affectedChartIds.push(...activeFilters[filterKey].scope); + affectedChartIds.push( + ...getRelatedCharts(filterKey, activeFilters[filterKey], slices), + ); } // if filterKey changes scope, diff --git a/superset-frontend/src/dashboard/components/Dashboard.test.jsx b/superset-frontend/src/dashboard/components/Dashboard.test.jsx index b33fe9f6388c..e3421ee04057 100644 --- a/superset-frontend/src/dashboard/components/Dashboard.test.jsx +++ b/superset-frontend/src/dashboard/components/Dashboard.test.jsx @@ -37,6 +37,9 @@ import { dashboardLayout } from 'spec/fixtures/mockDashboardLayout'; import dashboardState from 'spec/fixtures/mockDashboardState'; import { sliceEntitiesForChart as sliceEntities } from 'spec/fixtures/mockSliceEntities'; import { getAllActiveFilters } from 'src/dashboard/util/activeAllDashboardFilters'; +import { getRelatedCharts } from 'src/dashboard/util/getRelatedCharts'; + +jest.mock('src/dashboard/util/getRelatedCharts'); describe('Dashboard', () => { const props = { @@ -130,6 +133,7 @@ describe('Dashboard', () => { afterEach(() => { refreshSpy.restore(); + jest.clearAllMocks(); }); it('should not call refresh when is editMode', () => { @@ -153,6 +157,7 @@ describe('Dashboard', () => { }); it('should call refresh when native filters changed', () => { + getRelatedCharts.mockReturnValue([230]); wrapper.setProps({ activeFilters: { ...OVERRIDE_FILTERS, @@ -170,11 +175,21 @@ describe('Dashboard', () => { [NATIVE_FILTER_ID]: { scope: [230], values: extraFormData, + filterType: 'filter_select', + targets: [ + { + datasetId: 13, + column: { + name: 'ethnic_minority', + }, + }, + ], }, }); }); it('should call refresh if a filter is added', () => { + getRelatedCharts.mockReturnValue([1]); const newFilter = { gender: { values: ['boy', 'girl'], scope: [1] }, }; @@ -186,6 +201,7 @@ describe('Dashboard', () => { }); it('should call refresh if a filter is removed', () => { + getRelatedCharts.mockReturnValue([]); wrapper.setProps({ activeFilters: {}, }); @@ -194,6 +210,7 @@ describe('Dashboard', () => { }); it('should call refresh if a filter is changed', () => { + getRelatedCharts.mockReturnValue([1]); const newFilters = { ...OVERRIDE_FILTERS, '1_region': { values: ['Canada'], scope: [1] }, @@ -207,6 +224,7 @@ describe('Dashboard', () => { }); it('should call refresh with multiple chart ids', () => { + getRelatedCharts.mockReturnValue([1, 2]); const newFilters = { ...OVERRIDE_FILTERS, '2_country_name': { values: ['New Country'], scope: [1, 2] }, @@ -233,6 +251,7 @@ describe('Dashboard', () => { }); it('should call refresh with empty [] if a filter is changed but scope is not applicable', () => { + getRelatedCharts.mockReturnValue([]); const newFilters = { ...OVERRIDE_FILTERS, '3_country_name': { values: ['CHINA'], scope: [] }, diff --git a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx index 68992fa72459..a72912f0d0a5 100644 --- a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx +++ b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.test.tsx @@ -36,6 +36,8 @@ import { DASHBOARD_ROOT_ID } from 'src/dashboard/util/constants'; fetchMock.get('glob:*/csstemplateasyncmodelview/api/read', {}); +fetchMock.put('glob:*/api/v1/dashboard/*', {}); + jest.mock('src/dashboard/actions/dashboardState', () => ({ ...jest.requireActual('src/dashboard/actions/dashboardState'), fetchFaveStar: jest.fn(), diff --git a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.tsx b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.tsx index c48463b98a29..03d993288f4f 100644 --- a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.tsx +++ b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardBuilder.tsx @@ -675,31 +675,33 @@ const DashboardBuilder: FC<DashboardBuilderProps> = () => { editMode={editMode} marginLeft={dashboardContentMarginLeft} > - {missingInitialFilters.length > 0 ? ( - <div - css={css` - display: flex; - flex-direction: row; - align-items: center; - justify-content: center; - flex: 1; - & div { - width: 500px; - } - `} - > - <BasicErrorAlert - title={t('Unable to load dashboard')} - body={t( - `The following filters have the 'Select first filter value by default' + {showDashboard ? ( + missingInitialFilters.length > 0 ? ( + <div + css={css` + display: flex; + flex-direction: row; + align-items: center; + justify-content: center; + flex: 1; + & div { + width: 500px; + } + `} + > + <BasicErrorAlert + title={t('Unable to load dashboard')} + body={t( + `The following filters have the 'Select first filter value by default' option checked and could not be loaded, which is preventing the dashboard from rendering: %s`, - missingInitialFilters.join(', '), - )} - /> - </div> - ) : showDashboard ? ( - <DashboardContainer topLevelTabs={topLevelTabs} /> + missingInitialFilters.join(', '), + )} + /> + </div> + ) : ( + <DashboardContainer topLevelTabs={topLevelTabs} /> + ) ) : ( <Loading /> )} diff --git a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardContainer.tsx b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardContainer.tsx index 7c27a9f1f788..f36520ba317f 100644 --- a/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardContainer.tsx +++ b/superset-frontend/src/dashboard/components/DashboardBuilder/DashboardContainer.tsx @@ -18,7 +18,7 @@ */ // ParentSize uses resize observer so the dashboard will update size // when its container size changes, due to e.g., builder side panel opening -import { FC, useEffect, useMemo, useRef } from 'react'; +import { FC, useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { Filter, @@ -43,12 +43,14 @@ import { import { getChartIdsInFilterScope } from 'src/dashboard/util/getChartIdsInFilterScope'; import findTabIndexByComponentId from 'src/dashboard/util/findTabIndexByComponentId'; import { setInScopeStatusOfFilters } from 'src/dashboard/actions/nativeFilters'; -import { updateDashboardLabelsColor } from 'src/dashboard/actions/dashboardState'; import { - applyColors, - getColorNamespace, - resetColors, -} from 'src/utils/colorScheme'; + applyDashboardLabelsColorOnLoad, + updateDashboardLabelsColor, + persistDashboardLabelsColor, + ensureSyncedSharedLabelsColors, + ensureSyncedLabelsColorMap, +} from 'src/dashboard/actions/dashboardState'; +import { getColorNamespace, resetColors } from 'src/utils/colorScheme'; import { NATIVE_FILTER_DIVIDER_PREFIX } from '../nativeFilters/FiltersConfigModal/utils'; import { findTabsWithChartsInScope } from '../nativeFilters/utils'; import { getRootLevelTabsComponent } from './utils'; @@ -88,7 +90,14 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => { const chartIds = useSelector<RootState, number[]>(state => Object.values(state.charts).map(chart => chart.id), ); - + const renderedChartIds = useSelector<RootState, number[]>(state => + Object.values(state.charts) + .filter(chart => chart.chartStatus === 'rendered') + .map(chart => chart.id), + ); + const [dashboardLabelsColorInitiated, setDashboardLabelsColorInitiated] = + useState(false); + const prevRenderedChartIds = useRef<number[]>([]); const prevTabIndexRef = useRef(); const tabIndex = useMemo(() => { const nextTabIndex = findTabIndexByComponentId({ @@ -102,6 +111,18 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => { prevTabIndexRef.current = nextTabIndex; return nextTabIndex; }, [dashboardLayout, directPathToChild]); + // when all charts have rendered, enforce fresh shared labels + const shouldForceFreshSharedLabelsColors = + dashboardLabelsColorInitiated && + renderedChartIds.length > 0 && + chartIds.length === renderedChartIds.length && + prevRenderedChartIds.current.length < renderedChartIds.length; + + const onBeforeUnload = useCallback(() => { + dispatch(persistDashboardLabelsColor()); + resetColors(getColorNamespace(dashboardInfo?.metadata?.color_namespace)); + prevRenderedChartIds.current = []; + }, [dashboardInfo?.metadata?.color_namespace, dispatch]); useEffect(() => { if (nativeFilterScopes.length === 0) { @@ -141,27 +162,72 @@ const DashboardContainer: FC<DashboardContainerProps> = ({ topLevelTabs }) => { const TOP_OF_PAGE_RANGE = 220; useEffect(() => { - // verify freshness of color map on tab change - // and when loading for first time - setTimeout(() => { - dispatch(updateDashboardLabelsColor()); - }, 500); - }, [directPathToChild, dispatch]); + if (shouldForceFreshSharedLabelsColors) { + // all available charts have rendered, enforce freshest shared label colors + dispatch(ensureSyncedSharedLabelsColors(dashboardInfo.metadata, true)); + } + }, [dashboardInfo.metadata, dispatch, shouldForceFreshSharedLabelsColors]); + + useEffect(() => { + // verify freshness of color map + // when charts render to catch new labels + const numRenderedCharts = renderedChartIds.length; + + if ( + dashboardLabelsColorInitiated && + numRenderedCharts > 0 && + prevRenderedChartIds.current.length < numRenderedCharts + ) { + const newRenderedChartIds = renderedChartIds.filter( + id => !prevRenderedChartIds.current.includes(id), + ); + prevRenderedChartIds.current = renderedChartIds; + dispatch(updateDashboardLabelsColor(newRenderedChartIds)); + // new data may have appeared in the map (data changes) + // or new slices may have appeared while changing tabs + dispatch(ensureSyncedLabelsColorMap(dashboardInfo.metadata)); + + if (!shouldForceFreshSharedLabelsColors) { + dispatch(ensureSyncedSharedLabelsColors(dashboardInfo.metadata)); + } + } + }, [ + renderedChartIds, + dispatch, + dashboardLabelsColorInitiated, + dashboardInfo.metadata, + shouldForceFreshSharedLabelsColors, + ]); useEffect(() => { const labelsColorMap = getLabelsColorMap(); - const colorNamespace = getColorNamespace( - dashboardInfo?.metadata?.color_namespace, - ); labelsColorMap.source = LabelsColorMapSource.Dashboard; - // apply labels color as dictated by stored metadata - applyColors(dashboardInfo.metadata); + + if (dashboardInfo?.id && !dashboardLabelsColorInitiated) { + dispatch(applyDashboardLabelsColorOnLoad(dashboardInfo.metadata)); + // apply labels color as dictated by stored metadata (if any) + setDashboardLabelsColorInitiated(true); + } return () => { - resetColors(getColorNamespace(colorNamespace)); + onBeforeUnload(); }; + // eslint-disable-next-line react-hooks/exhaustive-deps - }, [dashboardInfo.id, dispatch]); + }, [dashboardInfo?.id, dispatch]); + + useEffect(() => { + // 'beforeunload' event interferes with Cypress data cleanup process. + // This code prevents 'beforeunload' from triggering in Cypress tests, + // as it is not required for end-to-end testing scenarios. + if (!(window as any).Cypress) { + window.addEventListener('beforeunload', onBeforeUnload); + } + + return () => { + window.removeEventListener('beforeunload', onBeforeUnload); + }; + }, [onBeforeUnload]); return ( <div className="grid-container" data-test="grid-container"> diff --git a/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx b/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx index 485879e95954..8f41b7b8e238 100644 --- a/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx +++ b/superset-frontend/src/dashboard/components/FiltersBadge/index.tsx @@ -288,38 +288,39 @@ export const FiltersBadge = ({ chartId }: FiltersBadgeProps) => { return null; } - return ( - <DetailsPanelPopover - appliedCrossFilterIndicators={appliedCrossFilterIndicators} - appliedIndicators={appliedIndicators} - onHighlightFilterSource={onHighlightFilterSource} - setPopoverVisible={setPopoverVisible} - popoverVisible={popoverVisible} - popoverContentRef={popoverContentRef} - popoverTriggerRef={popoverTriggerRef} - > - <StyledFilterCount - aria-label={t('Applied filters (%s)', filterCount)} - aria-haspopup="true" - role="button" - ref={popoverTriggerRef} - className={cx( - 'filter-counts', - !!appliedCrossFilterIndicators.length && 'has-cross-filters', - )} - tabIndex={0} - onKeyDown={handleKeyDown} - > - <Icons.Filter iconSize="m" /> - <StyledBadge - data-test="applied-filter-count" - className="applied-count" - count={filterCount} - showZero - /> - </StyledFilterCount> - </DetailsPanelPopover> - ); + // return ( + // <DetailsPanelPopover + // appliedCrossFilterIndicators={appliedCrossFilterIndicators} + // appliedIndicators={appliedIndicators} + // onHighlightFilterSource={onHighlightFilterSource} + // setPopoverVisible={setPopoverVisible} + // popoverVisible={popoverVisible} + // popoverContentRef={popoverContentRef} + // popoverTriggerRef={popoverTriggerRef} + // > + // <StyledFilterCount + // aria-label={t('Applied filters (%s)', filterCount)} + // aria-haspopup="true" + // role="button" + // ref={popoverTriggerRef} + // className={cx( + // 'filter-counts', + // !!appliedCrossFilterIndicators.length && 'has-cross-filters', + // )} + // tabIndex={0} + // onKeyDown={handleKeyDown} + // > + // <Icons.Filter iconSize="m" /> + // <StyledBadge + // data-test="applied-filter-count" + // className="applied-count" + // count={filterCount} + // showZero + // /> + // </StyledFilterCount> + // </DetailsPanelPopover> + // ); + return null; }; export default memo(FiltersBadge); diff --git a/superset-frontend/src/dashboard/components/Header/Header.test.tsx b/superset-frontend/src/dashboard/components/Header/Header.test.tsx index 79f9cdf30882..e6783e82952e 100644 --- a/superset-frontend/src/dashboard/components/Header/Header.test.tsx +++ b/superset-frontend/src/dashboard/components/Header/Header.test.tsx @@ -373,3 +373,48 @@ test('should render an extension component if one is supplied', () => { screen.getByText('dashboard.nav.right extension component'), ).toBeInTheDocument(); }); + +test('should NOT render MetadataBar when in edit mode', () => { + const mockedProps = { + ...createProps(), + editMode: true, + dashboardInfo: { + ...createProps().dashboardInfo, + userId: '123', + }, + }; + setup(mockedProps); + expect( + screen.queryByText(mockedProps.dashboardInfo.changed_on_delta_humanized), + ).not.toBeInTheDocument(); +}); + +test('should NOT render MetadataBar when embedded', () => { + const mockedProps = { + ...createProps(), + editMode: false, + dashboardInfo: { + ...createProps().dashboardInfo, + userId: undefined, + }, + }; + setup(mockedProps); + expect( + screen.queryByText(mockedProps.dashboardInfo.changed_on_delta_humanized), + ).not.toBeInTheDocument(); +}); + +test('should render MetadataBar when not in edit mode and not embedded', () => { + const mockedProps = { + ...createProps(), + editMode: false, + dashboardInfo: { + ...createProps().dashboardInfo, + userId: '123', + }, + }; + setup(mockedProps); + expect( + screen.getByText(mockedProps.dashboardInfo.changed_on_delta_humanized), + ).toBeInTheDocument(); +}); diff --git a/superset-frontend/src/dashboard/components/Header/index.jsx b/superset-frontend/src/dashboard/components/Header/index.jsx index d9f5dbd6d074..b0caea3ceb77 100644 --- a/superset-frontend/src/dashboard/components/Header/index.jsx +++ b/superset-frontend/src/dashboard/components/Header/index.jsx @@ -461,7 +461,6 @@ class Header extends PureComponent { customCss, colorNamespace, dataMask, - setColorScheme, setUnsavedChanges, colorScheme, onUndo, @@ -496,11 +495,11 @@ class Header extends PureComponent { const refreshWarning = dashboardInfo.common?.conf ?.SUPERSET_DASHBOARD_PERIODICAL_REFRESH_WARNING_MESSAGE; + const isEmbedded = !dashboardInfo?.userId; const handleOnPropertiesChange = updates => { const { dashboardInfoChanged, dashboardTitleChanged } = this.props; - setColorScheme(updates.colorScheme); dashboardInfoChanged({ slug: updates.slug, metadata: JSON.parse(updates.jsonMetadata || '{}'), @@ -553,7 +552,7 @@ class Header extends PureComponent { visible={!editMode} /> ), - !editMode && ( + !editMode && !isEmbedded && ( <MetadataBar items={this.getMetadataItems()} tooltipPlacement="bottom" diff --git a/superset-frontend/src/dashboard/components/OverwriteConfirm/OverwriteConfirmModal.test.tsx b/superset-frontend/src/dashboard/components/OverwriteConfirm/OverwriteConfirmModal.test.tsx index 01e9e825bf22..5d93fd6900b0 100644 --- a/superset-frontend/src/dashboard/components/OverwriteConfirm/OverwriteConfirmModal.test.tsx +++ b/superset-frontend/src/dashboard/components/OverwriteConfirm/OverwriteConfirmModal.test.tsx @@ -51,6 +51,11 @@ test('renders diff viewer when it contains overwriteConfirmMetadata', async () = test('requests update dashboard api when save button is clicked', async () => { const updateDashboardEndpoint = `glob:*/api/v1/dashboard/${overwriteConfirmMetadata.dashboardId}`; + const fetchDatasetsEndpoint = `glob:*/api/v1/dashboard/${overwriteConfirmMetadata.dashboardId}/datasets`; + + // mock fetch datasets + fetchMock.get(fetchDatasetsEndpoint, []); + fetchMock.put(updateDashboardEndpoint, { id: overwriteConfirmMetadata.dashboardId, last_modified_time: +new Date(), diff --git a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx index 8fef96509761..998e8540dcef 100644 --- a/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx +++ b/superset-frontend/src/dashboard/components/PropertiesModal/index.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { useCallback, useEffect, useMemo, useState } from 'react'; +import { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { omit } from 'lodash'; import { Input } from 'src/components/Input'; import { FormItem } from 'src/components/Form'; @@ -44,7 +44,19 @@ import withToasts from 'src/components/MessageToasts/withToasts'; import TagType from 'src/types/TagType'; import { fetchTags, OBJECT_TYPES } from 'src/features/tags/tags'; import { loadTags } from 'src/components/Tags/utils'; -import { applyColors, getColorNamespace } from 'src/utils/colorScheme'; +import { + applyColors, + getColorNamespace, + getLabelsColorMapEntries, +} from 'src/utils/colorScheme'; +import getOwnerName from 'src/utils/getOwnerName'; +import Owner from 'src/types/Owner'; +import { useDispatch } from 'react-redux'; +import { + setColorScheme, + setDashboardMetadata, +} from 'src/dashboard/actions/dashboardState'; +import { areObjectsEqual } from 'src/reduxUtils'; const StyledFormItem = styled(FormItem)` margin-bottom: 0; @@ -82,6 +94,7 @@ type DashboardInfo = { certifiedBy: string; certificationDetails: string; isManagedExternally: boolean; + metadata: Record<string, any>; }; const PropertiesModal = ({ @@ -96,10 +109,11 @@ const PropertiesModal = ({ onSubmit = () => {}, show = false, }: PropertiesModalProps) => { + const dispatch = useDispatch(); const [form] = AntdForm.useForm(); const [isLoading, setIsLoading] = useState(false); const [isAdvancedOpen, setIsAdvancedOpen] = useState(false); - const [colorScheme, setColorScheme] = useState(currentColorScheme); + const [colorScheme, setCurrentColorScheme] = useState(currentColorScheme); const [jsonMetadata, setJsonMetadata] = useState(''); const [dashboardInfo, setDashboardInfo] = useState<DashboardInfo>(); const [owners, setOwners] = useState<Owners>([]); @@ -107,6 +121,7 @@ const PropertiesModal = ({ const saveLabel = onlyApply ? t('Apply') : t('Save'); const [tags, setTags] = useState<TagType[]>([]); const categoricalSchemeRegistry = getCategoricalSchemeRegistry(); + const originalDashboardMetadata = useRef<Record<string, any>>({}); const tagsAsSelectValues = useMemo(() => { const selectTags = tags.map((tag: { id: number; name: string }) => ({ @@ -180,21 +195,24 @@ const PropertiesModal = ({ certifiedBy: certified_by || '', certificationDetails: certification_details || '', isManagedExternally: is_managed_externally || false, + metadata, }; form.setFieldsValue(dashboardInfo); setDashboardInfo(dashboardInfo); setOwners(owners); setRoles(roles); - setColorScheme(metadata.color_scheme); + setCurrentColorScheme(metadata.color_scheme); const metaDataCopy = omit(metadata, [ 'positions', 'shared_label_colors', + 'map_label_colors', 'color_scheme_domain', ]); setJsonMetadata(metaDataCopy ? jsonStringify(metaDataCopy) : ''); + originalDashboardMetadata.current = metadata; }, [form], ); @@ -250,17 +268,10 @@ const PropertiesModal = ({ }; const handleOwnersSelectValue = () => { - const parsedOwners = (owners || []).map( - (owner: { - id: number; - first_name?: string; - last_name?: string; - full_name?: string; - }) => ({ - value: owner.id, - label: owner.full_name || `${owner.first_name} ${owner.last_name}`, - }), - ); + const parsedOwners = (owners || []).map((owner: Owner) => ({ + value: owner.id, + label: getOwnerName(owner), + })); return parsedOwners; }; @@ -274,6 +285,8 @@ const PropertiesModal = ({ return parsedRoles; }; + const handleOnCancel = () => onHide(); + const onColorSchemeChange = ( colorScheme = '', { updateMetadata = true } = {}, @@ -292,20 +305,21 @@ const PropertiesModal = ({ throw new Error('A valid color scheme is required'); } + jsonMetadataObj.color_scheme = colorScheme; + jsonMetadataObj.label_colors = jsonMetadataObj.label_colors || {}; + + setCurrentColorScheme(colorScheme); + dispatch(setColorScheme(colorScheme)); + // update metadata to match selection if (updateMetadata) { - jsonMetadataObj.color_scheme = colorScheme; - jsonMetadataObj.label_colors = jsonMetadataObj.label_colors || {}; - setJsonMetadata(jsonStringify(jsonMetadataObj)); } - setColorScheme(colorScheme); }; const onFinish = () => { const { title, slug, certifiedBy, certificationDetails } = form.getFieldsValue(); - let currentColorScheme = colorScheme; let currentJsonMetadata = jsonMetadata; // validate currentJsonMetadata @@ -323,29 +337,48 @@ const PropertiesModal = ({ return; } - const copyMetadata = { ...metadata }; const colorNamespace = getColorNamespace(metadata?.color_namespace); - // color scheme in json metadata has precedence over selection - currentColorScheme = metadata?.color_scheme || colorScheme; - - // remove information from user facing input - if (metadata?.shared_label_colors) { - delete metadata.shared_label_colors; - } - if (metadata?.color_scheme_domain) { - delete metadata.color_scheme_domain; - } - - // only apply colors, the user has not saved yet - applyColors(copyMetadata, true); + const updatedColorScheme = metadata?.color_scheme || colorScheme; + const shouldGoFresh = + updatedColorScheme !== originalDashboardMetadata.current.color_scheme; + const shouldResetCustomLabels = !areObjectsEqual( + originalDashboardMetadata.current.label_colors || {}, + metadata?.label_colors || {}, + ); + const currentCustomLabels = Object.keys(metadata?.label_colors || {}); + const prevCustomLabels = Object.keys( + originalDashboardMetadata.current.label_colors || {}, + ); + const resettableCustomLabels = + currentCustomLabels.length > 0 ? currentCustomLabels : prevCustomLabels; + const freshCustomLabels = + shouldResetCustomLabels && resettableCustomLabels.length > 0 + ? resettableCustomLabels + : false; + const jsonMetadataObj = getJsonMetadata(); + const customLabelColors = jsonMetadataObj.label_colors || {}; + const updatedDashboardMetadata = { + ...originalDashboardMetadata.current, + label_colors: customLabelColors, + color_scheme: updatedColorScheme, + }; - currentJsonMetadata = jsonStringify(metadata); + originalDashboardMetadata.current = updatedDashboardMetadata; + applyColors(updatedDashboardMetadata, shouldGoFresh || freshCustomLabels); + dispatch( + setDashboardMetadata({ + ...updatedDashboardMetadata, + map_label_colors: getLabelsColorMapEntries(customLabelColors), + }), + ); - onColorSchemeChange(currentColorScheme, { + onColorSchemeChange(updatedColorScheme, { updateMetadata: false, }); + currentJsonMetadata = jsonStringify(metadata); + const moreOnSubmitProps: { roles?: Roles } = {}; const morePutProps: { roles?: number[]; tags?: (number | undefined)[] } = {}; @@ -562,14 +595,14 @@ const PropertiesModal = ({ return ( <Modal show={show} - onHide={onHide} + onHide={handleOnCancel} title={t('Dashboard properties')} footer={ <> <Button htmlType="button" buttonSize="small" - onClick={onHide} + onClick={handleOnCancel} data-test="properties-modal-cancel-button" cta > diff --git a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx index 0ffa82756c9a..7afece2b9c85 100644 --- a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx @@ -17,11 +17,18 @@ * under the License. */ import { FC, ReactNode, useContext, useEffect, useRef, useState } from 'react'; -import { css, getExtensionsRegistry, styled, t } from '@superset-ui/core'; +import { + css, + getExtensionsRegistry, + styled, + t, + keyframes, +} from '@superset-ui/core'; import { useUiConfig } from 'src/components/UiConfigContext'; import { Tooltip } from 'src/components/Tooltip'; import { useSelector } from 'react-redux'; import EditableTitle from 'src/components/EditableTitle'; +import { Skeleton } from 'src/components'; import SliceHeaderControls, { SliceHeaderControlsProps, } from 'src/dashboard/components/SliceHeaderControls'; @@ -57,6 +64,23 @@ const CrossFilterIcon = styled(Icons.ApartmentOutlined)` `} `; +const shimmer = keyframes` + 0% { background-position: 0% 50%; } + 100% { background-position: 200% 50%; } +`; + +const HeaderBar = styled.div` + border-radius: ${({ theme }) => theme.borderRadius}px; + height: 16px; + width: 32%; + margin-bottom: ${({ theme }) => theme.gridUnit * 2}px; + background: ${({ theme }) => + `linear-gradient(90deg, ${theme.colors.grayscale.light3}, ${theme.colors.grayscale.light2}, ${theme.colors.grayscale.light3})`}; + background-size: 200% 200%; + animation: ${shimmer} 1.5s ease-in-out infinite; + border-radius: 6px; +`; + const ChartHeaderStyles = styled.div` ${({ theme }) => css` font-size: ${theme.typography.sizes.l}px; @@ -75,6 +99,8 @@ const ChartHeaderStyles = styled.div` display: -webkit-box; -webkit-line-clamp: 2; -webkit-box-orient: vertical; + display: flex; + align-items: center; & > span.ant-tooltip-open { display: inline; @@ -189,6 +215,25 @@ const SliceHeader: FC<SliceHeaderProps> = ({ const exploreUrl = `/explore/?dashboard_page_id=${dashboardPageId}&slice_id=${slice.slice_id}`; + if (chartStatus === 'loading') { + return ( + <> + <ChartHeaderStyles data-test="slice-header" ref={innerRef}> + <div className="header-title" ref={headerRef}> + <div + css={theme => css` + width: 60%; + min-width: 160px; + `} + > + {/* <Skeleton.Input active size="small" /> */} + <HeaderBar /> + </div> + </div> + </ChartHeaderStyles> + </> + ); + } return ( <ChartHeaderStyles data-test="slice-header" ref={innerRef}> <div className="header-title" ref={headerRef}> @@ -206,6 +251,45 @@ const SliceHeader: FC<SliceHeaderProps> = ({ url={canExplore ? exploreUrl : undefined} /> </Tooltip> + <div + css={theme => css` + display: flex; + align-items: center; + justify-content: space-between; + gap: ${theme.gridUnit * 1.5}px; + margin-top: ${theme.gridUnit / 2}px; + margin-left: ${theme.gridUnit * 1.5}px; + transform: scale(0.95); + `} + > + <h3 + css={theme => css` + margin: 0; + font-size: ${theme.typography.sizes.m}px; + display: flex; + align-items: center; + gap: ${theme.gridUnit}px; + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + line-height: 0; + `} + > + {slice.description?.trim() && ( + <Tooltip title={slice.description}> + <Icons.InfoCircleOutlined + style={{ + fontSize: '14px', + color: '#999', + cursor: 'pointer', + flexShrink: 0, + lineHeight: '0 !important', + }} + /> + </Tooltip> + )} + </h3> + </div> {!!Object.values(annotationQuery).length && ( <Tooltip id="annotations-loading-tooltip" diff --git a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx index 5f8eaa7a9fe2..f15801e4d4ba 100644 --- a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx @@ -1,3 +1,5 @@ +/* eslint-disable import/no-unresolved */ +/* eslint-disable theme-colors/no-literal-colors */ /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -757,13 +759,13 @@ const SliceHeaderControls = (props: SliceHeaderControlsPropsWithRouter) => { <Menu.Divider /> - {slice.description && ( + {/* {slice.description && ( <Menu.Item key={MenuKeys.ToggleChartDescription}> {props.isDescriptionExpanded ? t('Hide chart description') : t('Show chart description')} </Menu.Item> - )} + )} */} {canExplore && ( <Menu.Item key={MenuKeys.ExploreChart}> @@ -927,6 +929,7 @@ const SliceHeaderControls = (props: SliceHeaderControlsPropsWithRouter) => { }} /> )} + <NoAnimationDropdown overlay={menu} overlayStyle={dropdownOverlayStyle} diff --git a/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx index 293b4d6f0ad0..fab9b9672d2c 100644 --- a/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx +++ b/superset-frontend/src/dashboard/components/SyncDashboardState/index.tsx @@ -28,6 +28,7 @@ import { } from 'src/utils/localStorageHelpers'; import { RootState } from 'src/dashboard/types'; import { getActiveFilters } from 'src/dashboard/util/activeDashboardFilters'; +import { enforceSharedLabelsColorsArray } from 'src/utils/colorScheme'; type Props = { dashboardPageId: string }; @@ -66,8 +67,10 @@ const SyncDashboardState: FC<Props> = ({ dashboardPageId }) => { >( ({ dashboardInfo, dashboardState, nativeFilters, dataMask }) => ({ labelsColor: dashboardInfo.metadata?.label_colors || EMPTY_OBJECT, - labelsColorMap: - dashboardInfo.metadata?.shared_label_colors || EMPTY_OBJECT, + labelsColorMap: dashboardInfo.metadata?.map_label_colors || EMPTY_OBJECT, + sharedLabelsColors: enforceSharedLabelsColorsArray( + dashboardInfo.metadata?.shared_label_colors, + ), colorScheme: dashboardState?.colorScheme, chartConfiguration: dashboardInfo.metadata?.chart_configuration || EMPTY_OBJECT, diff --git a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx index 6b79d3182654..1f6fb5658e63 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx @@ -197,11 +197,19 @@ class Chart extends Component { } } } else if ( - // chart should re-render if color scheme or label color was changed + // chart should re-render if color scheme or label colors were changed nextProps.formData?.color_scheme !== this.props.formData?.color_scheme || !areObjectsEqual( - nextProps.formData?.label_colors, - this.props.formData?.label_colors, + nextProps.formData?.label_colors || {}, + this.props.formData?.label_colors || {}, + ) || + !areObjectsEqual( + nextProps.formData?.map_label_colors || {}, + this.props.formData?.map_label_colors || {}, + ) || + !isEqual( + nextProps.formData?.shared_label_colors || [], + this.props.formData?.shared_label_colors || [], ) ) { return true; @@ -528,6 +536,8 @@ class Chart extends Component { datasetsStatus={datasetsStatus} isInView={isInView} emitCrossFilters={emitCrossFilters} + description={slice.description} + title={slice.slice_name} /> </ChartWrapper> </SliceContainer> diff --git a/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.test.tsx b/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.test.tsx index 990647b2c365..30e6ce6652e6 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.test.tsx +++ b/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.test.tsx @@ -109,9 +109,7 @@ describe('ChartHolder', () => { it('should render empty state', async () => { renderWrapper(); - expect( - screen.getByText('No results were returned for this query'), - ).toBeVisible(); + expect(screen.getByText('No data')).toBeVisible(); expect( screen.queryByText( 'Make sure that the controls are configured properly and the datasource contains data for the selected time range', diff --git a/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.tsx b/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.tsx index 2f4a77c39b07..89e4f9ecca54 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.tsx +++ b/superset-frontend/src/dashboard/components/gridComponents/ChartHolder.tsx @@ -21,7 +21,7 @@ import { useState, useMemo, useCallback, useEffect } from 'react'; import { ResizeCallback, ResizeStartCallback } from 're-resizable'; import cx from 'classnames'; import { useSelector } from 'react-redux'; -import { css } from '@superset-ui/core'; +import { css, useTheme } from '@superset-ui/core'; import { LayoutItem, RootState } from 'src/dashboard/types'; import AnchorLink from 'src/dashboard/components/AnchorLink'; import Chart from 'src/dashboard/containers/Chart'; @@ -70,15 +70,6 @@ interface ChartHolderProps { isInView: boolean; } -const fullSizeStyle = css` - && { - position: fixed; - z-index: 3000; - left: 0; - top: 0; - } -`; - const ChartHolder: React.FC<ChartHolderProps> = ({ id, parentId, @@ -102,9 +93,35 @@ const ChartHolder: React.FC<ChartHolderProps> = ({ setFullSizeChartId, isInView, }) => { + const theme = useTheme(); + const fullSizeStyle = css` + && { + position: fixed; + z-index: 3000; + left: 0; + top: 0; + padding: ${theme.gridUnit * 2}px; + } + `; const { chartId } = component.meta; const isFullSize = fullSizeChartId === chartId; + // Control HTML root overflow when in fullscreen mode + useEffect(() => { + const htmlElement = document.documentElement; + if (isFullSize) { + // Store original overflow value + const originalOverflow = htmlElement.style.overflow; + htmlElement.style.overflow = 'hidden'; + + // Cleanup function to restore original overflow + return () => { + htmlElement.style.overflow = originalOverflow; + }; + } + return undefined; + }, [isFullSize]); + const focusHighlightStyles = useFilterFocusHighlightStyles(chartId); const dashboardState = useSelector( (state: RootState) => state.dashboardState, diff --git a/superset-frontend/src/dashboard/components/gridComponents/Header.jsx b/superset-frontend/src/dashboard/components/gridComponents/Header.jsx index e643cb071b27..c786a9cf5c19 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Header.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Header.jsx @@ -47,6 +47,7 @@ const propTypes = { parentComponent: componentShape.isRequired, index: PropTypes.number.isRequired, editMode: PropTypes.bool.isRequired, + embeddedMode: PropTypes.bool.isRequired, // redux handleComponentDrop: PropTypes.func.isRequired, @@ -166,6 +167,7 @@ class Header extends PureComponent { index, handleComponentDrop, editMode, + embeddedMode, } = this.props; const headerStyle = headerStyleOptions.find( @@ -234,7 +236,7 @@ class Header extends PureComponent { onSaveTitle={this.handleChangeText} showTooltip={false} /> - {!editMode && ( + {!editMode && !embeddedMode && ( <AnchorLink id={component.id} dashboardId={dashboardId} /> )} </HeaderStyles> diff --git a/superset-frontend/src/dashboard/components/gridComponents/Header.test.jsx b/superset-frontend/src/dashboard/components/gridComponents/Header.test.jsx index ddf9edd7cf02..0446eb13afb5 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Header.test.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Header.test.jsx @@ -45,6 +45,7 @@ describe('Header', () => { parentComponent: newComponentFactory(DASHBOARD_GRID_TYPE), index: 0, editMode: false, + embeddedMode: false, filters: {}, handleComponentDrop() {}, deleteComponent() {}, @@ -118,4 +119,19 @@ describe('Header', () => { expect(deleteComponent.callCount).toBe(1); }); + + it('should render the AnchorLink in view mode', () => { + const wrapper = setup(); + expect(wrapper.find('AnchorLink')).toExist(); + }); + + it('should not render the AnchorLink in edit mode', () => { + const wrapper = setup({ editMode: true }); + expect(wrapper.find('AnchorLink')).not.toExist(); + }); + + it('should not render the AnchorLink in embedded mode', () => { + const wrapper = setup({ embeddedMode: true }); + expect(wrapper.find('AnchorLink')).not.toExist(); + }); }); diff --git a/superset-frontend/src/dashboard/components/gridComponents/Tab.jsx b/superset-frontend/src/dashboard/components/gridComponents/Tab.jsx index eaa8ca00ab4b..a8a1317661e0 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Tab.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Tab.jsx @@ -16,23 +16,23 @@ * specific language governing permissions and limitations * under the License. */ -import { PureComponent, Fragment } from 'react'; -import PropTypes from 'prop-types'; +import { styled, t } from '@superset-ui/core'; import classNames from 'classnames'; -import { bindActionCreators } from 'redux'; +import PropTypes from 'prop-types'; +import { Fragment, PureComponent } from 'react'; import { connect } from 'react-redux'; -import { styled, t } from '@superset-ui/core'; +import { bindActionCreators } from 'redux'; -import { EmptyStateMedium } from 'src/components/EmptyState'; import EditableTitle from 'src/components/EditableTitle'; +import { EmptyStateMedium } from 'src/components/EmptyState'; import { setEditMode } from 'src/dashboard/actions/dashboardState'; -import DashboardComponent from 'src/dashboard/containers/DashboardComponent'; import AnchorLink from 'src/dashboard/components/AnchorLink'; import DragDroppable, { Droppable, } from 'src/dashboard/components/dnd/DragDroppable'; -import { componentShape } from 'src/dashboard/util/propShapes'; +import DashboardComponent from 'src/dashboard/containers/DashboardComponent'; import { TAB_TYPE } from 'src/dashboard/util/componentTypes'; +import { componentShape } from 'src/dashboard/util/propShapes'; export const RENDER_TAB = 'RENDER_TAB'; export const RENDER_TAB_CONTENT = 'RENDER_TAB_CONTENT'; @@ -50,6 +50,7 @@ const propTypes = { onHoverTab: PropTypes.func, editMode: PropTypes.bool.isRequired, canEdit: PropTypes.bool.isRequired, + embeddedMode: PropTypes.bool, // grid related availableColumnCount: PropTypes.number, @@ -268,6 +269,7 @@ class Tab extends PureComponent { editMode, isFocused, isHighlighted, + embeddedMode, } = this.props; return ( @@ -297,7 +299,7 @@ class Tab extends PureComponent { showTooltip={false} editing={editMode && isFocused} /> - {!editMode && ( + {!editMode && !embeddedMode && ( <AnchorLink id={component.id} dashboardId={this.props.dashboardId} diff --git a/superset-frontend/src/dashboard/components/gridComponents/Tab.test.tsx b/superset-frontend/src/dashboard/components/gridComponents/Tab.test.tsx index 35f47de04b3b..6279d320a2fc 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Tab.test.tsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Tab.test.tsx @@ -90,6 +90,7 @@ const createProps = () => ({ type: 'TABS', }, editMode: false, + embeddedMode: false, undoLength: 0, redoLength: 0, filters: {}, @@ -400,3 +401,41 @@ test('Render tab content with no children, editMode: true, canEdit: true', () => screen.getByRole('link', { name: 'create a new chart' }), ).toHaveAttribute('href', '/chart/add?dashboard_id=23'); }); + +test('AnchorLink renders in view mode', () => { + const props = createProps(); + props.renderType = 'RENDER_TAB'; + + render(<Tab {...props} />, { + useRedux: true, + useDnd: true, + }); + + expect(screen.queryByTestId('anchor-link')).toBeInTheDocument(); +}); + +test('AnchorLink does not render in edit mode', () => { + const props = createProps(); + props.editMode = true; + props.renderType = 'RENDER_TAB'; + + render(<Tab {...props} />, { + useRedux: true, + useDnd: true, + }); + + expect(screen.queryByTestId('anchor-link')).not.toBeInTheDocument(); +}); + +test('AnchorLink does not render in embedded mode', () => { + const props = createProps(); + props.embeddedMode = true; + props.renderType = 'RENDER_TAB'; + + render(<Tab {...props} />, { + useRedux: true, + useDnd: true, + }); + + expect(screen.queryByTestId('anchor-link')).not.toBeInTheDocument(); +}); diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.test.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.test.tsx index 0242bd009157..9b01372bd52a 100644 --- a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.test.tsx +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.test.tsx @@ -16,20 +16,27 @@ * specific language governing permissions and limitations * under the License. */ +import userEvent from '@testing-library/user-event'; import { SyntheticEvent } from 'react'; import { render, screen, waitFor } from 'spec/helpers/testing-library'; -import userEvent from '@testing-library/user-event'; import { Menu } from 'src/components/Menu'; import downloadAsImage from 'src/utils/downloadAsImage'; import DownloadAsImage from './DownloadAsImage'; +const mockAddDangerToast = jest.fn(); + jest.mock('src/utils/downloadAsImage', () => ({ __esModule: true, default: jest.fn(() => (_e: SyntheticEvent) => {}), })); +jest.mock('src/components/MessageToasts/withToasts', () => ({ + useToasts: () => ({ + addDangerToast: mockAddDangerToast, + }), +})); + const createProps = () => ({ - addDangerToast: jest.fn(), text: 'Download as Image', dashboardTitle: 'Test Dashboard', logEvent: jest.fn(), @@ -40,22 +47,24 @@ const renderComponent = () => { <Menu> <DownloadAsImage {...createProps()} /> </Menu>, + { + useRedux: true, + }, ); }; test('Should call download image on click', async () => { - const props = createProps(); renderComponent(); await waitFor(() => { expect(downloadAsImage).toBeCalledTimes(0); - expect(props.addDangerToast).toBeCalledTimes(0); + expect(mockAddDangerToast).toBeCalledTimes(0); }); userEvent.click(screen.getByRole('button', { name: 'Download as Image' })); await waitFor(() => { expect(downloadAsImage).toBeCalledTimes(1); - expect(props.addDangerToast).toBeCalledTimes(0); + expect(mockAddDangerToast).toBeCalledTimes(0); }); }); diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.tsx index 0cb3f1fbb4f4..505a9b8184ae 100644 --- a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.tsx +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsImage.tsx @@ -21,20 +21,20 @@ import { logging, t } from '@superset-ui/core'; import { Menu } from 'src/components/Menu'; import { LOG_ACTIONS_DASHBOARD_DOWNLOAD_AS_IMAGE } from 'src/logger/LogUtils'; import downloadAsImage from 'src/utils/downloadAsImage'; +import { useToasts } from 'src/components/MessageToasts/withToasts'; export default function DownloadAsImage({ text, logEvent, dashboardTitle, - addDangerToast, ...rest }: { text: string; - addDangerToast: Function; dashboardTitle: string; logEvent?: Function; }) { const SCREENSHOT_NODE_SELECTOR = '.dashboard'; + const { addDangerToast } = useToasts(); const onDownloadImage = async (e: SyntheticEvent) => { try { downloadAsImage(SCREENSHOT_NODE_SELECTOR, dashboardTitle, true)(e); diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.test.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.test.tsx new file mode 100644 index 000000000000..56916f4b6476 --- /dev/null +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.test.tsx @@ -0,0 +1,73 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { SyntheticEvent } from 'react'; +import { render, screen, waitFor } from 'spec/helpers/testing-library'; +import userEvent from '@testing-library/user-event'; +import { Menu } from 'src/components/Menu'; +import downloadAsPdf from 'src/utils/downloadAsPdf'; +import DownloadAsPdf from './DownloadAsPdf'; + +const mockAddDangerToast = jest.fn(); + +jest.mock('src/utils/downloadAsPdf', () => ({ + __esModule: true, + default: jest.fn(() => (_e: SyntheticEvent) => {}), +})); + +jest.mock('src/components/MessageToasts/withToasts', () => ({ + useToasts: () => ({ + addDangerToast: mockAddDangerToast, + }), +})); + +const createProps = () => ({ + text: 'Export as PDF', + dashboardTitle: 'Test Dashboard', + logEvent: jest.fn(), +}); + +const renderComponent = () => { + render( + <Menu> + <DownloadAsPdf {...createProps()} /> + </Menu>, + { useRedux: true }, + ); +}; + +test('Should call download pdf on click', async () => { + renderComponent(); + await waitFor(() => { + expect(downloadAsPdf).toHaveBeenCalledTimes(0); + expect(mockAddDangerToast).toHaveBeenCalledTimes(0); + }); + + userEvent.click(screen.getByRole('button', { name: 'Export as PDF' })); + + await waitFor(() => { + expect(downloadAsPdf).toHaveBeenCalledTimes(1); + expect(mockAddDangerToast).toHaveBeenCalledTimes(0); + }); +}); + +test('Component is rendered with role="button"', async () => { + renderComponent(); + const button = screen.getByRole('button', { name: 'Export as PDF' }); + expect(button).toBeInTheDocument(); +}); diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.tsx new file mode 100644 index 000000000000..a07a2e232c6d --- /dev/null +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadAsPdf.tsx @@ -0,0 +1,55 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { SyntheticEvent } from 'react'; +import { logging, t } from '@superset-ui/core'; +import { Menu } from 'src/components/Menu'; +import downloadAsPdf from 'src/utils/downloadAsPdf'; +import { LOG_ACTIONS_DASHBOARD_DOWNLOAD_AS_PDF } from 'src/logger/LogUtils'; +import { useToasts } from 'src/components/MessageToasts/withToasts'; + +export default function DownloadAsPdf({ + text, + logEvent, + dashboardTitle, + ...rest +}: { + text: string; + dashboardTitle: string; + logEvent?: Function; +}) { + const SCREENSHOT_NODE_SELECTOR = '.dashboard'; + const { addDangerToast } = useToasts(); + const onDownloadPdf = async (e: SyntheticEvent) => { + try { + downloadAsPdf(SCREENSHOT_NODE_SELECTOR, dashboardTitle, true)(e); + } catch (error) { + logging.error(error); + addDangerToast(t('Sorry, something went wrong. Try again later.')); + } + logEvent?.(LOG_ACTIONS_DASHBOARD_DOWNLOAD_AS_PDF); + }; + + return ( + <Menu.Item key="download-pdf" {...rest}> + <div onClick={onDownloadPdf} role="button" tabIndex={0}> + {text} + </div> + </Menu.Item> + ); +} diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.test.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.test.tsx index 21bd7022eb36..9943727ac1ed 100644 --- a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.test.tsx +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.test.tsx @@ -84,7 +84,7 @@ describe('DownloadScreenshot component', () => { const props = defaultProps(); fetchMock.post( - `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot`, + `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot/`, { status: 400, body: {}, @@ -105,19 +105,23 @@ describe('DownloadScreenshot component', () => { test('displays success message when API call succeeds', async () => { const props = defaultProps(); fetchMock.post( - `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot`, + `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot/`, { status: 200, body: { image_url: 'mocked_image_url', + cache_key: 'mocked_cache_key', }, }, ); - fetchMock.get('glob:*/mocked_image_url?download_format=pdf', { - status: 200, - body: {}, - }); + fetchMock.get( + `glob:*/api/v1/dashboard/${props.dashboardId}/screenshot/mocked_cache_key/?download_format=pdf`, + { + status: 200, + body: {}, + }, + ); renderComponent(); @@ -126,18 +130,21 @@ describe('DownloadScreenshot component', () => { await waitFor(() => { expect(mockAddInfoToast).toHaveBeenCalledWith( 'The screenshot is being generated. Please, do not leave the page.', + { + noDuplicate: true, + }, ); }); }); - test('throws error when no image URL is provided', async () => { + test('throws error when no image cache key is provided', async () => { const props = defaultProps(); fetchMock.post( - `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot`, + `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot/`, { status: 200, body: { - image_url: '', + cache_key: '', }, }, ); @@ -156,24 +163,27 @@ describe('DownloadScreenshot component', () => { test('displays success message when image retrieval succeeds', async () => { const props = defaultProps(); - const imageUrl = 'glob:*/mocked_image_url?download_format=pdf'; fetchMock.post( - `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot`, + `glob:*/api/v1/dashboard/${props.dashboardId}/cache_dashboard_screenshot/`, { status: 200, body: { image_url: 'mocked_image_url', + cache_key: 'mocked_cache_key', }, }, ); - fetchMock.get(imageUrl, { - status: 200, - headers: { - 'Content-Type': 'image/png', + fetchMock.get( + `glob:*/api/v1/dashboard/${props.dashboardId}/screenshot/mocked_cache_key/?download_format=pdf`, + { + status: 200, + headers: { + 'Content-Type': 'application/pdf', + }, + body: new Blob([], { type: 'application/pdf' }), }, - body: new Blob([], { type: 'image/png' }), - }); + ); global.URL.createObjectURL = jest.fn(() => 'mockedObjectURL'); global.URL.revokeObjectURL = jest.fn(); @@ -185,13 +195,17 @@ describe('DownloadScreenshot component', () => { userEvent.click(screen.getByRole('button', { name: 'Download' })); await waitFor(() => { - expect(fetchMock.calls(imageUrl).length).toBe(1); + expect( + fetchMock.calls( + `glob:*/api/v1/dashboard/${props.dashboardId}/screenshot/mocked_cache_key/?download_format=pdf`, + ).length, + ).toBe(1); }); // Wait for the successful image retrieval message await waitFor(() => { expect(mockAddSuccessToast).toHaveBeenCalledWith( - 'The screenshot is now being downloaded.', + 'The screenshot has been downloaded.', ); }); }); diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.tsx index 3410accd1d6d..c61d263dd59b 100644 --- a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.tsx +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/DownloadScreenshot.tsx @@ -17,7 +17,12 @@ * under the License. */ -import { logging, t, SupersetClient } from '@superset-ui/core'; +import { + logging, + t, + SupersetClient, + SupersetApiError, +} from '@superset-ui/core'; import { Menu } from 'src/components/Menu'; import { LOG_ACTIONS_DASHBOARD_DOWNLOAD_AS_IMAGE, @@ -27,6 +32,8 @@ import { RootState } from 'src/dashboard/types'; import { useSelector } from 'react-redux'; import { useToasts } from 'src/components/MessageToasts/withToasts'; import { last } from 'lodash'; +import { getDashboardUrlParams } from 'src/utils/urlUtils'; +import { useCallback, useEffect, useRef } from 'react'; import { DownloadScreenshotFormat } from './types'; const RETRY_INTERVAL = 3000; @@ -44,23 +51,77 @@ export default function DownloadScreenshot({ logEvent?: Function; format: string; }) { + const activeTabs = useSelector( + (state: RootState) => state.dashboardState.activeTabs || undefined, + ); const anchor = useSelector( - (state: RootState) => last(state.dashboardState.activeTabs) || undefined, + (state: RootState) => + last(state.dashboardState.directPathToChild) || undefined, + ); + const dataMask = useSelector( + (state: RootState) => state.dataMask || undefined, ); const { addDangerToast, addSuccessToast, addInfoToast } = useToasts(); + const currentIntervalIds = useRef<NodeJS.Timeout[]>([]); + + const printLoadingToast = () => + addInfoToast( + t('The screenshot is being generated. Please, do not leave the page.'), + { + noDuplicate: true, + }, + ); + + const printFailureToast = useCallback( + () => + addDangerToast( + t('The screenshot could not be downloaded. Please, try again later.'), + ), + [addDangerToast], + ); + + const printSuccessToast = useCallback( + () => addSuccessToast(t('The screenshot has been downloaded.')), + [addSuccessToast], + ); + + const stopIntervals = useCallback( + (message?: 'success' | 'failure') => { + currentIntervalIds.current.forEach(clearInterval); + + if (message === 'failure') { + printFailureToast(); + } + if (message === 'success') { + printSuccessToast(); + } + }, + [printFailureToast, printSuccessToast], + ); const onDownloadScreenshot = () => { let retries = 0; + const toastIntervalId = setInterval( + () => printLoadingToast(), + RETRY_INTERVAL, + ); + + currentIntervalIds.current = [ + ...(currentIntervalIds.current || []), + toastIntervalId, + ]; + + printLoadingToast(); + // this function checks if the image is ready - const checkImageReady = (imageUrl: string) => - fetch(`${imageUrl}?download_format=${format}`) - .then(response => { - if (response.status === 404) { - throw new Error('Image not ready'); - } - return response.blob(); - }) + const checkImageReady = (cacheKey: string) => + SupersetClient.get({ + endpoint: `/api/v1/dashboard/${dashboardId}/screenshot/${cacheKey}/?download_format=${format}`, + headers: { Accept: 'application/pdf, image/png' }, + parseMethod: 'raw', + }) + .then((response: Response) => response.blob()) .then(blob => { const url = window.URL.createObjectURL(blob); const a = document.createElement('a'); @@ -70,61 +131,48 @@ export default function DownloadScreenshot({ a.click(); document.body.removeChild(a); window.URL.revokeObjectURL(url); - }); - - // this is the functions that handles the retries - const fetchImageWithRetry = (imageUrl: string) => { - checkImageReady(imageUrl) - .then(() => { - addSuccessToast(t('The screenshot is now being downloaded.')); + stopIntervals('success'); }) - .catch(error => { - // we check how many retries have been made - if (retries < MAX_RETRIES) { - retries += 1; - addInfoToast( - t( - 'The screenshot is being generated. Please, do not leave the page.', - ), - { - noDuplicate: true, - }, - ); - setTimeout(() => fetchImageWithRetry(imageUrl), RETRY_INTERVAL); - } else { - addDangerToast( - t( - 'The screenshot could not be downloaded. Please, try again later.', - ), - ); - logging.error(error); + .catch(err => { + if ((err as SupersetApiError).status === 404) { + throw new Error('Image not ready'); } }); + + const fetchImageWithRetry = (cacheKey: string) => { + if (retries >= MAX_RETRIES) { + stopIntervals('failure'); + logging.error('Max retries reached'); + return; + } + checkImageReady(cacheKey).catch(() => { + retries += 1; + }); }; SupersetClient.post({ - endpoint: `/api/v1/dashboard/${dashboardId}/cache_dashboard_screenshot`, + endpoint: `/api/v1/dashboard/${dashboardId}/cache_dashboard_screenshot/`, jsonPayload: { anchor, + activeTabs, + dataMask, + urlParams: getDashboardUrlParams(['edit']), }, }) .then(({ json }) => { - const imageUrl = json?.image_url; - if (!imageUrl) { + const cacheKey = json?.cache_key; + if (!cacheKey) { throw new Error('No image URL in response'); } - addInfoToast( - t( - 'The screenshot is being generated. Please, do not leave the page.', - ), - ); - fetchImageWithRetry(imageUrl); + const retryIntervalId = setInterval(() => { + fetchImageWithRetry(cacheKey); + }, RETRY_INTERVAL); + currentIntervalIds.current.push(retryIntervalId); + fetchImageWithRetry(cacheKey); }) .catch(error => { logging.error(error); - addDangerToast( - t('The screenshot could not be downloaded. Please, try again later.'), - ); + stopIntervals('failure'); }) .finally(() => { logEvent?.( @@ -135,6 +183,16 @@ export default function DownloadScreenshot({ }); }; + useEffect( + () => () => { + if (currentIntervalIds.current.length > 0) { + stopIntervals(); + } + currentIntervalIds.current = []; + }, + [stopIntervals], + ); + return ( <Menu.Item key={format} {...rest}> <div onClick={onDownloadScreenshot} role="button" tabIndex={0}> diff --git a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/index.tsx b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/index.tsx index c17fc0d0265d..089ccb9179b5 100644 --- a/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/index.tsx +++ b/superset-frontend/src/dashboard/components/menu/DownloadMenuItems/index.tsx @@ -17,8 +17,11 @@ * under the License. */ import { Menu } from 'src/components/Menu'; +import { FeatureFlag, isFeatureEnabled } from '@superset-ui/core'; import DownloadScreenshot from './DownloadScreenshot'; import { DownloadScreenshotFormat } from './types'; +import DownloadAsPdf from './DownloadAsPdf'; +import DownloadAsImage from './DownloadAsImage'; export interface DownloadMenuItemProps { pdfMenuItemTitle: string; @@ -34,25 +37,48 @@ const DownloadMenuItems = (props: DownloadMenuItemProps) => { imageMenuItemTitle, logEvent, dashboardId, + dashboardTitle, ...rest } = props; + const isWebDriverScreenshotEnabled = + isFeatureEnabled(FeatureFlag.EnableDashboardScreenshotEndpoints) && + isFeatureEnabled(FeatureFlag.EnableDashboardDownloadWebDriverScreenshot); return ( <Menu selectable={false}> - <DownloadScreenshot - text={pdfMenuItemTitle} - dashboardId={dashboardId} - logEvent={logEvent} - format={DownloadScreenshotFormat.PDF} - {...rest} - /> - <DownloadScreenshot - text={imageMenuItemTitle} - dashboardId={dashboardId} - logEvent={logEvent} - format={DownloadScreenshotFormat.PNG} - {...rest} - /> + {isWebDriverScreenshotEnabled ? ( + <> + <DownloadScreenshot + text={pdfMenuItemTitle} + dashboardId={dashboardId} + logEvent={logEvent} + format={DownloadScreenshotFormat.PDF} + {...rest} + /> + <DownloadScreenshot + text={imageMenuItemTitle} + dashboardId={dashboardId} + logEvent={logEvent} + format={DownloadScreenshotFormat.PNG} + {...rest} + /> + </> + ) : ( + <> + <DownloadAsPdf + text={pdfMenuItemTitle} + dashboardTitle={dashboardTitle} + logEvent={logEvent} + {...rest} + /> + <DownloadAsImage + text={imageMenuItemTitle} + dashboardTitle={dashboardTitle} + logEvent={logEvent} + {...rest} + /> + </> + )} </Menu> ); }; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/ActionButtons.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/ActionButtons.test.tsx index 91c6354326e1..f77839a1d11d 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/ActionButtons.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/ActionButtons.test.tsx @@ -22,7 +22,6 @@ import { render, screen } from 'spec/helpers/testing-library'; import ActionButtons from './index'; const createProps = () => ({ - onApply: jest.fn(), onClearAll: jest.fn(), dataMaskSelected: { DefaultsID: { @@ -39,14 +38,6 @@ const createProps = () => ({ }, }, }, - isApplyDisabled: false, -}); - -test('should render the "Apply" button', () => { - const mockedProps = createProps(); - render(<ActionButtons {...mockedProps} />, { useRedux: true }); - expect(screen.getByText('Apply filters')).toBeInTheDocument(); - expect(screen.getByText('Apply filters').parentElement).toBeEnabled(); }); test('should render the "Clear all" button as disabled', () => { @@ -56,26 +47,31 @@ test('should render the "Clear all" button as disabled', () => { expect(clearBtn.parentElement).toBeDisabled(); }); -test('should render the "Apply" button as disabled', () => { - const mockedProps = createProps(); - const applyDisabledProps = { - ...mockedProps, - isApplyDisabled: true, +test('should call onClearAll when clear all button is clicked', () => { + const mockedProps = { + ...createProps(), + dataMaskSelected: { + DefaultsID: { + filterState: { + value: 'some_value', // Make clear button enabled + }, + }, + }, + dataMaskApplied: { + DefaultsID: { + id: 'DefaultsID', + filterState: { + value: 'some_value', // Make clear button enabled + }, + }, + }, }; - render(<ActionButtons {...applyDisabledProps} />, { useRedux: true }); - const applyBtn = screen.getByText('Apply filters'); - expect(applyBtn.parentElement).toBeDisabled(); - userEvent.click(applyBtn); - expect(mockedProps.onApply).not.toHaveBeenCalled(); -}); - -test('should apply', () => { - const mockedProps = createProps(); render(<ActionButtons {...mockedProps} />, { useRedux: true }); - const applyBtn = screen.getByText('Apply filters'); - expect(mockedProps.onApply).not.toHaveBeenCalled(); - userEvent.click(applyBtn); - expect(mockedProps.onApply).toHaveBeenCalled(); + const clearBtn = screen.getByText('Clear all'); + expect(clearBtn.parentElement).toBeEnabled(); + expect(mockedProps.onClearAll).not.toHaveBeenCalled(); + userEvent.click(clearBtn); + expect(mockedProps.onClearAll).toHaveBeenCalled(); }); describe('custom width', () => { diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/index.tsx index 297a495573ba..5da14591edc7 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/index.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/ActionButtons/index.tsx @@ -33,11 +33,9 @@ import { getFilterBarTestId } from '../utils'; interface ActionButtonsProps { width?: number; - onApply: () => void; onClearAll: () => void; dataMaskSelected: DataMaskState; dataMaskApplied: DataMaskStateWithId; - isApplyDisabled: boolean; filterBarOrientation?: FilterBarOrientation; } @@ -80,10 +78,6 @@ const verticalStyle = (theme: SupersetTheme, width: number) => css` & > button { pointer-events: auto; } - - & > .filter-apply-button { - margin-bottom: ${theme.gridUnit * 3}px; - } `; const horizontalStyle = (theme: SupersetTheme) => css` @@ -93,22 +87,13 @@ const horizontalStyle = (theme: SupersetTheme) => css` text-transform: capitalize; font-weight: ${theme.typography.weights.normal}; } - & > .filter-apply-button { - &[disabled], - &[disabled]:hover { - color: ${theme.colors.grayscale.light1}; - background: ${theme.colors.grayscale.light3}; - } - } `; const ActionButtons = ({ width = OPEN_FILTER_BAR_WIDTH, - onApply, onClearAll, dataMaskApplied, dataMaskSelected, - isApplyDisabled, filterBarOrientation = FilterBarOrientation.Vertical, }: ActionButtonsProps) => { const isClearAllEnabled = useMemo( @@ -131,16 +116,6 @@ const ActionButtons = ({ ]} data-test="filterbar-action-buttons" > - <Button - disabled={isApplyDisabled} - buttonStyle="primary" - htmlType="submit" - className="filter-apply-button" - onClick={onApply} - {...getFilterBarTestId('apply-button')} - > - {isVertical ? t('Apply filters') : t('Apply')} - </Button> <Button disabled={!isClearAllEnabled} buttonStyle="link" diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/CrossFilters/selectors.ts b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/CrossFilters/selectors.ts index 4f61972db8c1..c8fd8e2841c7 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/CrossFilters/selectors.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/CrossFilters/selectors.ts @@ -33,7 +33,7 @@ export const crossFiltersSelector = (props: { verboseMaps: { [key: string]: Record<string, string> }; }): CrossFilterIndicator[] => { const { dataMask, chartConfiguration, dashboardLayout, verboseMaps } = props; - const chartsIds = Object.keys(chartConfiguration); + const chartsIds = Object.keys(chartConfiguration || {}); return chartsIds .map(chartId => { diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBar.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBar.test.tsx index 1a6f92c0c243..3ada02f1f666 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBar.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBar.test.tsx @@ -190,10 +190,7 @@ describe('FilterBar', () => { expect(screen.getByText('Clear all')).toBeInTheDocument(); }); - it('should render the "Apply filters" option', () => { - renderWrapper(); - expect(screen.getByText('Apply filters')).toBeInTheDocument(); - }); + it('should render the collapse icon', () => { renderWrapper(); @@ -247,7 +244,6 @@ describe('FilterBar', () => { renderWrapper(openedBarProps, stateWithoutNativeFilters); expect(screen.getByTestId(getTestId('clear-button'))).toBeDisabled(); - expect(screen.getByTestId(getTestId('apply-button'))).toBeDisabled(); }); it('renders dividers', async () => { @@ -285,15 +281,11 @@ describe('FilterBar', () => { expect(description.tagName).toBe('P'); // Do not enable buttons if there are not filters expect(screen.getByTestId(getTestId('clear-button'))).toBeDisabled(); - expect(screen.getByTestId(getTestId('apply-button'))).toBeDisabled(); }); it('create filter and apply it flow', async () => { renderWrapper(openedBarProps, stateWithoutNativeFilters); - expect(screen.getByTestId(getTestId('apply-button'))).toBeDisabled(); await addFilterFlow(); - - expect(screen.getByTestId(getTestId('apply-button'))).toBeDisabled(); }); }); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBarSettings/FilterBarSettings.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBarSettings/FilterBarSettings.test.tsx index ee7a8b6310f8..0e52e4def60f 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBarSettings/FilterBarSettings.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterBarSettings/FilterBarSettings.test.tsx @@ -41,7 +41,8 @@ const initialState: { dashboardInfo: DashboardInfo } = { color_namespace: '', color_scheme_domain: [], label_colors: {}, - shared_label_colors: {}, + shared_label_colors: [], + map_label_colors: {}, cross_filters_enabled: false, }, json_metadata: '', diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Horizontal.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Horizontal.tsx index 2b96d9963fc4..de66feadc403 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Horizontal.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Horizontal.tsx @@ -105,6 +105,14 @@ const HorizontalFilterBar: FC<HorizontalBarProps> = ({ isInitialized, onSelectionChange, }) => { + + const overallState = useSelector<RootState>(state => state) as any; + const isIframe = window.self !== window.top; + const dashboardInfo = useSelector<RootState, any>( + state => state.dashboardInfo, + ); + + const dataMask = useSelector<RootState, DataMaskStateWithId>( state => state.dataMask, ); @@ -129,10 +137,14 @@ const HorizontalFilterBar: FC<HorizontalBarProps> = ({ : []; const hasFilters = filterValues.length > 0 || selectedCrossFilters.length > 0; + if (isIframe && !hasFilters) { + return null; + } + return ( <HorizontalBar {...getFilterBarTestId()}> <HorizontalBarContent> - {!isInitialized ? ( + {/* {!isInitialized ? ( <Loading position="inline-centered" /> ) : ( <> @@ -160,7 +172,34 @@ const HorizontalFilterBar: FC<HorizontalBarProps> = ({ )} {actions} </> - )} + )} */} + + <> + <FilterBarSettings /> + {canEdit && ( + <FiltersLinkContainer hasFilters={hasFilters}> + <FilterConfigurationLink + dashboardId={dashboardId} + createNewOnOpen={filterValues.length === 0} + > + <Icons.PlusSmall /> {t('Add/Edit Filters')} + </FilterConfigurationLink> + </FiltersLinkContainer> + )} + {!hasFilters && ( + <FilterBarEmptyStateContainer data-test="horizontal-filterbar-empty"> + {t('No filters are currently added to this dashboard.')} + </FilterBarEmptyStateContainer> + )} + {hasFilters && ( + <FilterControls + dataMaskSelected={dataMaskSelected} + onFilterSelectionChange={onSelectionChange} + /> + )} + {actions} + </> + </HorizontalBarContent> </HorizontalBar> ); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Vertical.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Vertical.tsx index 9fd9a769b6d1..fd0c127ba3d3 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Vertical.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/Vertical.tsx @@ -31,6 +31,7 @@ import { } from 'react'; import cx from 'classnames'; import { FeatureFlag, isFeatureEnabled, styled, t } from '@superset-ui/core'; +// import { t } from '@superset-ui/core'; import Icons from 'src/components/Icons'; import Loading from 'src/components/Loading'; import { EmptyStateSmall } from 'src/components/EmptyState'; @@ -135,6 +136,8 @@ const VerticalFilterBar: FC<VerticalBarProps> = ({ const [isScrolling, setIsScrolling] = useState(false); const timeout = useRef<any>(); + const isIframe = window.self !== window.top; + const openFiltersBar = useCallback( () => toggleFiltersBar(true), [toggleFiltersBar], @@ -166,7 +169,8 @@ const VerticalFilterBar: FC<VerticalBarProps> = ({ const filterControls = useMemo( () => - filterValues.length === 0 ? ( + filterValues.length === 0 && isIframe ? null : filterValues.length === + 0 ? ( <FilterBarEmptyStateContainer> <EmptyStateSmall title={t('No global filters are currently added')} diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx index c25134715d83..0fb3c8bc73ee 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx @@ -30,6 +30,7 @@ import { import { useDispatch, useSelector } from 'react-redux'; import { + DataMaskState, DataMaskStateWithId, DataMaskWithId, Filter, @@ -51,7 +52,7 @@ import { logEvent } from 'src/logger/actions'; import { LOG_ACTIONS_CHANGE_DASHBOARD_FILTER } from 'src/logger/LogUtils'; import { FilterBarOrientation, RootState } from 'src/dashboard/types'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; -import { checkIsApplyDisabled } from './utils'; + import { FiltersBarProps } from './types'; import { useNativeFiltersDataMask, @@ -189,7 +190,7 @@ const FilterBar: FC<FiltersBarProps> = ({ useEffect(() => { if (previousFilters && dashboardId === previousDashboardId) { - const updates = {}; + const updates: DataMaskState = {}; Object.values(filters).forEach(currentFilter => { const previousFilter = previousFilters?.[currentFilter.id]; if (!previousFilter) { @@ -248,43 +249,55 @@ const FilterBar: FC<FiltersBarProps> = ({ const handleClearAll = useCallback(() => { const clearDataMaskIds: string[] = []; - let dispatchAllowed = false; + filtersInScope.filter(isNativeFilter).forEach(filter => { const { id } = filter; - if (dataMaskSelected[id]) { - if (filter.controlValues?.enableEmptyFilter) { - dispatchAllowed = false; - } + if (dataMaskSelected[id] || dataMaskApplied[id]) { clearDataMaskIds.push(id); setDataMaskSelected(draft => { - if (draft[id].filterState?.value !== undefined) { - draft[id].filterState!.value = undefined; - } + // Clear the filter from selected state + delete draft[id]; }); } }); - if (dispatchAllowed) { - clearDataMaskIds.forEach(id => dispatch(clearDataMask(id))); - } - }, [dataMaskSelected, dispatch, filtersInScope, setDataMaskSelected]); - useFilterUpdates(dataMaskSelected, setDataMaskSelected); - const isApplyDisabled = checkIsApplyDisabled( + // Clear applied filters immediately + clearDataMaskIds.forEach(id => dispatch(clearDataMask(id))); + }, [ dataMaskSelected, dataMaskApplied, - filtersInScope.filter(isNativeFilter), - ); + dispatch, + filtersInScope, + setDataMaskSelected, + ]); + + useFilterUpdates(dataMaskSelected, setDataMaskSelected); const isInitialized = useInitialization(); + // Auto-apply filters when dataMaskSelected changes + const previousDataMaskSelected = usePrevious(dataMaskSelected); + useEffect(() => { + // Only auto-apply if: + // 1. Component is initialized + // 2. dataMaskSelected has actually changed + // 3. There are filters selected + if ( + isInitialized && + previousDataMaskSelected && + !isEqual(previousDataMaskSelected, dataMaskSelected) && + Object.keys(dataMaskSelected).length > 0 + ) { + handleApply(); + } + }, [dataMaskSelected, isInitialized, previousDataMaskSelected, handleApply]); + const actions = ( <ActionButtons filterBarOrientation={orientation} width={verticalConfig?.width} - onApply={handleApply} onClearAll={handleClearAll} dataMaskSelected={dataMaskSelected} dataMaskApplied={dataMaskApplied} - isApplyDisabled={isApplyDisabled} /> ); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/useFilterScope.ts b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/useFilterScope.ts index 63b01a891726..f06dc680eaa1 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/useFilterScope.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/useFilterScope.ts @@ -16,13 +16,13 @@ * specific language governing permissions and limitations * under the License. */ +import { Filter, t } from '@superset-ui/core'; import { useMemo } from 'react'; import { useSelector } from 'react-redux'; -import { Filter, t } from '@superset-ui/core'; import { Layout, LayoutItem, RootState } from 'src/dashboard/types'; -import { DASHBOARD_ROOT_ID } from 'src/dashboard/util/constants'; -import { CHART_TYPE } from 'src/dashboard/util/componentTypes'; import { useChartIds } from 'src/dashboard/util/charts/useChartIds'; +import { CHART_TYPE } from 'src/dashboard/util/componentTypes'; +import { DASHBOARD_ROOT_ID } from 'src/dashboard/util/constants'; const extractTabLabel = (tab?: LayoutItem) => tab?.meta?.text || tab?.meta?.defaultText || ''; @@ -95,10 +95,11 @@ export const useFilterScope = (filter: Filter) => { if (topLevelTabs) { // We start assuming that all charts are in scope for all tabs in the root path const topLevelTabsInFullScope = [...filter.scope.rootPath]; - const layoutChartElementsInTabsInScope = layoutCharts.filter(element => - element.parents.some(parent => - topLevelTabsInFullScope.includes(parent), - ), + const layoutChartElementsInTabsInScope = layoutCharts.filter( + element => + element.parents?.some(parent => + topLevelTabsInFullScope.includes(parent), + ), ); // Exclude the tabs that contain excluded charts filter.scope.excluded.forEach(chartId => { @@ -106,7 +107,7 @@ export const useFilterScope = (filter: Filter) => { tabId => layoutChartElementsInTabsInScope .find(chart => chart.meta.chartId === chartId) - ?.parents.includes(tabId), + ?.parents?.includes(tabId), ); if (excludedIndex > -1) { topLevelTabsInFullScope.splice(excludedIndex, 1); @@ -120,7 +121,7 @@ export const useFilterScope = (filter: Filter) => { layoutChartElementsInTabsInScope.find( element => element.meta.chartId === chartId && - element.parents.every( + element.parents?.every( parent => !topLevelTabsInFullScope.includes(parent), ), ); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/ColumnSelect.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/ColumnSelect.tsx index e83f111972c2..08cf69fd740e 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/ColumnSelect.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/ColumnSelect.tsx @@ -56,6 +56,7 @@ export function ColumnSelect({ mode, }: ColumnSelectProps) { const [columns, setColumns] = useState<Column[]>(); + const [loading, setLoading] = useState(false); const { addDangerToast } = useToasts(); const resetColumnField = useCallback(() => { form.setFields([ @@ -87,9 +88,11 @@ export function ColumnSelect({ useChangeEffect(datasetId, previous => { if (previous != null) { + setColumns([]); resetColumnField(); } if (datasetId != null) { + setLoading(true); cachedSupersetGet({ endpoint: `/api/v1/dataset/${datasetId}?q=${rison.encode({ columns: [ @@ -98,26 +101,30 @@ export function ColumnSelect({ 'columns.type_generic', ], })}`, - }).then( - ({ json: { result } }) => { - const lookupValue = Array.isArray(value) ? value : [value]; - const valueExists = result.columns.some( - (column: Column) => lookupValue?.includes(column.column_name), - ); - if (!valueExists) { - resetColumnField(); - } - setColumns(result.columns); - }, - async badResponse => { - const { error, message } = await getClientErrorObject(badResponse); - let errorText = message || error || t('An error has occurred'); - if (message === 'Forbidden') { - errorText = t('You do not have permission to edit this dashboard'); - } - addDangerToast(errorText); - }, - ); + }) + .then( + ({ json: { result } }) => { + const lookupValue = Array.isArray(value) ? value : [value]; + const valueExists = result.columns.some( + (column: Column) => lookupValue?.includes(column.column_name), + ); + if (!valueExists) { + resetColumnField(); + } + setColumns(result.columns); + }, + async badResponse => { + const { error, message } = await getClientErrorObject(badResponse); + let errorText = message || error || t('An error has occurred'); + if (message === 'Forbidden') { + errorText = t( + 'You do not have permission to edit this dashboard', + ); + } + addDangerToast(errorText); + }, + ) + .finally(() => setLoading(false)); } }); @@ -126,6 +133,7 @@ export function ColumnSelect({ mode={mode} value={mode === 'multiple' ? value || [] : value} ariaLabel={t('Column select')} + loading={loading} onChange={onChange} options={options} placeholder={t('Select a column')} diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.test.tsx index ce8ff0245ee1..93f3dd6ec1c8 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.test.tsx @@ -34,6 +34,7 @@ describe('FilterScope', () => { const save = jest.fn(); let form: FormInstance<NativeFiltersForm>; const mockedProps = { + expanded: false, filterId: 'DefaultFilterId', dependencies: [], setErroredFilters: jest.fn(), diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.tsx index d5b554a2f5a0..682894acc294 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/FilterScope.tsx @@ -17,13 +17,8 @@ * under the License. */ -import { FC, useCallback, useRef, useState } from 'react'; -import { - NativeFilterScope, - styled, - t, - useComponentDidUpdate, -} from '@superset-ui/core'; +import { FC, useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { NativeFilterScope, styled, t } from '@superset-ui/core'; import { Radio } from 'src/components/Radio'; import { AntdForm, Typography } from 'src/components'; import { ScopingType } from './types'; @@ -32,7 +27,7 @@ import { getDefaultScopeValue, isScopingAll } from './utils'; type FilterScopeProps = { pathToFormValue?: string[]; - updateFormValues: (values: any) => void; + updateFormValues: (values: any, triggerFormChange?: boolean) => void; formFilterScope?: NativeFilterScope; forceUpdate: Function; filterScope?: NativeFilterScope; @@ -64,17 +59,19 @@ const FilterScope: FC<FilterScopeProps> = ({ chartId, initiallyExcludedCharts, }) => { - const [initialFilterScope] = useState( - filterScope || getDefaultScopeValue(chartId, initiallyExcludedCharts), + const initialFilterScope = useMemo( + () => filterScope || getDefaultScopeValue(chartId, initiallyExcludedCharts), + [chartId, filterScope, initiallyExcludedCharts], ); const lastSpecificScope = useRef(initialFilterScope); - const [initialScopingType] = useState( - isScopingAll(initialFilterScope, chartId) - ? ScopingType.All - : ScopingType.Specific, + const initialScopingType = useMemo( + () => + isScopingAll(initialFilterScope, chartId) + ? ScopingType.All + : ScopingType.Specific, + [chartId, initialFilterScope], ); - const [hasScopeBeenModified, setHasScopeBeenModified] = - useState(!!filterScope); + const [hasScopeBeenModified, setHasScopeBeenModified] = useState(false); const onUpdateFormValues = useCallback( (formValues: any) => { @@ -87,26 +84,24 @@ const FilterScope: FC<FilterScopeProps> = ({ [formScopingType, updateFormValues], ); - const updateScopes = useCallback(() => { - if (filterScope || hasScopeBeenModified) { - return; - } + const updateScopes = useCallback( + updatedFormValues => { + if (hasScopeBeenModified) { + return; + } - const newScope = getDefaultScopeValue(chartId, initiallyExcludedCharts); - updateFormValues({ - scope: newScope, - scoping: isScopingAll(newScope, chartId) - ? ScopingType.All - : ScopingType.Specific, - }); - }, [ - chartId, - filterScope, - hasScopeBeenModified, - initiallyExcludedCharts, - updateFormValues, - ]); - useComponentDidUpdate(updateScopes); + updateFormValues(updatedFormValues, false); + }, + [hasScopeBeenModified, updateFormValues], + ); + + useEffect(() => { + const updatedFormValues = { + scope: initialFilterScope, + scoping: initialScopingType, + }; + updateScopes(updatedFormValues); + }, [initialFilterScope, initialScopingType, updateScopes]); return ( <Wrapper> diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts index 60ae8b8e5f45..0657e4574cac 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FilterScope/utils.test.ts @@ -485,7 +485,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in Sports: '#8FD3E4', Strategy: '#A1A6BD', }, - shared_label_colors: {}, + shared_label_colors: [], color_scheme: 'supersetColors', extra_filters: [ { @@ -710,7 +710,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in queryFields: { metric: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], color_scheme: 'supersetColors', extra_filters: [ { @@ -759,7 +759,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in queryFields: { metric: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], color_scheme: 'supersetColors', dashboardId: 9, applied_time_extras: {}, @@ -3046,7 +3046,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], extra_filters: [ { col: '__time_range', @@ -3125,7 +3125,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], dashboardId: 9, applied_time_extras: {}, where: '', @@ -16514,7 +16514,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], extra_filters: [ { col: '__time_range', @@ -16661,7 +16661,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], dashboardId: 9, applied_time_extras: {}, where: '', @@ -17523,7 +17523,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in Sports: '#8FD3E4', Strategy: '#A1A6BD', }, - shared_label_colors: {}, + shared_label_colors: [], extra_filters: [ { col: '__time_range', @@ -17678,7 +17678,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], extra_filters: [ { col: '__time_range', @@ -17715,7 +17715,7 @@ describe('Ensure buildTree does not throw runtime errors when encountering an in groupby: 'groupby', metrics: 'metrics', }, - shared_label_colors: {}, + shared_label_colors: [], dashboardId: 9, applied_time_extras: {}, where: '', diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx index 154cdbf36841..7e151eec42a3 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/FiltersConfigForm.tsx @@ -105,6 +105,8 @@ import { import { FILTER_SUPPORTED_TYPES, INPUT_WIDTH } from './constants'; import DependencyList from './DependencyList'; +const FORM_ITEM_WIDTH = 260; + const TabPane = styled(Tabs.TabPane)` padding: ${({ theme }) => theme.gridUnit * 4}px 0px; `; @@ -136,8 +138,8 @@ const controlsOrder: ControlKey[] = [ 'inverseSelection', ]; -export const StyledFormItem = styled(FormItem)` - width: 49%; +export const StyledFormItem = styled(FormItem)<{ expanded: boolean }>` + width: ${({ expanded }) => (expanded ? '49%' : `${FORM_ITEM_WIDTH}px`)}; margin-bottom: ${({ theme }) => theme.gridUnit * 4}px; & .ant-form-item-label { @@ -149,10 +151,10 @@ export const StyledFormItem = styled(FormItem)` } `; -export const StyledRowFormItem = styled(FormItem)` +export const StyledRowFormItem = styled(FormItem)<{ expanded: boolean }>` margin-bottom: 0; padding-bottom: 0; - min-width: 50%; + min-width: ${({ expanded }) => (expanded ? '50%' : `${FORM_ITEM_WIDTH}px`)}; & .ant-form-item-label { padding-bottom: 0; @@ -167,8 +169,8 @@ export const StyledRowFormItem = styled(FormItem)` } `; -export const StyledRowSubFormItem = styled(FormItem)` - min-width: 50%; +export const StyledRowSubFormItem = styled(FormItem)<{ expanded: boolean }>` + min-width: ${({ expanded }) => (expanded ? '50%' : `${FORM_ITEM_WIDTH}px`)}; & .ant-form-item-label { padding-bottom: 0; @@ -264,9 +266,9 @@ const StyledAsterisk = styled.span` } `; -const FilterTypeInfo = styled.div` - ${({ theme }) => ` - width: 49%; +const FilterTypeInfo = styled.div<{ expanded: boolean }>` + ${({ theme, expanded }) => ` + width: ${expanded ? '49%' : `${FORM_ITEM_WIDTH}px`}; font-size: ${theme.typography.sizes.s}px; color: ${theme.colors.grayscale.light1}; margin: @@ -300,6 +302,7 @@ export const FilterPanels = { }; export interface FiltersConfigFormProps { + expanded: boolean; filterId: string; filterToEdit?: Filter; removedFilters: Record<string, FilterRemoval>; @@ -334,6 +337,7 @@ const FILTER_TYPE_NAME_MAPPING = { */ const FiltersConfigForm = ( { + expanded, filterId, filterToEdit, removedFilters, @@ -376,7 +380,7 @@ const FiltersConfigForm = ( const nativeFilterVizTypes = Object.entries(nativeFilterItems) // @ts-ignore .filter(([, { value }]) => value.behaviors?.includes(Behavior.NativeFilter)) - .map(([key]) => key); + .map(([key]) => key as keyof typeof FILTER_SUPPORTED_TYPES); const loadedDatasets = useSelector<RootState, DatasourcesState>( ({ datasources }) => datasources, @@ -411,6 +415,7 @@ const FiltersConfigForm = ( const { controlItems = {}, mainControlItems = {} } = formFilter ? getControlItemsMap({ + expanded, datasetId, disabled: false, forceUpdate, @@ -563,9 +568,9 @@ const FiltersConfigForm = ( }, [form]); const updateFormValues = useCallback( - (values: any) => { + (values: any, triggerFormChange = true) => { setNativeFilterFieldValues(form, filterId, values); - formChanged(); + if (triggerFormChange) formChanged(); }, [filterId, form, formChanged], ); @@ -760,6 +765,7 @@ const FiltersConfigForm = ( const timeColumn = ( <StyledRowFormItem + expanded={expanded} name={['filters', filterId, 'granularity_sqla']} label={ <> @@ -807,6 +813,7 @@ const FiltersConfigForm = ( > <StyledContainer> <StyledFormItem + expanded={expanded} name={['filters', filterId, 'type']} hidden initialValue={NativeFilterType.NativeFilter} @@ -814,6 +821,7 @@ const FiltersConfigForm = ( <Input /> </StyledFormItem> <StyledFormItem + expanded={expanded} name={['filters', filterId, 'name']} label={<StyledLabel>{t('Filter name')}</StyledLabel>} initialValue={filterToEdit?.name} @@ -822,6 +830,7 @@ const FiltersConfigForm = ( <Input {...getFiltersConfigModalTestId('name-input')} /> </StyledFormItem> <StyledFormItem + expanded={expanded} name={['filters', filterId, 'filterType']} rules={[{ required: !isRemoved, message: t('Name is required') }]} initialValue={filterToEdit?.filterType || 'filter_select'} @@ -867,7 +876,7 @@ const FiltersConfigForm = ( </StyledFormItem> </StyledContainer> {formFilter?.filterType === 'filter_time' && ( - <FilterTypeInfo> + <FilterTypeInfo expanded={expanded}> {t(`Dashboard time range filters apply to temporal columns defined in the filter section of each chart. Add temporal columns to the chart filters to have this dashboard filter impact those charts.`)} @@ -877,6 +886,7 @@ const FiltersConfigForm = ( <StyledRowContainer> {showDataset ? ( <StyledFormItem + expanded={expanded} name={['filters', filterId, 'dataset']} label={<StyledLabel>{t('Dataset')}</StyledLabel>} initialValue={ @@ -915,7 +925,10 @@ const FiltersConfigForm = ( /> </StyledFormItem> ) : ( - <StyledFormItem label={<StyledLabel>{t('Dataset')}</StyledLabel>}> + <StyledFormItem + expanded={expanded} + label={<StyledLabel>{t('Dataset')}</StyledLabel>} + > <Loading position="inline-centered" /> </StyledFormItem> )} @@ -941,6 +954,7 @@ const FiltersConfigForm = ( > {canDependOnOtherFilters && hasAvailableFilters && ( <StyledRowFormItem + expanded={expanded} name={['filters', filterId, 'dependencies']} initialValue={dependencies} > @@ -981,6 +995,7 @@ const FiltersConfigForm = ( }} > <StyledRowSubFormItem + expanded={expanded} name={['filters', filterId, 'adhoc_filters']} css={{ width: INPUT_WIDTH }} initialValue={filterToEdit?.adhoc_filters} @@ -1016,6 +1031,7 @@ const FiltersConfigForm = ( </StyledRowSubFormItem> {showTimeRangePicker && ( <StyledRowFormItem + expanded={expanded} name={['filters', filterId, 'time_range']} label={<StyledLabel>{t('Time range')}</StyledLabel>} initialValue={ @@ -1057,6 +1073,7 @@ const FiltersConfigForm = ( }} > <StyledRowFormItem + expanded={expanded} name={[ 'filters', filterId, @@ -1077,6 +1094,7 @@ const FiltersConfigForm = ( </StyledRowFormItem> {hasMetrics && ( <StyledRowSubFormItem + expanded={expanded} name={['filters', filterId, 'sortMetric']} initialValue={filterToEdit?.sortMetric} label={ @@ -1126,6 +1144,7 @@ const FiltersConfigForm = ( }} > <StyledRowFormItem + expanded={expanded} name={[ 'filters', filterId, @@ -1164,6 +1183,7 @@ const FiltersConfigForm = ( key={`${filterId}-${FilterPanels.settings.key}`} > <StyledFormItem + expanded={expanded} name={['filters', filterId, 'description']} initialValue={filterToEdit?.description} label={<StyledLabel>{t('Description')}</StyledLabel>} @@ -1194,6 +1214,7 @@ const FiltersConfigForm = ( > {!isRemoved && ( <StyledRowSubFormItem + expanded={expanded} name={['filters', filterId, 'defaultDataMask']} initialValue={initialDefaultValue} data-test="default-input" diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.test.tsx index d34abf268795..368df7088b5d 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.test.tsx @@ -64,6 +64,7 @@ const filterMock: Filter = { }; const createProps: () => ControlItemsProps = () => ({ + expanded: false, datasetId: 1, disabled: false, forceUpdate: jest.fn(), diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.tsx index 937862094b04..62fd4917ec8c 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigForm/getControlItemsMap.tsx @@ -44,6 +44,7 @@ import { import { ColumnSelect } from './ColumnSelect'; export interface ControlItemsProps { + expanded: boolean; datasetId: number; disabled: boolean; forceUpdate: Function; @@ -60,6 +61,7 @@ const CleanFormItem = styled(FormItem)` `; export default function getControlItemsMap({ + expanded, datasetId, disabled, forceUpdate, @@ -104,6 +106,7 @@ export default function getControlItemsMap({ } /> <StyledFormItem + expanded={expanded} // don't show the column select unless we have a dataset name={['filters', filterId, 'column']} initialValue={initColumn} @@ -174,6 +177,7 @@ export default function getControlItemsMap({ } > <StyledRowFormItem + expanded={expanded} key={controlItem.name} name={['filters', filterId, 'controlValues', controlItem.name]} initialValue={initialValue} diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx index bfb93a8da1fb..5e5391952148 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx @@ -16,28 +16,29 @@ * specific language governing permissions and limitations * under the License. */ -import { memo, useEffect, useCallback, useMemo, useState, useRef } from 'react'; -import { uniq, isEqual, sortBy, debounce, isEmpty } from 'lodash'; import { + Divider, Filter, FilterConfiguration, NativeFilterType, - Divider, - styled, SLOW_DEBOUNCE, - t, css, + styled, + t, useTheme, } from '@superset-ui/core'; +import { debounce, isEmpty, isEqual, sortBy, uniq } from 'lodash'; +import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { useDispatch } from 'react-redux'; import { AntdForm } from 'src/components'; -import Icons from 'src/components/Icons'; import ErrorBoundary from 'src/components/ErrorBoundary'; +import Icons from 'src/components/Icons'; import { StyledModal } from 'src/components/Modal'; -import { testWithId } from 'src/utils/testUtils'; import { updateCascadeParentIds } from 'src/dashboard/actions/nativeFilters'; import useEffectEvent from 'src/hooks/useEffectEvent'; +import { testWithId } from 'src/utils/testUtils'; import { useFilterConfigMap, useFilterConfiguration } from '../state'; +import DividerConfigForm from './DividerConfigForm'; import FilterConfigurePane from './FilterConfigurePane'; import FiltersConfigForm, { FilterPanels, @@ -46,15 +47,14 @@ import Footer from './Footer/Footer'; import { useOpenModal, useRemoveCurrentFilter } from './state'; import { FilterRemoval, NativeFiltersForm } from './types'; import { - createHandleSave, + NATIVE_FILTER_DIVIDER_PREFIX, createHandleRemoveItem, + createHandleSave, generateFilterId, getFilterIds, - validateForm, - NATIVE_FILTER_DIVIDER_PREFIX, hasCircularDependency, + validateForm, } from './utils'; -import DividerConfigForm from './DividerConfigForm'; const MODAL_MARGIN = 16; const MIN_WIDTH = 880; @@ -435,7 +435,8 @@ function FiltersConfigModal({ unsavedFiltersIds.length > 0 || form.isFieldsTouched() || changed || - didChangeOrder + didChangeOrder || + Object.values(removedFilters).some(f => f?.isPending) ) { setSaveAlertVisible(true); } else { @@ -581,6 +582,7 @@ function FiltersConfigModal({ /> ) : ( <FiltersConfigForm + expanded={expanded} ref={configFormRef} form={form} filterId={id} @@ -613,6 +615,7 @@ function FiltersConfigModal({ validateDependencies, getDependencySuggestion, handleActiveFilterPanelChange, + expanded, ], ); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/Footer/CancelConfirmationAlert.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/Footer/CancelConfirmationAlert.tsx index b36769ada561..16d15a06b0d7 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/Footer/CancelConfirmationAlert.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/Footer/CancelConfirmationAlert.tsx @@ -61,6 +61,7 @@ export function CancelConfirmationAlert({ buttonSize="small" buttonStyle="primary" onClick={onConfirm} + data-test="native-filter-modal-confirm-cancel-button" > {t('Yes, cancel')} </Button> diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/utils.ts b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/utils.ts index ffe53378ab13..fa15d2e63884 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/utils.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/utils.ts @@ -16,18 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -import { FormInstance } from 'src/components'; -import { nanoid } from 'nanoid'; -import { getInitialDataMask } from 'src/dataMask/reducer'; import { + Divider, Filter, FilterConfiguration, - NativeFilterType, - Divider, NativeFilterTarget, + NativeFilterType, logging, } from '@superset-ui/core'; +import { nanoid } from 'nanoid'; +import { FormInstance } from 'src/components'; import { DASHBOARD_ROOT_ID } from 'src/dashboard/util/constants'; +import { getInitialDataMask } from 'src/dataMask/reducer'; import { FilterRemoval, NativeFiltersForm } from './types'; export const REMOVAL_DELAY_SECS = 5; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/state.ts b/superset-frontend/src/dashboard/components/nativeFilters/state.ts index 38a50c7b06b7..5bf71116c358 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/state.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/state.ts @@ -83,8 +83,8 @@ function useSelectChartTabParents() { const chartLayoutItem = Object.values(dashboardLayout).find( layoutItem => layoutItem.meta?.chartId === chartId, ); - return chartLayoutItem?.parents.filter( - (parent: string) => dashboardLayout[parent].type === TAB_TYPE, + return chartLayoutItem?.parents?.filter( + (parent: string) => dashboardLayout[parent]?.type === TAB_TYPE, ); }; } diff --git a/superset-frontend/src/dashboard/containers/Chart.jsx b/superset-frontend/src/dashboard/containers/Chart.jsx index 6451cf200480..9f00bd0bf709 100644 --- a/superset-frontend/src/dashboard/containers/Chart.jsx +++ b/superset-frontend/src/dashboard/containers/Chart.jsx @@ -38,6 +38,7 @@ import { import getFormDataWithExtraFilters from 'src/dashboard/util/charts/getFormDataWithExtraFilters'; import Chart from 'src/dashboard/components/gridComponents/Chart'; import { PLACEHOLDER_DATASOURCE } from 'src/dashboard/constants'; +import { enforceSharedLabelsColorsArray } from 'src/utils/colorScheme'; const EMPTY_OBJECT = {}; @@ -59,17 +60,26 @@ function mapStateToProps( const datasource = (chart && chart.form_data && datasources[chart.form_data.datasource]) || PLACEHOLDER_DATASOURCE; - const { colorScheme, colorNamespace, datasetsStatus } = dashboardState; + const { + colorScheme: appliedColorScheme, + colorNamespace, + datasetsStatus, + } = dashboardState; const labelsColor = dashboardInfo?.metadata?.label_colors || {}; - const labelsColorMap = dashboardInfo?.metadata?.shared_label_colors || {}; + const labelsColorMap = dashboardInfo?.metadata?.map_label_colors || {}; + const sharedLabelsColors = enforceSharedLabelsColorsArray( + dashboardInfo?.metadata?.shared_label_colors, + ); + const ownColorScheme = chart.form_data?.color_scheme; // note: this method caches filters if possible to prevent render cascades const formData = getFormDataWithExtraFilters({ chart, chartConfiguration: dashboardInfo.metadata?.chart_configuration, charts: chartQueries, filters: getAppliedFilterValues(id), - colorScheme, colorNamespace, + colorScheme: appliedColorScheme, + ownColorScheme, sliceId: id, nativeFilters: nativeFilters?.filters, allSliceIds: dashboardState.sliceIds, @@ -77,6 +87,7 @@ function mapStateToProps( extraControls, labelsColor, labelsColorMap, + sharedLabelsColors, }); formData.dashboardId = dashboardInfo.id; diff --git a/superset-frontend/src/dashboard/containers/DashboardComponent.jsx b/superset-frontend/src/dashboard/containers/DashboardComponent.jsx index bf92c5dcecae..b811f398abb5 100644 --- a/superset-frontend/src/dashboard/containers/DashboardComponent.jsx +++ b/superset-frontend/src/dashboard/containers/DashboardComponent.jsx @@ -80,6 +80,7 @@ function mapStateToProps( dashboardId: dashboardInfo.id, dashboardInfo, fullSizeChartId: dashboardState.fullSizeChartId, + embeddedMode: !dashboardInfo?.userId, }; // rows and columns need more data about their child dimensions diff --git a/superset-frontend/src/dashboard/fixtures/mockNativeFilters.ts b/superset-frontend/src/dashboard/fixtures/mockNativeFilters.ts index d1d37cbf6aec..32f54cbe269d 100644 --- a/superset-frontend/src/dashboard/fixtures/mockNativeFilters.ts +++ b/superset-frontend/src/dashboard/fixtures/mockNativeFilters.ts @@ -39,6 +39,7 @@ export const nativeFiltersInfo: NativeFiltersState = { id: 'DefaultsID', name: 'test', filterType: 'filter_select', + chartsInScope: [], targets: [ { datasetId: 0, diff --git a/superset-frontend/src/dashboard/reducers/dashboardState.js b/superset-frontend/src/dashboard/reducers/dashboardState.js index 015cb9822c58..771a56301659 100644 --- a/superset-frontend/src/dashboard/reducers/dashboardState.js +++ b/superset-frontend/src/dashboard/reducers/dashboardState.js @@ -46,6 +46,10 @@ import { SET_OVERRIDE_CONFIRM, SAVE_DASHBOARD_STARTED, SAVE_DASHBOARD_FINISHED, + SET_DASHBOARD_LABELS_COLORMAP_SYNCABLE, + SET_DASHBOARD_LABELS_COLORMAP_SYNCED, + SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCABLE, + SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCED, } from '../actions/dashboardState'; import { HYDRATE_DASHBOARD } from '../actions/hydrate'; @@ -101,6 +105,30 @@ export default function dashboardStateReducer(state = {}, action) { updatedColorScheme: true, }; }, + [SET_DASHBOARD_LABELS_COLORMAP_SYNCABLE]() { + return { + ...state, + labelsColorMapMustSync: true, + }; + }, + [SET_DASHBOARD_LABELS_COLORMAP_SYNCED]() { + return { + ...state, + labelsColorMapMustSync: false, + }; + }, + [SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCABLE]() { + return { + ...state, + sharedLabelsColorsMustSync: true, + }; + }, + [SET_DASHBOARD_SHARED_LABELS_COLORS_SYNCED]() { + return { + ...state, + sharedLabelsColorsMustSync: false, + }; + }, [TOGGLE_EXPAND_SLICE]() { const updatedExpandedSlices = { ...state.expandedSlices }; const { sliceId } = action; diff --git a/superset-frontend/src/dashboard/types.ts b/superset-frontend/src/dashboard/types.ts index 8ed1405cdf89..f1b1c47b5dae 100644 --- a/superset-frontend/src/dashboard/types.ts +++ b/superset-frontend/src/dashboard/types.ts @@ -52,6 +52,8 @@ export type Chart = ChartState & { form_data: { viz_type: string; datasource: string; + color_scheme: string; + slice_id: number; }; }; @@ -131,7 +133,8 @@ export type DashboardInfo = { color_namespace: string; color_scheme_domain: string[]; label_colors: JsonObject; - shared_label_colors: JsonObject; + shared_label_colors: string[]; + map_label_colors: JsonObject; cross_filters_enabled: boolean; }; crossFiltersEnabled: boolean; @@ -177,7 +180,7 @@ export type ComponentType = (typeof componentTypes)[ComponentTypesKeys]; /** State of dashboardLayout item in redux */ export type LayoutItem = { children: string[]; - parents: string[]; + parents?: string[]; type: ComponentType; id: string; meta: { diff --git a/superset-frontend/src/dashboard/util/activeAllDashboardFilters.ts b/superset-frontend/src/dashboard/util/activeAllDashboardFilters.ts index f9e98e85365c..3bde1d2e6f85 100644 --- a/superset-frontend/src/dashboard/util/activeAllDashboardFilters.ts +++ b/superset-frontend/src/dashboard/util/activeAllDashboardFilters.ts @@ -54,9 +54,13 @@ export const getAllActiveFilters = ({ chartConfiguration?.[filterId]?.crossFilters?.chartsInScope ?? allSliceIds ?? []; + const filterType = nativeFilters?.[filterId]?.filterType; + const targets = nativeFilters?.[filterId]?.targets ?? scope; // Iterate over all roots to find all affected charts activeFilters[filterId] = { scope, + filterType, + targets, values: extraFormData, }; }); diff --git a/superset-frontend/src/dashboard/util/charts/getFormDataWithExtraFilters.ts b/superset-frontend/src/dashboard/util/charts/getFormDataWithExtraFilters.ts index 9160d21e58a1..2190318547ef 100644 --- a/superset-frontend/src/dashboard/util/charts/getFormDataWithExtraFilters.ts +++ b/superset-frontend/src/dashboard/util/charts/getFormDataWithExtraFilters.ts @@ -25,6 +25,7 @@ import { import { ChartConfiguration, ChartQueryPayload } from 'src/dashboard/types'; import { getExtraFormData } from 'src/dashboard/components/nativeFilters/utils'; import { areObjectsEqual } from 'src/reduxUtils'; +import { isEqual } from 'lodash'; import getEffectiveExtraFilters from './getEffectiveExtraFilters'; import { getAllActiveFilters } from '../activeAllDashboardFilters'; @@ -38,6 +39,7 @@ export interface GetFormDataWithExtraFiltersArguments { chart: ChartQueryPayload; filters: DataRecordFilters; colorScheme?: string; + ownColorScheme?: string; colorNamespace?: string; sliceId: number; dataMask: DataMaskStateWithId; @@ -45,6 +47,7 @@ export interface GetFormDataWithExtraFiltersArguments { extraControls: Record<string, string | boolean | null>; labelsColor?: Record<string, string>; labelsColorMap?: Record<string, string>; + sharedLabelsColors?: string[]; allSliceIds: number[]; } @@ -57,30 +60,32 @@ export default function getFormDataWithExtraFilters({ nativeFilters, chartConfiguration, colorScheme, + ownColorScheme, colorNamespace, sliceId, dataMask, extraControls, labelsColor, labelsColorMap, + sharedLabelsColors, allSliceIds, }: GetFormDataWithExtraFiltersArguments) { // if dashboard metadata + filters have not changed, use cache if possible const cachedFormData = cachedFormdataByChart[sliceId]; if ( cachedFiltersByChart[sliceId] === filters && - areObjectsEqual(cachedFormData?.color_scheme, colorScheme, { - ignoreUndefined: true, - }) && + areObjectsEqual(cachedFormData?.own_color_scheme, ownColorScheme) && + areObjectsEqual(cachedFormData?.color_scheme, colorScheme) && areObjectsEqual(cachedFormData?.color_namespace, colorNamespace, { ignoreUndefined: true, }) && areObjectsEqual(cachedFormData?.label_colors, labelsColor, { ignoreUndefined: true, }) && - areObjectsEqual(cachedFormData?.shared_label_colors, labelsColorMap, { + areObjectsEqual(cachedFormData?.map_label_colors, labelsColorMap, { ignoreUndefined: true, }) && + isEqual(cachedFormData?.shared_label_colors, sharedLabelsColors) && !!cachedFormData && areObjectsEqual(cachedFormData?.dataMask, dataMask, { ignoreUndefined: true, @@ -110,9 +115,14 @@ export default function getFormDataWithExtraFilters({ const formData = { ...chart.form_data, + chart_id: chart.id, label_colors: labelsColor, - shared_label_colors: labelsColorMap, + shared_label_colors: sharedLabelsColors, + map_label_colors: labelsColorMap, ...(colorScheme && { color_scheme: colorScheme }), + ...(ownColorScheme && { + own_color_scheme: ownColorScheme, + }), extra_filters: getEffectiveExtraFilters(filters), ...extraData, ...extraControls, diff --git a/superset-frontend/src/dashboard/util/crossFilters.test.ts b/superset-frontend/src/dashboard/util/crossFilters.test.ts index b88579cc843a..96281ee22abe 100644 --- a/superset-frontend/src/dashboard/util/crossFilters.test.ts +++ b/superset-frontend/src/dashboard/util/crossFilters.test.ts @@ -58,6 +58,7 @@ const CHARTS = { datasource: '2__table', viz_type: 'echarts_timeseries_line', slice_id: 1, + color_scheme: 'supersetColors', }, chartAlert: null, chartStatus: 'rendered' as const, @@ -76,6 +77,7 @@ const CHARTS = { '2': { id: 2, form_data: { + color_scheme: 'supersetColors', datasource: '2__table', viz_type: 'echarts_timeseries_line', slice_id: 2, @@ -262,8 +264,10 @@ test('Recalculate charts in global filter scope when charts change', () => { '3': { id: 3, form_data: { + slice_id: 3, datasource: '3__table', viz_type: 'echarts_timeseries_line', + color_scheme: 'supersetColors', }, chartAlert: null, chartStatus: 'rendered' as const, diff --git a/superset-frontend/src/dashboard/util/getRelatedCharts.test.ts b/superset-frontend/src/dashboard/util/getRelatedCharts.test.ts new file mode 100644 index 000000000000..a1ba74a20ce5 --- /dev/null +++ b/superset-frontend/src/dashboard/util/getRelatedCharts.test.ts @@ -0,0 +1,107 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + AppliedCrossFilterType, + Filter, + NativeFilterType, +} from '@superset-ui/core'; +import { getRelatedCharts } from './getRelatedCharts'; + +const slices = { + '1': { datasource: 'ds1', slice_id: 1 }, + '2': { datasource: 'ds2', slice_id: 2 }, + '3': { datasource: 'ds1', slice_id: 3 }, +} as any; + +test('Return all chart ids in global scope with native filters', () => { + const filters = { + filterKey1: { + filterType: 'filter_select', + chartsInScope: [1, 2, 3], + scope: { + excluded: [], + rootPath: [], + }, + targets: [ + { + column: { name: 'column1' }, + datasetId: 100, + }, + ], + type: NativeFilterType.NativeFilter, + } as unknown as Filter, + }; + + const result = getRelatedCharts('filterKey1', filters.filterKey1, slices); + expect(result).toEqual([1, 2, 3]); +}); + +test('Return only chart ids in specific scope with native filters', () => { + const filters = { + filterKey1: { + filterType: 'filter_select', + chartsInScope: [1, 3], + scope: { + excluded: [], + rootPath: [], + }, + targets: [ + { + column: { name: 'column1' }, + datasetId: 100, + }, + ], + type: NativeFilterType.NativeFilter, + } as unknown as Filter, + }; + + const result = getRelatedCharts('filterKey1', filters.filterKey1, slices); + expect(result).toEqual([1, 3]); +}); + +test('Return all chart ids with cross filter in global scope', () => { + const filters = { + '3': { + filterType: undefined, + scope: [1, 2, 3], + targets: [], + values: null, + } as AppliedCrossFilterType, + }; + + const result = getRelatedCharts('3', filters['3'], slices); + expect(result).toEqual([1, 2]); +}); + +test('Return only chart ids in specific scope with cross filter', () => { + const filters = { + '1': { + filterType: undefined, + scope: [1, 2], + targets: [], + values: { + filters: [{ col: 'column3' }], + }, + } as AppliedCrossFilterType, + }; + + const result = getRelatedCharts('1', filters['1'], slices); + expect(result).toEqual([2]); +}); diff --git a/superset-frontend/src/dashboard/util/getRelatedCharts.ts b/superset-frontend/src/dashboard/util/getRelatedCharts.ts new file mode 100644 index 000000000000..6d5cabfc9b94 --- /dev/null +++ b/superset-frontend/src/dashboard/util/getRelatedCharts.ts @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + AppliedCrossFilterType, + AppliedNativeFilterType, + Filter, + isAppliedCrossFilterType, + isAppliedNativeFilterType, + isNativeFilter, +} from '@superset-ui/core'; +import { Slice } from 'src/types/Chart'; + +function isGlobalScope(scope: number[], slices: Record<string, Slice>) { + return scope.length === Object.keys(slices).length; +} + +function getRelatedChartsForSelectFilter( + slices: Record<string, Slice>, + chartsInScope: number[], +): number[] { + // all have been selected, always apply + if (isGlobalScope(chartsInScope, slices)) { + return Object.keys(slices).map(Number); + } + + const chartsInScopeSet = new Set(chartsInScope); + + return Object.values(slices).reduce((result: number[], slice) => { + if (chartsInScopeSet.has(slice.slice_id)) { + result.push(slice.slice_id); + } + return result; + }, []); +} +function getRelatedChartsForCrossFilter( + filterKey: string, + slices: Record<string, Slice>, + scope: number[], +): number[] { + const sourceSlice = slices[filterKey]; + + if (!sourceSlice) return []; + + const fullScope = [ + ...scope.filter(s => String(s) !== filterKey), + Number(filterKey), + ]; + const scopeSet = new Set(scope); + + return Object.values(slices).reduce((result: number[], slice) => { + if (slice.slice_id === Number(filterKey)) { + return result; + } + // Check if it's in the global scope + if (isGlobalScope(fullScope, slices)) { + result.push(slice.slice_id); + return result; + } + // Check if it's hand-picked in scope + if (scopeSet.has(slice.slice_id)) { + result.push(slice.slice_id); + } + return result; + }, []); +} + +export function getRelatedCharts( + filterKey: string, + filter: AppliedNativeFilterType | AppliedCrossFilterType | Filter, + slices: Record<string, Slice>, +) { + let related: number[] = []; + const isCrossFilter = + Object.keys(slices).includes(filterKey) && isAppliedCrossFilterType(filter); + + const chartsInScope = Array.isArray(filter.scope) + ? filter.scope + : (filter as Filter).chartsInScope ?? []; + + if (isCrossFilter) { + related = getRelatedChartsForCrossFilter(filterKey, slices, chartsInScope); + } + + const nativeFilter = filter as AppliedNativeFilterType | Filter; + // on highlight, a standard native filter is passed + // on apply, an applied native filter is passed + if ( + !isCrossFilter || + isAppliedNativeFilterType(nativeFilter) || + isNativeFilter(nativeFilter) + ) { + related = getRelatedChartsForSelectFilter(slices, chartsInScope); + } + + return related; +} diff --git a/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.test.tsx b/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.test.tsx index d3010376da21..80e6038c0726 100644 --- a/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.test.tsx +++ b/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.test.tsx @@ -24,6 +24,9 @@ import reducerIndex from 'spec/helpers/reducerIndex'; import { screen, render } from 'spec/helpers/testing-library'; import { initialState } from 'src/SqlLab/fixtures'; import useFilterFocusHighlightStyles from './useFilterFocusHighlightStyles'; +import { getRelatedCharts } from './getRelatedCharts'; + +jest.mock('./getRelatedCharts'); const TestComponent = ({ chartId }: { chartId: number }) => { const styles = useFilterFocusHighlightStyles(chartId); @@ -38,6 +41,7 @@ describe('useFilterFocusHighlightStyles', () => { { ...mockState, ...(initialState as any), ...customState }, compose(applyMiddleware(thunk)), ); + const mockGetRelatedCharts = getRelatedCharts as jest.Mock; const renderWrapper = (chartId: number, store = createMockStore()) => render(<TestComponent chartId={chartId} />, { @@ -57,6 +61,7 @@ describe('useFilterFocusHighlightStyles', () => { }); it('should return unfocused styles if chart is not in scope of focused native filter', async () => { + mockGetRelatedCharts.mockReturnValue([]); const store = createMockStore({ nativeFilters: { focusedFilterId: 'test-filter', @@ -76,6 +81,7 @@ describe('useFilterFocusHighlightStyles', () => { }); it('should return unfocused styles if chart is not in scope of hovered native filter', async () => { + mockGetRelatedCharts.mockReturnValue([]); const store = createMockStore({ nativeFilters: { hoveredFilterId: 'test-filter', @@ -96,6 +102,7 @@ describe('useFilterFocusHighlightStyles', () => { it('should return focused styles if chart is in scope of focused native filter', async () => { const chartId = 18; + mockGetRelatedCharts.mockReturnValue([chartId]); const store = createMockStore({ nativeFilters: { focusedFilterId: 'testFilter', @@ -116,6 +123,7 @@ describe('useFilterFocusHighlightStyles', () => { it('should return focused styles if chart is in scope of hovered native filter', async () => { const chartId = 18; + mockGetRelatedCharts.mockReturnValue([chartId]); const store = createMockStore({ nativeFilters: { hoveredFilterId: 'testFilter', @@ -136,6 +144,7 @@ describe('useFilterFocusHighlightStyles', () => { it('should return unfocused styles if focusedFilterField is targeting a different chart', async () => { const chartId = 18; + mockGetRelatedCharts.mockReturnValue([]); const store = createMockStore({ dashboardState: { focusedFilterField: { @@ -159,6 +168,7 @@ describe('useFilterFocusHighlightStyles', () => { it('should return focused styles if focusedFilterField chart equals our own', async () => { const chartId = 18; + mockGetRelatedCharts.mockReturnValue([chartId]); const store = createMockStore({ dashboardState: { focusedFilterField: { diff --git a/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.ts b/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.ts index f1f428240c16..aa636cb1ee55 100644 --- a/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.ts +++ b/superset-frontend/src/dashboard/util/useFilterFocusHighlightStyles.ts @@ -16,11 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -import { useTheme } from '@superset-ui/core'; +import { Filter, useTheme } from '@superset-ui/core'; import { useSelector } from 'react-redux'; import { getChartIdsInFilterScope } from 'src/dashboard/util/activeDashboardFilters'; import { DashboardState, RootState } from 'src/dashboard/types'; +import { getRelatedCharts } from './getRelatedCharts'; const selectFocusedFilterScope = ( dashboardState: DashboardState, @@ -41,6 +42,7 @@ const useFilterFocusHighlightStyles = (chartId: number) => { const dashboardState = useSelector( (state: RootState) => state.dashboardState, ); + const dashboardFilters = useSelector( (state: RootState) => state.dashboardFilters, ); @@ -49,12 +51,22 @@ const useFilterFocusHighlightStyles = (chartId: number) => { dashboardFilters, ); + const slices = + useSelector((state: RootState) => state.sliceEntities.slices) || {}; + const highlightedFilterId = nativeFilters?.focusedFilterId || nativeFilters?.hoveredFilterId; + if (!(focusedFilterScope || highlightedFilterId)) { return {}; } + const relatedCharts = getRelatedCharts( + highlightedFilterId as string, + nativeFilters.filters[highlightedFilterId as string] as Filter, + slices, + ); + // we use local styles here instead of a conditionally-applied class, // because adding any conditional class to this container // causes performance issues in Chrome. @@ -69,11 +81,7 @@ const useFilterFocusHighlightStyles = (chartId: number) => { }; if (highlightedFilterId) { - if ( - nativeFilters.filters[highlightedFilterId]?.chartsInScope?.includes( - chartId, - ) - ) { + if (relatedCharts.includes(chartId)) { return focusedChartStyles; } } else if ( diff --git a/superset-frontend/src/embedded/index.tsx b/superset-frontend/src/embedded/index.tsx index 735a057fc6de..5145052c8be9 100644 --- a/superset-frontend/src/embedded/index.tsx +++ b/superset-frontend/src/embedded/index.tsx @@ -80,7 +80,9 @@ function showFailureMessage(message: string) { if (!window.parent || window.parent === window) { showFailureMessage( - 'This page is intended to be embedded in an iframe, but it looks like that is not the case.', + t( + 'This page is intended to be embedded in an iframe, but it looks like that is not the case.', + ), ); } @@ -141,7 +143,9 @@ function start() { // something is most likely wrong with the guest token logging.error(err); showFailureMessage( - 'Something went wrong with embedded authentication. Check the dev console for details.', + t( + 'Something went wrong with embedded authentication. Check the dev console for details.', + ), ); }, ); diff --git a/superset-frontend/src/explore/actions/saveModalActions.test.ts b/superset-frontend/src/explore/actions/saveModalActions.test.ts index 062379e5d322..0368693f856d 100644 --- a/superset-frontend/src/explore/actions/saveModalActions.test.ts +++ b/superset-frontend/src/explore/actions/saveModalActions.test.ts @@ -20,7 +20,11 @@ import sinon from 'sinon'; import fetchMock from 'fetch-mock'; import { Dispatch } from 'redux'; import { ADD_TOAST } from 'src/components/MessageToasts/actions'; -import { DatasourceType, QueryFormData } from '@superset-ui/core'; +import { + DatasourceType, + QueryFormData, + SimpleAdhocFilter, +} from '@superset-ui/core'; import { createDashboard, createSlice, @@ -31,6 +35,7 @@ import { getSlicePayload, PayloadSlice, } from './saveModalActions'; +import { Operators } from '../constants'; // Define test constants and mock data using imported types const sliceId = 10; @@ -594,6 +599,7 @@ describe('getSlicePayload', () => { }, ], }; + const formDataWithAdhocFiltersWithExtra: QueryFormData = { ...formDataWithNativeFilters, viz_type: 'mixed_timeseries', @@ -625,11 +631,61 @@ describe('getSlicePayload', () => { owners as [], formDataFromSliceWithAdhocFilterB, ); + expect(JSON.parse(result.params as string).adhoc_filters).toEqual( formDataFromSliceWithAdhocFilterB.adhoc_filters, ); - expect(JSON.parse(result.params as string).adhoc_filters).toEqual( + expect(JSON.parse(result.params as string).adhoc_filters_b).toEqual( formDataFromSliceWithAdhocFilterB.adhoc_filters_b, ); }); + + test('should return the correct payload when formDataFromSliceWithAdhocFilter has no time range filters in mixed chart', () => { + const formDataFromSliceWithAdhocFilterB: QueryFormData = { + ...formDataFromSlice, + adhoc_filters: [], + adhoc_filters_b: [], + }; + + const formDataWithAdhocFiltersWithExtra: QueryFormData = { + ...formDataWithNativeFilters, + viz_type: 'mixed_timeseries', + adhoc_filters: [ + { + clause: 'WHERE', + subject: 'year', + operator: 'TEMPORAL_RANGE', + comparator: 'No filter', + expressionType: 'SIMPLE', + isExtra: true, + }, + ], + adhoc_filters_b: [ + { + clause: 'WHERE', + subject: 'year', + operator: 'TEMPORAL_RANGE', + comparator: 'No filter', + expressionType: 'SIMPLE', + isExtra: true, + }, + ], + }; + const result = getSlicePayload( + sliceName, + formDataWithAdhocFiltersWithExtra, + dashboards, + owners as [], + formDataFromSliceWithAdhocFilterB, + ); + + const hasTemporalRange = ( + JSON.parse(result.params as string).adhoc_filters_b || [] + ).some( + (filter: SimpleAdhocFilter) => + filter.operator === Operators.TemporalRange, + ); + + expect(hasTemporalRange).toBe(true); + }); }); diff --git a/superset-frontend/src/explore/actions/saveModalActions.ts b/superset-frontend/src/explore/actions/saveModalActions.ts index a66dba3a7b68..42f2c0686c85 100644 --- a/superset-frontend/src/explore/actions/saveModalActions.ts +++ b/superset-frontend/src/explore/actions/saveModalActions.ts @@ -119,19 +119,24 @@ export const getSlicePayload = ( } if (!hasTemporalRangeFilter(adhocFilters)) { - formDataWithNativeFilters.adhoc_filters?.forEach( - (filter: SimpleAdhocFilter) => { - if (filter.operator === Operators.TemporalRange && filter.isExtra) { - if (!adhocFilters.adhoc_filters) { - adhocFilters.adhoc_filters = []; - } - adhocFilters.adhoc_filters.push({ - ...filter, - comparator: 'No filter', - }); - } - }, + const adhocFiltersKeys = Object.keys(formDataWithNativeFilters).filter( + key => ADHOC_FILTER_REGEX.test(key), ); + adhocFiltersKeys?.forEach(filtersKey => { + formDataWithNativeFilters[filtersKey]?.forEach( + (filter: SimpleAdhocFilter) => { + if (filter.operator === Operators.TemporalRange && filter.isExtra) { + if (!adhocFilters[filtersKey]) { + adhocFilters[filtersKey] = []; + } + adhocFilters[filtersKey].push({ + ...filter, + comparator: 'No filter', + }); + } + }, + ); + }); } const formData = { ...formDataWithNativeFilters, diff --git a/superset-frontend/src/explore/components/ControlPanelsContainer.tsx b/superset-frontend/src/explore/components/ControlPanelsContainer.tsx index 27bf09d79d6b..4a3bae20ea88 100644 --- a/superset-frontend/src/explore/components/ControlPanelsContainer.tsx +++ b/superset-frontend/src/explore/components/ControlPanelsContainer.tsx @@ -448,7 +448,7 @@ export const ControlPanelsContainer = (props: ControlPanelsContainerProps) => { const renderControl = ({ name, config }: CustomControlItem) => { const { controls, chart, exploreState } = props; - const { visibility, hidden, ...restConfig } = config; + const { visibility, hidden, disableStash, ...restConfig } = config; // If the control item is not an object, we have to look up the control data from // the centralized controls file. @@ -529,7 +529,7 @@ export const ControlPanelsContainer = (props: ControlPanelsContainerProps) => { return ( <StashFormDataContainer - shouldStash={isVisible === false} + shouldStash={isVisible === false && disableStash !== true} fieldNames={[name]} key={`control-container-${name}`} > diff --git a/superset-frontend/src/explore/components/DataTablesPane/components/useResultsPane.tsx b/superset-frontend/src/explore/components/DataTablesPane/components/useResultsPane.tsx index fe262fe72078..19b828d97a42 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/components/useResultsPane.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/components/useResultsPane.tsx @@ -134,7 +134,7 @@ export const useResultsPane = ({ } if (resultResp.length === 0) { - const title = t('No results were returned for this query'); + const title = t('No data'); return Array(queryCount).fill( <EmptyStateMedium image="document.svg" title={title} />, ); diff --git a/superset-frontend/src/explore/components/DataTablesPane/test/ResultsPaneOnDashboard.test.tsx b/superset-frontend/src/explore/components/DataTablesPane/test/ResultsPaneOnDashboard.test.tsx index a2cb3156cbe6..2576a912d107 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/test/ResultsPaneOnDashboard.test.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/test/ResultsPaneOnDashboard.test.tsx @@ -98,9 +98,7 @@ describe('ResultsPaneOnDashboard', () => { const { findByText } = render(<ResultsPaneOnDashboard {...props} />, { useRedux: true, }); - expect( - await findByText('No results were returned for this query'), - ).toBeVisible(); + expect(await findByText('No data')).toBeVisible(); }); test('render errorMessage', async () => { diff --git a/superset-frontend/src/explore/components/EmbedCodeContent.jsx b/superset-frontend/src/explore/components/EmbedCodeContent.jsx index 19ccfeb54ce2..504ef78ed633 100644 --- a/superset-frontend/src/explore/components/EmbedCodeContent.jsx +++ b/superset-frontend/src/explore/components/EmbedCodeContent.jsx @@ -22,6 +22,7 @@ import { Input, TextArea } from 'src/components/Input'; import CopyToClipboard from 'src/components/CopyToClipboard'; import { URL_PARAMS } from 'src/constants'; import { getChartPermalink } from 'src/utils/urlUtils'; +import { useTimezone } from 'src/components/TimezoneContext'; import { CopyButton } from './DataTableControl'; const CopyButtonEmbedCode = styled(CopyButton)` @@ -31,6 +32,7 @@ const CopyButtonEmbedCode = styled(CopyButton)` `; const EmbedCodeContent = ({ formData, addDangerToast }) => { + const { timezone } = useTimezone(); const [height, setHeight] = useState('400'); const [width, setWidth] = useState('600'); const [url, setUrl] = useState(''); @@ -65,7 +67,8 @@ const EmbedCodeContent = ({ formData, addDangerToast }) => { const html = useMemo(() => { if (!url) return ''; - const srcLink = `${url}?${URL_PARAMS.standalone.name}=1&height=${height}`; + const timezoneParam = timezone !== 'UTC' ? `&${URL_PARAMS.timezone.name}=${encodeURIComponent(timezone)}` : ''; + const srcLink = `${url}?${URL_PARAMS.standalone.name}=1&height=${height}${timezoneParam}`; return ( '<iframe\n' + ` width="${width}"\n` + @@ -77,7 +80,7 @@ const EmbedCodeContent = ({ formData, addDangerToast }) => { '>\n' + '</iframe>' ); - }, [height, url, width]); + }, [height, url, width, timezone]); const text = errorMessage || html || t('Generating link, please wait..'); return ( diff --git a/superset-frontend/src/explore/components/ExploreChartHeader/index.jsx b/superset-frontend/src/explore/components/ExploreChartHeader/index.jsx index 988b11f8259c..c7df2af40f66 100644 --- a/superset-frontend/src/explore/components/ExploreChartHeader/index.jsx +++ b/superset-frontend/src/explore/components/ExploreChartHeader/index.jsx @@ -39,6 +39,7 @@ const propTypes = { canOverwrite: PropTypes.bool.isRequired, canDownload: PropTypes.bool.isRequired, dashboardId: PropTypes.number, + colorScheme: PropTypes.string, isStarred: PropTypes.bool.isRequired, slice: PropTypes.object, sliceName: PropTypes.string, @@ -68,6 +69,7 @@ const additionalItemsStyles = theme => css` export const ExploreChartHeader = ({ dashboardId, + colorScheme: dashboardColorScheme, slice, actions, formData, @@ -84,17 +86,15 @@ export const ExploreChartHeader = ({ const dispatch = useDispatch(); const { latestQueryFormData, sliceFormData } = chart; const [isPropertiesModalOpen, setIsPropertiesModalOpen] = useState(false); - const updateCategoricalNamespace = async () => { const { dashboards } = metadata || {}; const dashboard = dashboardId && dashboards && dashboards.find(d => d.id === dashboardId); - if (!dashboard) { + if (!dashboard || !dashboardColorScheme) { // clean up color namespace and shared color maps // to avoid colors spill outside of dashboard context resetColors(metadata?.color_namespace); - return; } if (dashboard) { @@ -108,6 +108,7 @@ export const ExploreChartHeader = ({ // setting the chart to use the dashboard custom label colors if any const dashboardMetadata = JSON.parse(result.json_metadata); + // ensure consistency with the dashboard applyColors(dashboardMetadata); } catch (error) { logging.info(t('Unable to retrieve dashboard colors')); diff --git a/superset-frontend/src/explore/components/ExploreChartPanel/index.jsx b/superset-frontend/src/explore/components/ExploreChartPanel/index.jsx index 189177019c70..1a9a9ebf3319 100644 --- a/superset-frontend/src/explore/components/ExploreChartPanel/index.jsx +++ b/superset-frontend/src/explore/components/ExploreChartPanel/index.jsx @@ -270,6 +270,8 @@ const ExploreChartPanel = ({ timeout={timeout} triggerQuery={chart.triggerQuery} vizType={vizType} + description={slice?.description} + title={slice?.slice_name} /> )} </div> @@ -294,6 +296,8 @@ const ExploreChartPanel = ({ formData, onQuery, ownState, + slice?.description, + slice?.slice_name, timeout, triggerRender, vizType, diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx index 3d263671c1b1..4f06231bc659 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx @@ -76,6 +76,9 @@ const propTypes = { actions: PropTypes.object.isRequired, datasource_type: PropTypes.string.isRequired, dashboardId: PropTypes.number, + colorScheme: PropTypes.string, + ownColorScheme: PropTypes.string, + dashboardColorScheme: PropTypes.string, isDatasourceMetaLoading: PropTypes.bool.isRequired, chart: chartPropShape.isRequired, slice: PropTypes.object, @@ -356,7 +359,14 @@ function ExploreViewContainer(props) { } useComponentDidMount(() => { - props.actions.logEvent(LOG_ACTIONS_MOUNT_EXPLORER); + props.actions.logEvent( + LOG_ACTIONS_MOUNT_EXPLORER, + props.slice?.slice_id + ? { + slice_id: props.slice.slice_id, + } + : undefined, + ); }); useChangeEffect(tabId, (previous, current) => { @@ -563,6 +573,7 @@ function ExploreViewContainer(props) { canOverwrite={props.can_overwrite} canDownload={props.can_download} dashboardId={props.dashboardId} + colorScheme={props.dashboardColorScheme} isStarred={props.isStarred} slice={props.slice} sliceName={props.sliceName} @@ -741,6 +752,9 @@ function mapStateToProps(state) { }, ); const chart = charts[slice_id]; + const colorScheme = explore.form_data?.color_scheme; + const ownColorScheme = explore.form_data?.own_color_scheme; + const dashboardColorScheme = explore.form_data?.dashboard_color_scheme; let dashboardId = Number(explore.form_data?.dashboardId); if (Number.isNaN(dashboardId)) { @@ -753,6 +767,9 @@ function mapStateToProps(state) { datasource_type: datasource.type, datasourceId: datasource.datasource_id, dashboardId, + colorScheme, + ownColorScheme, + dashboardColorScheme, controls: explore.controls, can_add: !!explore.can_add, can_download: !!explore.can_download, diff --git a/superset-frontend/src/explore/components/controls/ColorSchemeControl/ColorSchemeControl.test.tsx b/superset-frontend/src/explore/components/controls/ColorSchemeControl/ColorSchemeControl.test.tsx index 0f380b3910c2..264e6968c8d2 100644 --- a/superset-frontend/src/explore/components/controls/ColorSchemeControl/ColorSchemeControl.test.tsx +++ b/superset-frontend/src/explore/components/controls/ColorSchemeControl/ColorSchemeControl.test.tsx @@ -29,6 +29,7 @@ import ColorSchemeControl, { ColorSchemes } from '.'; const defaultProps = () => ({ hasCustomLabelsColor: false, + sharedLabelsColors: [], label: 'Color scheme', labelMargin: 0, name: 'color', @@ -128,8 +129,8 @@ test('displays color scheme options', async () => { }); }); -test('Renders control with dashboard id', () => { - setup({ dashboardId: 1 }); +test('Renders control with dashboard id and dashboard color scheme', () => { + setup({ dashboardId: 1, hasDashboardColorScheme: true }); expect(screen.getByText('Dashboard scheme')).toBeInTheDocument(); expect( screen.getByLabelText('Select color scheme', { selector: 'input' }), diff --git a/superset-frontend/src/explore/components/controls/ColorSchemeControl/index.tsx b/superset-frontend/src/explore/components/controls/ColorSchemeControl/index.tsx index e43bb32b7252..fd9dc85f0fd3 100644 --- a/superset-frontend/src/explore/components/controls/ColorSchemeControl/index.tsx +++ b/superset-frontend/src/explore/components/controls/ColorSchemeControl/index.tsx @@ -26,6 +26,8 @@ import { styled, t, useTheme, + getLabelsColorMap, + CategoricalColorNamespace, } from '@superset-ui/core'; import AntdSelect from 'antd/lib/select'; import { isFunction, sortBy } from 'lodash'; @@ -35,6 +37,7 @@ import Icons from 'src/components/Icons'; import { SelectOptionsType } from 'src/components/Select/types'; import { StyledSelect } from 'src/components/Select/styles'; import { handleFilterOptionHelper } from 'src/components/Select/utils'; +import { getColorNamespace } from 'src/utils/colorScheme'; import ColorSchemeLabel from './ColorSchemeLabel'; const { Option, OptGroup } = AntdSelect; @@ -47,6 +50,12 @@ export interface ColorSchemes { export interface ColorSchemeControlProps { hasCustomLabelsColor: boolean; + hasDashboardColorScheme?: boolean; + hasSharedLabelsColor?: boolean; + sharedLabelsColors?: string[]; + mapLabelsColors?: Record<string, any>; + colorNamespace?: string; + chartId?: number; dashboardId?: number; label: string; name: string; @@ -64,8 +73,8 @@ const StyledAlert = styled(Icons.AlertSolid)` `; const CUSTOM_LABEL_ALERT = t( - `This color scheme is being overridden by custom label colors. - Check the JSON metadata in the Advanced settings`, + `The colors of this chart might be overridden by custom label colors of the related dashboard. + Check the JSON metadata in the Advanced settings.`, ); const DASHBOARD_ALERT = t( @@ -73,18 +82,38 @@ const DASHBOARD_ALERT = t( Edit the color scheme in the dashboard properties.`, ); +const DASHBOARD_CONTEXT_ALERT = t( + `You are viewing this chart in a dashboard context with labels shared across multiple charts. + The color scheme selection is disabled.`, +); + +const DASHBOARD_CONTEXT_TOOLTIP = t( + `You are viewing this chart in the context of a dashboard that is directly affecting its colors. + To edit the color scheme, open this chart outside of the dashboard.`, +); + const Label = ({ label, - hasCustomLabelsColor, dashboardId, + hasSharedLabelsColor, + hasCustomLabelsColor, + hasDashboardColorScheme, }: Pick< ColorSchemeControlProps, - 'label' | 'hasCustomLabelsColor' | 'dashboardId' + | 'label' + | 'dashboardId' + | 'hasCustomLabelsColor' + | 'hasSharedLabelsColor' + | 'hasDashboardColorScheme' >) => { - if (hasCustomLabelsColor || dashboardId) { - const alertTitle = hasCustomLabelsColor - ? CUSTOM_LABEL_ALERT - : DASHBOARD_ALERT; + if (hasSharedLabelsColor || hasCustomLabelsColor || hasDashboardColorScheme) { + const alertTitle = + hasCustomLabelsColor && !hasSharedLabelsColor + ? CUSTOM_LABEL_ALERT + : dashboardId && hasDashboardColorScheme + ? DASHBOARD_ALERT + : DASHBOARD_CONTEXT_ALERT; + return ( <> {label}{' '} @@ -99,7 +128,12 @@ const Label = ({ const ColorSchemeControl = ({ hasCustomLabelsColor = false, + hasDashboardColorScheme = false, + mapLabelsColors = {}, + sharedLabelsColors = [], dashboardId, + colorNamespace, + chartId, label = t('Color scheme'), onChange = () => {}, value, @@ -110,9 +144,21 @@ const ColorSchemeControl = ({ isLinear, ...rest }: ColorSchemeControlProps) => { + const countSharedLabelsColor = sharedLabelsColors.length; + const colorMapInstance = getLabelsColorMap(); + const chartLabels = chartId + ? colorMapInstance.chartsLabelsMap.get(chartId)?.labels || [] + : []; + const hasSharedLabelsColor = !!( + dashboardId && + countSharedLabelsColor > 0 && + chartLabels.some(label => sharedLabelsColors.includes(label)) + ); + const hasDashboardScheme = dashboardId && hasDashboardColorScheme; + const showDashboardLockedOption = hasDashboardScheme || hasSharedLabelsColor; const theme = useTheme(); const currentScheme = useMemo(() => { - if (dashboardId) { + if (showDashboardLockedOption) { return 'dashboard'; } let result = value || defaultScheme; @@ -121,13 +167,15 @@ const ColorSchemeControl = ({ result = schemesObject?.SUPERSET_DEFAULT?.id; } return result; - }, [dashboardId, defaultScheme, schemes, value]); + }, [defaultScheme, schemes, showDashboardLockedOption, value]); const options = useMemo(() => { - if (dashboardId) { + if (showDashboardLockedOption) { return [ - <Option value="dashboard" label={t('dashboard')} key="dashboard"> - <Tooltip title={DASHBOARD_ALERT}>{t('Dashboard scheme')}</Tooltip> + <Option value="dashboard" label={t('Dashboard')} key="dashboard"> + <Tooltip title={DASHBOARD_CONTEXT_TOOLTIP}> + {t('Dashboard scheme')} + </Tooltip> </Option>, ]; } @@ -218,11 +266,29 @@ const ColorSchemeControl = ({ ))} </OptGroup> )); - }, [choices, dashboardId, isLinear, schemes]); + }, [choices, hasDashboardScheme, hasSharedLabelsColor, isLinear, schemes]); // We can't pass on change directly because it receives a second // parameter and it would be interpreted as the error parameter - const handleOnChange = (value: string) => onChange(value); + const handleOnChange = (value: string) => { + if (chartId) { + colorMapInstance.setOwnColorScheme(chartId, value); + if (dashboardId) { + const colorNameSpace = getColorNamespace(colorNamespace); + const categoricalNamespace = + CategoricalColorNamespace.getNamespace(colorNameSpace); + + const sharedLabelsSet = new Set(sharedLabelsColors); + // reset colors except shared and custom labels to keep dashboard consistency + const resettableLabels = Object.keys(mapLabelsColors).filter( + l => !sharedLabelsSet.has(l), + ); + categoricalNamespace.resetColorsForLabels(resettableLabels); + } + } + + onChange(value); + }; return ( <> @@ -231,8 +297,10 @@ const ColorSchemeControl = ({ label={ <Label label={label} - hasCustomLabelsColor={hasCustomLabelsColor} dashboardId={dashboardId} + hasCustomLabelsColor={hasCustomLabelsColor} + hasDashboardColorScheme={hasDashboardColorScheme} + hasSharedLabelsColor={hasSharedLabelsColor} /> } /> @@ -249,7 +317,7 @@ const ColorSchemeControl = ({ `} aria-label={t('Select color scheme')} allowClear={clearable} - disabled={!!dashboardId} + disabled={hasDashboardScheme || hasSharedLabelsColor} onChange={handleOnChange} placeholder={t('Select scheme')} value={currentScheme} diff --git a/superset-frontend/src/explore/components/controls/DateFilterControl/DateFilterLabel.tsx b/superset-frontend/src/explore/components/controls/DateFilterControl/DateFilterLabel.tsx index e5ef55326796..c1441fc2f34b 100644 --- a/superset-frontend/src/explore/components/controls/DateFilterControl/DateFilterLabel.tsx +++ b/superset-frontend/src/explore/components/controls/DateFilterControl/DateFilterLabel.tsx @@ -17,6 +17,7 @@ * under the License. */ import { ReactNode, useState, useEffect, useMemo } from 'react'; +import { DateTime } from 'luxon'; import { css, styled, @@ -151,6 +152,101 @@ const getTooltipTitle = ( range || null ); +/** + * --- Timezone helpers (Luxon-based) --- + * If ?timezone=XYZ is present, use that; otherwise default to 'Asia/Kolkata'. + * We convert backend-evaluated UTC ranges into this timezone for display. + */ + +// Read timezone from URL (?timezone=Asia/Kolkata). Defaults to Asia/Kolkata. +function getTimezoneFromUrl(): string { + try { + const urlParams = new URLSearchParams(window.location.search); + const tz = urlParams.get('timezone'); + const fallback = 'Asia/Kolkata'; + return tz?.trim() || fallback; + } catch { + return 'Asia/Kolkata'; + } +} + +function convertRangeTZ(s: string, toTZ = 'Asia/Kolkata') { + const isoRe = + /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:\d{2})?/g; + + const matches = [...s.matchAll(isoRe)].map(m => m[0]); + if (matches.length < 2) return s; + + const convert = (iso: string) => { + // If no offset, assume UTC + const src = /Z|[+-]\d{2}:\d{2}$/.test(iso) ? iso : `${iso}Z`; + const dt = new Date(src); + + // Format as ISO-like string in target zone + const parts = Object.fromEntries( + new Intl.DateTimeFormat('en-CA', { + timeZone: toTZ, + year: 'numeric', + month: '2-digit', + day: '2-digit', + hour: '2-digit', + minute: '2-digit', + second: '2-digit', + hour12: false, + }) + .formatToParts(dt) + .filter(p => + ['year', 'month', 'day', 'hour', 'minute', 'second'].includes(p.type), + ) + .map(p => [p.type, p.value]), + ); + + return `${parts.year}-${parts.month}-${parts.day}T${parts.hour}:${parts.minute}:${parts.second}`; + }; + + const a = convert(matches[0]); + const b = convert(matches[1]); + + return s.replace(matches[0], a).replace(matches[1], b); +} + +export const getDateFromTimezone = (timezone: string) => + DateTime.now().setZone(timezone); +export const getEzTimezoneDate = ( + timezone: string, + fn: 'startOf' | 'endOf', + unit: 'day' | 'month' | 'year', +): string => { + const date = getDateFromTimezone(timezone); + // Convert boundary in TZ to UTC ISO string + return date[fn](unit).toUTC().toISO() as string; +}; + +// Try to parse a naive "YYYY-MM-DD HH:mm:ss" or ISO-like string as UTC using Luxon +function parseNaiveUTC(input: string): DateTime | null { + if (!input) return null; + let iso = input.trim().replace(' ', 'T'); + const hasTZ = /[zZ]|[+-]\d{2}:?\d{2}$/.test(iso); + if (!hasTZ) iso += 'Z'; + const dt = DateTime.fromISO(iso, { zone: 'utc' }); + return dt.isValid ? dt : null; +} + +// Convert a range string "start to end" (or "start : end") from UTC -> tz using Luxon +function convertRangeUTCToTZ(range: string, tz: string): string { + if (!range) return range; + const parts = range.split(/\s+(?:to|:)\s+/i); + if (parts.length !== 2) return range; + + const startUTC = parseNaiveUTC(parts[0]); + const endUTC = parseNaiveUTC(parts[1]); + if (!startUTC || !endUTC) return range; + + const startStr = startUTC.setZone(tz).toFormat('yyyy-MM-dd HH:mm:ss ZZ'); + const endStr = endUTC.setZone(tz).toFormat('yyyy-MM-dd HH:mm:ss ZZ'); + return `${startStr} to ${endStr}`; +} + export default function DateFilterLabel(props: DateFilterControlProps) { const { onChange, @@ -175,6 +271,9 @@ export default function DateFilterLabel(props: DateFilterControlProps) { const theme = useTheme(); const [labelRef, labelIsTruncated] = useCSSTextTruncation<HTMLSpanElement>(); + // Backend evaluates to UTC; we convert to the URL tz or default 'Asia/Kolkata' for display. + const urlTZ = getTimezoneFromUrl(); + useEffect(() => { if (value === NO_TIME_RANGE) { setActualTimeRange(NO_TIME_RANGE); @@ -188,6 +287,10 @@ export default function DateFilterLabel(props: DateFilterControlProps) { setValidTimeRange(false); setTooltipTitle(value || null); } else { + const convertedADR = actualRange + ? convertRangeUTCToTZ(actualRange, urlTZ) + : actualRange; + /* HRT == human readable text ADR == actual datetime range @@ -205,22 +308,27 @@ export default function DateFilterLabel(props: DateFilterControlProps) { guessedFrame === 'Current' || guessedFrame === 'No filter' ) { + // Pill shows HRT (value); tooltip shows ADR (converted) setActualTimeRange(value); setTooltipTitle( - getTooltipTitle(labelIsTruncated, value, actualRange), + getTooltipTitle(labelIsTruncated, value, convertedADR), ); } else { - setActualTimeRange(actualRange || ''); + // Pill shows ADR (converted); tooltip shows HRT (value) + setActualTimeRange(convertedADR || ''); setTooltipTitle( - getTooltipTitle(labelIsTruncated, actualRange, value), + getTooltipTitle(labelIsTruncated, convertedADR, value), ); } setValidTimeRange(true); } setLastFetchedTimeRange(value); - setEvalResponse(actualRange || value); + const previewADR = actualRange + ? convertRangeUTCToTZ(actualRange, urlTZ) + : actualRange; + setEvalResponse(previewADR || value); }); - }, [guessedFrame, labelIsTruncated, labelRef, value]); + }, [guessedFrame, labelIsTruncated, labelRef, value, urlTZ]); useDebouncedEffect( () => { @@ -236,7 +344,10 @@ export default function DateFilterLabel(props: DateFilterControlProps) { setEvalResponse(error || ''); setValidTimeRange(false); } else { - setEvalResponse(actualRange || ''); + const previewADR = actualRange + ? convertRangeUTCToTZ(actualRange, urlTZ) + : actualRange; + setEvalResponse(previewADR || ''); setValidTimeRange(true); } setLastFetchedTimeRange(timeRangeValue); @@ -244,7 +355,7 @@ export default function DateFilterLabel(props: DateFilterControlProps) { } }, SLOW_DEBOUNCE, - [timeRangeValue], + [timeRangeValue, lastFetchedTimeRange, urlTZ], ); function onSave() { @@ -314,11 +425,7 @@ export default function DateFilterLabel(props: DateFilterControlProps) { <Divider /> <div> <div className="section-title">{t('Actual time range')}</div> - {validTimeRange && ( - <div> - {evalResponse === 'No filter' ? t('No filter') : evalResponse} - </div> - )} + {validTimeRange && <div>{convertRangeTZ(evalResponse, urlTZ)}</div>} {!validTimeRange && ( <IconWrapper className="warning"> <Icons.ErrorSolidSmall iconColor={theme.colors.error.base} /> diff --git a/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx b/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx index d3448d2a56f8..162905112460 100644 --- a/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx +++ b/superset-frontend/src/explore/components/controls/DateFilterControl/components/CustomFrame.tsx @@ -19,6 +19,8 @@ import { useSelector } from 'react-redux'; import { t, customTimeRangeDecode } from '@superset-ui/core'; import { Moment } from 'moment'; +import moment from 'moment-timezone'; +import { useTimezone } from 'src/components/TimezoneContext'; import { isInteger } from 'lodash'; // @ts-ignore import { locales } from 'antd/dist/antd-with-locales'; @@ -46,6 +48,7 @@ import { import { ExplorePageState } from 'src/explore/types'; export function CustomFrame(props: FrameComponentProps) { + const { timezone } = useTimezone(); const { customRange, matchedFlag } = customTimeRangeDecode(props.value); if (!matchedFlag) { props.onChange(customTimeRangeEncode(customRange)); @@ -118,6 +121,17 @@ export function CustomFrame(props: FrameComponentProps) { const datePickerLocale = locales[LOCALE_MAPPING[localFromFlaskBabel]]?.DatePicker; + // Helper functions for timezone-aware date handling + const convertToTimezone = (datetime: string): Moment => { + const converted = moment.tz(datetime, timezone); + return converted; + }; + + const convertFromTimezone = (momentDate: Moment): string => { + const result = momentDate.clone().tz(timezone).format(MOMENT_FORMAT); + return result; + }; + return ( <div data-test="custom-frame"> <div className="section-title">{t('Configure custom time range')}</div> @@ -140,9 +154,9 @@ export function CustomFrame(props: FrameComponentProps) { <Row> <DatePicker showTime - defaultValue={dttmToMoment(sinceDatetime)} + defaultValue={convertToTimezone(sinceDatetime)} onChange={(datetime: Moment) => - onChange('sinceDatetime', datetime.format(MOMENT_FORMAT)) + onChange('sinceDatetime', convertFromTimezone(datetime)) } allowClear={false} locale={datePickerLocale} @@ -193,9 +207,9 @@ export function CustomFrame(props: FrameComponentProps) { <Row> <DatePicker showTime - defaultValue={dttmToMoment(untilDatetime)} + defaultValue={convertToTimezone(untilDatetime)} onChange={(datetime: Moment) => - onChange('untilDatetime', datetime.format(MOMENT_FORMAT)) + onChange('untilDatetime', convertFromTimezone(datetime)) } allowClear={false} locale={datePickerLocale} @@ -250,9 +264,9 @@ export function CustomFrame(props: FrameComponentProps) { <Col> <DatePicker showTime - defaultValue={dttmToMoment(anchorValue)} + defaultValue={convertToTimezone(anchorValue)} onChange={(datetime: Moment) => - onChange('anchorValue', datetime.format(MOMENT_FORMAT)) + onChange('anchorValue', convertFromTimezone(datetime)) } allowClear={false} className="control-anchor-to-datetime" diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx index 395b943dc61c..2657e5429531 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.test.tsx @@ -23,54 +23,102 @@ import { Provider } from 'react-redux'; import configureMockStore from 'redux-mock-store'; import thunk from 'redux-thunk'; import { supersetTheme, ThemeProvider } from '@superset-ui/core'; -import ColumnSelectPopover from 'src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover'; +import ColumnSelectPopover, { + ColumnSelectPopoverProps, +} from 'src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover'; const middlewares = [thunk]; const mockStore = configureMockStore(middlewares); -describe('ColumnSelectPopover - onTabChange function', () => { - it('updates adhocColumn when switching to sqlExpression tab with custom label', () => { - const mockColumns = [{ column_name: 'year' }]; - const mockOnClose = jest.fn(); - const mockOnChange = jest.fn(); - const mockGetCurrentTab = jest.fn(); - const mockSetDatasetModal = jest.fn(); - const mockSetLabel = jest.fn(); +const renderPopover = ( + props: Pick< + ColumnSelectPopoverProps, + 'columns' | 'editedColumn' | 'getCurrentTab' | 'onChange' + >, +) => { + const store = mockStore({ explore: { datasource: { type: 'table' } } }); - const store = mockStore({ explore: { datasource: { type: 'table' } } }); + return render( + <Provider store={store}> + <ThemeProvider theme={supersetTheme}> + <ColumnSelectPopover + hasCustomLabel + isTemporal + label="Custom Label" + onClose={jest.fn()} + setDatasetModal={jest.fn()} + setLabel={jest.fn()} + {...props} + /> + </ThemeProvider> + </Provider>, + ); +}; - const { container, getByText } = render( - <Provider store={store}> - <ThemeProvider theme={supersetTheme}> - <ColumnSelectPopover - columns={mockColumns} - editedColumn={mockColumns[0]} - getCurrentTab={mockGetCurrentTab} - hasCustomLabel - isTemporal - label="Custom Label" - onChange={mockOnChange} - onClose={mockOnClose} - setDatasetModal={mockSetDatasetModal} - setLabel={mockSetLabel} - /> - </ThemeProvider> - </Provider>, - ); +test('updates adhocColumn when switching to sqlExpression tab with custom label', () => { + const mockColumns = [{ column_name: 'year' }]; + const mockOnChange = jest.fn(); + const mockGetCurrentTab = jest.fn(); - const sqlExpressionTab = container.querySelector( - '#adhoc-metric-edit-tabs-tab-sqlExpression', - ); - expect(sqlExpressionTab).not.toBeNull(); - fireEvent.click(sqlExpressionTab!); - expect(mockGetCurrentTab).toHaveBeenCalledWith('sqlExpression'); + const { container, getByText } = renderPopover({ + columns: mockColumns, + editedColumn: mockColumns[0], + getCurrentTab: mockGetCurrentTab, + onChange: mockOnChange, + }); + + const sqlExpressionTab = container.querySelector( + '#adhoc-metric-edit-tabs-tab-sqlExpression', + ); + expect(sqlExpressionTab).not.toBeNull(); + fireEvent.click(sqlExpressionTab!); + expect(mockGetCurrentTab).toHaveBeenCalledWith('sqlExpression'); + + const saveButton = getByText('Save'); + fireEvent.click(saveButton); + expect(mockOnChange).toHaveBeenCalledWith({ + label: 'Custom Label', + sqlExpression: 'year', + expressionType: 'SQL', + }); +}); + +test('open with Simple tab selected when there is no column selected', () => { + const { getByText } = renderPopover({ + columns: [{ column_name: 'year' }], + editedColumn: undefined, + getCurrentTab: jest.fn(), + onChange: jest.fn(), + }); + expect(getByText('Saved')).toHaveAttribute('aria-selected', 'false'); + expect(getByText('Simple')).toHaveAttribute('aria-selected', 'true'); + expect(getByText('Custom SQL')).toHaveAttribute('aria-selected', 'false'); +}); + +test('open with Saved tab selected when there is a saved column selected', () => { + const { getByText } = renderPopover({ + columns: [{ column_name: 'year' }], + editedColumn: { column_name: 'year', expression: 'year - 1' }, + getCurrentTab: jest.fn(), + onChange: jest.fn(), + }); + expect(getByText('Saved')).toHaveAttribute('aria-selected', 'true'); + expect(getByText('Simple')).toHaveAttribute('aria-selected', 'false'); + expect(getByText('Custom SQL')).toHaveAttribute('aria-selected', 'false'); +}); - const saveButton = getByText('Save'); - fireEvent.click(saveButton); - expect(mockOnChange).toHaveBeenCalledWith({ - label: 'Custom Label', - sqlExpression: 'year', - expressionType: 'SQL', - }); +test('open with Custom SQL tab selected when there is a custom SQL selected', () => { + const { getByText } = renderPopover({ + columns: [{ column_name: 'year' }], + editedColumn: { + column_name: 'year', + label: 'Custom SQL', + sqlExpression: 'year - 1', + }, + getCurrentTab: jest.fn(), + onChange: jest.fn(), }); + expect(getByText('Saved')).toHaveAttribute('aria-selected', 'false'); + expect(getByText('Simple')).toHaveAttribute('aria-selected', 'false'); + expect(getByText('Custom SQL')).toHaveAttribute('aria-selected', 'true'); }); diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx index ed274d0899d8..711cad392de2 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/ColumnSelectPopover.tsx @@ -41,8 +41,10 @@ import Button from 'src/components/Button'; import { Select } from 'src/components'; import { Form, FormItem } from 'src/components/Form'; +import sqlKeywords from 'src/SqlLab/utils/sqlKeywords'; import { SQLEditor } from 'src/components/AsyncAceEditor'; import { EmptyStateSmall } from 'src/components/EmptyState'; +import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords'; import { StyledColumnOption } from 'src/explore/components/optionRenderers'; import { POPOVER_INITIAL_HEIGHT, @@ -63,7 +65,7 @@ const StyledSelect = styled(Select)` } `; -interface ColumnSelectPopoverProps { +export interface ColumnSelectPopoverProps { columns: ColumnMeta[]; editedColumn?: ColumnMeta | AdhocColumn; onChange: (column: ColumnMeta | AdhocColumn) => void; @@ -187,9 +189,9 @@ const ColumnSelectPopover = ({ const defaultActiveTabKey = initialAdhocColumn ? 'sqlExpression' - : initialSimpleColumn || calculatedColumns.length === 0 - ? 'simple' - : 'saved'; + : selectedCalculatedColumn + ? 'saved' + : 'simple'; useEffect(() => { getCurrentTab(defaultActiveTabKey); @@ -287,6 +289,10 @@ const ColumnSelectPopover = ({ const savedExpressionsLabel = t('Saved expressions'); const simpleColumnsLabel = t('Column'); + const keywords = useMemo( + () => sqlKeywords.concat(getColumnKeywords(columns)), + [columns], + ); return ( <Form layout="vertical" id="metrics-edit-popover"> @@ -451,6 +457,7 @@ const ColumnSelectPopover = ({ className="filter-sql-editor" wrapEnabled ref={sqlEditorRef} + keywords={keywords} /> </Tabs.TabPane> </Tabs> diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.test.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.test.tsx index b19803931d90..a1ce875c01d8 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.test.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.test.tsx @@ -16,7 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { render, screen } from 'spec/helpers/testing-library'; +import userEvent from '@testing-library/user-event'; +import { render, screen, within } from 'spec/helpers/testing-library'; import { DndColumnSelect, DndColumnSelectProps, @@ -63,3 +64,52 @@ test('renders adhoc column', async () => { expect(await screen.findByText('adhoc column')).toBeVisible(); expect(screen.getByLabelText('calculator')).toBeVisible(); }); + +test('warn selected custom metric when metric gets removed from dataset', async () => { + const columnValues = ['column1', 'column2']; + + const { rerender, container } = render( + <DndColumnSelect + {...defaultProps} + options={[ + { + column_name: 'column1', + }, + { + column_name: 'column2', + }, + ]} + value={columnValues} + />, + { + useDnd: true, + useRedux: true, + }, + ); + + rerender( + <DndColumnSelect + {...defaultProps} + options={[ + { + column_name: 'column3', + }, + { + column_name: 'column2', + }, + ]} + value={columnValues} + />, + ); + expect(screen.getByText('column2')).toBeVisible(); + expect(screen.queryByText('column1')).toBeInTheDocument(); + const warningIcon = within( + screen.getByText('column1').parentElement ?? container, + ).getByRole('button'); + expect(warningIcon).toBeInTheDocument(); + userEvent.hover(warningIcon); + const warningTooltip = await screen.findByText( + 'This column might be incompatible with current dataset', + ); + expect(warningTooltip).toBeInTheDocument(); +}); diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.tsx index 455e1bf1b2e1..faceb68336e1 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndColumnSelect.tsx @@ -109,6 +109,8 @@ function DndColumnSelect(props: DndColumnSelectProps) { isAdhocColumn(column) && column.datasourceWarning ? t('This column might be incompatible with current dataset') : undefined; + const withCaret = isAdhocColumn(column) || !column.error_text; + return ( <ColumnSelectPopoverTrigger key={idx} @@ -134,7 +136,7 @@ function DndColumnSelect(props: DndColumnSelectProps) { canDelete={canDelete} column={column} datasourceWarningMessage={datasourceWarningMessage} - withCaret + withCaret={withCaret} /> </ColumnSelectPopoverTrigger> ); diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.test.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.test.tsx index 733b20e86493..80c60ca859d0 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.test.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.test.tsx @@ -48,6 +48,7 @@ import { DndItemType } from '../../DndItemType'; import DatasourcePanelDragOption from '../../DatasourcePanel/DatasourcePanelDragOption'; jest.mock('src/components/AsyncAceEditor', () => ({ + ...jest.requireActual('src/components/AsyncAceEditor'), SQLEditor: (props: AsyncAceEditorProps) => ( <div data-test="react-ace">{props.value}</div> ), diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.test.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.test.tsx index 5a0264665a39..c00b50397dd5 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.test.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.test.tsx @@ -92,13 +92,13 @@ test('render selected metrics correctly', () => { expect(screen.getByText('SUM(Column B)')).toBeVisible(); }); -test('remove selected custom metric when metric gets removed from dataset', () => { +test('warn selected custom metric when metric gets removed from dataset', async () => { let metricValues = ['metric_a', 'metric_b', adhocMetricA, adhocMetricB]; const onChange = (val: any[]) => { metricValues = val; }; - const { rerender } = render( + const { rerender, container } = render( <DndMetricSelect {...defaultProps} value={metricValues} @@ -129,19 +129,28 @@ test('remove selected custom metric when metric gets removed from dataset', () = ); expect(screen.getByText('metric_a')).toBeVisible(); expect(screen.queryByText('Metric B')).not.toBeInTheDocument(); - expect(screen.queryByText('metric_b')).not.toBeInTheDocument(); + expect(screen.queryByText('metric_b')).toBeInTheDocument(); + const warningIcon = within( + screen.getByText('metric_b').parentElement ?? container, + ).getByRole('button'); + expect(warningIcon).toBeInTheDocument(); + userEvent.hover(warningIcon); + const warningTooltip = await screen.findByText( + 'This metric might be incompatible with current dataset', + ); + expect(warningTooltip).toBeInTheDocument(); expect(screen.getByText('SUM(column_a)')).toBeVisible(); expect(screen.getByText('SUM(Column B)')).toBeVisible(); }); -test('remove selected custom metric when metric gets removed from dataset for single-select metric control', () => { +test('warn selected custom metric when metric gets removed from dataset for single-select metric control', async () => { let metricValue = 'metric_b'; const onChange = (val: any) => { metricValue = val; }; - const { rerender } = render( + const { rerender, container } = render( <DndMetricSelect {...defaultProps} value={metricValue} @@ -178,7 +187,19 @@ test('remove selected custom metric when metric gets removed from dataset for si ); expect(screen.queryByText('Metric B')).not.toBeInTheDocument(); - expect(screen.getByText('Drop a column/metric here or click')).toBeVisible(); + expect( + screen.queryByText('Drop a column/metric here or click'), + ).not.toBeInTheDocument(); + expect(screen.queryByText('metric_b')).toBeInTheDocument(); + const warningIcon = within( + screen.getByText('metric_b').parentElement ?? container, + ).getByRole('button'); + expect(warningIcon).toBeInTheDocument(); + userEvent.hover(warningIcon); + const warningTooltip = await screen.findByText( + 'This metric might be incompatible with current dataset', + ); + expect(warningTooltip).toBeInTheDocument(); }); test('remove selected adhoc metric when column gets removed from dataset', async () => { diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.tsx index 214334a5ac27..c6586bb97e53 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndMetricSelect.tsx @@ -60,11 +60,6 @@ const coerceMetrics = ( } const metricsCompatibleWithDataset = ensureIsArray(addedMetrics).filter( metric => { - if (isSavedMetric(metric)) { - return savedMetrics.some( - savedMetric => savedMetric.metric_name === metric, - ); - } if (isAdhocMetricSimple(metric)) { return columns.some( column => column.column_name === metric.column.column_name, @@ -75,6 +70,15 @@ const coerceMetrics = ( ); return metricsCompatibleWithDataset.map(metric => { + if ( + isSavedMetric(metric) && + !savedMetrics.some(savedMetric => savedMetric.metric_name === metric) + ) { + return { + metric_name: metric, + error_text: t('This metric might be incompatible with current dataset'), + }; + } if (!isDictionaryForAdhocMetric(metric)) { return metric; } diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/utils/optionSelector.ts b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/utils/optionSelector.ts index 433b449be212..70e22b31ed20 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/utils/optionSelector.ts +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/utils/optionSelector.ts @@ -22,6 +22,7 @@ import { ensureIsArray, QueryFormColumn, isPhysicalColumn, + t, } from '@superset-ui/core'; const getColumnNameOrAdhocColumn = ( @@ -55,7 +56,13 @@ export class OptionSelector { if (!isPhysicalColumn(value)) { return value; } - return null; + return { + type_generic: 'UNKNOWN', + column_name: value, + error_text: t( + 'This column might be incompatible with current dataset', + ), + }; }) .filter(Boolean) as ColumnMeta[]; } diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSqlTabContent/index.jsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSqlTabContent/index.jsx index f24dff5aabc8..dfbc19db2918 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSqlTabContent/index.jsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterEditPopoverSqlTabContent/index.jsx @@ -23,6 +23,7 @@ import { styled, t } from '@superset-ui/core'; import { SQLEditor } from 'src/components/AsyncAceEditor'; import sqlKeywords from 'src/SqlLab/utils/sqlKeywords'; +import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords'; import adhocMetricType from 'src/explore/components/controls/MetricControl/adhocMetricType'; import columnType from 'src/explore/components/controls/FilterControl/columnType'; import AdhocFilter from 'src/explore/components/controls/FilterControl/AdhocFilter'; @@ -91,19 +92,7 @@ export default class AdhocFilterEditPopoverSqlTabContent extends Component { const { adhocFilter, height, options } = this.props; const keywords = sqlKeywords.concat( - options - .map(option => { - if (option.column_name) { - return { - name: option.column_name, - value: option.column_name, - score: 50, - meta: 'option', - }; - } - return null; - }) - .filter(Boolean), + getColumnKeywords(options.filter(option => option.column_name)), ); const selectOptions = Object.values(Clauses).map(clause => ({ label: clause, diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/index.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/index.jsx index af1ddc4489c9..c527aa582543 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/index.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricEditPopover/index.jsx @@ -49,6 +49,7 @@ import { StyledMetricOption, StyledColumnOption, } from 'src/explore/components/optionRenderers'; +import { getColumnKeywords } from 'src/explore/controlUtils/getColumnKeywords'; const propTypes = { onChange: PropTypes.func.isRequired, @@ -304,14 +305,7 @@ export default class AdhocMetricEditPopover extends PureComponent { ...popoverProps } = this.props; const { adhocMetric, savedMetric } = this.state; - const keywords = sqlKeywords.concat( - columns.map(column => ({ - name: column.column_name, - value: column.column_name, - score: 50, - meta: 'column', - })), - ); + const keywords = sqlKeywords.concat(getColumnKeywords(columns)); const columnValue = (adhocMetric.column && adhocMetric.column.column_name) || diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.jsx index 700d31d7d57e..7ca0f7065fb8 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.jsx @@ -67,6 +67,7 @@ class AdhocMetricOption extends PureComponent { multi, datasourceWarningMessage, } = this.props; + const withCaret = !savedMetric.error_text; return ( <AdhocMetricPopoverTrigger @@ -86,7 +87,7 @@ class AdhocMetricOption extends PureComponent { onDropLabel={onDropLabel} index={index} type={type ?? DndItemType.AdhocMetricOption} - withCaret + withCaret={withCaret} isFunction multi={multi} datasourceWarningMessage={datasourceWarningMessage} diff --git a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx index 1ef070de8119..b7b644980839 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/AdhocMetricOption.test.jsx @@ -16,13 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -import sinon from 'sinon'; -import { shallow } from 'enzyme'; - +import { render, screen, fireEvent } from 'spec/helpers/testing-library'; import { AGGREGATES } from 'src/explore/constants'; import AdhocMetricOption from 'src/explore/components/controls/MetricControl/AdhocMetricOption'; import AdhocMetric from 'src/explore/components/controls/MetricControl/AdhocMetric'; -import ControlPopover from '../ControlPopover/ControlPopover'; +import userEvent from '@testing-library/user-event'; const columns = [ { type: 'VARCHAR(255)', column_name: 'source' }, @@ -35,49 +33,73 @@ const sumValueAdhocMetric = new AdhocMetric({ aggregate: AGGREGATES.SUM, }); +const datasource = { + type: 'table', + id: 1, + uid: '1__table', + columnFormats: {}, + verboseMap: {}, +}; + +const defaultProps = { + adhocMetric: sumValueAdhocMetric, + savedMetric: {}, + savedMetricsOptions: [], + onMetricEdit: jest.fn(), + columns, + datasource, + onMoveLabel: jest.fn(), + onDropLabel: jest.fn(), + index: 0, +}; + function setup(overrides) { - const onMetricEdit = sinon.spy(); const props = { - adhocMetric: sumValueAdhocMetric, - savedMetric: {}, - savedMetrics: [], - onMetricEdit, - columns, - onMoveLabel: () => {}, - onDropLabel: () => {}, - index: 0, + ...defaultProps, ...overrides, }; - const wrapper = shallow(<AdhocMetricOption {...props} />) - .find('AdhocMetricPopoverTrigger') - .shallow(); - return { wrapper, onMetricEdit }; + return render(<AdhocMetricOption {...props} />, { useDnd: true }); } -describe('AdhocMetricOption', () => { - it('renders an overlay trigger wrapper for the label', () => { - const { wrapper } = setup(); - expect(wrapper.find(ControlPopover)).toExist(); - expect(wrapper.find('OptionControlLabel')).toExist(); - }); +test('renders an overlay trigger wrapper for the label', () => { + setup(); + expect(screen.getByText('SUM(value)')).toBeInTheDocument(); +}); - it('overwrites the adhocMetric in state with onLabelChange', () => { - const { wrapper } = setup(); - wrapper.instance().onLabelChange({ target: { value: 'new label' } }); - expect(wrapper.state('title').label).toBe('new label'); - expect(wrapper.state('title').hasCustomLabel).toBe(true); +test('overwrites the adhocMetric in state with onLabelChange', async () => { + setup(); + userEvent.click(screen.getByText('SUM(value)')); + userEvent.click(screen.getByTestId(/AdhocMetricEditTitle#trigger/i)); + const labelInput = await screen.findByTestId(/AdhocMetricEditTitle#input/i); + userEvent.clear(labelInput); + userEvent.type(labelInput, 'new label'); + expect(labelInput).toHaveValue('new label'); + fireEvent.keyPress(labelInput, { + key: 'Enter', + charCode: 13, }); + expect(screen.getByText(/new label/i)).toBeInTheDocument(); +}); - it('returns to default labels when the custom label is cleared', () => { - const { wrapper } = setup(); - expect(wrapper.state('title').label).toBe('SUM(value)'); - - wrapper.instance().onLabelChange({ target: { value: 'new label' } }); - expect(wrapper.state('title').label).toBe('new label'); - - wrapper.instance().onLabelChange({ target: { value: '' } }); - - expect(wrapper.state('title').label).toBe('SUM(value)'); - expect(wrapper.state('title').hasCustomLabel).toBe(false); +test('returns to default labels when the custom label is cleared', async () => { + setup(); + userEvent.click(screen.getByText('SUM(value)')); + userEvent.click(screen.getByTestId(/AdhocMetricEditTitle#trigger/i)); + const labelInput = await screen.findByTestId(/AdhocMetricEditTitle#input/i); + userEvent.clear(labelInput); + userEvent.type(labelInput, 'new label'); + fireEvent.keyPress(labelInput, { + key: 'Enter', + charCode: 13, + }); + expect(labelInput).not.toBeInTheDocument(); + expect(screen.getByText(/new label/i)).toBeInTheDocument(); + userEvent.click(screen.getByTestId(/AdhocMetricEditTitle#trigger/i)); + expect(screen.getByPlaceholderText(/new label/i)).toBeInTheDocument(); + userEvent.clear(labelInput); + fireEvent.keyPress(labelInput, { + key: 'Enter', + charCode: 13, }); + expect(screen.getByPlaceholderText('SUM(value)')).toBeInTheDocument(); }); diff --git a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx index 3b51fcc5083a..d55bfa1d83f4 100644 --- a/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx +++ b/superset-frontend/src/explore/components/controls/MetricControl/MetricDefinitionValue.test.jsx @@ -16,38 +16,40 @@ * specific language governing permissions and limitations * under the License. */ -import { shallow } from 'enzyme'; - -import { AGGREGATES } from 'src/explore/constants'; +import { render, screen } from 'spec/helpers/testing-library'; import MetricDefinitionValue from 'src/explore/components/controls/MetricControl/MetricDefinitionValue'; -import AdhocMetricOption from 'src/explore/components/controls/MetricControl/AdhocMetricOption'; import AdhocMetric from 'src/explore/components/controls/MetricControl/AdhocMetric'; +import { AGGREGATES } from 'src/explore/constants'; const sumValueAdhocMetric = new AdhocMetric({ column: { type: 'DOUBLE', column_name: 'value' }, aggregate: AGGREGATES.SUM, }); -describe('MetricDefinitionValue', () => { - it('renders a MetricOption given a saved metric', () => { - const wrapper = shallow( - <MetricDefinitionValue - onMetricEdit={() => {}} - option={{ metric_name: 'a_saved_metric', expression: 'COUNT(*)' }} - index={1} - />, - ); - expect(wrapper.find('AdhocMetricOption')).toExist(); - }); +const setup = propOverrides => { + const props = { + onMetricEdit: jest.fn(), + option: sumValueAdhocMetric, + index: 1, + columns: [], + savedMetrics: [], + savedMetricsOptions: [], + datasource: {}, + onMoveLabel: jest.fn(), + onDropLabel: jest.fn(), + ...propOverrides, + }; + return render(<MetricDefinitionValue {...props} />, { useDnd: true }); +}; - it('renders an AdhocMetricOption given an adhoc metric', () => { - const wrapper = shallow( - <MetricDefinitionValue - onMetricEdit={() => {}} - option={sumValueAdhocMetric} - index={1} - />, - ); - expect(wrapper.find(AdhocMetricOption)).toExist(); +test('renders a MetricOption given a saved metric', () => { + setup({ + option: { metric_name: 'a_saved_metric', expression: 'COUNT(*)' }, }); + expect(screen.getByText('a_saved_metric')).toBeInTheDocument(); +}); + +test('renders an AdhocMetricOption given an adhoc metric', () => { + setup(); + expect(screen.getByText('SUM(value)')).toBeInTheDocument(); }); diff --git a/superset-frontend/src/explore/components/controls/withAsyncVerification.tsx b/superset-frontend/src/explore/components/controls/withAsyncVerification.tsx index 39e3021e72cb..5ed60aea4d35 100644 --- a/superset-frontend/src/explore/components/controls/withAsyncVerification.tsx +++ b/superset-frontend/src/explore/components/controls/withAsyncVerification.tsx @@ -24,6 +24,7 @@ import { import { JsonArray, JsonValue, t } from '@superset-ui/core'; import { ControlProps } from 'src/explore/components/Control'; import builtInControlComponents from 'src/explore/components/controls'; +import useEffectEvent from 'src/hooks/useEffectEvent'; /** * Full control component map. @@ -72,7 +73,7 @@ export type AsyncVerify = ( * Whether the extra props will update the original props. */ function hasUpdates( - props: ControlPropsWithExtras, + props: Partial<ControlPropsWithExtras>, newProps: ExtraControlProps, ) { return ( @@ -165,17 +166,17 @@ export default function withAsyncVerification({ [basicOnChange, otherProps, verifiedProps], ); - useEffect(() => { - if (needAsyncVerification && verify) { + const verifyProps = useEffectEvent( + (verifyFunc: AsyncVerify, props: typeof otherProps) => { if (showLoadingState) { setIsLoading(true); } - verify(otherProps) + verifyFunc(props) .then(updatedProps => { if (showLoadingState) { setIsLoading(false); } - if (updatedProps && hasUpdates(otherProps, updatedProps)) { + if (updatedProps && hasUpdates(verifiedProps, updatedProps)) { setVerifiedProps({ // save isLoading in combination with other props to avoid // rendering twice. @@ -198,14 +199,14 @@ export default function withAsyncVerification({ ); } }); + }, + ); + + useEffect(() => { + if (needAsyncVerification && verify) { + verifyProps(verify, otherProps); } - }, [ - needAsyncVerification, - showLoadingState, - verify, - otherProps, - addWarningToast, - ]); + }, [needAsyncVerification, verify, otherProps, verifyProps]); return ( <ControlComponent diff --git a/superset-frontend/src/explore/controlPanels/sections.tsx b/superset-frontend/src/explore/controlPanels/sections.tsx index 6ff7dd9facdf..1058caae0528 100644 --- a/superset-frontend/src/explore/controlPanels/sections.tsx +++ b/superset-frontend/src/explore/controlPanels/sections.tsx @@ -26,6 +26,7 @@ export const datasourceAndVizType: ControlPanelSectionConfig = { controlSetRows: [ ['datasource'], ['viz_type'], + ['enable_ai_insights'], [ { name: 'slice_id', diff --git a/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx b/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx index c18873460a92..16e9cee85044 100644 --- a/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx +++ b/superset-frontend/src/explore/controlUtils/controlUtils.test.tsx @@ -61,6 +61,9 @@ describe('controlUtils', () => { controls: {}, form_data: { datasource: '1__table', viz_type: 'table' }, common: {}, + slice: { + slice_id: 1, + }, }; beforeAll(() => { diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/Encoder.ts b/superset-frontend/src/explore/controlUtils/getColumnKeywords.test.tsx similarity index 60% rename from superset-frontend/plugins/plugin-chart-word-cloud/src/chart/Encoder.ts rename to superset-frontend/src/explore/controlUtils/getColumnKeywords.test.tsx index 042f3ce1f396..59782ba94dfa 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/src/chart/Encoder.ts +++ b/superset-frontend/src/explore/controlUtils/getColumnKeywords.test.tsx @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,3 +16,23 @@ * specific language governing permissions and limitations * under the License. */ + +import { getColumnKeywords } from './getColumnKeywords'; + +test('returns HTML for a column tooltip', () => { + const expected = { + column_name: 'test column1', + verbose_name: null, + is_certified: false, + certified_by: null, + description: 'test description', + type: 'VARCHAR', + }; + expect(getColumnKeywords([expected])).toContainEqual({ + name: expected.column_name, + value: expected.column_name, + docHTML: expect.stringContaining(expected.description), + score: 50, + meta: 'column', + }); +}); diff --git a/superset-frontend/src/explore/controlUtils/getColumnKeywords.tsx b/superset-frontend/src/explore/controlUtils/getColumnKeywords.tsx new file mode 100644 index 000000000000..0ef134b1a2e8 --- /dev/null +++ b/superset-frontend/src/explore/controlUtils/getColumnKeywords.tsx @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ColumnMeta } from '@superset-ui/chart-controls'; +import { t } from '@superset-ui/core'; +import { getTooltipHTML } from 'src/components/AsyncAceEditor'; +import { COLUMN_AUTOCOMPLETE_SCORE } from 'src/SqlLab/constants'; + +export function getColumnKeywords(columns: ColumnMeta[]) { + return columns.map( + ({ + column_name, + verbose_name, + is_certified, + certified_by, + description, + type, + }) => ({ + name: verbose_name || column_name, + value: column_name, + docHTML: getTooltipHTML({ + title: column_name, + meta: type ? `column: ${type}` : 'column', + body: `${description ?? ''}`, + footer: is_certified ? ( + <>{t('Certified by %s', certified_by)}</> + ) : undefined, + }), + score: COLUMN_AUTOCOMPLETE_SCORE, + meta: 'column', + }), + ); +} diff --git a/superset-frontend/src/explore/controlUtils/getFormDataFromDashboardContext.test.ts b/superset-frontend/src/explore/controlUtils/getFormDataFromDashboardContext.test.ts index fd120355c8ee..28450d49c453 100644 --- a/superset-frontend/src/explore/controlUtils/getFormDataFromDashboardContext.test.ts +++ b/superset-frontend/src/explore/controlUtils/getFormDataFromDashboardContext.test.ts @@ -151,10 +151,9 @@ const getExpectedResultFormData = (overrides: JsonObject = {}) => ({ girl: '#FF69B4', boy: '#ADD8E6', }, - shared_label_colors: { - boy: '#ADD8E6', - girl: '#FF69B4', - }, + shared_label_colors: ['boy', 'girl'], + own_color_scheme: 'supersetColors', + dashboard_color_scheme: 'd3Category20b', extra_filters: [ { col: '__time_range', diff --git a/superset-frontend/src/explore/controlUtils/getFormDataWithDashboardContext.ts b/superset-frontend/src/explore/controlUtils/getFormDataWithDashboardContext.ts index 3018491ef8cc..bc4da46e99de 100644 --- a/superset-frontend/src/explore/controlUtils/getFormDataWithDashboardContext.ts +++ b/superset-frontend/src/explore/controlUtils/getFormDataWithDashboardContext.ts @@ -218,11 +218,18 @@ export const getFormDataWithDashboardContext = ( {}, ); + const ownColorScheme = exploreFormData.color_scheme; + const dashboardColorScheme = dashboardContextFormData.color_scheme; + const appliedColorScheme = dashboardColorScheme || ownColorScheme; + return { ...exploreFormData, ...dashboardContextFormData, ...filterBoxData, ...nativeFiltersData, ...adhocFilters, + own_color_scheme: ownColorScheme, + color_scheme: appliedColorScheme, + dashboard_color_scheme: dashboardColorScheme, }; }; diff --git a/superset-frontend/src/explore/exploreUtils/index.js b/superset-frontend/src/explore/exploreUtils/index.js index 4fc8147a1896..a3483c887607 100644 --- a/superset-frontend/src/explore/exploreUtils/index.js +++ b/superset-frontend/src/explore/exploreUtils/index.js @@ -37,6 +37,7 @@ import { UNSAVED_CHART_ID, } from 'src/explore/constants'; import { DashboardStandaloneMode } from 'src/dashboard/util/constants'; +import { convertFormDataForAPI } from 'src/components/Chart/timezoneChartActions'; export function getChartKey(explore) { const { slice, form_data } = explore; @@ -210,17 +211,21 @@ export const buildV1ChartDataPayload = ({ setDataMask, ownState, }) => { + // Convert timezone-aware dates to UTC BEFORE building queries + const convertedFormData = convertFormDataForAPI(formData); + const buildQuery = - getChartBuildQueryRegistry().get(formData.viz_type) ?? + getChartBuildQueryRegistry().get(convertedFormData.viz_type) ?? (buildQueryformData => buildQueryContext(buildQueryformData, baseQueryObject => [ { ...baseQueryObject, }, ])); - return buildQuery( + + const payload = buildQuery( { - ...formData, + ...convertedFormData, force, result_format: resultFormat, result_type: resultType, @@ -232,6 +237,8 @@ export const buildV1ChartDataPayload = ({ }, }, ); + + return payload; }; export const getLegacyEndpointType = ({ resultType, resultFormat }) => @@ -316,7 +323,7 @@ export const getSimpleSQLExpression = (subject, operator, comparator) => { .map(val => optionLabel(val)) .map( val => - `${quote}${isString ? String(val).replace("'", "''") : val}${quote}`, + `${quote}${isString ? String(val).replace(/'/g, "''") : val}${quote}`, ); if (comparatorArray.length > 0) { expression += ` ${prefix}${formattedComparators.join(', ')}${suffix}`; diff --git a/superset-frontend/src/features/alerts/AlertReportModal.tsx b/superset-frontend/src/features/alerts/AlertReportModal.tsx index 8c71a7f6a7f4..aad229955212 100644 --- a/superset-frontend/src/features/alerts/AlertReportModal.tsx +++ b/superset-frontend/src/features/alerts/AlertReportModal.tsx @@ -98,6 +98,7 @@ export interface AlertReportModalProps { const DEFAULT_WORKING_TIMEOUT = 3600; const DEFAULT_CRON_VALUE = '0 0 * * *'; // every day const DEFAULT_RETENTION = 90; +const EMAIL_REGEX = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/; const DEFAULT_NOTIFICATION_METHODS: NotificationMethodOption[] = [ NotificationMethodOption.Email, @@ -372,6 +373,7 @@ export const TRANSLATIONS = { WORKING_TIMEOUT_ERROR_TEXT: t('working timeout'), RECIPIENTS_ERROR_TEXT: t('recipients'), EMAIL_SUBJECT_ERROR_TEXT: t('email subject'), + EMAIL_VALIDATION_ERROR_TEXT: t('invalid email'), ERROR_TOOLTIP_MESSAGE: t( 'Not all required fields are complete. Please provide the following:', ), @@ -621,6 +623,8 @@ const AlertReportModal: FunctionComponent<AlertReportModalProps> = ({ recipients.push({ recipient_config_json: { target: setting.recipients, + ccTarget: setting.cc, + bccTarget: setting.bcc, }, type: setting.method, }); @@ -1014,6 +1018,31 @@ const AlertReportModal: FunctionComponent<AlertReportModalProps> = ({ return hasInfo; }; + const checkEmailFormat = () => { + if (!notificationSettings.length) { + return true; + } + + const validateEmails = (emails: string): boolean => { + if (!emails) return true; // No emails to validate + return emails + .split(/[,;]/) + .every(email => EMAIL_REGEX.test(email.trim())); + }; + + // Use array method to check conditions + return notificationSettings.every(setting => { + if (!!setting.method && setting.method === 'Email') { + return ( + (!setting.recipients?.length || validateEmails(setting.recipients)) && + (!setting.cc || validateEmails(setting.cc)) && + (!setting.bcc || validateEmails(setting.bcc)) + ); + } + return true; // Non-Email methods are considered valid + }); + }; + const validateGeneralSection = () => { const errors = []; if (!currentAlert?.name?.length) { @@ -1069,13 +1098,24 @@ const AlertReportModal: FunctionComponent<AlertReportModalProps> = ({ }; const validateNotificationSection = () => { + const errors = []; const hasErrors = !checkNotificationSettings(); - const errors = hasErrors ? [TRANSLATIONS.RECIPIENTS_ERROR_TEXT] : []; + + if (hasErrors) { + errors.push(TRANSLATIONS.RECIPIENTS_ERROR_TEXT); + } else { + // Check for email format errors + const hasValidationErrors = !checkEmailFormat(); + if (hasValidationErrors) { + errors.push(TRANSLATIONS.EMAIL_VALIDATION_ERROR_TEXT); + } + } if (emailError) { errors.push(TRANSLATIONS.EMAIL_SUBJECT_ERROR_TEXT); } + // Update validation status with combined errors updateValidationStatus(Sections.Notification, errors); }; @@ -1132,6 +1172,8 @@ const AlertReportModal: FunctionComponent<AlertReportModalProps> = ({ setNotificationSettings([ { recipients: '', + cc: '', + bcc: '', options: allowedNotificationMethods, method: NotificationMethodOption.Email, }, @@ -1153,6 +1195,8 @@ const AlertReportModal: FunctionComponent<AlertReportModalProps> = ({ // @ts-ignore: Type not assignable recipients: config.target || setting.recipient_config_json, options: allowedNotificationMethods, + cc: config.ccTarget || '', + bcc: config.bccTarget || '', }; }); diff --git a/superset-frontend/src/features/alerts/components/NotificationMethod.test.tsx b/superset-frontend/src/features/alerts/components/NotificationMethod.test.tsx index 4b4e46c69e27..39d9b2625012 100644 --- a/superset-frontend/src/features/alerts/components/NotificationMethod.test.tsx +++ b/superset-frontend/src/features/alerts/components/NotificationMethod.test.tsx @@ -16,9 +16,21 @@ * specific language governing permissions and limitations * under the License. */ -import { fireEvent, render, screen } from 'spec/helpers/testing-library'; +import { + cleanup, + fireEvent, + render, + screen, + waitFor, +} from 'spec/helpers/testing-library'; import userEvent from '@testing-library/user-event'; +import { + FeatureFlag, + JsonResponse, + SupersetClient, + TextResponse, +} from '@superset-ui/core'; import { NotificationMethod, mapSlackValues } from './NotificationMethod'; import { NotificationMethodOption, NotificationSetting } from '../types'; @@ -43,6 +55,7 @@ const mockDefaultSubject = 'Default Subject'; describe('NotificationMethod', () => { beforeEach(() => { jest.clearAllMocks(); + cleanup(); }); it('should render the component', () => { @@ -80,8 +93,8 @@ describe('NotificationMethod', () => { />, ); - const deleteButton = screen.getByRole('button'); - userEvent.click(deleteButton); + const deleteButton = document.querySelector('.delete-button'); + if (deleteButton) userEvent.click(deleteButton); expect(mockOnRemove).toHaveBeenCalledWith(1); }); @@ -180,4 +193,291 @@ describe('NotificationMethod', () => { { label: 'User Two', value: 'user2' }, ]); }); + it('should render CC and BCC fields when method is Email and visibility flags are true', () => { + const defaultProps = { + setting: { + method: NotificationMethodOption.Email, + recipients: 'recipient1@example.com, recipient2@example.com', + cc: 'cc1@example.com', + bcc: 'bcc1@example.com', + options: [ + NotificationMethodOption.Email, + NotificationMethodOption.Slack, + ], + }, + index: 0, + onUpdate: jest.fn(), + onRemove: jest.fn(), + onInputChange: jest.fn(), + email_subject: 'Test Subject', + defaultSubject: 'Default Subject', + setErrorSubject: jest.fn(), + }; + + const { getByTestId } = render(<NotificationMethod {...defaultProps} />); + + // Check if CC and BCC fields are rendered + expect(getByTestId('cc')).toBeInTheDocument(); + expect(getByTestId('bcc')).toBeInTheDocument(); + }); + it('should render CC and BCC fields with correct values when method is Email', () => { + const defaultProps = { + setting: { + method: NotificationMethodOption.Email, + recipients: 'recipient1@example.com, recipient2@example.com', + cc: 'cc1@example.com', + bcc: 'bcc1@example.com', + options: [ + NotificationMethodOption.Email, + NotificationMethodOption.Slack, + ], + }, + index: 0, + onUpdate: jest.fn(), + onRemove: jest.fn(), + onInputChange: jest.fn(), + email_subject: 'Test Subject', + defaultSubject: 'Default Subject', + setErrorSubject: jest.fn(), + }; + + const { getByTestId } = render(<NotificationMethod {...defaultProps} />); + + // Check if CC and BCC fields are rendered with correct values + expect(getByTestId('cc')).toHaveValue('cc1@example.com'); + expect(getByTestId('bcc')).toHaveValue('bcc1@example.com'); + }); + it('should not render CC and BCC fields when method is not Email', () => { + const defaultProps = { + setting: { + method: NotificationMethodOption.Slack, + recipients: 'recipient1@example.com, recipient2@example.com', + cc: 'cc1@example.com', + bcc: 'bcc1@example.com', + options: [ + NotificationMethodOption.Email, + NotificationMethodOption.Slack, + ], + }, + index: 0, + onUpdate: jest.fn(), + onRemove: jest.fn(), + onInputChange: jest.fn(), + email_subject: 'Test Subject', + defaultSubject: 'Default Subject', + setErrorSubject: jest.fn(), + }; + + const { queryByTestId } = render(<NotificationMethod {...defaultProps} />); + + // Check if CC and BCC fields are not rendered + expect(queryByTestId('cc')).not.toBeInTheDocument(); + expect(queryByTestId('bcc')).not.toBeInTheDocument(); + }); + // Handle empty recipients list gracefully + it('should handle empty recipients list gracefully', () => { + const defaultProps = { + setting: { + method: NotificationMethodOption.Email, + recipients: '', + cc: '', + bcc: '', + options: [ + NotificationMethodOption.Email, + NotificationMethodOption.Slack, + ], + }, + index: 0, + onUpdate: jest.fn(), + onRemove: jest.fn(), + onInputChange: jest.fn(), + email_subject: 'Test Subject', + defaultSubject: 'Default Subject', + setErrorSubject: jest.fn(), + }; + + const { queryByTestId } = render(<NotificationMethod {...defaultProps} />); + + // Check if CC and BCC fields are not rendered + expect(queryByTestId('cc')).not.toBeInTheDocument(); + expect(queryByTestId('bcc')).not.toBeInTheDocument(); + }); + it('shows the right combo when ff is false', async () => { + /* should show the div with "Recipients are separated by" + when FeatureFlag.AlertReportSlackV2 is false and fetchSlackChannels errors + */ + // Mock the feature flag to be false + window.featureFlags = { [FeatureFlag.AlertReportSlackV2]: false }; + + // Mock the SupersetClient.get to simulate an error + jest.spyOn(SupersetClient, 'get').mockImplementation(() => { + throw new Error('Error fetching Slack channels'); + }); + + render( + <NotificationMethod + setting={{ + ...mockSetting, + method: NotificationMethodOption.Slack, + }} + index={0} + onUpdate={mockOnUpdate} + onRemove={mockOnRemove} + onInputChange={mockOnInputChange} + email_subject={mockEmailSubject} + defaultSubject={mockDefaultSubject} + setErrorSubject={mockSetErrorSubject} + />, + ); + + // Wait for the component to handle the error and render the expected div + await waitFor(() => { + expect( + screen.getByText('Recipients are separated by "," or ";"'), + ).toBeInTheDocument(); + }); + }); + it('shows the textbox when the fetch fails', async () => { + /* should show the div with "Recipients are separated by" + when FeatureFlag.AlertReportSlackV2 is true and fetchSlackChannels errors + */ + + // Mock the feature flag to be false + window.featureFlags = { [FeatureFlag.AlertReportSlackV2]: false }; + + // Mock the SupersetClient.get to simulate an error + jest.spyOn(SupersetClient, 'get').mockImplementation(() => { + throw new Error('Error fetching Slack channels'); + }); + + render( + <NotificationMethod + setting={{ + ...mockSetting, + method: NotificationMethodOption.Slack, + }} + index={0} + onUpdate={mockOnUpdate} + onRemove={mockOnRemove} + onInputChange={mockOnInputChange} + email_subject={mockEmailSubject} + defaultSubject={mockDefaultSubject} + setErrorSubject={mockSetErrorSubject} + />, + ); + + // Wait for the component to handle the error and render the expected div + await waitFor(() => { + expect( + screen.getByText('Recipients are separated by "," or ";"'), + ).toBeInTheDocument(); + }); + }); + it('shows the dropdown when ff is true and slackChannels succeed', async () => { + /* should show the Select channels dropdown + when FeatureFlag.AlertReportSlackV2 is true and fetchSlackChannels succeeds + */ + // Mock the feature flag to be false + window.featureFlags = { [FeatureFlag.AlertReportSlackV2]: true }; + + // Mock the SupersetClient.get to simulate an error + jest + .spyOn(SupersetClient, 'get') + .mockImplementation( + () => + Promise.resolve({ json: { result: [] } }) as unknown as Promise< + Response | JsonResponse | TextResponse + >, + ); + + render( + <NotificationMethod + setting={{ + ...mockSetting, + method: NotificationMethodOption.SlackV2, + recipients: 'slack-channel', + }} + index={0} + onUpdate={mockOnUpdate} + onRemove={mockOnRemove} + onInputChange={mockOnInputChange} + email_subject={mockEmailSubject} + defaultSubject={mockDefaultSubject} + setErrorSubject={mockSetErrorSubject} + />, + ); + + // Wait for the component to handle the error and render the expected div + await waitFor(() => { + expect(screen.getByTitle('Slack')).toBeInTheDocument(); + }); + }); + it('shows the textarea when ff is true and slackChannels fail', async () => { + /* should show the Select channels dropdown + when FeatureFlag.AlertReportSlackV2 is true and fetchSlackChannels succeeds + */ + // Mock the feature flag to be false + window.featureFlags = { [FeatureFlag.AlertReportSlackV2]: true }; + + // Mock the SupersetClient.get to simulate an error + jest.spyOn(SupersetClient, 'get').mockImplementation(() => { + throw new Error('Error fetching Slack channels'); + }); + + render( + <NotificationMethod + setting={{ + ...mockSetting, + method: NotificationMethodOption.Slack, + recipients: 'slack-channel', + }} + index={0} + onUpdate={mockOnUpdate} + onRemove={mockOnRemove} + onInputChange={mockOnInputChange} + email_subject={mockEmailSubject} + defaultSubject={mockDefaultSubject} + setErrorSubject={mockSetErrorSubject} + />, + ); + + // Wait for the component to handle the error and render the expected div + expect( + screen.getByText('Recipients are separated by "," or ";"'), + ).toBeInTheDocument(); + }); + it('shows the textarea when ff is true and slackChannels fail and slack is selected', async () => { + /* should show the Select channels dropdown + when FeatureFlag.AlertReportSlackV2 is true and fetchSlackChannels succeeds + */ + // Mock the feature flag to be false + window.featureFlags = { [FeatureFlag.AlertReportSlackV2]: true }; + + // Mock the SupersetClient.get to simulate an error + jest.spyOn(SupersetClient, 'get').mockImplementation(() => { + throw new Error('Error fetching Slack channels'); + }); + + render( + <NotificationMethod + setting={{ + ...mockSetting, + method: NotificationMethodOption.Slack, + recipients: 'slack-channel', + }} + index={0} + onUpdate={mockOnUpdate} + onRemove={mockOnRemove} + onInputChange={mockOnInputChange} + email_subject={mockEmailSubject} + defaultSubject={mockDefaultSubject} + setErrorSubject={mockSetErrorSubject} + />, + ); + + // Wait for the component to handle the error and render the expected div + expect( + screen.getByText('Recipients are separated by "," or ";"'), + ).toBeInTheDocument(); + }); }); diff --git a/superset-frontend/src/features/alerts/components/NotificationMethod.tsx b/superset-frontend/src/features/alerts/components/NotificationMethod.tsx index 85e26f777ab2..17a78697c9a5 100644 --- a/superset-frontend/src/features/alerts/components/NotificationMethod.tsx +++ b/superset-frontend/src/features/alerts/components/NotificationMethod.tsx @@ -44,34 +44,75 @@ import { import { StyledInputContainer } from '../AlertReportModal'; const StyledNotificationMethod = styled.div` - margin-bottom: 10px; + ${({ theme }) => ` + margin-bottom: ${theme.gridUnit * 3}px; - .input-container { - textarea { - height: auto; + .input-container { + textarea { + height: auto; + } + + &.error { + input { + border-color: ${theme.colors.error.base}; + } + } + + .helper { + margin-top: ${theme.gridUnit * 2}px; + font-size: ${theme.typography.sizes.s}px; + color: ${theme.colors.grayscale.base}; + } } - &.error { - input { - border-color: ${({ theme }) => theme.colors.error.base}; + .inline-container { + margin-bottom: ${theme.gridUnit * 2}px; + + > div { + margin: 0px; + } + + .delete-button { + margin-left: ${theme.gridUnit * 2}px; + padding-top: ${theme.gridUnit}px; } } - } - .inline-container { - margin-bottom: 10px; + .ghost-button { + color: ${theme.colors.primary.dark1}; + display: inline-flex; + align-items: center; + font-size: ${theme.typography.sizes.s}px; + cursor: pointer; + margin-top: ${theme.gridUnit}px; + + .icon { + width: ${theme.gridUnit * 3}px; + height: ${theme.gridUnit * 3}px; + font-size: ${theme.typography.sizes.s}px; + margin-right: ${theme.gridUnit}px; + } + } - > div { - margin: 0; + .ghost-button + .ghost-button { + margin-left: ${theme.gridUnit * 4}px; } - .delete-button { - margin-left: 10px; - padding-top: 3px; + .ghost-button:first-child[style*='none'] + .ghost-button { + margin-left: 0px; /* Remove margin when the first button is hidden */ } - } + `} `; +const TRANSLATIONS = { + EMAIL_CC_NAME: t('CC recipients'), + EMAIL_BCC_NAME: t('BCC recipients'), + EMAIL_SUBJECT_NAME: t('Email subject name (optional)'), + EMAIL_SUBJECT_ERROR_TEXT: t( + 'Please enter valid text. Spaces alone are not permitted.', + ), +}; + interface NotificationMethodProps { setting?: NotificationSetting | null; index: number; @@ -85,13 +126,6 @@ interface NotificationMethodProps { setErrorSubject: (hasError: boolean) => void; } -const TRANSLATIONS = { - EMAIL_SUBJECT_NAME: t('Email subject name (optional)'), - EMAIL_SUBJECT_ERROR_TEXT: t( - 'Please enter valid text. Spaces alone are not permitted.', - ), -}; - export const mapSlackValues = ({ method, recipientValue, @@ -164,7 +198,7 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ defaultSubject, setErrorSubject, }) => { - const { method, recipients, options } = setting || {}; + const { method, recipients, cc, bcc, options } = setting || {}; const [recipientValue, setRecipientValue] = useState<string>( recipients || '', ); @@ -172,7 +206,13 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ { label: string; value: string }[] >([]); const [error, setError] = useState(false); + const [ccVisible, setCcVisible] = useState<boolean>(!!cc); + const [bccVisible, setBccVisible] = useState<boolean>(!!bcc); + const [ccValue, setCcValue] = useState<string>(cc || ''); + const [bccValue, setBccValue] = useState<string>(bcc || ''); const theme = useTheme(); + const [methodOptionsLoading, setMethodOptionsLoading] = + useState<boolean>(true); const [slackOptions, setSlackOptions] = useState<SlackOptionsType>([ { label: '', @@ -188,11 +228,16 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ }) => { // Since we're swapping the method, reset the recipients setRecipientValue(''); + setCcValue(''); + setBccValue(''); + if (onUpdate && setting) { const updatedSetting = { ...setting, method: selected.value, recipients: '', + cc: '', + bcc: '', }; onUpdate(index, updatedSetting); @@ -214,51 +259,52 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ }; useEffect(() => { - if ( - method && - [ - NotificationMethodOption.Slack, - NotificationMethodOption.SlackV2, - ].includes(method) && - !slackOptions[0]?.options.length - ) { + const slackEnabled = options?.some( + option => + option === NotificationMethodOption.Slack || + option === NotificationMethodOption.SlackV2, + ); + if (slackEnabled && !slackOptions[0]?.options.length) { fetchSlackChannels({ types: ['public_channel', 'private_channel'] }) .then(({ json }) => { const { result } = json; - const options: SlackOptionsType = mapChannelsToOptions(result); setSlackOptions(options); if (isFeatureEnabled(FeatureFlag.AlertReportSlackV2)) { - // map existing ids to names for display + // for edit mode, map existing ids to names for display if slack v2 // or names to ids if slack v1 const [publicOptions, privateOptions] = options; - - setSlackRecipients( - mapSlackValues({ - method, - recipientValue, - slackOptions: [ - ...publicOptions.options, - ...privateOptions.options, - ], - }), - ); - if (method === NotificationMethodOption.Slack) { - onMethodChange({ - label: NotificationMethodOption.Slack, - value: NotificationMethodOption.SlackV2, - }); + if ( + method && + [ + NotificationMethodOption.SlackV2, + NotificationMethodOption.Slack, + ].includes(method) + ) { + setSlackRecipients( + mapSlackValues({ + method, + recipientValue, + slackOptions: [ + ...publicOptions.options, + ...privateOptions.options, + ], + }), + ); } } }) - .catch(() => { + .catch(e => { // Fallback to slack v1 if slack v2 is not compatible setUseSlackV1(true); + }) + .finally(() => { + setMethodOptionsLoading(false); }); } - }, [method]); + }, []); const methodOptions = useMemo( () => @@ -280,7 +326,7 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ : method, value: method, })), - [options], + [options, useSlackV1], ); if (!setting) { @@ -333,11 +379,49 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ } }; + const onCcChange = (event: React.ChangeEvent<HTMLTextAreaElement>) => { + const { target } = event; + + setCcValue(target.value); + + if (onUpdate) { + const updatedSetting = { + ...setting, + cc: target.value, + }; + + onUpdate(index, updatedSetting); + } + }; + + const onBccChange = (event: React.ChangeEvent<HTMLTextAreaElement>) => { + const { target } = event; + + setBccValue(target.value); + + if (onUpdate) { + const updatedSetting = { + ...setting, + bcc: target.value, + }; + + onUpdate(index, updatedSetting); + } + }; + // Set recipients if (!!recipients && recipientValue !== recipients) { setRecipientValue(recipients); } + if (!!cc && ccValue !== cc) { + setCcValue(cc); + } + + if (!!bcc && bccValue !== bcc) { + setBccValue(bcc); + } + return ( <StyledNotificationMethod> <div className="inline-container"> @@ -353,8 +437,10 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ options={methodOptions} showSearch value={methodOptions.find(option => option.value === method)} + loading={methodOptionsLoading} /> {index !== 0 && !!onRemove ? ( + // eslint-disable-next-line jsx-a11y/control-has-associated-label <span role="button" tabIndex={0} @@ -418,14 +504,16 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ <> <div className="input-container"> <textarea - name="recipients" + name="To" data-test="recipients" value={recipientValue} onChange={onRecipientsChange} /> </div> - <div className="helper"> - {t('Recipients are separated by "," or ";"')} + <div className="input-container"> + <div className="helper"> + {t('Recipients are separated by "," or ";"')} + </div> </div> </> ) : ( @@ -446,6 +534,75 @@ export const NotificationMethod: FunctionComponent<NotificationMethodProps> = ({ </div> </StyledInputContainer> </div> + {method === NotificationMethodOption.Email && ( + <StyledInputContainer> + {/* Render "CC" input field if ccVisible is true */} + {ccVisible && ( + <> + <div className="control-label"> + {TRANSLATIONS.EMAIL_CC_NAME} + </div> + <div className="input-container"> + <textarea + name="CC" + data-test="cc" + value={ccValue} + onChange={onCcChange} + /> + </div> + <div className="input-container"> + <div className="helper"> + {t('Recipients are separated by "," or ";"')} + </div> + </div> + </> + )} + {/* Render "BCC" input field if bccVisible is true */} + {bccVisible && ( + <> + <div className="control-label"> + {TRANSLATIONS.EMAIL_BCC_NAME} + </div> + <div className="input-container"> + <textarea + name="BCC" + data-test="bcc" + value={bccValue} + onChange={onBccChange} + /> + </div> + <div className="input-container"> + <div className="helper"> + {t('Recipients are separated by "," or ";"')} + </div> + </div> + </> + )} + {/* New buttons container */} + <div className="ghost-button"> + <span + className="ghost-button" + role="button" + tabIndex={0} + onClick={() => setCcVisible(true)} + style={{ display: ccVisible ? 'none' : 'inline-flex' }} + > + <Icons.Email className="icon" /> + {t('Add CC Recipients')} + </span> + <span + className="ghost-button" + role="button" + tabIndex={0} + onClick={() => setBccVisible(true)} + style={{ display: bccVisible ? 'none' : 'inline-flex' }} + > + <Icons.Email className="icon" /> + {t('Add BCC Recipients')} + </span> + </div> + </StyledInputContainer> + )} </> ) : null} </StyledNotificationMethod> diff --git a/superset-frontend/src/features/alerts/types.ts b/superset-frontend/src/features/alerts/types.ts index 9c6a2e9c8bf0..2726fca38dbf 100644 --- a/superset-frontend/src/features/alerts/types.ts +++ b/superset-frontend/src/features/alerts/types.ts @@ -50,6 +50,8 @@ export enum NotificationMethodOption { export type NotificationSetting = { method?: NotificationMethodOption; recipients: string; + cc?: string; + bcc?: string; options: NotificationMethodOption[]; }; @@ -63,6 +65,8 @@ export type SlackChannel = { export type Recipient = { recipient_config_json: { target: string; + ccTarget?: string; + bccTarget?: string; }; type: NotificationMethodOption; }; diff --git a/superset-frontend/src/features/allEntities/AllEntitiesTable.test.tsx b/superset-frontend/src/features/allEntities/AllEntitiesTable.test.tsx new file mode 100644 index 000000000000..9ddd30a74588 --- /dev/null +++ b/superset-frontend/src/features/allEntities/AllEntitiesTable.test.tsx @@ -0,0 +1,131 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { render } from 'spec/helpers/testing-library'; +import { screen } from '@testing-library/react'; +import * as useQueryParamsModule from 'use-query-params'; +import AllEntitiesTable from './AllEntitiesTable'; + +describe('AllEntitiesTable', () => { + const mockSetShowTagModal = jest.fn(); + + const mockObjects = { + dashboard: [], + chart: [], + query: [], + }; + + const mockObjectsWithTags = { + dashboard: [ + { + id: 1, + type: 'dashboard', + name: 'Sales Dashboard', + url: '/dashboard/1', + changed_on: '2023-11-20T12:34:56Z', + created_by: 1, + creator: 'John Doe', + owners: [{ id: 1, first_name: 'John', last_name: 'Doe' }], + tags: [ + { id: 101, name: 'Sales', type: 'TagType.custom' }, + { id: 42, name: 'Current Tag', type: 'TagType.custom' }, + ], + }, + ], + chart: [ + { + id: 2, + type: 'chart', + name: 'Monthly Revenue', + url: '/chart/2', + changed_on: '2023-11-19T12:00:00Z', + created_by: 2, + creator: 'Jane Smith', + owners: [{ id: 2, first_name: 'Jane', last_name: 'Smith' }], + tags: [ + { id: 102, name: 'Revenue', type: 'TagType.custom' }, + { id: 42, name: 'Current Tag', type: 'TagType.custom' }, + ], + }, + ], + query: [ + { + id: 3, + type: 'query', + name: 'User Engagement', + url: '/query/3', + changed_on: '2023-11-18T09:30:00Z', + created_by: 3, + creator: 'Alice Brown', + owners: [{ id: 3, first_name: 'Alice', last_name: 'Brown' }], + tags: [ + { id: 103, name: 'Engagement', type: 'TagType.custom' }, + { id: 42, name: 'Current Tag', type: 'TagType.custom' }, + ], + }, + ], + }; + + beforeEach(() => { + jest + .spyOn(useQueryParamsModule, 'useQueryParam') + .mockReturnValue([42, jest.fn()]); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it('renders when empty', () => { + render( + <AllEntitiesTable + search="" + setShowTagModal={mockSetShowTagModal} + objects={mockObjects} + />, + ); + + expect( + screen.getByText('No entities have this tag currently assigned'), + ).toBeInTheDocument(); + + expect(screen.getByText('Add tag to entities')).toBeInTheDocument(); + }); + + it('renders the correct tags for each object type, excluding the current tag', () => { + render( + <AllEntitiesTable + search="" + setShowTagModal={mockSetShowTagModal} + objects={mockObjectsWithTags} + />, + ); + + expect(screen.getByText('Sales Dashboard')).toBeInTheDocument(); + expect(screen.getByText('Sales')).toBeInTheDocument(); + + expect(screen.getByText('Monthly Revenue')).toBeInTheDocument(); + expect(screen.getByText('Revenue')).toBeInTheDocument(); + + expect(screen.getByText('User Engagement')).toBeInTheDocument(); + expect(screen.getByText('Engagement')).toBeInTheDocument(); + + expect(screen.queryByText('Current Tag')).not.toBeInTheDocument(); + }); +}); diff --git a/superset-frontend/src/features/allEntities/AllEntitiesTable.tsx b/superset-frontend/src/features/allEntities/AllEntitiesTable.tsx index ad5e307bf244..be68047defe4 100644 --- a/superset-frontend/src/features/allEntities/AllEntitiesTable.tsx +++ b/superset-frontend/src/features/allEntities/AllEntitiesTable.tsx @@ -24,6 +24,7 @@ import FacePile from 'src/components/FacePile'; import Tag from 'src/types/TagType'; import Owner from 'src/types/Owner'; import { EmptyStateBig } from 'src/components/EmptyState'; +import { NumberParam, useQueryParam } from 'use-query-params'; const MAX_TAGS_TO_SHOW = 3; const PAGE_SIZE = 10; @@ -79,6 +80,7 @@ export default function AllEntitiesTable({ }: AllEntitiesTableProps) { type objectType = 'dashboard' | 'chart' | 'query'; + const [tagId] = useQueryParam('id', NumberParam); const showListViewObjs = objects.dashboard.length > 0 || objects.chart.length > 0 || @@ -119,7 +121,9 @@ export default function AllEntitiesTable({ <TagsList tags={tags.filter( (tag: Tag) => - tag.type === 'TagTypes.custom' || tag.type === 1, + tag.type !== undefined && + ['TagType.custom', 1].includes(tag.type) && + tag.id !== tagId, )} maxTags={MAX_TAGS_TO_SHOW} /> diff --git a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx index 6cf1deaac038..e4de2df22230 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx @@ -172,11 +172,11 @@ const ExtraOptions = ({ indeterminate={false} checked={!!db?.allow_dml} onChange={onInputChange} - labelText={t('Allow DML')} + labelText={t('Allow DDL and DML')} /> <InfoTooltip tooltip={t( - 'Allow manipulation of the database using non-SELECT statements such as UPDATE, DELETE, CREATE, etc.', + 'Allow the execution of DDL (Data Definition Language: CREATE, DROP, TRUNCATE, etc.) and DML (Data Modification Language: INSERT, UPDATE, DELETE, etc)', )} /> </div> diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx index acf6528edde4..910ad9339a94 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.test.tsx @@ -700,9 +700,9 @@ describe('DatabaseModal', () => { /force all tables and views to be created in this schema when clicking ctas or cvas in sql lab\./i, ); const allowDMLCheckbox = screen.getByRole('checkbox', { - name: /allow dml/i, + name: /allow ddl and dml/i, }); - const allowDMLText = screen.getByText(/allow dml/i); + const allowDMLText = screen.getByText(/allow ddl and dml/i); const enableQueryCostEstimationCheckbox = screen.getByRole('checkbox', { name: /enable query cost estimation/i, }); diff --git a/superset-frontend/src/features/datasets/AddDataset/LeftPanel/LeftPanel.test.tsx b/superset-frontend/src/features/datasets/AddDataset/LeftPanel/LeftPanel.test.tsx index 88836d2c25d5..e66c9785dbfe 100644 --- a/superset-frontend/src/features/datasets/AddDataset/LeftPanel/LeftPanel.test.tsx +++ b/superset-frontend/src/features/datasets/AddDataset/LeftPanel/LeftPanel.test.tsx @@ -35,7 +35,7 @@ beforeEach(() => { allow_file_upload: 'Allow Csv Upload', allow_ctas: 'Allow Ctas', allow_cvas: 'Allow Cvas', - allow_dml: 'Allow Dml', + allow_dml: 'Allow DDL and DML', allow_multi_schema_metadata_fetch: 'Allow Multi Schema Metadata Fetch', allow_run_async: 'Allow Run Async', allows_cost_estimate: 'Allows Cost Estimate', diff --git a/superset-frontend/src/features/home/ActivityTable.tsx b/superset-frontend/src/features/home/ActivityTable.tsx index e518f2fa0562..254e05154ec8 100644 --- a/superset-frontend/src/features/home/ActivityTable.tsx +++ b/superset-frontend/src/features/home/ActivityTable.tsx @@ -33,19 +33,7 @@ import { Chart } from 'src/types/Chart'; import Icons from 'src/components/Icons'; import SubMenu from './SubMenu'; import EmptyState from './EmptyState'; -import { WelcomeTable } from './types'; - -/** - * Return result from /api/v1/log/recent_activity/ - */ -interface RecentActivity { - action: string; - item_type: 'slice' | 'dashboard'; - item_url: string; - item_title: string; - time: number; - time_delta_humanized?: string; -} +import { WelcomeTable, RecentActivity } from './types'; interface RecentSlice extends RecentActivity { item_type: 'slice'; diff --git a/superset-frontend/src/features/home/Menu.tsx b/superset-frontend/src/features/home/Menu.tsx index 7afc7fb7ad85..3d83aded7c0b 100644 --- a/superset-frontend/src/features/home/Menu.tsx +++ b/superset-frontend/src/features/home/Menu.tsx @@ -68,6 +68,12 @@ const StyledHeader = styled.header` height: 100%; object-fit: contain; } + &:focus { + border-color: transparent; + } + &:focus-visible { + border-color: ${theme.colors.primary.dark1}; + } } .navbar-brand-text { border-left: 1px solid ${theme.colors.grayscale.light2}; @@ -306,11 +312,7 @@ export function Menu({ arrowPointAtCenter > {isFrontendRoute(window.location.pathname) ? ( - <GenericLink - className="navbar-brand" - to={brand.path} - tabIndex={-1} - > + <GenericLink className="navbar-brand" to={brand.path}> <img src={brand.icon} alt={brand.alt} /> </GenericLink> ) : ( diff --git a/superset-frontend/src/features/home/RightMenu.test.tsx b/superset-frontend/src/features/home/RightMenu.test.tsx index 50c0ca3259c2..ba50a5256c37 100644 --- a/superset-frontend/src/features/home/RightMenu.test.tsx +++ b/superset-frontend/src/features/home/RightMenu.test.tsx @@ -308,10 +308,13 @@ test('If there is a DB with allow_file_upload set as True the option should be e userEvent.hover(dropdown); const dataMenu = await screen.findByText(dropdownItems[0].label); userEvent.hover(dataMenu); - expect(await screen.findByText('Upload CSV to database')).toBeInTheDocument(); + const csvMenu = await screen.findByText('Upload CSV to database'); + expect(csvMenu).toBeInTheDocument(); expect( await screen.findByText('Upload Excel to database'), ).toBeInTheDocument(); + + expect(csvMenu).not.toHaveAttribute('aria-disabled', 'true'); }); test('If there is NOT a DB with allow_file_upload set as True the option should be disabled', async () => { @@ -341,10 +344,11 @@ test('If there is NOT a DB with allow_file_upload set as True the option should userEvent.hover(dropdown); const dataMenu = await screen.findByText(dropdownItems[0].label); userEvent.hover(dataMenu); - expect(await screen.findByText('Upload CSV to database')).toBeInTheDocument(); - expect( - (await screen.findByText('Upload CSV to database')).closest('a'), - ).not.toBeInTheDocument(); + const csvMenu = await screen.findByRole('menuitem', { + name: 'Upload CSV to database', + }); + expect(csvMenu).toBeInTheDocument(); + expect(csvMenu).toHaveAttribute('aria-disabled', 'true'); }); test('Logs out and clears local storage item redux', async () => { diff --git a/superset-frontend/src/features/home/RightMenu.tsx b/superset-frontend/src/features/home/RightMenu.tsx index 99a139836684..589433764a06 100644 --- a/superset-frontend/src/features/home/RightMenu.tsx +++ b/superset-frontend/src/features/home/RightMenu.tsx @@ -313,7 +313,7 @@ const RightMenu = ({ const buildMenuItem = (item: MenuObjectChildProps) => item.disable ? ( - <Menu.Item key={item.name} css={styledDisabled}> + <Menu.Item key={item.name} css={styledDisabled} disabled> <Tooltip placement="top" title={tooltipText}> {item.label} </Tooltip> diff --git a/superset-frontend/src/features/home/SavedQueries.tsx b/superset-frontend/src/features/home/SavedQueries.tsx index 55535fc501ae..7c6a5979f59c 100644 --- a/superset-frontend/src/features/home/SavedQueries.tsx +++ b/superset-frontend/src/features/home/SavedQueries.tsx @@ -278,7 +278,7 @@ const SavedQueries = ({ url={`/sqllab?savedQueryId=${q.id}`} title={q.label} imgFallbackURL="/static/assets/images/empty-query.svg" - description={t('Ran %s', q.changed_on_delta_humanized)} + description={t('Modified %s', q.changed_on_delta_humanized)} cover={ q?.sql?.length && showThumbnails && featureFlag ? ( <QueryContainer> diff --git a/superset-frontend/src/features/home/types.ts b/superset-frontend/src/features/home/types.ts index 105cded78f72..a59e9fcd8586 100644 --- a/superset-frontend/src/features/home/types.ts +++ b/superset-frontend/src/features/home/types.ts @@ -55,3 +55,15 @@ export enum GlobalMenuDataOptions { ExcelUpload = 'excelUpload', ColumnarUpload = 'columnarUpload', } + +/** + * Return result from /api/v1/log/recent_activity/ + */ +export interface RecentActivity { + action: string; + item_type: 'slice' | 'dashboard'; + item_url: string; + item_title: string; + time: number; + time_delta_humanized?: string; +} diff --git a/superset-frontend/src/features/reports/ReportModal/index.tsx b/superset-frontend/src/features/reports/ReportModal/index.tsx index 0452ac909bf4..8843bc4835ca 100644 --- a/superset-frontend/src/features/reports/ReportModal/index.tsx +++ b/superset-frontend/src/features/reports/ReportModal/index.tsx @@ -73,6 +73,8 @@ interface ReportProps { show: boolean; userId: number; userEmail: string; + ccEmail: string; + bccEmail: string; chart?: ChartState; chartName?: string; dashboardId?: number; @@ -109,6 +111,8 @@ function ReportModal({ chart, userId, userEmail, + ccEmail, + bccEmail, creationMethod, dashboardName, chartName, @@ -184,7 +188,11 @@ function ReportModal({ owners: [userId], recipients: [ { - recipient_config_json: { target: userEmail }, + recipient_config_json: { + target: userEmail, + ccTarget: ccEmail, + bccTarget: bccEmail, + }, type: 'Email', }, ], diff --git a/superset-frontend/src/features/reports/types.ts b/superset-frontend/src/features/reports/types.ts index 9a3f055b49fe..637e324ca522 100644 --- a/superset-frontend/src/features/reports/types.ts +++ b/superset-frontend/src/features/reports/types.ts @@ -46,7 +46,14 @@ export interface ReportObject { name: string; owners: number[]; recipients: [ - { recipient_config_json: { target: string }; type: ReportRecipientType }, + { + recipient_config_json: { + target: string; + ccTarget: string; + bccTarget: string; + }; + type: ReportRecipientType; + }, ]; report_format: string; timezone: string; diff --git a/superset-frontend/src/hooks/apiResources/queryValidations.ts b/superset-frontend/src/hooks/apiResources/queryValidations.ts index df88d6afca9b..048ce3098c64 100644 --- a/superset-frontend/src/hooks/apiResources/queryValidations.ts +++ b/superset-frontend/src/hooks/apiResources/queryValidations.ts @@ -26,7 +26,7 @@ export type FetchValidationQueryParams = { templateParams?: string; }; -type ValidationResult = { +export type ValidationResult = { end_column: number | null; line_number: number | null; message: string | null; diff --git a/superset-frontend/src/hooks/apiResources/sqlLab.ts b/superset-frontend/src/hooks/apiResources/sqlLab.ts index 45f7c83a3808..54e24ae13648 100644 --- a/superset-frontend/src/hooks/apiResources/sqlLab.ts +++ b/superset-frontend/src/hooks/apiResources/sqlLab.ts @@ -69,7 +69,7 @@ export type InitialState = { }[]; }; -const queryValidationApi = api.injectEndpoints({ +const initialStateApi = api.injectEndpoints({ endpoints: builder => ({ sqlLabInitialState: builder.query<InitialState, void>({ providesTags: ['SqlLabInitialState'], @@ -83,4 +83,4 @@ const queryValidationApi = api.injectEndpoints({ }); export const { useSqlLabInitialStateQuery: useSqlLabInitialState } = - queryValidationApi; + initialStateApi; diff --git a/superset-frontend/src/hooks/apiResources/tables.ts b/superset-frontend/src/hooks/apiResources/tables.ts index d90c528b40aa..86b080745fc1 100644 --- a/superset-frontend/src/hooks/apiResources/tables.ts +++ b/superset-frontend/src/hooks/apiResources/tables.ts @@ -117,6 +117,13 @@ const tableApi = api.injectEndpoints({ }), }), tableMetadata: builder.query<TableMetaData, FetchTableMetadataQueryParams>({ + providesTags: result => + result + ? [ + { type: 'TableMetadatas', id: result.name }, + { type: 'TableMetadatas', id: 'LIST' }, + ] + : [{ type: 'TableMetadatas', id: 'LIST' }], query: ({ dbId, catalog, schema, table }) => ({ endpoint: `/api/v1/database/${dbId}/table_metadata/${toQueryString({ name: table, @@ -152,7 +159,7 @@ export const { export function useTables(options: Params) { const { dbId, catalog, schema, onSuccess, onError } = options || {}; const isMountedRef = useRef(false); - const { data: schemaOptions, isFetching } = useSchemas({ + const { currentData: schemaOptions, isFetching } = useSchemas({ dbId, catalog: catalog || undefined, }); @@ -203,13 +210,13 @@ export function useTables(options: Params) { isSuccess, isError, isFetching, - data, + currentData, error, originalArgs, } = result; if (!originalArgs?.forceRefresh && requestId && !isFetching) { - if (isSuccess && data) { - handleOnSuccess(data, false); + if (isSuccess && currentData) { + handleOnSuccess(currentData, false); } if (isError) { handleOnError(error as Response); diff --git a/superset-frontend/src/hooks/useTimezoneConversion.ts b/superset-frontend/src/hooks/useTimezoneConversion.ts new file mode 100644 index 000000000000..16a2fde3eb69 --- /dev/null +++ b/superset-frontend/src/hooks/useTimezoneConversion.ts @@ -0,0 +1,174 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import moment from 'moment-timezone'; +import { useTimezone } from 'src/components/TimezoneContext'; + +/** + * Hook for timezone-aware conversions and formatting + * Provides utilities to convert dates between timezone and UTC for API calls + */ +export function useTimezoneConversion() { + const { timezone, convertToUTC, convertFromUTC, formatDate, formatDateTime } = useTimezone(); + + /** + * Convert date range filters to UTC for API calls + * This should be used when sending date filters to the backend + */ + const convertDateRangeToUTC = (dateRange: [string, string]): [string, string] => { + const [start, end] = dateRange; + return [ + convertToUTC(start).toISOString(), + convertToUTC(end).toISOString(), + ]; + }; + + /** + * Convert form data date fields to UTC before sending to API + * This recursively walks through form data and converts known date fields + */ + const convertFormDataDatesToUTC = (formData: any): any => { + if (!formData || typeof formData !== 'object') { + return formData; + } + + const converted = { ...formData }; + + // Common date field names that need conversion + const dateFields = [ + 'since', + 'until', + 'time_range_endpoints', + 'start_date_offset', + 'end_date_offset', + 'x_ticks_layout', + ]; + + dateFields.forEach(field => { + if (converted[field]) { + if (Array.isArray(converted[field])) { + // Handle date range arrays + converted[field] = converted[field].map((date: any) => + typeof date === 'string' ? convertToUTC(date).toISOString() : date + ); + } else if (typeof converted[field] === 'string') { + // Handle single date strings + converted[field] = convertToUTC(converted[field]).toISOString(); + } + } + }); + + // Handle time_range specifically if it's an object + if (converted.time_range && typeof converted.time_range === 'object') { + const timeRange = converted.time_range; + if (timeRange.since) { + timeRange.since = convertToUTC(timeRange.since).toISOString(); + } + if (timeRange.until) { + timeRange.until = convertToUTC(timeRange.until).toISOString(); + } + } + + return converted; + }; + + /** + * Convert UTC response data to local timezone for display + */ + const convertResponseDatesFromUTC = (data: any): any => { + if (!data || typeof data !== 'object') { + return data; + } + + if (Array.isArray(data)) { + return data.map(convertResponseDatesFromUTC); + } + + const converted = { ...data }; + + // Common response date field patterns + const datePatterns = [ + /.*_date$/i, + /.*_time$/i, + /.*timestamp$/i, + /created.*$/i, + /updated.*$/i, + /modified.*$/i, + ]; + + Object.keys(converted).forEach(key => { + const value = converted[key]; + + // Check if this field looks like a date field + const isDateField = datePatterns.some(pattern => pattern.test(key)); + + if (isDateField && typeof value === 'string') { + // Try to parse as ISO date and convert from UTC + try { + const parsedDate = moment.utc(value); + if (parsedDate.isValid()) { + converted[key] = convertFromUTC(value).toISOString(); + } + } catch (e) { + // If parsing fails, leave the value unchanged + } + } else if (typeof value === 'object') { + // Recursively convert nested objects + converted[key] = convertResponseDatesFromUTC(value); + } + }); + + return converted; + }; + + /** + * Format a date for display in the current timezone + */ + const formatDateForDisplay = (date: moment.MomentInput, format?: string): string => { + return formatDate(date, format); + }; + + /** + * Format a datetime for display in the current timezone + */ + const formatDateTimeForDisplay = (date: moment.MomentInput, format?: string): string => { + return formatDateTime(date, format); + }; + + /** + * Get timezone info for debugging/logging + */ + const getTimezoneInfo = () => ({ + current: timezone, + offset: moment.tz(timezone).format('Z'), + abbreviation: moment.tz(timezone).format('z'), + }); + + return { + timezone, + convertDateRangeToUTC, + convertFormDataDatesToUTC, + convertResponseDatesFromUTC, + formatDateForDisplay, + formatDateTimeForDisplay, + getTimezoneInfo, + convertToUTC, + convertFromUTC, + }; +} diff --git a/superset-frontend/src/middleware/logger.test.js b/superset-frontend/src/middleware/logger.test.js index d67c681caaf1..08aeb866cc89 100644 --- a/superset-frontend/src/middleware/logger.test.js +++ b/superset-frontend/src/middleware/logger.test.js @@ -16,18 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -import sinon from 'sinon'; import { SupersetClient } from '@superset-ui/core'; -import logger from 'src/middleware/loggerMiddleware'; +import sinon from 'sinon'; import { LOG_EVENT } from 'src/logger/actions'; -import { LOG_ACTIONS_LOAD_CHART } from 'src/logger/LogUtils'; +import { + LOG_ACTIONS_LOAD_CHART, + LOG_ACTIONS_SPA_NAVIGATION, +} from 'src/logger/LogUtils'; +import logger from 'src/middleware/loggerMiddleware'; describe('logger middleware', () => { + const dashboardId = 123; const next = sinon.spy(); const mockStore = { getState: () => ({ dashboardInfo: { - id: 1, + id: dashboardId, }, impressionId: 'impression_id', }), @@ -43,6 +47,10 @@ describe('logger middleware', () => { }, }; + const timeSandbox = sinon.createSandbox({ + useFakeTimers: true, + }); + let postStub; beforeEach(() => { postStub = sinon.stub(SupersetClient, 'post'); @@ -50,6 +58,7 @@ describe('logger middleware', () => { afterEach(() => { next.resetHistory(); postStub.restore(); + timeSandbox.clock.reset(); }); it('should listen to LOG_EVENT action type', () => { @@ -64,11 +73,10 @@ describe('logger middleware', () => { }); it('should POST an event to /superset/log/ when called', () => { - const clock = sinon.useFakeTimers(); logger(mockStore)(next)(action); expect(next.callCount).toBe(0); - clock.tick(2000); + timeSandbox.clock.tick(2000); expect(SupersetClient.post.callCount).toBe(1); expect(SupersetClient.post.getCall(0).args[0].endpoint).toMatch( '/superset/log/', @@ -76,12 +84,19 @@ describe('logger middleware', () => { }); it('should include ts, start_offset, event_name, impression_id, source, and source_id in every event', () => { - const clock = sinon.useFakeTimers(); - logger(mockStore)(next)(action); - clock.tick(2000); - - expect(SupersetClient.post.callCount).toBe(1); - const { events } = SupersetClient.post.getCall(0).args[0].postPayload; + const fetchLog = logger(mockStore)(next); + fetchLog({ + type: LOG_EVENT, + payload: { + eventName: LOG_ACTIONS_SPA_NAVIGATION, + eventData: { path: `/dashboard/${dashboardId}/` }, + }, + }); + timeSandbox.clock.tick(2000); + fetchLog(action); + timeSandbox.clock.tick(2000); + expect(SupersetClient.post.callCount).toBe(2); + const { events } = SupersetClient.post.getCall(1).args[0].postPayload; const mockEventdata = action.payload.eventData; const mockEventname = action.payload.eventName; expect(events[0]).toMatchObject({ @@ -91,6 +106,7 @@ describe('logger middleware', () => { source: 'dashboard', source_id: mockStore.getState().dashboardInfo.id, event_type: 'timing', + dashboard_id: mockStore.getState().dashboardInfo.id, }); expect(typeof events[0].ts).toBe('number'); diff --git a/superset-frontend/src/middleware/loggerMiddleware.js b/superset-frontend/src/middleware/loggerMiddleware.js index 5408edb62c36..c008d027b3bb 100644 --- a/superset-frontend/src/middleware/loggerMiddleware.js +++ b/superset-frontend/src/middleware/loggerMiddleware.js @@ -23,7 +23,10 @@ import { SupersetClient } from '@superset-ui/core'; import { safeStringify } from '../utils/safeStringify'; import { LOG_EVENT } from '../logger/actions'; -import { LOG_EVENT_TYPE_TIMING } from '../logger/LogUtils'; +import { + LOG_EVENT_TYPE_TIMING, + LOG_ACTIONS_SPA_NAVIGATION, +} from '../logger/LogUtils'; import DebouncedMessageQueue from '../utils/DebouncedMessageQueue'; const LOG_ENDPOINT = '/superset/log/?explode=events'; @@ -67,73 +70,87 @@ const logMessageQueue = new DebouncedMessageQueue({ delayThreshold: 1000, }); let lastEventId = 0; -const loggerMiddleware = store => next => action => { - if (action.type !== LOG_EVENT) { - return next(action); - } +const loggerMiddleware = store => next => { + let navPath; + return action => { + if (action.type !== LOG_EVENT) { + return next(action); + } - const { dashboardInfo, explore, impressionId, dashboardLayout, sqlLab } = - store.getState(); - let logMetadata = { - impression_id: impressionId, - version: 'v2', - }; - if (dashboardInfo?.id) { - logMetadata = { - source: 'dashboard', - source_id: dashboardInfo.id, - ...logMetadata, - }; - } else if (explore?.slice) { - logMetadata = { - source: 'explore', - source_id: explore.slice ? explore.slice.slice_id : 0, - ...logMetadata, + const { dashboardInfo, explore, impressionId, dashboardLayout, sqlLab } = + store.getState(); + let logMetadata = { + impression_id: impressionId, + version: 'v2', }; - } else if (sqlLab) { - const editor = sqlLab.queryEditors.find( - ({ id }) => id === sqlLab.tabHistory.slice(-1)[0], - ); - logMetadata = { - source: 'sqlLab', - source_id: editor?.id, - db_id: editor?.dbId, - schema: editor?.schema, - }; - } + const { eventName } = action.payload; + let { eventData = {} } = action.payload; + + if (eventName === LOG_ACTIONS_SPA_NAVIGATION) { + navPath = eventData.path; + } + const path = navPath || window?.location?.href; + + if (dashboardInfo?.id && path?.includes('/dashboard/')) { + logMetadata = { + source: 'dashboard', + source_id: dashboardInfo.id, + dashboard_id: dashboardInfo.id, + ...logMetadata, + }; + } else if (explore?.slice) { + logMetadata = { + source: 'explore', + source_id: explore.slice ? explore.slice.slice_id : 0, + ...(explore.slice.slice_id && { slice_id: explore.slice.slice_id }), + ...logMetadata, + }; + } else if (path?.includes('/sqllab/')) { + const editor = sqlLab.queryEditors.find( + ({ id }) => id === sqlLab.tabHistory.slice(-1)[0], + ); + logMetadata = { + source: 'sqlLab', + source_id: editor?.id, + db_id: editor?.dbId, + schema: editor?.schema, + }; + } - const { eventName } = action.payload; - let { eventData = {} } = action.payload; - eventData = { - ...logMetadata, - ts: new Date().getTime(), - event_name: eventName, - ...eventData, - }; - if (LOG_EVENT_TYPE_TIMING.has(eventName)) { - eventData = { - ...eventData, - event_type: 'timing', - trigger_event: lastEventId, - }; - } else { - lastEventId = nanoid(); eventData = { + ...logMetadata, + ts: new Date().getTime(), + event_name: eventName, ...eventData, - event_type: 'user', - event_id: lastEventId, - visibility: document.visibilityState, }; - } + if (LOG_EVENT_TYPE_TIMING.has(eventName)) { + eventData = { + ...eventData, + event_type: 'timing', + trigger_event: lastEventId, + }; + } else { + lastEventId = nanoid(); + eventData = { + ...eventData, + event_type: 'user', + event_id: lastEventId, + visibility: document.visibilityState, + }; + } - if (eventData.target_id && dashboardLayout?.present?.[eventData.target_id]) { - const { meta } = dashboardLayout.present[eventData.target_id]; - // chart name or tab/header text - eventData.target_name = meta.chartId ? meta.sliceName : meta.text; - } + if ( + eventData.target_id && + dashboardLayout?.present?.[eventData.target_id] + ) { + const { meta } = dashboardLayout.present[eventData.target_id]; + // chart name or tab/header text + eventData.target_name = meta.chartId ? meta.sliceName : meta.text; + } - logMessageQueue.append(eventData); - return eventData; + logMessageQueue.append(eventData); + return eventData; + }; }; export default loggerMiddleware; diff --git a/superset-frontend/src/pages/AlertReportList/index.tsx b/superset-frontend/src/pages/AlertReportList/index.tsx index 9bb3705f1fd6..ac8d705cd1bd 100644 --- a/superset-frontend/src/pages/AlertReportList/index.tsx +++ b/superset-frontend/src/pages/AlertReportList/index.tsx @@ -109,7 +109,7 @@ function AlertList({ user, addSuccessToast, }: AlertListProps) { - const title = isReportEnabled ? t('report') : t('alert'); + const title = isReportEnabled ? t('Report') : t('Alert'); const titlePlural = isReportEnabled ? t('reports') : t('alerts'); const pathName = isReportEnabled ? 'Reports' : 'Alerts'; const initialFilters = useMemo( diff --git a/superset-frontend/src/pages/Chart/index.tsx b/superset-frontend/src/pages/Chart/index.tsx index e8e9ae127fe5..c0d38144e2a6 100644 --- a/superset-frontend/src/pages/Chart/index.tsx +++ b/superset-frontend/src/pages/Chart/index.tsx @@ -84,9 +84,10 @@ const getDashboardContextFormData = () => { if (dashboardContext) { const sliceId = getUrlParam(URL_PARAMS.sliceId) || 0; const { + colorScheme, labelsColor, labelsColorMap, - colorScheme, + sharedLabelsColors, chartConfiguration, nativeFilters, filterBoxFilters, @@ -98,15 +99,18 @@ const getDashboardContextFormData = () => { filters: getAppliedFilterValues(sliceId, filterBoxFilters), nativeFilters, chartConfiguration, - colorScheme, dataMask, + colorScheme, labelsColor, labelsColorMap, + sharedLabelsColors, sliceId, allSliceIds: [sliceId], extraControls: {}, }); - Object.assign(dashboardContextWithFilters, { dashboardId }); + Object.assign(dashboardContextWithFilters, { + dashboardId, + }); return dashboardContextWithFilters; } return null; diff --git a/superset-frontend/src/pages/ChartCreation/ChartCreation.test.tsx b/superset-frontend/src/pages/ChartCreation/ChartCreation.test.tsx index b3c92dbe80d8..7d7e3ea3fa79 100644 --- a/superset-frontend/src/pages/ChartCreation/ChartCreation.test.tsx +++ b/superset-frontend/src/pages/ChartCreation/ChartCreation.test.tsx @@ -16,24 +16,38 @@ * specific language governing permissions and limitations * under the License. */ -import { ReactWrapper } from 'enzyme'; -import { styledMount as mount } from 'spec/helpers/theming'; -import Button from 'src/components/Button'; -import { AsyncSelect } from 'src/components'; -import { - ChartCreation, - ChartCreationProps, - ChartCreationState, -} from 'src/pages/ChartCreation'; -import VizTypeGallery from 'src/explore/components/controls/VizTypeControl/VizTypeGallery'; -import { act } from 'spec/helpers/testing-library'; + +import userEvent from '@testing-library/user-event'; +import { screen, waitFor, render } from 'spec/helpers/testing-library'; +import fetchMock from 'fetch-mock'; +import { createMemoryHistory } from 'history'; +import { ChartCreation } from 'src/pages/ChartCreation'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; -const datasource = { - value: '1', - label: 'table', +jest.mock('src/components/DynamicPlugins', () => ({ + usePluginContext: () => ({ + mountedPluginMetadata: { table: { name: 'Table', tags: [] } }, + }), +})); + +const mockDatasourceResponse = { + result: [ + { + id: 1, + table_name: 'table', + datasource_type: 'table', + database: { database_name: 'test_db' }, + schema: 'public', + }, + ], + count: 1, }; +fetchMock.get(/\/api\/v1\/dataset\/\?q=.*/, { + body: mockDatasourceResponse, + status: 200, +}); + const mockUser: UserWithPermissionsAndRoles = { createdOn: '2021-04-27T18:12:38.952304', email: 'admin', @@ -59,94 +73,112 @@ const mockUserWithDatasetWrite: UserWithPermissionsAndRoles = { username: 'admin', isAnonymous: false, }; +const history = createMemoryHistory(); + +history.push = jest.fn(); -// We don't need the actual implementation for the tests const routeProps = { - history: {} as any, + history, location: {} as any, match: {} as any, }; -async function getWrapper(user = mockUser) { - const wrapper = mount( +const renderOptions = { + useRouter: true, +}; + +async function renderComponent(user = mockUser) { + render( <ChartCreation user={user} addSuccessToast={() => null} {...routeProps} />, - ) as unknown as ReactWrapper< - ChartCreationProps, - ChartCreationState, - ChartCreation - >; - await act(() => new Promise(resolve => setTimeout(resolve, 0))); - return wrapper; + renderOptions, + ); + await waitFor(() => new Promise(resolve => setTimeout(resolve, 0))); } test('renders a select and a VizTypeGallery', async () => { - const wrapper = await getWrapper(); - expect(wrapper.find(AsyncSelect)).toExist(); - expect(wrapper.find(VizTypeGallery)).toExist(); + await renderComponent(); + expect(screen.getByRole('combobox', { name: 'Dataset' })).toBeInTheDocument(); + expect(screen.getByText(/choose chart type/i)).toBeInTheDocument(); }); test('renders dataset help text when user lacks dataset write permissions', async () => { - const wrapper = await getWrapper(); - expect(wrapper.find('[data-test="dataset-write"]')).not.toExist(); - expect(wrapper.find('[data-test="no-dataset-write"]')).toExist(); + await renderComponent(); + expect(screen.queryByText('Add a dataset')).not.toBeInTheDocument(); + expect(screen.getByText('view instructions')).toBeInTheDocument(); }); test('renders dataset help text when user has dataset write permissions', async () => { - const wrapper = await getWrapper(mockUserWithDatasetWrite); - expect(wrapper.find('[data-test="dataset-write"]')).toExist(); - expect(wrapper.find('[data-test="no-dataset-write"]')).not.toExist(); + await renderComponent(mockUserWithDatasetWrite); + expect(screen.getByText('Add a dataset')).toBeInTheDocument(); + expect(screen.queryByText('view instructions')).toBeInTheDocument(); }); -test('renders a button', async () => { - const wrapper = await getWrapper(); - expect(wrapper.find(Button)).toExist(); +test('renders create chart button', async () => { + await renderComponent(); + expect( + screen.getByRole('button', { name: 'Create new chart' }), + ).toBeInTheDocument(); }); test('renders a disabled button if no datasource is selected', async () => { - const wrapper = await getWrapper(); + await renderComponent(); expect( - wrapper.find(Button).find({ disabled: true }).hostNodes(), - ).toHaveLength(1); + screen.getByRole('button', { name: 'Create new chart' }), + ).toBeDisabled(); }); test('renders an enabled button if datasource and viz type are selected', async () => { - const wrapper = await getWrapper(); - wrapper.setState({ - datasource, - vizType: 'table', - }); + await renderComponent(); + + const datasourceSelect = screen.getByRole('combobox', { name: 'Dataset' }); + userEvent.click(datasourceSelect); + userEvent.click(await screen.findByText(/test_db/i)); + + userEvent.click( + screen.getByRole('button', { + name: /ballot all charts/i, + }), + ); + userEvent.click(await screen.findByText('Table')); + expect( - wrapper.find(Button).find({ disabled: true }).hostNodes(), - ).toHaveLength(0); + screen.getByRole('button', { name: 'Create new chart' }), + ).toBeEnabled(); }); test('double-click viz type does nothing if no datasource is selected', async () => { - const wrapper = await getWrapper(); - wrapper.instance().gotoSlice = jest.fn(); - wrapper.update(); - wrapper.instance().onVizTypeDoubleClick(); - expect(wrapper.instance().gotoSlice).not.toBeCalled(); -}); + await renderComponent(); -test('double-click viz type submits if datasource is selected', async () => { - const wrapper = await getWrapper(); - wrapper.instance().gotoSlice = jest.fn(); - wrapper.update(); - wrapper.setState({ - datasource, - vizType: 'table', - }); - - wrapper.instance().onVizTypeDoubleClick(); - expect(wrapper.instance().gotoSlice).toBeCalled(); + userEvent.click( + screen.getByRole('button', { + name: /ballot all charts/i, + }), + ); + userEvent.dblClick(await screen.findByText('Table')); + + expect( + screen.getByRole('button', { name: 'Create new chart' }), + ).toBeDisabled(); + expect(history.push).not.toHaveBeenCalled(); }); -test('formats Explore url', async () => { - const wrapper = await getWrapper(); - wrapper.setState({ - datasource, - vizType: 'table', - }); - const formattedUrl = '/explore/?viz_type=table&datasource=1'; - expect(wrapper.instance().exploreUrl()).toBe(formattedUrl); +test('double-click viz type submits with formatted URL if datasource is selected', async () => { + await renderComponent(); + + const datasourceSelect = screen.getByRole('combobox', { name: 'Dataset' }); + userEvent.click(datasourceSelect); + userEvent.click(await screen.findByText(/test_db/i)); + + userEvent.click( + screen.getByRole('button', { + name: /ballot all charts/i, + }), + ); + userEvent.dblClick(await screen.findByText('Table')); + + expect( + screen.getByRole('button', { name: 'Create new chart' }), + ).toBeEnabled(); + const formattedUrl = '/explore/?viz_type=table&datasource=1__table'; + expect(history.push).toHaveBeenCalledWith(formattedUrl); }); diff --git a/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx b/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx index 90a397666e0a..6761e1e040fd 100644 --- a/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx +++ b/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx @@ -161,7 +161,7 @@ describe('DashboardList', () => { const callsD = fetchMock.calls(/dashboard\/\?q/); expect(callsD).toHaveLength(1); expect(callsD[0][0]).toMatchInlineSnapshot( - `"http://localhost/api/v1/dashboard/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25,select_columns:!(id,dashboard_title,published,url,slug,changed_by,changed_on_delta_humanized,owners.id,owners.first_name,owners.last_name,owners,tags.id,tags.name,tags.type,status,certified_by,certification_details,changed_on))"`, + `"http://localhost/api/v1/dashboard/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25,select_columns:!(id,dashboard_title,published,url,slug,changed_by,changed_by.id,changed_by.first_name,changed_by.last_name,changed_on_delta_humanized,owners,owners.id,owners.first_name,owners.last_name,tags.id,tags.name,tags.type,status,certified_by,certification_details,changed_on))"`, ); }); diff --git a/superset-frontend/src/pages/DashboardList/index.tsx b/superset-frontend/src/pages/DashboardList/index.tsx index 8ffc51ce2a6f..edc4e5b5eef4 100644 --- a/superset-frontend/src/pages/DashboardList/index.tsx +++ b/superset-frontend/src/pages/DashboardList/index.tsx @@ -118,11 +118,14 @@ const DASHBOARD_COLUMNS_TO_FETCH = [ 'url', 'slug', 'changed_by', + 'changed_by.id', + 'changed_by.first_name', + 'changed_by.last_name', 'changed_on_delta_humanized', + 'owners', 'owners.id', 'owners.first_name', 'owners.last_name', - 'owners', 'tags.id', 'tags.name', 'tags.type', @@ -383,9 +386,6 @@ function DashboardList(props: DashboardListProps) { Header: t('Owners'), accessor: 'owners', disableSortBy: true, - cellProps: { - style: { padding: '0px' }, - }, size: 'xl', }, { diff --git a/superset-frontend/src/pages/Home/Home.test.tsx b/superset-frontend/src/pages/Home/Home.test.tsx index f470e9ea7380..9f9ed80546e7 100644 --- a/superset-frontend/src/pages/Home/Home.test.tsx +++ b/superset-frontend/src/pages/Home/Home.test.tsx @@ -16,12 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -import fetchMock from 'fetch-mock'; import * as uiCore from '@superset-ui/core'; -import { render, screen, waitFor } from 'spec/helpers/testing-library'; +import { getExtensionsRegistry } from '@superset-ui/core'; import userEvent from '@testing-library/user-event'; +import fetchMock from 'fetch-mock'; +import { render, screen, waitFor } from 'spec/helpers/testing-library'; import Welcome from 'src/pages/Home'; -import { getExtensionsRegistry } from '@superset-ui/core'; import setupExtensions from 'src/setup/setupExtensions'; const chartsEndpoint = 'glob:*/api/v1/chart/?*'; @@ -62,9 +62,35 @@ fetchMock.get(savedQueryEndpoint, { result: [], }); +const mockRecentActivityResult = [ + { + action: 'dashboard', + item_title: "World Bank's Data", + item_type: 'dashboard', + item_url: '/superset/dashboard/world_health/', + time: 1741644942130.566, + time_delta_humanized: 'a day ago', + }, + { + action: 'dashboard', + item_title: '[ untitled dashboard ]', + item_type: 'dashboard', + item_url: '/superset/dashboard/19/', + time: 1741644881695.7869, + time_delta_humanized: 'a day ago', + }, + { + action: 'dashboard', + item_title: '[ untitled dashboard ]', + item_type: 'dashboard', + item_url: '/superset/dashboard/19/', + time: 1741644381695.7869, + time_delta_humanized: 'two day ago', + }, +]; + fetchMock.get(recentActivityEndpoint, { - Created: [], - Viewed: [], + result: mockRecentActivityResult, }); fetchMock.get(chartInfoEndpoint, { @@ -142,6 +168,21 @@ test('With sql role - renders all panels on the page on page load', async () => expect(panels).toHaveLength(4); }); +test('With sql role - renders distinct recent activities', async () => { + await renderWelcome(); + const recentPanel = screen.getByRole('button', { name: 'right Recents' }); + userEvent.click(recentPanel); + await waitFor( + () => + expect( + screen.queryAllByText(mockRecentActivityResult[0].item_title), + ).toHaveLength(1), // eslint-disable-line jest-dom/prefer-in-document + ); + expect( + screen.queryAllByText(mockRecentActivityResult[1].item_title), + ).toHaveLength(1); // eslint-disable-line jest-dom/prefer-in-document +}); + test('With sql role - calls api methods in parallel on page load', async () => { await renderWelcome(); expect(fetchMock.calls(chartsEndpoint)).toHaveLength(2); diff --git a/superset-frontend/src/pages/Home/index.tsx b/superset-frontend/src/pages/Home/index.tsx index b92e84f486dc..dab79badeb5e 100644 --- a/superset-frontend/src/pages/Home/index.tsx +++ b/superset-frontend/src/pages/Home/index.tsx @@ -159,7 +159,7 @@ function Welcome({ user, addDangerToast }: WelcomeProps) { const canReadSavedQueries = userHasPermission(user, 'SavedQuery', 'can_read'); const userid = user.userId; const id = userid!.toString(); // confident that user is not a guest user - const params = rison.encode({ page_size: 6 }); + const params = rison.encode({ page_size: 24, distinct: false }); const recent = `/api/v1/log/recent_activity/?q=${params}`; const [activeChild, setActiveChild] = useState('Loading'); const userKey = dangerouslyGetItemDoNotUse(id, null); diff --git a/superset-frontend/src/theme/index.ts b/superset-frontend/src/theme/index.ts index 308ea035d48c..857e95888f93 100644 --- a/superset-frontend/src/theme/index.ts +++ b/superset-frontend/src/theme/index.ts @@ -40,7 +40,7 @@ const baseConfig: ThemeConfig = { colorSuccess: supersetTheme.colors.success.base, colorTextBase: supersetTheme.colors.grayscale.dark2, colorWarning: supersetTheme.colors.warning.base, - controlHeight: supersetTheme.gridUnit * 32, + controlHeight: 32, fontFamily: supersetTheme.typography.families.sansSerif, fontFamilyCode: supersetTheme.typography.families.monospace, fontSize: supersetTheme.typography.sizes.m, @@ -61,9 +61,20 @@ const baseConfig: ThemeConfig = { fontWeightStrong: supersetTheme.typography.weights.medium, }, Tag: { - borderRadiusSM: supersetTheme.gridUnit / 2, + borderRadiusSM: 2, defaultBg: supersetTheme.colors.grayscale.light4, }, + Progress: { + fontSize: supersetTheme.typography.sizes.s, + colorText: supersetTheme.colors.text.label, + remainingColor: supersetTheme.colors.grayscale.light4, + }, + Slider: { + trackBgDisabled: supersetTheme.colors.grayscale.light1, + colorBgElevated: supersetTheme.colors.grayscale.light5, + handleSizeHover: 10, + handleLineWidthHover: 2, + }, }, }; diff --git a/superset-frontend/src/types/Chart.ts b/superset-frontend/src/types/Chart.ts index f26525cd3358..b7c439cb211f 100644 --- a/superset-frontend/src/types/Chart.ts +++ b/superset-frontend/src/types/Chart.ts @@ -74,6 +74,8 @@ export type Slice = { query_context?: object; is_managed_externally: boolean; owners?: number[]; + datasource?: string; + datasource_id?: number; }; export default Chart; diff --git a/superset-frontend/src/types/DashboardContextForExplore.ts b/superset-frontend/src/types/DashboardContextForExplore.ts index 4187dceaeb2d..117a182606ce 100644 --- a/superset-frontend/src/types/DashboardContextForExplore.ts +++ b/superset-frontend/src/types/DashboardContextForExplore.ts @@ -26,6 +26,7 @@ import { ChartConfiguration } from 'src/dashboard/types'; export interface DashboardContextForExplore { labelsColor: Record<string, string>; labelsColorMap: Record<string, string>; + sharedLabelsColors: string[]; colorScheme: string; chartConfiguration: ChartConfiguration; nativeFilters: PartialFilters; diff --git a/superset-frontend/src/types/Owner.ts b/superset-frontend/src/types/Owner.ts index 91e9d29c9bf9..b8c0f4962cba 100644 --- a/superset-frontend/src/types/Owner.ts +++ b/superset-frontend/src/types/Owner.ts @@ -22,7 +22,8 @@ */ export default interface Owner { - first_name: string; + first_name?: string; id: number; - last_name: string; + last_name?: string; + full_name?: string; } diff --git a/superset-frontend/plugins/plugin-chart-word-cloud/test/index.test.ts b/superset-frontend/src/types/dom-to-pdf.d.ts similarity index 69% rename from superset-frontend/plugins/plugin-chart-word-cloud/test/index.test.ts rename to superset-frontend/src/types/dom-to-pdf.d.ts index bec7a1171d2e..061e80d96ced 100644 --- a/superset-frontend/plugins/plugin-chart-word-cloud/test/index.test.ts +++ b/superset-frontend/src/types/dom-to-pdf.d.ts @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -16,14 +16,21 @@ * specific language governing permissions and limitations * under the License. */ +declare module 'dom-to-pdf' { + interface Image { + type: string; + quality: number; + } -import { WordCloudChartPlugin, LegacyWordCloudChartPlugin } from '../src'; + interface Options { + margin: number; + filename: string; + image: Image; + html2canvas: object; + excludeClassNames?: string[]; + } -describe('plugin-chart-word-cloud', () => { - it('exports WordCloudChartPlugin', () => { - expect(WordCloudChartPlugin).toBeDefined(); - }); - it('exports LegacyWordCloudChartPlugin', () => { - expect(LegacyWordCloudChartPlugin).toBeDefined(); - }); -}); + function domToPdf(elementToPrint: Element, options?: Options): Promise<any>; + + export default domToPdf; +} diff --git a/superset-frontend/src/utils/aiSummary.test.ts b/superset-frontend/src/utils/aiSummary.test.ts new file mode 100644 index 000000000000..a6103dc34501 --- /dev/null +++ b/superset-frontend/src/utils/aiSummary.test.ts @@ -0,0 +1,277 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + generateSummary, + extractRawDataSample, + extractLightweightData, + ChartSummaryInput, +} from './aiSummary'; + +// Mock fetch globally +global.fetch = jest.fn(); +const mockFetch = fetch as jest.MockedFunction<typeof fetch>; + +// Mock window.location +Object.defineProperty(window, 'location', { + value: { + search: '', + }, + writable: true, +}); + +describe('aiSummary utilities', () => { + beforeEach(() => { + jest.clearAllMocks(); + window.location.search = ''; + }); + + describe('extractRawDataSample', () => { + it('should extract data sample from query data', () => { + const queriesData = [ + { + data: [ + { name: 'John', age: 30 }, + { name: 'Jane', age: 25 }, + ], + }, + ]; + + const result = extractRawDataSample(queriesData); + expect(result).toEqual([ + { name: 'John', age: 30 }, + { name: 'Jane', age: 25 }, + ]); + }); + + it('should limit to 200 rows', () => { + const largeDataset = Array.from({ length: 300 }, (_, i) => ({ + id: i, + value: `value_${i}`, + })); + const queriesData = [{ data: largeDataset }]; + + const result = extractRawDataSample(queriesData); + expect(result).toHaveLength(200); + expect(result?.[0]).toEqual({ id: 0, value: 'value_0' }); + expect(result?.[199]).toEqual({ id: 199, value: 'value_199' }); + }); + + it('should return null for invalid data', () => { + expect(extractRawDataSample([])).toBeNull(); + expect(extractRawDataSample([{ data: 'not an array' }])).toBeNull(); + expect(extractRawDataSample([{}])).toBeNull(); + }); + }); + + describe('extractLightweightData', () => { + it('should extract and summarize data structure', () => { + const queriesData = [ + { + data: [ + { name: 'John', age: 30, active: true }, + { name: 'Jane', age: 25, active: false }, + ], + }, + ]; + + const result = extractLightweightData(queriesData); + expect(result).toEqual([ + { name: 'John', age: 30, active: true }, + { name: 'Jane', age: 25, active: false }, + ]); + }); + + it('should truncate long strings', () => { + const longString = 'a'.repeat(150); + const queriesData = [ + { + data: [{ description: longString }], + }, + ]; + + const result = extractLightweightData(queriesData); + expect(Array.isArray(result) && result[0]?.description).toHaveLength(120); + }); + }); + + describe('generateSummary', () => { + const mockSuccessResponse = { + ok: true, + json: async () => ({ + data: { + result: { + insight: 'This chart shows sales data trends over time.', + }, + }, + }), + }; + + it('should send title and description to API', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + const input: ChartSummaryInput = { + vizType: 'line', + title: 'Sales Over Time', + description: 'Monthly sales data for Q1 2024', + dataSample: [{ month: 'Jan', sales: 1000 }], + }; + + const result = await generateSummary(input); + + expect(mockFetch).toHaveBeenCalledWith( + 'https://api.intelligence.fynd.com/service/panel/analytics/ai/sql-helper/explain-chart', + expect.objectContaining({ + method: 'POST', + headers: { + accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + chart_data: { + vizType: 'line', + dataSample: [{ month: 'Jan', sales: 1000 }], + title: 'Sales Over Time', + description: 'Monthly sales data for Q1 2024', + }, + }), + credentials: 'include', + }), + ); + + expect(result).toBe('This chart shows sales data trends over time.'); + }); + + it('should handle undefined title and description', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + + const input: ChartSummaryInput = { + vizType: 'bar', + dataSample: [{ category: 'A', value: 100 }], + }; + + await generateSummary(input); + + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify({ + chart_data: { + vizType: 'bar', + dataSample: [{ category: 'A', value: 100 }], + title: undefined, + description: undefined, + }, + }), + }), + ); + }); + + it('should include URL query parameters', async () => { + mockFetch.mockResolvedValueOnce(mockSuccessResponse as any); + window.location.search = '?currency_code=USD&timezone=UTC&country_code=US&country=United States'; + + const input: ChartSummaryInput = { + vizType: 'pie', + title: 'Revenue Distribution', + description: 'Revenue by region', + dataSample: [{ region: 'North', revenue: 5000 }], + }; + + await generateSummary(input); + + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: JSON.stringify({ + chart_data: { + vizType: 'pie', + dataSample: [{ region: 'North', revenue: 5000 }], + title: 'Revenue Distribution', + description: 'Revenue by region', + currency_code: 'USD', + timezone: 'UTC', + country_code: 'US', + country: 'United States', + }, + }), + }), + ); + }); + + it('should handle API errors', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 500, + } as any); + + const input: ChartSummaryInput = { + vizType: 'table', + title: 'Error Test', + description: 'This should fail', + }; + + await expect(generateSummary(input)).rejects.toThrow('AI endpoint error 500'); + }); + + it('should handle invalid response format', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: async () => ({ data: { result: {} } }), + } as any); + + const input: ChartSummaryInput = { + vizType: 'table', + title: 'Invalid Response Test', + }; + + await expect(generateSummary(input)).rejects.toThrow('Invalid AI response'); + }); + + it('should respect timeout option', async () => { + const mockAbortController = { + signal: { aborted: false }, + abort: jest.fn(), + }; + jest.spyOn(global, 'AbortController').mockImplementation(() => mockAbortController as any); + jest.spyOn(global, 'setTimeout').mockImplementation((callback, delay) => { + if (delay === 5000) { + // Call the abort function immediately for testing + setTimeout(callback, 0); + } + return 123 as any; + }); + + mockFetch.mockImplementation(() => new Promise(() => {})); // Never resolves + + const input: ChartSummaryInput = { + vizType: 'bar', + title: 'Timeout Test', + }; + + try { + await generateSummary(input, { timeoutMs: 5000 }); + } catch (error) { + // Expected to throw due to abort + } + + expect(mockAbortController.abort).toHaveBeenCalled(); + }); + }); +}); diff --git a/superset-frontend/src/utils/aiSummary.ts b/superset-frontend/src/utils/aiSummary.ts new file mode 100644 index 000000000000..271d33165f75 --- /dev/null +++ b/superset-frontend/src/utils/aiSummary.ts @@ -0,0 +1,156 @@ +/** + * aiSummary + * + * Lightweight AI summary helper for charts. + * - Prefers summarizing structured data; can optionally include a snapshot image. + * - Uses a pluggable backend endpoint if provided via options or at runtime + * as window['__AI_SUMMARY_ENDPOINT__']. + * - If the endpoint fails or is missing, returns a small generic 3-line summary. + */ + +import { ensureIsArray } from '@superset-ui/core'; + +export type SummaryMode = 'data' | 'image' | 'auto'; + +export interface ChartSummaryInput { + vizType: string; + dataSample?: unknown; + imageBase64?: string; + title?: string; + description?: string; + filters?: Record<string, unknown>; + timeRange?: string | null; +} + +export interface GenerateSummaryOptions { + mode?: SummaryMode; + model?: string; + endpoint?: string; // custom proxy endpoint that returns { summary: string } + apiKey?: string; // OpenAI key (only for direct client calls; prefer endpoint) + timeoutMs?: number; + signal?: AbortSignal; +} + +function safeSlice<T>(arr: T[], max: number): T[] { + if (!Array.isArray(arr)) return []; + return arr.slice(0, Math.max(0, max)); +} + +function summarizeShape(value: any): any { + // Produce a compact representation to keep prompts tiny + if (Array.isArray(value)) { + const head = safeSlice(value, 20); + return head.map(row => { + if (row && typeof row === 'object') { + const out: Record<string, unknown> = {}; + Object.entries(row as Record<string, unknown>).forEach(([k, v]) => { + if (typeof v === 'string') { + out[k] = (v as string).slice(0, 120); + } else if (typeof v === 'number' || typeof v === 'boolean') { + out[k] = v; + } else if (v instanceof Date) { + out[k] = v.toISOString(); + } else { + out[k] = typeof v; + } + }); + return out; + } + return row; + }); + } + if (value && typeof value === 'object') { + const out: Record<string, unknown> = {}; + Object.entries(value).forEach(([k, v]) => { + out[k] = Array.isArray(v) ? `array(${v.length})` : typeof v; + }); + return out; + } + return typeof value; +} + +export function extractLightweightData(queriesData: unknown): unknown { + const arr = ensureIsArray(queriesData); + if (!arr.length) return null; + const first = arr[0] as any; + // The query payload shapes vary a lot; try common keys + const data = first?.data ?? first?.records ?? first?.result ?? first; + return summarizeShape(data); +} + +// Raw sampler: returns minimally processed data suitable for external services +// Caps at 200 rows to keep payloads manageable +export function extractRawDataSample(queriesData: unknown): unknown[] | null { + const arr = ensureIsArray(queriesData); + if (!arr.length) return null; + const first = arr[0] as any; + const data = first?.data ?? first?.records ?? first?.result ?? first; + if (!Array.isArray(data)) return null; + return data.slice(0, 200); +} + +export async function generateSummary( + input: ChartSummaryInput, + options?: GenerateSummaryOptions, +): Promise<string> { + const endpoint = + 'https://api.intelligence.fynd.com/service/panel/analytics/ai/sql-helper/explain-chart'; + + // Build payload for custom API + const payload = { + chart_data: { + vizType: input.vizType, + dataSample: input.dataSample, + title: input.title, + description: input.description, + } as Record<string, unknown>, + }; + + // Add optional metadata from URL query params if present + try { + const search = typeof window !== 'undefined' ? window.location.search : ''; + const params = new URLSearchParams(search); + const cd = payload.chart_data as Record<string, unknown>; + + const currencyCode = params.get('currency_code'); + if (currencyCode) cd.currency_code = currencyCode; + + const timezone = params.get('timezone'); + if (timezone) cd.timezone = timezone; + + const countryCode = params.get('country_code'); + if (countryCode) cd.country_code = countryCode; + + const country = params.get('country'); + if (country) cd.country = country; + } catch { + // best effort only + } + + const controller = new AbortController(); + const timeout = setTimeout( + () => controller.abort(), + options?.timeoutMs ?? 15000, + ); + try { + const res = await fetch(endpoint, { + method: 'POST', + headers: { + accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify(payload), + signal: options?.signal ?? controller.signal, + credentials: 'include', + }); + if (!res.ok) throw new Error(`AI endpoint error ${res.status}`); + const json = await res.json(); + const summary = json?.data?.result?.insight; + if (!summary || typeof summary !== 'string') { + throw new Error('Invalid AI response'); + } + return summary; + } finally { + clearTimeout(timeout); + } +} diff --git a/superset-frontend/src/utils/colorScheme.ts b/superset-frontend/src/utils/colorScheme.ts index be4d3786e0c5..1b9f2d12ecef 100644 --- a/superset-frontend/src/utils/colorScheme.ts +++ b/superset-frontend/src/utils/colorScheme.ts @@ -24,7 +24,7 @@ import { } from '@superset-ui/core'; /** - * Forces falsy namespace values to undefined to default to GLOBAL + * Force falsy namespace values to undefined to default to GLOBAL * * @param namespace * @returns - namespace or default undefined @@ -32,16 +32,73 @@ import { export const getColorNamespace = (namespace?: string) => namespace || undefined; /** - * Get the labels color map entries * + * Field shared_label_colors used to be a dict of all colors for all labels. + * Force shared_label_colors field to be a list of actual shared labels. + * + * @param sharedLabelsColors - the shared label colors list + * @returns string[] + */ +export const enforceSharedLabelsColorsArray = ( + sharedLabelsColors: string[] | Record<string, string> | undefined, +) => (Array.isArray(sharedLabelsColors) ? sharedLabelsColors : []); + +/** + * Get labels shared across all charts in a dashboard. + * Merges a fresh instance of shared label colors with a stored one. + * + * @param currentSharedLabels - existing shared labels to merge with fresh * @returns Record<string, string> */ -export const getLabelsColorMapEntries = (): Record<string, string> => { - const labelsColorMapInstance = getLabelsColorMap(); - const updatedLabelsColorMapEntries = Object.fromEntries( - labelsColorMapInstance.getColorMap(), +export const getFreshSharedLabels = ( + currentSharedLabels: string[] = [], +): string[] => { + const { chartsLabelsMap } = getLabelsColorMap(); + const allLabels = Array.from(chartsLabelsMap.values()).flatMap( + ({ labels }) => labels, ); - return updatedLabelsColorMapEntries; + + const duplicates = Array.from( + allLabels.reduce( + (counts, label) => counts.set(label, (counts.get(label) || 0) + 1), + new Map(), + ), + ) + .filter(([, count]) => count > 1) + .map(([label]) => label); + + return Array.from(new Set([...currentSharedLabels, ...duplicates])); +}; + +export const getSharedLabelsColorMapEntries = ( + currentColorMap: Record<string, string>, + sharedLabels: string[], +): Record<string, string> => + Object.fromEntries( + Object.entries(currentColorMap).filter(([label]) => + sharedLabels.includes(label), + ), + ); + +/** + * Returns all entries (labels and colors) except custom label colors. + * + * @param customLabelsColor - the custom label colors in label_colors field + * @returns all color entries except custom label colors + */ +export const getLabelsColorMapEntries = ( + customLabelsColor: Record<string, string> = {}, +): Record<string, string> => { + const labelsColorMapInstance = getLabelsColorMap(); + const allEntries = Object.fromEntries(labelsColorMapInstance.getColorMap()); + + // custom label colors are applied and stored separetely via label_colors + // removing all instances of custom label colors from the entries + Object.keys(customLabelsColor).forEach(label => { + delete allEntries[label]; + }); + + return allEntries; }; export const getColorSchemeDomain = (colorScheme: string) => @@ -56,15 +113,20 @@ export const getColorSchemeDomain = (colorScheme: string) => export const isLabelsColorMapSynced = ( metadata: Record<string, any>, ): boolean => { - const currentLabelsColorMap = metadata?.shared_label_colors || {}; - const customLabelColors = metadata?.label_colors || {}; - const freshLabelsColorMap = getLabelsColorMap().getColorMap(); - const isSynced = Array.from(freshLabelsColorMap.entries()).every( + const storedLabelsColorMap = metadata.map_label_colors || {}; + const customLabelColors = metadata.label_colors || {}; + const freshColorMap = getLabelsColorMap().getColorMap(); + const fullFreshColorMap = { + ...Object.fromEntries(freshColorMap), + ...customLabelColors, + }; + + const isSynced = Object.entries(fullFreshColorMap).every( ([label, color]) => - currentLabelsColorMap.hasOwnProperty(label) && - (currentLabelsColorMap[label] === color || - customLabelColors[label] !== undefined), + storedLabelsColorMap.hasOwnProperty(label) && + storedLabelsColorMap[label] === color, ); + return isSynced; }; @@ -79,7 +141,7 @@ export const resetColors = (color_namespace?: string) => { getColorNamespace(color_namespace), ); categoricalNamespace.resetColors(); - labelsColorMapInstance.clear(); + labelsColorMapInstance.reset(); }; /** @@ -92,13 +154,18 @@ export const resetColors = (color_namespace?: string) => { export const refreshLabelsColorMap = ( namespace?: string, colorScheme?: string, + merge = false, ) => { const colorNameSpace = getColorNamespace(namespace); const categoricalNamespace = CategoricalColorNamespace.getNamespace(colorNameSpace); const labelsColorMapInstance = getLabelsColorMap(); - labelsColorMapInstance.updateColorMap(categoricalNamespace, colorScheme); + labelsColorMapInstance.updateColorMap( + categoricalNamespace, + colorScheme, + merge, + ); }; /** @@ -107,34 +174,80 @@ export const refreshLabelsColorMap = ( * * @param metadata - the dashboard metadata object */ -export const applyColors = (metadata: Record<string, any>, fresh = false) => { +export const applyColors = ( + metadata: Record<string, any>, + // Create a fresh color map by changing color scheme + fresh: boolean | string[] = false, + // Catch new labels in the color map as they appear + merge = false, + // Apply only label colors that are shared across multiple charts. + shared = false, +) => { const colorNameSpace = getColorNamespace(metadata?.color_namespace); const categoricalNamespace = CategoricalColorNamespace.getNamespace(colorNameSpace); const colorScheme = metadata?.color_scheme; - const customLabelColors = metadata?.label_colors || {}; - // when scheme unset, update only custom label colors - const labelsColorMap = metadata?.shared_label_colors || {}; + const fullLabelsColor = metadata?.map_label_colors || {}; + const sharedLabels = enforceSharedLabelsColorsArray( + metadata?.shared_label_colors, + ); + const customLabelsColor = metadata?.label_colors || {}; + const sharedLabelsColor = getSharedLabelsColorMapEntries( + fullLabelsColor, + sharedLabels, + ); - // reset forced colors (custom labels + labels color map) - categoricalNamespace.resetColors(); + if (fresh && !Array.isArray(fresh)) { + // reset custom label colors + // re-evaluate all other label colors + categoricalNamespace.resetColors(); + } - // apply custom label colors first - Object.keys(customLabelColors).forEach(label => { - categoricalNamespace.setColor(label, customLabelColors[label]); - }); + if (fresh && Array.isArray(fresh)) { + // when a color scheme is not set for the dashboard + // should only reset colors for charts that have changed scheme + // while keeping colors of existing shared label colors intact + // this is used also to reset custom label colors when added or removed + categoricalNamespace.resetColorsForLabels(fresh); + } - // re-instantiate a fresh labels color map based on current scheme - // will consider also just applied custom label colors - refreshLabelsColorMap(metadata?.color_namespace, colorScheme); + if (fresh || merge) { + // re-instantiate a fresh labels color map based on current scheme + // it consider just applied custom label colors if present and all forced colors + // it will merge with the existing color map new labels only when merge is true + refreshLabelsColorMap(metadata?.color_namespace, colorScheme, merge); + } - // get the fresh map that was just updated or existing - const labelsColorMapEntries = fresh - ? getLabelsColorMapEntries() - : labelsColorMap; + let applicableColorMapEntries: Record<string, any> = fullLabelsColor; + if (fresh) { + // requires a new map all together + applicableColorMapEntries = { + ...getLabelsColorMapEntries(customLabelsColor), + }; + } + if (merge) { + // must only add up newly appearing labels + // without overriding existing ones + applicableColorMapEntries = { + ...fullLabelsColor, + ...getLabelsColorMapEntries(customLabelsColor), + }; + } + + if (shared) { + // must apply the colors to only shared labels + applicableColorMapEntries = sharedLabelsColor; + } + + applicableColorMapEntries = { + ...applicableColorMapEntries, + ...customLabelsColor, + }; // apply the final color map - Object.keys(labelsColorMapEntries).forEach(label => { - categoricalNamespace.setColor(label, labelsColorMapEntries[label]); - }); + if (applicableColorMapEntries) { + Object.keys(applicableColorMapEntries).forEach(label => { + categoricalNamespace.setColor(label, applicableColorMapEntries[label]); + }); + } }; diff --git a/superset-frontend/src/utils/dateUtils.ts b/superset-frontend/src/utils/dateUtils.ts new file mode 100644 index 000000000000..b1b6e0591198 --- /dev/null +++ b/superset-frontend/src/utils/dateUtils.ts @@ -0,0 +1,160 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import moment from 'moment-timezone'; +// Local fallback for URL param reading to avoid dependency and logs +function getUrlParam(param: { name: string } | string): string | null { + try { + const key = typeof param === 'string' ? param : param.name; + const url = new URL(window.location.href); + return url.searchParams.get(key); + } catch (_e) { + return null; + } +} +const URL_PARAMS = { timezone: { name: 'timezone' } } as const; + +const DEFAULT_TIMEZONE = 'Asia/Kolkata'; +const DEFAULT_DATE_FORMAT = 'YYYY-MM-DD'; +const DEFAULT_DATETIME_FORMAT = 'YYYY-MM-DD HH:mm:ss'; + +/** + * Get the target timezone for the application: + * 1. URL parameter timezone (highest priority) + * 2. Default to Asia/Kolkata (if no URL param) + * + * This is the timezone that ALL dates will be converted TO for display. + * Browser timezone is just the source - we convert FROM browser TO this target timezone. + */ +export function getCurrentTimezone(): string { + const urlTimezone = getUrlParam(URL_PARAMS.timezone); + if (typeof urlTimezone === 'string' && moment.tz.zone(urlTimezone)) { + return urlTimezone; + } + return DEFAULT_TIMEZONE; +} + +/** + * Format a date in the current timezone + */ +export function formatDate( + date: moment.MomentInput, + format = DEFAULT_DATE_FORMAT, + timezone?: string, +): string { + const tz = timezone || getCurrentTimezone(); + return moment.tz(date, tz).format(format); +} + +/** + * Format a datetime in the current timezone + */ +export function formatDateTime( + date: moment.MomentInput, + format = DEFAULT_DATETIME_FORMAT, + timezone?: string, +): string { + const tz = timezone || getCurrentTimezone(); + return moment.tz(date, tz).format(format); +} + +/** + * Create a moment object in the current timezone + */ +export function createMomentInTimezone( + date: moment.MomentInput, + timezone?: string, +): moment.Moment { + const tz = timezone || getCurrentTimezone(); + return moment.tz(date, tz); +} + +/** + * Parse a date string and convert it to the current timezone + */ +export function parseAndConvertToTimezone( + dateString: string, + inputFormat?: string, + timezone?: string, +): moment.Moment { + const tz = timezone || getCurrentTimezone(); + if (inputFormat) { + return moment.tz(dateString, inputFormat, tz); + } + return moment.tz(dateString, tz); +} + +/** + * Get timezone display name for UI + */ +export function getTimezoneDisplayName(timezone?: string): string { + const tz = timezone || getCurrentTimezone(); + const offset = moment.tz(tz).format('Z'); + return `${tz} (UTC${offset})`; +} + +/** + * Check if a timezone is valid + */ +export function isValidTimezone(timezone: string): boolean { + return !!moment.tz.zone(timezone); +} + +/** + * Convert a date from the current timezone to UTC for API calls + */ +export function convertToUTC( + date: moment.MomentInput, + sourceTimezone?: string, +): moment.Moment { + const tz = sourceTimezone || getCurrentTimezone(); + return moment.tz(date, tz).utc(); +} + +/** + * Convert a UTC date to the current timezone for display + */ +export function convertFromUTC( + utcDate: moment.MomentInput, + targetTimezone?: string, +): moment.Moment { + const tz = targetTimezone || getCurrentTimezone(); + return moment.utc(utcDate).tz(tz); +} + +/** + * Format a UTC date string for API calls + */ +export function formatForAPI( + date: moment.MomentInput, + sourceTimezone?: string, +): string { + return convertToUTC(date, sourceTimezone).toISOString(); +} + +/** + * Parse a UTC date from API and format for display + */ +export function formatFromAPI( + utcDateString: string, + format = DEFAULT_DATETIME_FORMAT, + targetTimezone?: string, +): string { + return convertFromUTC(utcDateString, targetTimezone).format(format); +} diff --git a/superset-frontend/src/utils/downloadAsPdf.ts b/superset-frontend/src/utils/downloadAsPdf.ts new file mode 100644 index 000000000000..bb769d1eb117 --- /dev/null +++ b/superset-frontend/src/utils/downloadAsPdf.ts @@ -0,0 +1,74 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { SyntheticEvent } from 'react'; +import domToPdf from 'dom-to-pdf'; +import { kebabCase } from 'lodash'; +import { logging, t } from '@superset-ui/core'; +import { addWarningToast } from 'src/components/MessageToasts/actions'; + +/** + * generate a consistent file stem from a description and date + * + * @param description title or description of content of file + * @param date date when file was generated + */ +const generateFileStem = (description: string, date = new Date()) => + `${kebabCase(description)}-${date.toISOString().replace(/[: ]/g, '-')}`; + +/** + * Create an event handler for turning an element into an image + * + * @param selector css selector of the parent element which should be turned into image + * @param description name or a short description of what is being printed. + * Value will be normalized, and a date as well as a file extension will be added. + * @param isExactSelector if false, searches for the closest ancestor that matches selector. + * @returns event handler + */ +export default function downloadAsPdf( + selector: string, + description: string, + isExactSelector = false, +) { + return (event: SyntheticEvent) => { + const elementToPrint = isExactSelector + ? document.querySelector(selector) + : event.currentTarget.closest(selector); + + if (!elementToPrint) { + return addWarningToast( + t('PDF download failed, please refresh and try again.'), + ); + } + + const options = { + margin: 10, + filename: `${generateFileStem(description)}.pdf`, + image: { type: 'jpeg', quality: 1 }, + html2canvas: { scale: 2 }, + excludeClassNames: ['header-controls'], + }; + return domToPdf(elementToPrint, options) + .then(() => { + // nothing to be done + }) + .catch((e: Error) => { + logging.error('PDF generation failed', e); + }); + }; +} diff --git a/superset-frontend/src/utils/getOwnerName.test.ts b/superset-frontend/src/utils/getOwnerName.test.ts index a4a25e57b24e..27ec7e99b944 100644 --- a/superset-frontend/src/utils/getOwnerName.test.ts +++ b/superset-frontend/src/utils/getOwnerName.test.ts @@ -22,6 +22,8 @@ test('render owner name correctly', () => { expect(getOwnerName({ id: 1, first_name: 'Foo', last_name: 'Bar' })).toEqual( 'Foo Bar', ); + + expect(getOwnerName({ id: 2, full_name: 'John Doe' })).toEqual('John Doe'); }); test('return empty string for undefined owner', () => { diff --git a/superset-frontend/src/utils/getOwnerName.ts b/superset-frontend/src/utils/getOwnerName.ts index 2534c45f2cbb..42c1519c8671 100644 --- a/superset-frontend/src/utils/getOwnerName.ts +++ b/superset-frontend/src/utils/getOwnerName.ts @@ -22,5 +22,5 @@ export default function getOwnerName(owner?: Owner): string { if (!owner) { return ''; } - return `${owner.first_name} ${owner.last_name}`; + return owner.full_name || `${owner.first_name} ${owner.last_name}`; } diff --git a/superset-frontend/src/utils/timezoneApiUtils.ts b/superset-frontend/src/utils/timezoneApiUtils.ts new file mode 100644 index 000000000000..e97a6867ee17 --- /dev/null +++ b/superset-frontend/src/utils/timezoneApiUtils.ts @@ -0,0 +1,492 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import moment from 'moment-timezone'; +import { getCurrentTimezone, convertToUTC, convertFromUTC } from './dateUtils'; + +interface TimezoneAwareRequestOptions { + convertRequestDates?: boolean; + convertResponseDates?: boolean; + additionalDateFields?: string[]; +} + +// --- Helper utilities (kept minimal and without logging) --- +function isDateString(str: string): boolean { + if (!str || typeof str !== 'string') { + return false; + } + const datePatterns = [ + /^\d{4}-\d{2}-\d{2}$/, + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/, + /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}/, + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/, + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{2}:\d{2}$/, + ]; + const matched = datePatterns.some(pattern => pattern.test(str)); + if (!matched) { + return false; + } + const m = moment(str); + if (!m.isValid()) { + return false; + } + // Cheap sanity bounds + const year = m.year(); + if (year < 1900 || year > 2100) { + return false; + } + return true; +} + +function isRelativeTimeRange(str: string): boolean { + if (!str || typeof str !== 'string') return false; + const relativePatterns = [ + /^today$/i, + /^now$/i, + /^last\s+(day|week|month|quarter|year)$/i, + /^last\s+\d+\s+(second|minute|hour|day|week|month|quarter|year)s?$/i, + /^next\s+\d+\s+(second|minute|hour|day|week|month|quarter|year)s?$/i, + /^current\s+(day|week|month|quarter|year)$/i, + /^previous\s+(calendar\s+)?(day|week|month|quarter|year)$/i, + /DATETIME\("(today|now)"\)/i, + /DATEADD\(DATETIME\("(today|now)"\)/i, + /DATETRUNC\(/i, + ]; + return relativePatterns.some(pattern => pattern.test(str)); +} + +function resolveRelativeTimeRangeInternal( + timeRange: string, + timezone: string, + now: moment.Moment, + today: moment.Moment, +): string { + const lower = timeRange.toLowerCase(); + if (lower === 'today') { + return today.format('YYYY-MM-DDTHH:mm:ss'); + } + if (lower === 'now') { + return now.format('YYYY-MM-DDTHH:mm:ss'); + } + if (timeRange.includes('DATETIME("today")')) { + const todayStr = today.format('YYYY-MM-DDTHH:mm:ss'); + return timeRange.replace(/DATETIME\("today"\)/g, `DATETIME("${todayStr}")`); + } + if (timeRange.includes('DATETIME("now")')) { + const nowStr = now.format('YYYY-MM-DDTHH:mm:ss'); + return timeRange.replace(/DATETIME\("now"\)/g, `DATETIME("${nowStr}")`); + } + if (lower === 'current day') { + const startOfDay = today.format('YYYY-MM-DDTHH:mm:ss'); + const endOfDay = today.clone().endOf('day').format('YYYY-MM-DDTHH:mm:ss'); + return `${startOfDay} : ${endOfDay}`; + } + const currentMatch = timeRange.match( + /^current\s+(week|month|quarter|year)$/i, + ); + if (currentMatch) { + const unit = currentMatch[1].toLowerCase(); + const startOf = today + .clone() + .startOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + const endOf = today + .clone() + .endOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + return `${startOf} : ${endOf}`; + } + const previousCalendarMatch = timeRange.match( + /^previous\s+calendar\s+(week|month|year)$/i, + ); + if (previousCalendarMatch) { + const unit = previousCalendarMatch[1].toLowerCase(); + const unitMap: Record<string, string> = { + week: 'weeks', + month: 'months', + year: 'years', + }; + const momentUnit = unitMap[unit] || unit; + const startTime = today + .clone() + .subtract(1, momentUnit as any) + .startOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + const endTime = today + .clone() + .startOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + return `${startTime} : ${endTime}`; + } + const previousMatch = timeRange.match( + /^previous\s+(day|week|month|quarter|year)$/i, + ); + if (previousMatch) { + const unit = previousMatch[1].toLowerCase(); + const unitMap: Record<string, string> = { + day: 'days', + week: 'weeks', + month: 'months', + quarter: 'quarters', + year: 'years', + }; + const momentUnit = unitMap[unit] || unit; + const startTime = today + .clone() + .subtract(1, momentUnit as any) + .startOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + const endTime = today + .clone() + .startOf(unit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + console.log(`Previous range: ${startTime} : ${endTime}`) + return `${startTime} : ${endTime}`; + } + const lastMatch = timeRange.match( + /^last\s+(\d+)?\s*(day|week|month|quarter|year)s?$/i, + ); + if (lastMatch) { + const count = parseInt(lastMatch[1] || '1', 10); + const unit = lastMatch[2].toLowerCase(); + const unitMap: Record<string, string> = { + day: 'days', + week: 'weeks', + month: 'months', + quarter: 'quarters', + year: 'years', + }; + const momentUnit = unitMap[unit] || unit; + const startTime = today + .clone() + .subtract(count, momentUnit as any) + .format('YYYY-MM-DDTHH:mm:ss'); + const endTime = today.format('YYYY-MM-DDTHH:mm:ss'); + console.log(`Last range: ${startTime} : ${endTime}`) + return `${startTime} : ${endTime}`; + } + if (timeRange.includes('DATETRUNC(')) { + let result = timeRange; + if (result.includes('DATETIME("today")')) { + const todayStr = today.format('YYYY-MM-DDTHH:mm:ss'); + result = result.replace( + /DATETIME\("today"\)/g, + `DATETIME("${todayStr}")`, + ); + } + if (result.includes('DATETIME("now")')) { + const nowStr = now.format('YYYY-MM-DDTHH:mm:ss'); + result = result.replace(/DATETIME\("now"\)/g, `DATETIME("${nowStr}")`); + } + return result; + } + return timeRange; +} + +function resolveRelativeTimeRange(timeRange: string, timezone: string): string { + const originalTimeRange = timeRange; + if (!isRelativeTimeRange(timeRange)) { + logTimezoneConversion('resolveRelativeTimeRange (not relative)', originalTimeRange, timeRange); + return timeRange; + } + const now = moment.tz(timezone); + const today = moment.tz(timezone).startOf('day'); + const result = resolveRelativeTimeRangeInternal(timeRange, timezone, now, today); + + logTimezoneConversion('resolveRelativeTimeRange', originalTimeRange, result, { + timezone, + now: now.format('YYYY-MM-DDTHH:mm:ss'), + today: today.format('YYYY-MM-DDTHH:mm:ss'), + }); + + return result; +} + +function convertFilterDateValues(filter: any, timezone: string): any { + if (!filter || typeof filter !== 'object') { + return filter; + } + const converted = { ...filter }; + if (converted.comparator) { + if ( + typeof converted.comparator === 'string' && + isDateString(converted.comparator) + ) { + // Skip conversion for relative time ranges in filter comparators + const isSimpleRelativeRange = /^(last|previous|current)\s+(day|week|month|quarter|year)$/i.test(converted.comparator) || + /^previous\s+calendar\s+(week|month|year)$/i.test(converted.comparator); + if (!isSimpleRelativeRange) { + converted.comparator = convertToUTC( + converted.comparator, + timezone, + ).toISOString(); + } + } else if (Array.isArray(converted.comparator)) { + converted.comparator = converted.comparator.map((val: any) => { + if (typeof val === 'string' && isDateString(val)) { + return convertToUTC(val, timezone).toISOString(); + } + return val; + }); + } + } + if ( + converted.val && + typeof converted.val === 'string' && + isDateString(converted.val) + ) { + converted.val = convertToUTC(converted.val, timezone).toISOString(); + } else if (converted.val && Array.isArray(converted.val)) { + converted.val = converted.val.map((val: any) => { + if (typeof val === 'string' && isDateString(val)) { + return convertToUTC(val, timezone).toISOString(); + } + return val; + }); + } + if (converted.clause === 'WHERE' && converted.subject && converted.operator) { + if ( + converted.comparator && + typeof converted.comparator === 'string' && + isDateString(converted.comparator) + ) { + converted.comparator = convertToUTC( + converted.comparator, + timezone, + ).toISOString(); + } + } + Object.keys(converted).forEach(key => { + if (typeof converted[key] === 'object' && converted[key] !== null) { + converted[key] = convertFilterDateValues(converted[key], timezone); + } + }); + return converted; +} + +// --- Public API --- +export function convertRequestDatesToUTC( + payload: any, + options: TimezoneAwareRequestOptions = {}, +): any { + if (!payload || typeof payload !== 'object') { + return payload; + } + const { additionalDateFields = [] } = options; + const converted = { ...payload }; + const timezone = getCurrentTimezone(); + + const defaultDateFields = [ + 'since', + 'until', + 'start_date', + 'end_date', + 'time_range_endpoints', + 'start_date_offset', + 'end_date_offset', + 'time_range', + 'extra_filters', + 'adhoc_filters', + 'extra_form_data', + ]; + const allDateFields = [...defaultDateFields, ...additionalDateFields]; + + allDateFields.forEach(field => { + if (!converted[field]) { + return; + } + if ( + (field === 'adhoc_filters' || field === 'extra_filters') && + Array.isArray(converted[field]) + ) { + converted[field] = converted[field].map((filter: any) => + convertFilterDateValues(filter, timezone), + ); + } + if (field === 'extra_form_data' && typeof converted[field] === 'object') { + const extraFormData = { ...converted[field] } as any; + if (typeof extraFormData.time_range === 'string') { + const originalTimeRange = extraFormData.time_range; + let tr = extraFormData.time_range as string; + + logTimezoneConversion('extra_form_data.time_range (initial)', originalTimeRange, tr); + + logTimezoneConversion('extra_form_data.time_range (processing check)', tr, 'WILL PROCESS ALL RANGES'); + + if (isRelativeTimeRange(tr) && !tr.includes(' : ')) { + const resolvedTr = resolveRelativeTimeRange(tr, timezone); + logTimezoneConversion('extra_form_data.time_range (resolved)', tr, resolvedTr); + tr = resolvedTr; + } + + const parts = tr.split(' : '); + if (parts.length === 2) { + let [startTime, endTime] = parts; + const originalParts = [startTime, endTime]; + + if (isRelativeTimeRange(startTime)) { + startTime = resolveRelativeTimeRange(startTime, timezone); + } + if (isRelativeTimeRange(endTime)) { + endTime = resolveRelativeTimeRange(endTime, timezone); + } + + if (isDateString(startTime) && isDateString(endTime)) { + const convertedStart = convertToUTC( + startTime, + timezone, + ).toISOString(); + const convertedEnd = convertToUTC(endTime, timezone).toISOString(); + tr = `${convertedStart} : ${convertedEnd}`; + + logTimezoneConversion('extra_form_data.time_range (final UTC)', originalParts, [convertedStart, convertedEnd], { + timezone, + originalTimeRange, + }); + } else { + tr = `${startTime} : ${endTime}`; + logTimezoneConversion('extra_form_data.time_range (non-date range)', originalParts, [startTime, endTime]); + } + } else { + logTimezoneConversion('extra_form_data.time_range (single value)', originalTimeRange, tr); + } + extraFormData.time_range = tr; + } + converted[field] = extraFormData; + } + if (Array.isArray(converted[field])) { + converted[field] = converted[field].map((val: any) => { + if (typeof val === 'string' && isDateString(val)) { + return convertToUTC(val, timezone).toISOString(); + } + return val; + }); + } + if ( + typeof converted[field] === 'string' && + isDateString(converted[field]) + ) { + logTimezoneConversion('date string conversion', field, { + value: converted[field], + isTimeRange: field === 'time_range' + }); + + converted[field] = convertToUTC(converted[field], timezone).toISOString(); + } + }); + + if (converted.form_data) { + converted.form_data = convertRequestDatesToUTC( + converted.form_data, + options, + ); + } + if (Array.isArray(converted.queries)) { + converted.queries = converted.queries.map((q: any) => + convertRequestDatesToUTC(q, options), + ); + } + return converted; +} + +export function convertResponseDatesFromUTC( + data: any, + options: TimezoneAwareRequestOptions = {}, +): any { + if (!data || typeof data !== 'object') { + return data; + } + if (Array.isArray(data)) { + return data.map(item => convertResponseDatesFromUTC(item, options)); + } + const { additionalDateFields = [] } = options; + const converted = { ...data }; + const timezone = getCurrentTimezone(); + const datePatterns = [ + /.*_date$/i, + /.*_time$/i, + /.*timestamp$/i, + /created.*$/i, + /updated.*$/i, + /modified.*$/i, + /changed.*$/i, + ...additionalDateFields.map(field => new RegExp(`^${field}$`, 'i')), + ]; + Object.keys(converted).forEach(key => { + const value = converted[key]; + const isDateField = datePatterns.some(pattern => pattern.test(key)); + if (isDateField && typeof value === 'string' && isDateString(value)) { + try { + converted[key] = convertFromUTC(value, timezone).toISOString(); + } catch (_e) { + // leave as-is + } + } else if (typeof value === 'object') { + converted[key] = convertResponseDatesFromUTC(value, options); + } + }); + return converted; +} + +export function createTimezoneAwareApiCall<T = any>( + originalApiCall: (...args: any[]) => Promise<T>, + options: TimezoneAwareRequestOptions = {}, +) { + return async (...args: any[]): Promise<T> => { + const { convertRequestDates = true, convertResponseDates = true } = options; + let convertedArgs = args; + if (convertRequestDates && args.length > 0) { + convertedArgs = args.map((arg, index) => { + if (index === 0 && typeof arg === 'object') { + return convertRequestDatesToUTC(arg, options); + } + return arg; + }); + } + const response = await originalApiCall(...convertedArgs); + if (convertResponseDates) { + return convertResponseDatesFromUTC(response, options); + } + return response; + }; +} + +let debugLogging = false; + +// Enable/disable debug logging +export function enableTimezoneDebugLogging(enable: boolean = true): void { + debugLogging = enable; +} + +export function logTimezoneConversion( + operation: string, + input: any, + output: any, + details?: any, +): void { + if (debugLogging) { + console.group(`🕐 Timezone Conversion: ${operation}`); + console.log('Input:', input); + console.log('Output:', output); + if (details) { + console.log('Details:', details); + } + console.groupEnd(); + } +} diff --git a/superset-frontend/src/utils/urlUtils.ts b/superset-frontend/src/utils/urlUtils.ts index 2858d65a7de1..80e8948d22c1 100644 --- a/superset-frontend/src/utils/urlUtils.ts +++ b/superset-frontend/src/utils/urlUtils.ts @@ -123,8 +123,13 @@ function getChartUrlParams(excludedUrlParams?: string[]): UrlParamEntries { return getUrlParamEntries(urlParams); } -function getDashboardUrlParams(): UrlParamEntries { - const urlParams = getUrlParams(RESERVED_DASHBOARD_URL_PARAMS); +export function getDashboardUrlParams( + extraExcludedParams: string[] = [], +): UrlParamEntries { + const urlParams = getUrlParams([ + ...RESERVED_DASHBOARD_URL_PARAMS, + ...extraExcludedParams, + ]); const filterBoxFilters = getActiveFilters(); if (!isEmpty(filterBoxFilters)) urlParams.append( diff --git a/superset-frontend/src/views/CRUD/utils.tsx b/superset-frontend/src/views/CRUD/utils.tsx index 7b9274a90446..43a9e179fb94 100644 --- a/superset-frontend/src/views/CRUD/utils.tsx +++ b/superset-frontend/src/views/CRUD/utils.tsx @@ -26,6 +26,7 @@ import { SupersetTheme, getClientErrorObject, t, + lruCache, } from '@superset-ui/core'; import Chart from 'src/types/Chart'; import { intersection } from 'lodash'; @@ -34,7 +35,7 @@ import { FetchDataConfig, FilterValue } from 'src/components/ListView'; import SupersetText from 'src/utils/textUtils'; import { findPermission } from 'src/utils/findPermission'; import { User } from 'src/types/bootstrapTypes'; -import { WelcomeTable } from 'src/features/home/types'; +import { RecentActivity, WelcomeTable } from 'src/features/home/types'; import { Dashboard, Filter, TableTab } from './types'; // Modifies the rison encoding slightly to match the backend's rison encoding/decoding. Applies globally. @@ -223,10 +224,14 @@ export const getRecentActivityObjs = ( ) => SupersetClient.get({ endpoint: recent }).then(recentsRes => { const res: any = {}; + const distinctRes = lruCache<RecentActivity>(6); + recentsRes.json.result.reverse().forEach((record: RecentActivity) => { + distinctRes.set(record.item_url, record); + }); return getFilteredChartsandDashboards(addDangerToast, filters).then( ({ other }) => { res.other = other; - res.viewed = recentsRes.json.result; + res.viewed = distinctRes.values().reverse(); return res; }, ); diff --git a/superset-frontend/src/views/RootContextProviders.tsx b/superset-frontend/src/views/RootContextProviders.tsx index c0bc5c3af140..50c107d3d823 100644 --- a/superset-frontend/src/views/RootContextProviders.tsx +++ b/superset-frontend/src/views/RootContextProviders.tsx @@ -30,6 +30,7 @@ import FlashProvider from '../components/FlashProvider'; import { theme } from '../preamble'; import { EmbeddedUiConfigProvider } from '../components/UiConfigContext'; import { DynamicPluginProvider } from '../components/DynamicPlugins'; +import { TimezoneProvider } from '../components/TimezoneContext'; const { common } = getBootstrapData(); @@ -52,13 +53,15 @@ export const RootContextProviders: React.FC = ({ children }) => { ReactRouterRoute={Route} stringifyOptions={{ encode: false }} > - {RootContextProviderExtension ? ( - <RootContextProviderExtension> - {children} - </RootContextProviderExtension> - ) : ( - children - )} + <TimezoneProvider> + {RootContextProviderExtension ? ( + <RootContextProviderExtension> + {children} + </RootContextProviderExtension> + ) : ( + children + )} + </TimezoneProvider> </QueryParamProvider> </DynamicPluginProvider> </EmbeddedUiConfigProvider> diff --git a/superset-frontend/webpack.config.js b/superset-frontend/webpack.config.js index 3fbb413d9af4..066e69286e65 100644 --- a/superset-frontend/webpack.config.js +++ b/superset-frontend/webpack.config.js @@ -158,15 +158,7 @@ if (!isDevMode) { plugins.push( // runs type checking on a separate process to speed up the build - new ForkTsCheckerWebpackPlugin({ - eslint: { - files: './{src,packages,plugins}/**/*.{ts,tsx,js,jsx}', - memoryLimit: 4096, - options: { - ignorePath: './.eslintignore', - }, - }, - }), + ); } diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py index 093d81b1c3f7..365b79c42323 100644 --- a/superset/cachekeys/api.py +++ b/superset/cachekeys/api.py @@ -114,8 +114,8 @@ def invalidate(self) -> Response: CacheKey.cache_key.in_(cache_keys) ) - with db.session.begin_nested(): - db.session.execute(delete_stmt) + db.session.execute(delete_stmt) + db.session.commit() # pylint: disable=consider-using-transaction stats_logger_manager.instance.gauge( "invalidated_cache", len(cache_keys) @@ -126,6 +126,7 @@ def invalidate(self) -> Response: len(datasource_uids), ) except SQLAlchemyError as ex: # pragma: no cover + db.session.rollback() # pylint: disable=consider-using-transaction logger.error(ex, exc_info=True) return self.response_500(str(ex)) return self.response(201) diff --git a/superset/cachekeys/schemas.py b/superset/cachekeys/schemas.py index e58a45ac565b..d31e40b7d407 100644 --- a/superset/cachekeys/schemas.py +++ b/superset/cachekeys/schemas.py @@ -32,6 +32,10 @@ class Datasource(Schema): datasource_name = fields.String( metadata={"description": datasource_name_description}, ) + catalog = fields.String( + allow_none=True, + metadata={"description": "Datasource catalog"}, + ) schema = fields.String( metadata={"description": "Datasource schema"}, ) diff --git a/superset/charts/api.py b/superset/charts/api.py index d814d0fa02a9..31bfdca45523 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -66,7 +66,9 @@ DashboardsForbiddenError, ) from superset.commands.chart.export import ExportChartsCommand +from superset.commands.chart.fave import AddFavoriteChartCommand from superset.commands.chart.importers.dispatcher import ImportChartsCommand +from superset.commands.chart.unfave import DelFavoriteChartCommand from superset.commands.chart.update import UpdateChartCommand from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.exceptions import CommandException, TagForbiddenError @@ -199,6 +201,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "tags.id", "tags.name", "tags.type", + "uuid", ] list_select_columns = list_columns + ["changed_by_fk", "changed_on"] order_columns = [ @@ -269,10 +272,12 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: base_related_field_filters = { "owners": [["id", BaseFilterRelatedUsers, lambda: []]], "created_by": [["id", BaseFilterRelatedUsers, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], } related_field_filters = { "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } allowed_rel_fields = {"owners", "created_by", "changed_by"} @@ -896,11 +901,13 @@ def add_favorite(self, pk: int) -> Response: 500: $ref: '#/components/responses/500' """ - chart = ChartDAO.find_by_id(pk) - if not chart: + try: + AddFavoriteChartCommand(pk).run() + except ChartNotFoundError: return self.response_404() + except ChartForbiddenError: + return self.response_403() - ChartDAO.add_favorite(chart) return self.response(200, result="OK") @expose("/<pk>/favorites/", methods=("DELETE",)) @@ -939,11 +946,13 @@ def remove_favorite(self, pk: int) -> Response: 500: $ref: '#/components/responses/500' """ - chart = ChartDAO.find_by_id(pk) - if not chart: - return self.response_404() + try: + DelFavoriteChartCommand(pk).run() + except ChartNotFoundError: + self.response_404() + except ChartForbiddenError: + self.response_403() - ChartDAO.remove_favorite(chart) return self.response(200, result="OK") @expose("/warm_up_cache", methods=("PUT",)) diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py index ae88fdef5ad1..653b09896e86 100644 --- a/superset/charts/data/api.py +++ b/superset/charts/data/api.py @@ -394,8 +394,13 @@ def _process_data(query_data: Any) -> Any: ) if result_format == ChartDataResultFormat.JSON: + queries = result["queries"] + if security_manager.is_guest_user(): + for query in queries: + with contextlib.suppress(KeyError): + del query["query"] response_data = json.dumps( - {"result": result["queries"]}, + {"result": queries}, default=json.json_int_dttm_ser, ignore_nan=True, ) diff --git a/superset/charts/post_processing.py b/superset/charts/post_processing.py index ebcae32f8f48..4c5abd8db19f 100644 --- a/superset/charts/post_processing.py +++ b/superset/charts/post_processing.py @@ -29,6 +29,7 @@ from io import StringIO from typing import Any, Optional, TYPE_CHECKING, Union +import numpy as np import pandas as pd from flask_babel import gettext as __ @@ -83,10 +84,11 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s else: axis = {"columns": 1, "rows": 0} + # pivoting with null values will create an empty df + df = df.fillna("SUPERSET_PANDAS_NAN") + # pivot data; we'll compute totals and subtotals later if rows or columns: - # pivoting with null values will create an empty df - df = df.fillna("NULL") df = df.pivot_table( index=rows, columns=columns, @@ -151,6 +153,18 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s # add subtotal for each group and overall total; we start from the # overall group, and iterate deeper into subgroups groups = df.columns + if not apply_metrics_on_rows: + for col in df.columns: + # we need to replace the temporary placeholder with either a string + # or np.nan, depending on the column type so that they can sum correctly + if pd.api.types.is_numeric_dtype(df[col]): + df[col].replace("SUPERSET_PANDAS_NAN", np.nan, inplace=True) + else: + df[col].replace("SUPERSET_PANDAS_NAN", "nan", inplace=True) + else: + # when we applied metrics on rows, we switched the columns and rows + # so checking column type doesn't apply. Replace everything with np.nan + df.replace("SUPERSET_PANDAS_NAN", np.nan, inplace=True) for level in range(df.columns.nlevels): subgroups = {group[:level] for group in groups} for subgroup in subgroups: @@ -171,7 +185,7 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s for subgroup in subgroups: slice_ = df.index.get_loc(subgroup) subtotal = pivot_v2_aggfunc_map[aggfunc]( - df.iloc[slice_, :].apply(pd.to_numeric), axis=0 + df.iloc[slice_, :].apply(pd.to_numeric, errors="coerce"), axis=0 ) depth = df.index.nlevels - len(subgroup) - 1 total = metric_name if level == 0 else __("Subtotal") @@ -186,6 +200,14 @@ def pivot_df( # pylint: disable=too-many-locals, too-many-arguments, too-many-s if apply_metrics_on_rows: df = df.T + # replace the remaining temporary placeholder string for np.nan after pivoting + df.replace("SUPERSET_PANDAS_NAN", np.nan, inplace=True) + df.rename( + index={"SUPERSET_PANDAS_NAN": np.nan}, + columns={"SUPERSET_PANDAS_NAN": np.nan}, + inplace=True, + ) + return df diff --git a/superset/commands/chart/exceptions.py b/superset/commands/chart/exceptions.py index 00877aa80307..72ef71f466e8 100644 --- a/superset/commands/chart/exceptions.py +++ b/superset/commands/chart/exceptions.py @@ -154,3 +154,11 @@ class DashboardsForbiddenError(ForbiddenError): class WarmUpCacheChartNotFoundError(CommandException): status = 404 message = _("Chart not found") + + +class ChartFaveError(CommandException): + message = _("Error faving chart") + + +class ChartUnfaveError(CommandException): + message = _("Error unfaving chart") diff --git a/superset/commands/chart/fave.py b/superset/commands/chart/fave.py new file mode 100644 index 000000000000..d45d1694761c --- /dev/null +++ b/superset/commands/chart/fave.py @@ -0,0 +1,57 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from functools import partial + +from requests_cache import Optional + +from superset import security_manager +from superset.commands.base import BaseCommand +from superset.commands.chart.exceptions import ( + ChartFaveError, + ChartForbiddenError, + ChartNotFoundError, +) +from superset.daos.chart import ChartDAO +from superset.exceptions import SupersetSecurityException +from superset.models.slice import Slice +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) + + +class AddFavoriteChartCommand(BaseCommand): + def __init__(self, chart_id: int) -> None: + self._chart_id = chart_id + self._chart: Optional[Slice] = None + + @transaction(on_error=partial(on_error, reraise=ChartFaveError)) + def run(self) -> None: + self.validate() + return ChartDAO.add_favorite(self._chart) + + def validate(self) -> None: + chart = ChartDAO.find_by_id(self._chart_id) + if not chart: + raise ChartNotFoundError() + + try: + security_manager.raise_for_ownership(chart) + except SupersetSecurityException as ex: + raise ChartForbiddenError() from ex + + self._chart = chart diff --git a/superset/commands/chart/importers/v1/__init__.py b/superset/commands/chart/importers/v1/__init__.py index 89fe5e7a700b..dc5a7079669a 100644 --- a/superset/commands/chart/importers/v1/__init__.py +++ b/superset/commands/chart/importers/v1/__init__.py @@ -26,6 +26,7 @@ from superset.commands.database.importers.v1.utils import import_database from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.importers.v1 import ImportModelsCommand +from superset.commands.utils import update_chart_config_dataset from superset.connectors.sqla.models import SqlaTable from superset.daos.chart import ChartDAO from superset.databases.schemas import ImportV1DatabaseSchema @@ -86,16 +87,10 @@ def _import(configs: dict[str, Any], overwrite: bool = False) -> None: # update datasource id, type, and name dataset = datasets[config["dataset_uuid"]] - config.update( - { - "datasource_id": dataset.id, - "datasource_type": "table", - "datasource_name": dataset.table_name, - } - ) - config["params"].update({"datasource": dataset.uid}) - - if "query_context" in config: - config["query_context"] = None - + dataset_dict = { + "datasource_id": dataset.id, + "datasource_type": "table", + "datasource_name": dataset.table_name, + } + config = update_chart_config_dataset(config, dataset_dict) import_chart(config, overwrite=overwrite) diff --git a/superset/commands/chart/importers/v1/utils.py b/superset/commands/chart/importers/v1/utils.py index 35a7f6e2700f..dacef7c1e32f 100644 --- a/superset/commands/chart/importers/v1/utils.py +++ b/superset/commands/chart/importers/v1/utils.py @@ -50,9 +50,12 @@ def import_chart( ) -> Slice: can_write = ignore_permissions or security_manager.can_access("can_write", "Chart") existing = db.session.query(Slice).filter_by(uuid=config["uuid"]).first() + user = get_user() if existing: - if overwrite and can_write and get_user(): - if not security_manager.can_access_chart(existing): + if overwrite and can_write and user: + if not security_manager.can_access_chart(existing) or ( + user not in existing.owners and not security_manager.is_admin() + ): raise ImportFailedError( "A chart already exists and user doesn't " "have permissions to overwrite it" diff --git a/superset/commands/chart/unfave.py b/superset/commands/chart/unfave.py new file mode 100644 index 000000000000..d19d2b276999 --- /dev/null +++ b/superset/commands/chart/unfave.py @@ -0,0 +1,57 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from functools import partial + +from requests_cache import Optional + +from superset import security_manager +from superset.commands.base import BaseCommand +from superset.commands.chart.exceptions import ( + ChartForbiddenError, + ChartNotFoundError, + ChartUnfaveError, +) +from superset.daos.chart import ChartDAO +from superset.exceptions import SupersetSecurityException +from superset.models.slice import Slice +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) + + +class DelFavoriteChartCommand(BaseCommand): + def __init__(self, chart_id: int) -> None: + self._chart_id = chart_id + self._chart: Optional[Slice] = None + + @transaction(on_error=partial(on_error, reraise=ChartUnfaveError)) + def run(self) -> None: + self.validate() + return ChartDAO.remove_favorite(self._chart) + + def validate(self) -> None: + chart = ChartDAO.find_by_id(self._chart_id) + if not chart: + raise ChartNotFoundError() + + try: + security_manager.raise_for_ownership(chart) + except SupersetSecurityException as ex: + raise ChartForbiddenError() from ex + + self._chart = chart diff --git a/superset/commands/dashboard/copy.py b/superset/commands/dashboard/copy.py new file mode 100644 index 000000000000..b694d3686788 --- /dev/null +++ b/superset/commands/dashboard/copy.py @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from functools import partial +from typing import Any + +from superset import is_feature_enabled, security_manager +from superset.commands.base import BaseCommand +from superset.commands.dashboard.exceptions import ( + DashboardCopyError, + DashboardForbiddenError, + DashboardInvalidError, +) +from superset.daos.dashboard import DashboardDAO +from superset.models.dashboard import Dashboard +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) + + +class CopyDashboardCommand(BaseCommand): + def __init__(self, original_dash: Dashboard, data: dict[str, Any]) -> None: + self._original_dash = original_dash + self._properties = data.copy() + + @transaction(on_error=partial(on_error, reraise=DashboardCopyError)) + def run(self) -> Dashboard: + self.validate() + return DashboardDAO.copy_dashboard(self._original_dash, self._properties) + + def validate(self) -> None: + if not self._properties.get("dashboard_title") or not self._properties.get( + "json_metadata" + ): + raise DashboardInvalidError() + if is_feature_enabled("DASHBOARD_RBAC") and not security_manager.is_owner( + self._original_dash + ): + raise DashboardForbiddenError() diff --git a/superset/commands/dashboard/delete.py b/superset/commands/dashboard/delete.py index 0135c4303f29..fd53519a5083 100644 --- a/superset/commands/dashboard/delete.py +++ b/superset/commands/dashboard/delete.py @@ -23,12 +23,13 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.commands.dashboard.exceptions import ( + DashboardDeleteEmbeddedFailedError, DashboardDeleteFailedError, DashboardDeleteFailedReportsExistError, DashboardForbiddenError, DashboardNotFoundError, ) -from superset.daos.dashboard import DashboardDAO +from superset.daos.dashboard import DashboardDAO, EmbeddedDashboardDAO from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard @@ -37,6 +38,19 @@ logger = logging.getLogger(__name__) +class DeleteEmbeddedDashboardCommand(BaseCommand): + def __init__(self, dashboard: Dashboard): + self._dashboard = dashboard + + @transaction(on_error=partial(on_error, reraise=DashboardDeleteEmbeddedFailedError)) + def run(self) -> None: + self.validate() + return EmbeddedDashboardDAO.delete(self._dashboard.embedded) + + def validate(self) -> None: + pass + + class DeleteDashboardCommand(BaseCommand): def __init__(self, model_ids: list[int]): self._model_ids = model_ids diff --git a/superset/commands/dashboard/exceptions.py b/superset/commands/dashboard/exceptions.py index 19184b894c29..9281119b320b 100644 --- a/superset/commands/dashboard/exceptions.py +++ b/superset/commands/dashboard/exceptions.py @@ -62,6 +62,10 @@ class DashboardDeleteFailedError(DeleteFailedError): message = _("Dashboard could not be deleted.") +class DashboardDeleteEmbeddedFailedError(DeleteFailedError): + message = _("Embedded dashboard could not be deleted.") + + class DashboardDeleteFailedReportsExistError(DashboardDeleteFailedError): message = _("There are associated alerts or reports") @@ -76,3 +80,15 @@ class DashboardImportError(ImportFailedError): class DashboardAccessDeniedError(ForbiddenError): message = _("You don't have access to this dashboard.") + + +class DashboardCopyError(CommandInvalidError): + message = _("Dashboard cannot be copied due to invalid parameters.") + + +class DashboardFaveError(CommandInvalidError): + message = _("Dashboard cannot be favorited.") + + +class DashboardUnfaveError(CommandInvalidError): + message = _("Dashboard cannot be unfavorited.") diff --git a/superset/commands/dashboard/export.py b/superset/commands/dashboard/export.py index fe3a6b8eab87..93cc490ad73d 100644 --- a/superset/commands/dashboard/export.py +++ b/superset/commands/dashboard/export.py @@ -130,6 +130,18 @@ def _file_content(model: Dashboard) -> str: logger.info("Unable to decode `%s` field: %s", key, value) payload[new_name] = {} + # Extract all native filter datasets and replace native + # filter dataset references with uuid + for native_filter in payload.get("metadata", {}).get( + "native_filter_configuration", [] + ): + for target in native_filter.get("targets", []): + dataset_id = target.pop("datasetId", None) + if dataset_id is not None: + dataset = DatasetDAO.find_by_id(dataset_id) + if dataset: + target["datasetUuid"] = str(dataset.uuid) + # the mapping between dashboard -> charts is inferred from the position # attribute, so if it's not present we need to add a default config if not payload.get("position"): @@ -180,16 +192,14 @@ def _export( logger.info("Unable to decode `%s` field: %s", key, value) payload[new_name] = {} - # Extract all native filter datasets and replace native - # filter dataset references with uuid - for native_filter in payload.get("metadata", {}).get( - "native_filter_configuration", [] - ): - for target in native_filter.get("targets", []): - dataset_id = target.pop("datasetId", None) - if dataset_id is not None: - dataset = DatasetDAO.find_by_id(dataset_id) - if dataset: - target["datasetUuid"] = str(dataset.uuid) - if export_related: + if export_related: + # Extract all native filter datasets and export referenced datasets + for native_filter in payload.get("metadata", {}).get( + "native_filter_configuration", [] + ): + for target in native_filter.get("targets", []): + dataset_id = target.pop("datasetId", None) + if dataset_id is not None: + dataset = DatasetDAO.find_by_id(dataset_id) + if dataset: yield from ExportDatasetsCommand([dataset_id]).run() diff --git a/superset/commands/dashboard/fave.py b/superset/commands/dashboard/fave.py new file mode 100644 index 000000000000..e3050c729dbe --- /dev/null +++ b/superset/commands/dashboard/fave.py @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from functools import partial + +from requests_cache import Optional + +from superset.commands.base import BaseCommand +from superset.commands.dashboard.exceptions import ( + DashboardFaveError, +) +from superset.daos.dashboard import DashboardDAO +from superset.models.dashboard import Dashboard +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) + + +class AddFavoriteDashboardCommand(BaseCommand): + def __init__(self, dashboard_id: int) -> None: + self._dashboard_id = dashboard_id + self._dashboard: Optional[Dashboard] = None + + @transaction(on_error=partial(on_error, reraise=DashboardFaveError)) + def run(self) -> None: + self.validate() + return DashboardDAO.add_favorite(self._dashboard) + + def validate(self) -> None: + # Raises DashboardNotFoundError or DashboardAccessDeniedError + dashboard = DashboardDAO.get_by_id_or_slug(self._dashboard_id) + self._dashboard = dashboard diff --git a/superset/commands/dashboard/importers/v1/__init__.py b/superset/commands/dashboard/importers/v1/__init__.py index 48b4e93e8cf3..18cbb7da8407 100644 --- a/superset/commands/dashboard/importers/v1/__init__.py +++ b/superset/commands/dashboard/importers/v1/__init__.py @@ -34,6 +34,7 @@ from superset.commands.database.importers.v1.utils import import_database from superset.commands.dataset.importers.v1.utils import import_dataset from superset.commands.importers.v1 import ImportModelsCommand +from superset.commands.utils import update_chart_config_dataset from superset.daos.dashboard import DashboardDAO from superset.dashboards.schemas import ImportV1DashboardSchema from superset.databases.schemas import ImportV1DatabaseSchema @@ -113,11 +114,7 @@ def _import(configs: dict[str, Any], overwrite: bool = False) -> None: ): # update datasource id, type, and name dataset_dict = dataset_info[config["dataset_uuid"]] - config.update(dataset_dict) - dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}" - config["params"].update({"datasource": dataset_uid}) - if "query_context" in config: - config["query_context"] = None + config = update_chart_config_dataset(config, dataset_dict) chart = import_chart(config, overwrite=False) charts.append(chart) diff --git a/superset/commands/dashboard/importers/v1/utils.py b/superset/commands/dashboard/importers/v1/utils.py index 5e949093b8a8..262e516d04af 100644 --- a/superset/commands/dashboard/importers/v1/utils.py +++ b/superset/commands/dashboard/importers/v1/utils.py @@ -153,9 +153,12 @@ def import_dashboard( "Dashboard", ) existing = db.session.query(Dashboard).filter_by(uuid=config["uuid"]).first() + user = get_user() if existing: - if overwrite and can_write and get_user(): - if not security_manager.can_access_dashboard(existing): + if overwrite and can_write and user: + if not security_manager.can_access_dashboard(existing) or ( + user not in existing.owners and not security_manager.is_admin() + ): raise ImportFailedError( "A dashboard already exists and user doesn't " "have permissions to overwrite it" diff --git a/superset/commands/dashboard/unfave.py b/superset/commands/dashboard/unfave.py new file mode 100644 index 000000000000..811a2cdd1e68 --- /dev/null +++ b/superset/commands/dashboard/unfave.py @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from functools import partial + +from requests_cache import Optional + +from superset.commands.base import BaseCommand +from superset.commands.dashboard.exceptions import ( + DashboardUnfaveError, +) +from superset.daos.dashboard import DashboardDAO +from superset.models.dashboard import Dashboard +from superset.utils.decorators import on_error, transaction + +logger = logging.getLogger(__name__) + + +class DelFavoriteDashboardCommand(BaseCommand): + def __init__(self, dashboard_id: int) -> None: + self._dashboard_id = dashboard_id + self._dashboard: Optional[Dashboard] = None + + @transaction(on_error=partial(on_error, reraise=DashboardUnfaveError)) + def run(self) -> None: + self.validate() + return DashboardDAO.remove_favorite(self._dashboard) + + def validate(self) -> None: + # Raises DashboardNotFoundError or DashboardAccessDeniedError + dashboard = DashboardDAO.get_by_id_or_slug(self._dashboard_id) + self._dashboard = dashboard diff --git a/superset/commands/database/create.py b/superset/commands/database/create.py index 76dd6087be58..12d45224a3e7 100644 --- a/superset/commands/database/create.py +++ b/superset/commands/database/create.py @@ -39,10 +39,11 @@ SSHTunnelInvalidError, ) from superset.commands.database.test_connection import TestConnectionDatabaseCommand +from superset.commands.database.utils import add_permissions from superset.daos.database import DatabaseDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.exceptions import SupersetErrorsException -from superset.extensions import event_logger, security_manager +from superset.extensions import event_logger from superset.models.core import Database from superset.utils.decorators import on_error, transaction @@ -99,28 +100,7 @@ def run(self) -> Model: ).run() # add catalog/schema permissions - if database.db_engine_spec.supports_catalog: - catalogs = database.get_all_catalog_names( - cache=False, - ssh_tunnel=ssh_tunnel, - ) - for catalog in catalogs: - security_manager.add_permission_view_menu( - "catalog_access", - security_manager.get_catalog_perm( - database.database_name, catalog - ), - ) - else: - # add a dummy catalog for DBs that don't support them - catalogs = [None] - - for catalog in catalogs: - try: - self.add_schema_permissions(database, catalog, ssh_tunnel) - except Exception: # pylint: disable=broad-except - logger.warning("Error processing catalog '%s'", catalog) - continue + add_permissions(database, ssh_tunnel) except ( SSHTunnelInvalidError, SSHTunnelCreateFailedError, @@ -148,26 +128,6 @@ def run(self) -> Model: return database - def add_schema_permissions( - self, - database: Database, - catalog: str, - ssh_tunnel: Optional[SSHTunnel], - ) -> None: - for schema in database.get_all_schema_names( - catalog=catalog, - cache=False, - ssh_tunnel=ssh_tunnel, - ): - security_manager.add_permission_view_menu( - "schema_access", - security_manager.get_schema_perm( - database.database_name, - catalog, - schema, - ), - ) - def validate(self) -> None: exceptions: list[ValidationError] = [] sqlalchemy_uri: Optional[str] = self._properties.get("sqlalchemy_uri") diff --git a/superset/commands/database/importers/v1/utils.py b/superset/commands/database/importers/v1/utils.py index 56d31b03e199..0098bfa26d45 100644 --- a/superset/commands/database/importers/v1/utils.py +++ b/superset/commands/database/importers/v1/utils.py @@ -15,17 +15,22 @@ # specific language governing permissions and limitations # under the License. +import logging from typing import Any from superset import app, db, security_manager +from superset.commands.database.utils import add_permissions from superset.commands.exceptions import ImportFailedError from superset.databases.ssh_tunnel.models import SSHTunnel from superset.databases.utils import make_url_safe +from superset.db_engine_specs.exceptions import SupersetDBAPIConnectionError from superset.exceptions import SupersetSecurityException from superset.models.core import Database from superset.security.analytics_db_safety import check_sqlalchemy_uri from superset.utils import json +logger = logging.getLogger(__name__) + def import_database( config: dict[str, Any], @@ -62,14 +67,23 @@ def import_database( config["extra"] = json.dumps(config["extra"]) # Before it gets removed in import_from_dict - ssh_tunnel = config.pop("ssh_tunnel", None) + ssh_tunnel_config = config.pop("ssh_tunnel", None) - database = Database.import_from_dict(config, recursive=False) + database: Database = Database.import_from_dict(config, recursive=False) if database.id is None: db.session.flush() - if ssh_tunnel: - ssh_tunnel["database_id"] = database.id - SSHTunnel.import_from_dict(ssh_tunnel, recursive=False) + if ssh_tunnel_config: + ssh_tunnel_config["database_id"] = database.id + ssh_tunnel = SSHTunnel.import_from_dict(ssh_tunnel_config, recursive=False) + else: + ssh_tunnel = None + + # TODO (betodealmeida): we should use the `CreateDatabaseCommand` for imports + + try: + add_permissions(database, ssh_tunnel) + except SupersetDBAPIConnectionError as ex: + logger.warning(ex.message) return database diff --git a/superset/commands/database/update.py b/superset/commands/database/update.py index 28f895b2f632..0fc31c096a06 100644 --- a/superset/commands/database/update.py +++ b/superset/commands/database/update.py @@ -41,6 +41,7 @@ from superset.daos.database import DatabaseDAO from superset.daos.dataset import DatasetDAO from superset.databases.ssh_tunnel.models import SSHTunnel +from superset.db_engine_specs.base import GenericDBException from superset.models.core import Database from superset.utils.decorators import on_error, transaction @@ -80,6 +81,7 @@ def run(self) -> Model: database.set_sqlalchemy_uri(database.sqlalchemy_uri) ssh_tunnel = self._handle_ssh_tunnel(database) self._refresh_catalogs(database, original_database_name, ssh_tunnel) + return database def _handle_ssh_tunnel(self, database: Database) -> SSHTunnel | None: @@ -115,17 +117,13 @@ def _get_catalog_names( ) -> set[str]: """ Helper method to load catalogs. - - This method captures a generic exception, since errors could potentially come - from any of the 50+ database drivers we support. """ - try: return database.get_all_catalog_names( force=True, ssh_tunnel=ssh_tunnel, ) - except Exception as ex: + except GenericDBException as ex: raise DatabaseConnectionFailedError() from ex def _get_schema_names( @@ -136,18 +134,14 @@ def _get_schema_names( ) -> set[str]: """ Helper method to load schemas. - - This method captures a generic exception, since errors could potentially come - from any of the 50+ database drivers we support. """ - try: return database.get_all_schema_names( force=True, catalog=catalog, ssh_tunnel=ssh_tunnel, ) - except Exception as ex: + except GenericDBException as ex: raise DatabaseConnectionFailedError() from ex def _refresh_catalogs( @@ -166,36 +160,43 @@ def _refresh_catalogs( ) for catalog in catalogs: - schemas = self._get_schema_names(database, catalog, ssh_tunnel) + try: + schemas = self._get_schema_names(database, catalog, ssh_tunnel) - if catalog: - perm = security_manager.get_catalog_perm( - original_database_name, - catalog, - ) - existing_pvm = security_manager.find_permission_view_menu( - "catalog_access", - perm, - ) - if not existing_pvm: - # new catalog - security_manager.add_permission_view_menu( + if catalog: + perm = security_manager.get_catalog_perm( + original_database_name, + catalog, + ) + existing_pvm = security_manager.find_permission_view_menu( "catalog_access", - security_manager.get_catalog_perm( - database.database_name, - catalog, - ), + perm, ) - for schema in schemas: + if not existing_pvm: + # new catalog security_manager.add_permission_view_menu( - "schema_access", - security_manager.get_schema_perm( + "catalog_access", + security_manager.get_catalog_perm( database.database_name, catalog, - schema, ), ) + for schema in schemas: + security_manager.add_permission_view_menu( + "schema_access", + security_manager.get_schema_perm( + database.database_name, + catalog, + schema, + ), + ) + continue + except DatabaseConnectionFailedError: + # more than one catalog, move to next + if catalog: + logger.warning("Error processing catalog %s", catalog) continue + raise # add possible new schemas in catalog self._refresh_schemas( @@ -248,7 +249,7 @@ def _rename_database_in_permissions( catalog: str | None, schemas: set[str], ) -> None: - new_name = security_manager.get_catalog_perm( + new_catalog_perm_name = security_manager.get_catalog_perm( database.database_name, catalog, ) @@ -264,10 +265,10 @@ def _rename_database_in_permissions( perm, ) if existing_pvm: - existing_pvm.view_menu.name = new_name + existing_pvm.view_menu.name = new_catalog_perm_name for schema in schemas: - new_name = security_manager.get_schema_perm( + new_schema_perm_name = security_manager.get_schema_perm( database.database_name, catalog, schema, @@ -284,7 +285,7 @@ def _rename_database_in_permissions( perm, ) if existing_pvm: - existing_pvm.view_menu.name = new_name + existing_pvm.view_menu.name = new_schema_perm_name # rename permissions on datasets and charts for dataset in DatabaseDAO.get_datasets( @@ -292,9 +293,11 @@ def _rename_database_in_permissions( catalog=catalog, schema=schema, ): - dataset.schema_perm = new_name + dataset.catalog_perm = new_catalog_perm_name + dataset.schema_perm = new_schema_perm_name for chart in DatasetDAO.get_related_objects(dataset.id)["charts"]: - chart.schema_perm = new_name + chart.catalog_perm = new_catalog_perm_name + chart.schema_perm = new_schema_perm_name def validate(self) -> None: if database_name := self._properties.get("database_name"): diff --git a/superset/commands/database/utils.py b/superset/commands/database/utils.py new file mode 100644 index 000000000000..ea0ce1a27e27 --- /dev/null +++ b/superset/commands/database/utils.py @@ -0,0 +1,67 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from __future__ import annotations + +import logging + +from superset import security_manager +from superset.databases.ssh_tunnel.models import SSHTunnel +from superset.db_engine_specs.base import GenericDBException +from superset.models.core import Database + +logger = logging.getLogger(__name__) + + +def add_permissions(database: Database, ssh_tunnel: SSHTunnel | None) -> None: + """ + Add DAR for catalogs and schemas. + """ + if database.db_engine_spec.supports_catalog: + catalogs = database.get_all_catalog_names( + cache=False, + ssh_tunnel=ssh_tunnel, + ) + + for catalog in catalogs: + security_manager.add_permission_view_menu( + "catalog_access", + security_manager.get_catalog_perm( + database.database_name, + catalog, + ), + ) + else: + catalogs = [None] + + for catalog in catalogs: + try: + for schema in database.get_all_schema_names( + catalog=catalog, + cache=False, + ssh_tunnel=ssh_tunnel, + ): + security_manager.add_permission_view_menu( + "schema_access", + security_manager.get_schema_perm( + database.database_name, + catalog, + schema, + ), + ) + except GenericDBException: # pylint: disable=broad-except + logger.warning("Error processing catalog '%s'", catalog) + continue diff --git a/superset/commands/dataset/create.py b/superset/commands/dataset/create.py index a2d81e548bfb..ae6a0af4ed32 100644 --- a/superset/commands/dataset/create.py +++ b/superset/commands/dataset/create.py @@ -54,23 +54,28 @@ def run(self) -> Model: def validate(self) -> None: exceptions: list[ValidationError] = [] database_id = self._properties["database"] - schema = self._properties.get("schema") catalog = self._properties.get("catalog") + schema = self._properties.get("schema") + table_name = self._properties["table_name"] sql = self._properties.get("sql") owner_ids: Optional[list[int]] = self._properties.get("owners") - table = Table(self._properties["table_name"], schema, catalog) - - # Validate uniqueness - if not DatasetDAO.validate_uniqueness(database_id, table): - exceptions.append(DatasetExistsValidationError(table)) - # Validate/Populate database database = DatasetDAO.get_database_by_id(database_id) if not database: exceptions.append(DatabaseNotFoundValidationError()) self._properties["database"] = database + # Validate uniqueness + if database: + if not catalog: + catalog = self._properties["catalog"] = database.get_default_catalog() + + table = Table(table_name, schema, catalog) + + if not DatasetDAO.validate_uniqueness(database, table): + exceptions.append(DatasetExistsValidationError(table)) + # Validate table exists on dataset if sql is not provided # This should be validated when the dataset is physical if ( diff --git a/superset/commands/dataset/importers/v1/utils.py b/superset/commands/dataset/importers/v1/utils.py index 1c508fe2522e..e4dbdfe8f17c 100644 --- a/superset/commands/dataset/importers/v1/utils.py +++ b/superset/commands/dataset/importers/v1/utils.py @@ -102,6 +102,7 @@ def validate_data_uri(data_uri: str) -> None: raise DatasetForbiddenDataURI() +# pylint: disable=too-many-branches def import_dataset( config: dict[str, Any], overwrite: bool = False, @@ -113,10 +114,18 @@ def import_dataset( "Dataset", ) existing = db.session.query(SqlaTable).filter_by(uuid=config["uuid"]).first() + user = get_user() if existing: + if overwrite and can_write and user: + if user not in existing.owners and not security_manager.is_admin(): + raise ImportFailedError( + "A dataset already exists and user doesn't " + "have permissions to overwrite it" + ) if not overwrite or not can_write: return existing config["id"] = existing.id + elif not can_write: raise ImportFailedError( "Dataset doesn't exist and user doesn't have permission to create datasets" @@ -166,7 +175,7 @@ def import_dataset( try: table_exists = dataset.database.has_table( - Table(dataset.table_name, dataset.schema), + Table(dataset.table_name, dataset.schema, dataset.catalog), ) except Exception: # pylint: disable=broad-except # MySQL doesn't play nice with GSheets table names diff --git a/superset/commands/dataset/update.py b/superset/commands/dataset/update.py index 14d1c5ef4470..2772cc0ffa1f 100644 --- a/superset/commands/dataset/update.py +++ b/superset/commands/dataset/update.py @@ -79,10 +79,12 @@ def run(self) -> Model: def validate(self) -> None: exceptions: list[ValidationError] = [] owner_ids: Optional[list[int]] = self._properties.get("owners") + # Validate/populate model exists self._model = DatasetDAO.find_by_id(self._model_id) if not self._model: raise DatasetNotFoundError() + # Check ownership try: security_manager.raise_for_ownership(self._model) @@ -91,22 +93,30 @@ def validate(self) -> None: database_id = self._properties.get("database") + catalog = self._properties.get("catalog") + if not catalog: + catalog = self._properties["catalog"] = ( + self._model.database.get_default_catalog() + ) + table = Table( self._properties.get("table_name"), # type: ignore self._properties.get("schema"), - self._properties.get("catalog"), + catalog, ) # Validate uniqueness if not DatasetDAO.validate_update_uniqueness( - self._model.database_id, + self._model.database, table, self._model_id, ): exceptions.append(DatasetExistsValidationError(table)) + # Validate/Populate database not allowed to change if database_id and database_id != self._model: exceptions.append(DatabaseChangeValidationError()) + # Validate/Populate owner try: owners = self.compute_owners( @@ -116,6 +126,7 @@ def validate(self) -> None: self._properties["owners"] = owners except ValidationError as ex: exceptions.append(ex) + # Validate columns if columns := self._properties.get("columns"): self._validate_columns(columns, exceptions) diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py index 78a2251a293a..c0be04a66fb3 100644 --- a/superset/commands/importers/v1/assets.py +++ b/superset/commands/importers/v1/assets.py @@ -39,6 +39,7 @@ validate_metadata_type, ) from superset.commands.query.importers.v1.utils import import_saved_query +from superset.commands.utils import update_chart_config_dataset from superset.dashboards.schemas import ImportV1DashboardSchema from superset.databases.schemas import ImportV1DatabaseSchema from superset.datasets.schemas import ImportV1DatasetSchema @@ -113,11 +114,7 @@ def _import(configs: dict[str, Any]) -> None: for file_name, config in configs.items(): if file_name.startswith("charts/"): dataset_dict = dataset_info[config["dataset_uuid"]] - config.update(dataset_dict) - dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}" - config["params"].update({"datasource": dataset_uid}) - if "query_context" in config: - config["query_context"] = None + config = update_chart_config_dataset(config, dataset_dict) chart = import_chart(config, overwrite=True) charts.append(chart) chart_ids[str(chart.uuid)] = chart.id diff --git a/superset/commands/logs/prune.py b/superset/commands/logs/prune.py new file mode 100644 index 000000000000..9ad031ea5163 --- /dev/null +++ b/superset/commands/logs/prune.py @@ -0,0 +1,111 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import time +from datetime import datetime, timedelta + +import sqlalchemy as sa + +from superset import db +from superset.commands.base import BaseCommand +from superset.models.core import Log + +logger = logging.getLogger(__name__) + + +# pylint: disable=consider-using-transaction +class LogPruneCommand(BaseCommand): + """ + Command to prune the logs table by deleting rows older than the specified retention period. + + This command deletes records from the `Log` table that have not been changed within the + specified number of days. It helps in maintaining the database by removing outdated entries + and freeing up space. + + Attributes: + retention_period_days (int): The number of days for which records should be retained. + Records older than this period will be deleted. + """ # noqa: E501 + + def __init__(self, retention_period_days: int): + """ + :param retention_period_days: Number of days to keep in the logs table + """ + self.retention_period_days = retention_period_days + + def run(self) -> None: + """ + Executes the prune command + """ + batch_size = 999 # SQLite has a IN clause limit of 999 + total_deleted = 0 + start_time = time.time() + + # Select all IDs that need to be deleted + ids_to_delete = ( + db.session.execute( + sa.select(Log.id).where( + Log.dttm + < datetime.now() - timedelta(days=self.retention_period_days) + ) + ) + .scalars() + .all() + ) + + total_rows = len(ids_to_delete) + + logger.info("Total rows to be deleted: %s", f"{total_rows:,}") + + next_logging_threshold = 1 + + # Iterate over the IDs in batches + for i in range(0, total_rows, batch_size): + batch_ids = ids_to_delete[i : i + batch_size] + + # Delete the selected batch using IN clause + result = db.session.execute(sa.delete(Log).where(Log.id.in_(batch_ids))) + + # Update the total number of deleted records + total_deleted += result.rowcount + + # Explicitly commit the transaction given that if an error occurs, we want to ensure that the # noqa: E501 + # records that have been deleted so far are committed + db.session.commit() + + # Log the number of deleted records every 1% increase in progress + percentage_complete = (total_deleted / total_rows) * 100 + if percentage_complete >= next_logging_threshold: + logger.info( + "Deleted %s rows from the logs table older than %s days (%d%% complete)", # noqa: E501 + f"{total_deleted:,}", + self.retention_period_days, + percentage_complete, + ) + next_logging_threshold += 1 + + elapsed_time = time.time() - start_time + minutes, seconds = divmod(elapsed_time, 60) + formatted_time = f"{int(minutes):02}:{int(seconds):02}" + logger.info( + "Pruning complete: %s rows deleted in %s", + f"{total_deleted:,}", + formatted_time, + ) + + def validate(self) -> None: + pass diff --git a/superset/commands/report/base.py b/superset/commands/report/base.py index 199f985d0d0e..b5ff4e6cf2e8 100644 --- a/superset/commands/report/base.py +++ b/superset/commands/report/base.py @@ -118,7 +118,7 @@ def validate_report_frequency( for _ in range(iterations): next_exec = next(schedule) - diff, current_exec = next_exec - current_exec, next_exec + diff, current_exec = next_exec - current_exec, next_exec # type: ignore if int(diff) < minimum_interval: raise ReportScheduleFrequencyNotAllowed( report_type=report_type, minimum_interval=minimum_interval diff --git a/superset/commands/report/execute.py b/superset/commands/report/execute.py index 3ec5bdfa97b2..a6f4bb19d80a 100644 --- a/superset/commands/report/execute.py +++ b/superset/commands/report/execute.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. import logging -from copy import deepcopy from datetime import datetime, timedelta from typing import Any, Optional, Union from uuid import UUID @@ -67,6 +66,7 @@ from superset.reports.notifications.base import NotificationContent from superset.reports.notifications.exceptions import ( NotificationError, + NotificationParamException, SlackV1NotificationError, ) from superset.tasks.utils import get_executor @@ -132,15 +132,13 @@ def update_report_schedule_slack_v2(self) -> None: V2 uses ids instead of names for channels. """ try: - updated_recipients = [] for recipient in self._report_schedule.recipients: - recipient_copy = deepcopy(recipient) - if recipient_copy.type == ReportRecipientType.SLACK: - recipient_copy.type = ReportRecipientType.SLACKV2 - slack_recipients = json.loads(recipient_copy.recipient_config_json) + if recipient.type == ReportRecipientType.SLACK: + recipient.type = ReportRecipientType.SLACKV2 + slack_recipients = json.loads(recipient.recipient_config_json) # we need to ensure that existing reports can also fetch # ids from private channels - recipient_copy.recipient_config_json = json.dumps( + recipient.recipient_config_json = json.dumps( { "target": get_channels_with_search( slack_recipients["target"], @@ -151,9 +149,6 @@ def update_report_schedule_slack_v2(self) -> None: ) } ) - - updated_recipients.append(recipient_copy) - db.session.commit() # pylint: disable=consider-using-transaction except Exception as ex: logger.warning( "Failed to update slack recipients to v2: %s", str(ex), exc_info=True @@ -367,6 +362,7 @@ def _get_log_data(self) -> HeaderDataType: chart_id = None dashboard_id = None report_source = None + slack_channels = None if self._report_schedule.chart: report_source = ReportSourceFormat.CHART chart_id = self._report_schedule.chart_id @@ -374,6 +370,14 @@ def _get_log_data(self) -> HeaderDataType: report_source = ReportSourceFormat.DASHBOARD dashboard_id = self._report_schedule.dashboard_id + if self._report_schedule.recipients: + slack_channels = [ + recipient.recipient_config_json + for recipient in self._report_schedule.recipients + if recipient.type + in [ReportRecipientType.SLACK, ReportRecipientType.SLACKV2] + ] + log_data: HeaderDataType = { "notification_type": self._report_schedule.type, "notification_source": report_source, @@ -381,6 +385,7 @@ def _get_log_data(self) -> HeaderDataType: "chart_id": chart_id, "dashboard_id": dashboard_id, "owners": self._report_schedule.owners, + "slack_channels": slack_channels, } return log_data @@ -421,6 +426,7 @@ def _get_notification_content(self) -> NotificationContent: name=self._report_schedule.name, text=error_text, header_data=header_data, + url=url, ) if ( @@ -486,7 +492,7 @@ def _send( recipient.type = ReportRecipientType.SLACKV2 notification = create_notification(recipient, notification_content) notification.send() - except UpdateFailedError as err: + except (UpdateFailedError, NotificationParamException) as err: # log the error but keep processing the report with SlackV1 logger.warning( "Failed to update slack recipients to v2: %s", str(err) @@ -528,13 +534,14 @@ def send_error(self, name: str, message: str) -> None: :raises: CommandException """ header_data = self._get_log_data() + url = self._get_url(user_friendly=True) logger.info( "header_data in notifications for alerts and reports %s, taskid, %s", header_data, self._execution_id, ) notification_content = NotificationContent( - name=name, text=message, header_data=header_data + name=name, text=message, header_data=header_data, url=url ) # filter recipients to recipients who are also owners diff --git a/superset/commands/sql_lab/execute.py b/superset/commands/sql_lab/execute.py index 0c3e33b52916..001d5609db44 100644 --- a/superset/commands/sql_lab/execute.py +++ b/superset/commands/sql_lab/execute.py @@ -17,7 +17,6 @@ # pylint: disable=too-few-public-methods, too-many-arguments from __future__ import annotations -import copy import logging from typing import Any, TYPE_CHECKING @@ -152,8 +151,6 @@ def _run_sql_json_exec_from_scratch(self) -> SqlJsonExecutionStatus: self._validate_access(query) self._execution_context.set_query(query) rendered_query = self._sql_query_render.render(self._execution_context) - validate_rendered_query = copy.copy(query) - validate_rendered_query.sql = rendered_query self._set_query_limit_if_required(rendered_query) self._query_dao.update( query, {"limit": self._execution_context.query.limit} diff --git a/superset/commands/sql_lab/query.py b/superset/commands/sql_lab/query.py new file mode 100644 index 000000000000..4f25be5feda9 --- /dev/null +++ b/superset/commands/sql_lab/query.py @@ -0,0 +1,111 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +import time +from datetime import datetime, timedelta + +import sqlalchemy as sa + +from superset import db +from superset.commands.base import BaseCommand +from superset.models.sql_lab import Query + +logger = logging.getLogger(__name__) + + +# pylint: disable=consider-using-transaction +class QueryPruneCommand(BaseCommand): + """ + Command to prune the query table by deleting rows older than the specified retention period. + + This command deletes records from the `Query` table that have not been changed within the + specified number of days. It helps in maintaining the database by removing outdated entries + and freeing up space. + + Attributes: + retention_period_days (int): The number of days for which records should be retained. + Records older than this period will be deleted. + """ + + def __init__(self, retention_period_days: int): + """ + :param retention_period_days: Number of days to keep in the query table + """ + self.retention_period_days = retention_period_days + + def run(self) -> None: + """ + Executes the prune command + """ + batch_size = 999 # SQLite has a IN clause limit of 999 + total_deleted = 0 + start_time = time.time() + + # Select all IDs that need to be deleted + ids_to_delete = ( + db.session.execute( + sa.select(Query.id).where( + Query.changed_on + < datetime.now() - timedelta(days=self.retention_period_days) + ) + ) + .scalars() + .all() + ) + + total_rows = len(ids_to_delete) + + logger.info("Total rows to be deleted: %s", f"{total_rows:,}") + + next_logging_threshold = 1 + + # Iterate over the IDs in batches + for i in range(0, total_rows, batch_size): + batch_ids = ids_to_delete[i : i + batch_size] + + # Delete the selected batch using IN clause + result = db.session.execute(sa.delete(Query).where(Query.id.in_(batch_ids))) + + # Update the total number of deleted records + total_deleted += result.rowcount + + # Explicitly commit the transaction given that if an error occurs, we want to ensure that the + # records that have been deleted so far are committed + db.session.commit() + + # Log the number of deleted records every 1% increase in progress + percentage_complete = (total_deleted / total_rows) * 100 + if percentage_complete >= next_logging_threshold: + logger.info( + "Deleted %s rows from the query table older than %s days (%d%% complete)", # noqa: E501 + f"{total_deleted:,}", + self.retention_period_days, + percentage_complete, + ) + next_logging_threshold += 1 + + elapsed_time = time.time() - start_time + minutes, seconds = divmod(elapsed_time, 60) + formatted_time = f"{int(minutes):02}:{int(seconds):02}" + logger.info( + "Pruning complete: %s rows deleted in %s", + f"{total_deleted:,}", + formatted_time, + ) + + def validate(self) -> None: + pass diff --git a/superset/commands/utils.py b/superset/commands/utils.py index 29a31aa2aa7a..7486d657adc6 100644 --- a/superset/commands/utils.py +++ b/superset/commands/utils.py @@ -17,7 +17,7 @@ from __future__ import annotations from collections import Counter -from typing import Optional, TYPE_CHECKING +from typing import Any, Optional, TYPE_CHECKING from flask import g from flask_appbuilder.security.sqla.models import Role, User @@ -34,6 +34,7 @@ from superset.daos.exceptions import DatasourceNotFound from superset.daos.tag import TagDAO from superset.tags.models import ObjectType, Tag, TagType +from superset.utils import json from superset.utils.core import DatasourceType, get_user_id if TYPE_CHECKING: @@ -185,3 +186,43 @@ def update_tags( TagDAO.create_custom_tagged_objects( object_type, object_id, [tag.name for tag in tags_to_add] ) + + +def update_chart_config_dataset( + config: dict[str, Any], dataset_info: dict[str, Any] +) -> dict[str, Any]: + """ + Update the chart configuration and query_context with new dataset information + + :param config: The original chart configuration + :param dataset_info: Dict with datasource_id, datasource_type, and datasource_name + :return: The updated chart configuration + """ + # Update datasource id, type, and name + config.update(dataset_info) + + dataset_uid = f"{dataset_info['datasource_id']}__{dataset_info['datasource_type']}" + config["params"].update({"datasource": dataset_uid}) + + if "query_context" in config and config["query_context"] is not None: + try: + query_context = json.loads(config["query_context"]) + + query_context["datasource"] = { + "id": dataset_info["datasource_id"], + "type": dataset_info["datasource_type"], + } + + if "form_data" in query_context: + query_context["form_data"]["datasource"] = dataset_uid + + if "queries" in query_context: + for query in query_context["queries"]: + if "datasource" in query: + query["datasource"] = query_context["datasource"] + + config["query_context"] = json.dumps(query_context) + except json.JSONDecodeError: + config["query_context"] = None + + return config diff --git a/superset/common/query_actions.py b/superset/common/query_actions.py index bdbccc78dbe2..9e61de6e1aaa 100644 --- a/superset/common/query_actions.py +++ b/superset/common/query_actions.py @@ -107,7 +107,7 @@ def _get_full( payload["colnames"] = list(df.columns) payload["indexnames"] = list(df.index) payload["coltypes"] = extract_dataframe_dtypes(df, datasource) - payload["data"] = query_context.get_data(df) + payload["data"] = query_context.get_data(df, payload["coltypes"]) payload["result_format"] = query_context.result_format del payload["df"] diff --git a/superset/common/query_context.py b/superset/common/query_context.py index 48b5abfbecde..a04e3944603f 100644 --- a/superset/common/query_context.py +++ b/superset/common/query_context.py @@ -28,6 +28,7 @@ ) from superset.common.query_object import QueryObject from superset.models.slice import Slice +from superset.utils.core import GenericDataType if TYPE_CHECKING: from superset.connectors.sqla.models import BaseDatasource @@ -88,8 +89,9 @@ def __init__( # pylint: disable=too-many-arguments def get_data( self, df: pd.DataFrame, + coltypes: list[GenericDataType], ) -> str | list[dict[str, Any]]: - return self._processor.get_data(df) + return self._processor.get_data(df, coltypes) def get_payload( self, diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 26935a4d9678..762ed3099719 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -56,6 +56,7 @@ DTTM_ALIAS, error_msg_from_exception, FilterOperator, + GenericDataType, get_base_axis_labels, get_column_names_from_columns, get_column_names_from_metrics, @@ -641,7 +642,9 @@ def generate_join_column( return str(value) - def get_data(self, df: pd.DataFrame) -> str | list[dict[str, Any]]: + def get_data( + self, df: pd.DataFrame, coltypes: list[GenericDataType] + ) -> str | list[dict[str, Any]]: if self._query_context.result_format in ChartDataResultFormat.table_like(): include_index = not isinstance(df.index, pd.RangeIndex) columns = list(df.columns) @@ -655,6 +658,7 @@ def get_data(self, df: pd.DataFrame) -> str | list[dict[str, Any]]: df, index=include_index, **config["CSV_EXPORT"] ) elif self._query_context.result_format == ChartDataResultFormat.XLSX: + excel.apply_column_types(df, coltypes) result = excel.df_to_excel(df, **config["EXCEL_EXPORT"]) return result or "" diff --git a/superset/config.py b/superset/config.py index 4435c22548f9..0ba0e7957f02 100644 --- a/superset/config.py +++ b/superset/config.py @@ -68,6 +68,7 @@ if TYPE_CHECKING: from flask_appbuilder.security.sqla import models + from sqlglot import Dialect, Dialects from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database @@ -196,12 +197,14 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp' # SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp' -# The default MySQL isolation level is REPEATABLE READ whereas the default PostgreSQL -# isolation level is READ COMMITTED. All backends should use READ COMMITTED (or similar) -# to help ensure consistent behavior. -SQLALCHEMY_ENGINE_OPTIONS = { - "isolation_level": "SERIALIZABLE", # SQLite does not support READ COMMITTED. -} +# This config is exposed through flask-sqlalchemy, and can be used to set your metadata +# database connection settings. You can use this to set arbitrary connection settings +# that may be specific to the database engine you are using. +# Note that you can use this to set the isolation level of your database, as in +# `SQLALCHEMY_ENGINE_OPTIONS = {"isolation_level": "READ COMMITTED"}` +# Also note that we recommend READ COMMITTED for regular operation. +# Find out more here https://flask-sqlalchemy.palletsprojects.com/en/3.1.x/config/ +SQLALCHEMY_ENGINE_OPTIONS = {} # In order to hook up a custom password store for all SQLALCHEMY connections # implement a function that takes a single argument of type 'sqla.engine.url', @@ -247,6 +250,10 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER = ( # pylint: disable=invalid-name SQLAlchemyUtilsAdapter ) + +# Extends the default SQLGlot dialects with additional dialects +SQLGLOT_DIALECTS_EXTENSIONS: dict[str, Dialects | type[Dialect]] = {} + # The limit of queries fetched for query search QUERY_SEARCH_LIMIT = 1000 @@ -258,6 +265,7 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: "superset.views.core.log", "superset.views.core.explore_json", "superset.charts.data.api.data", + "superset.dashboards.api.cache_dashboard_screenshot", ] # Whether to run the web server in debug mode or not @@ -470,6 +478,14 @@ class D3TimeFormat(TypedDict, total=False): "PRESTO_EXPAND_DATA": False, # Exposes API endpoint to compute thumbnails "THUMBNAILS": False, + # Enables the endpoints to cache and retrieve dashboard screenshots via webdriver. + # Requires configuring Celery and a cache using THUMBNAIL_CACHE_CONFIG. + "ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS": False, + # Generate screenshots (PDF or JPG) of dashboards using the web driver. + # When disabled, screenshots are generated on the fly by the browser. + # This feature flag is used by the download feature in the dashboard view. + # It is dependent on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINT being enabled. + "ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT": False, "SHARE_QUERIES_VIA_KV_STORE": False, "TAGGING_SYSTEM": False, "SQLLAB_BACKEND_PERSISTENCE": True, @@ -542,6 +558,10 @@ class D3TimeFormat(TypedDict, total=False): "SQLLAB_FORCE_RUN_ASYNC": False, # Set to True to to enable factory resent CLI command "ENABLE_FACTORY_RESET_COMMAND": False, + # Whether Superset should use Slack avatars for users. + # If on, you'll want to add "https://avatars.slack-edge.com" to the list of allowed + # domains in your TALISMAN_CONFIG + "SLACK_ENABLE_AVATARS": False, } # ------------------------------ @@ -783,6 +803,8 @@ class D3TimeFormat(TypedDict, total=False): STORE_CACHE_KEYS_IN_METADATA_DB = False # CORS Options +# NOTE: enabling this requires installing the cors-related python dependencies +# `pip install .[cors]` or `pip install apache_superset[cors]`, depending ENABLE_CORS = False CORS_OPTIONS: dict[Any, Any] = {} @@ -975,7 +997,12 @@ class D3TimeFormat(TypedDict, total=False): class CeleryConfig: # pylint: disable=too-few-public-methods broker_url = "sqla+sqlite:///celerydb.sqlite" - imports = ("superset.sql_lab", "superset.tasks.scheduler") + imports = ( + "superset.sql_lab", + "superset.tasks.scheduler", + "superset.tasks.thumbnails", + "superset.tasks.cache", + ) result_backend = "db+sqlite:///celery_results.sqlite" worker_prefetch_multiplier = 1 task_acks_late = False @@ -994,6 +1021,18 @@ class CeleryConfig: # pylint: disable=too-few-public-methods "task": "reports.prune_log", "schedule": crontab(minute=0, hour=0), }, + # Uncomment to enable pruning of the query table + # "prune_query": { + # "task": "prune_query", + # "schedule": crontab(minute=0, hour=0, day_of_month=1), + # "options": {"retention_period_days": 180}, + # }, + # Uncomment to enable pruning of the logs table + # "prune_logs": { + # "task": "prune_logs", + # "schedule": crontab(minute="*", hour="*"), + # "kwargs": {"retention_period_days": 180}, + # }, } @@ -1033,6 +1072,10 @@ class CeleryConfig: # pylint: disable=too-few-public-methods # timeout. SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = int(timedelta(seconds=10).total_seconds()) +# Timeout duration for SQL Lab fetching query results by the resultsKey. +# 0 means no timeout. +SQLLAB_QUERY_RESULT_TIMEOUT = 0 + # The cost returned by the databases is a relative value; in order to map the cost to # a tangible value you need to define a custom formatter that takes into consideration # your specific infrastructure. For example, you could analyze queries a posteriori by @@ -1287,7 +1330,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name "table_to_xml_and_xmlschema", "version", }, - "clickhouse": {"url"}, + "clickhouse": {"url", "version", "currentDatabase", "hostName"}, "mysql": {"version"}, } @@ -1410,11 +1453,6 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument SLACK_API_TOKEN: Callable[[], str] | str | None = None SLACK_PROXY = None -# Whether Superset should use Slack avatars for users. -# If on, you'll want to add "https://avatars.slack-edge.com" to the list of allowed -# domains in your TALISMAN_CONFIG -SLACK_ENABLE_AVATARS = False - # The webdriver to use for generating reports. Use one of the following # firefox # Requires: geckodriver and firefox installations @@ -1688,6 +1726,15 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument # Guest token audience for the embedded superset, either string or callable GUEST_TOKEN_JWT_AUDIENCE: Callable[[], str] | str | None = None +# A callable that can be supplied to do extra validation of guest token configuration +# for example certain RLS parameters: +# lambda x: len(x['rls']) == 1 and "tenant_id=" in x['rls'][0]['clause'] +# +# Takes the GuestTokenUser dict as an argument +# Return False from the callable to return a HTTP 400 to the user. + +GUEST_TOKEN_VALIDATOR_HOOK = None + # A SQL dataset health check. Note if enabled it is strongly advised that the callable # be memoized to aid with performance, i.e., # @@ -1807,6 +1854,15 @@ class ExtraDynamicQueryFilters(TypedDict, total=False): EXTRA_DYNAMIC_QUERY_FILTERS: ExtraDynamicQueryFilters = {} +# The migrations that add catalog permissions might take a considerably long time +# to execute as it has to create permissions to all schemas and catalogs from all +# other catalogs accessible by the credentials. This flag allows to skip the +# creation of these secondary perms, and focus only on permissions for the default +# catalog. These secondary permissions can be created later by editing the DB +# connection via the UI (without downtime). +CATALOGS_SIMPLIFIED_MIGRATION: bool = False + + # ------------------------------------------------------------------- # * WARNING: STOP EDITING HERE * # ------------------------------------------------------------------- diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index c38a0085a534..fb7409adba58 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -21,10 +21,11 @@ import dataclasses import logging import re +from collections import defaultdict from collections.abc import Hashable from dataclasses import dataclass, field from datetime import datetime, timedelta -from typing import Any, Callable, cast +from typing import Any, Callable, cast, Optional, Union import dateutil.parser import numpy as np @@ -69,7 +70,7 @@ from sqlalchemy.sql.expression import Label, TextAsFrom from sqlalchemy.sql.selectable import Alias, TableClause -from superset import app, db, security_manager +from superset import app, db, is_feature_enabled, security_manager from superset.commands.dataset.exceptions import DatasetNotFoundError from superset.common.db_query_status import QueryStatus from superset.connectors.sqla.utils import ( @@ -82,7 +83,6 @@ from superset.exceptions import ( ColumnNotFoundException, DatasetInvalidPermissionEvaluationException, - QueryClauseValidationException, QueryObjectValidationError, SupersetErrorException, SupersetErrorsException, @@ -102,10 +102,9 @@ ExploreMixin, ImportExportMixin, QueryResult, - validate_adhoc_subquery, ) from superset.models.slice import Slice -from superset.sql_parse import ParsedQuery, sanitize_clause, Table +from superset.sql_parse import ParsedQuery, Table from superset.superset_typing import ( AdhocColumn, AdhocMetric, @@ -117,7 +116,6 @@ ) from superset.utils import core as utils, json from superset.utils.backports import StrEnum -from superset.utils.core import GenericDataType, MediumText config = app.config metadata = Model.metadata # pylint: disable=no-member @@ -461,9 +459,11 @@ def data_for_slices( # pylint: disable=too-many-locals ) else: _columns = [ - utils.get_column_name(column_) - if utils.is_adhoc_column(column_) - else column_ + ( + utils.get_column_name(column_) + if utils.is_adhoc_column(column_) + else column_ + ) for column_param in COLUMN_FORM_DATA_PARAMS for column_ in utils.as_list(form_data.get(column_param) or []) ] @@ -476,7 +476,7 @@ def data_for_slices( # pylint: disable=too-many-locals ] filtered_columns: list[Column] = [] - column_types: set[GenericDataType] = set() + column_types: set[utils.GenericDataType] = set() for column_ in data["columns"]: generic_type = column_.get("type_generic") if generic_type is not None: @@ -488,6 +488,11 @@ def data_for_slices( # pylint: disable=too-many-locals del data["description"] data.update({"metrics": filtered_metrics}) data.update({"columns": filtered_columns}) + + all_columns = { + column_["column_name"]: column_["verbose_name"] or column_["column_name"] + for column_ in filtered_columns + } verbose_map = {"__timestamp": "Time"} verbose_map.update( { @@ -495,14 +500,9 @@ def data_for_slices( # pylint: disable=too-many-locals for metric in filtered_metrics } ) - verbose_map.update( - { - column_["column_name"]: column_["verbose_name"] - or column_["column_name"] - for column_ in filtered_columns - } - ) + verbose_map.update(all_columns) data["verbose_map"] = verbose_map + data["column_names"] = set(all_columns.values()) | set(self.column_names) return data @@ -510,7 +510,7 @@ def data_for_slices( # pylint: disable=too-many-locals def filter_values_handler( # pylint: disable=too-many-arguments values: FilterValues | None, operator: str, - target_generic_type: GenericDataType, + target_generic_type: utils.GenericDataType, target_native_type: str | None = None, is_list_target: bool = False, db_engine_spec: builtins.type[BaseEngineSpec] | None = None, @@ -710,6 +710,56 @@ def get_datasource_by_name( ) -> BaseDatasource | None: raise NotImplementedError() + def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: + raise NotImplementedError() + + def text(self, clause: str) -> TextClause: + raise NotImplementedError() + + def get_sqla_row_level_filters( + self, + template_processor: Optional[BaseTemplateProcessor] = None, + ) -> list[TextClause]: + """ + Return the appropriate row level security filters for this table and the + current user. A custom username can be passed when the user is not present in the + Flask global namespace. + + :param template_processor: The template processor to apply to the filters. + :returns: A list of SQL clauses to be ANDed together. + """ + template_processor = template_processor or self.get_template_processor() + + all_filters: list[TextClause] = [] + filter_groups: dict[Union[int, str], list[TextClause]] = defaultdict(list) + try: + for filter_ in security_manager.get_rls_filters(self): + clause = self.text( + f"({template_processor.process_template(filter_.clause)})" + ) + if filter_.group_key: + filter_groups[filter_.group_key].append(clause) + else: + all_filters.append(clause) + + if is_feature_enabled("EMBEDDED_SUPERSET"): + for rule in security_manager.get_guest_rls_filters(self): + clause = self.text( + f"({template_processor.process_template(rule['clause'])})" + ) + all_filters.append(clause) + + grouped_filters = [or_(*clauses) for clauses in filter_groups.values()] + all_filters.extend(grouped_filters) + return all_filters + except TemplateError as ex: + raise QueryObjectValidationError( + _( + "Error in jinja expression in RLS filters: %(msg)s", + msg=ex.message, + ) + ) from ex + class AnnotationDatasource(BaseDatasource): """Dummy object so we can query annotations using 'Viz' objects just like @@ -778,10 +828,10 @@ class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Mod advanced_data_type = Column(String(255)) groupby = Column(Boolean, default=True) filterable = Column(Boolean, default=True) - description = Column(MediumText()) + description = Column(utils.MediumText()) table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE")) is_dttm = Column(Boolean, default=False) - expression = Column(MediumText()) + expression = Column(utils.MediumText()) python_date_format = Column(String(255)) extra = Column(Text) @@ -841,21 +891,21 @@ def is_boolean(self) -> bool: """ Check if the column has a boolean datatype. """ - return self.type_generic == GenericDataType.BOOLEAN + return self.type_generic == utils.GenericDataType.BOOLEAN @property def is_numeric(self) -> bool: """ Check if the column has a numeric datatype. """ - return self.type_generic == GenericDataType.NUMERIC + return self.type_generic == utils.GenericDataType.NUMERIC @property def is_string(self) -> bool: """ Check if the column has a string datatype. """ - return self.type_generic == GenericDataType.STRING + return self.type_generic == utils.GenericDataType.STRING @property def is_temporal(self) -> bool: @@ -867,7 +917,7 @@ def is_temporal(self) -> bool: """ if self.is_dttm is not None: return self.is_dttm - return self.type_generic == GenericDataType.TEMPORAL + return self.type_generic == utils.GenericDataType.TEMPORAL @property def database(self) -> Database: @@ -884,7 +934,7 @@ def db_extra(self) -> dict[str, Any]: @property def type_generic(self) -> utils.GenericDataType | None: if self.is_dttm: - return GenericDataType.TEMPORAL + return utils.GenericDataType.TEMPORAL return ( column_spec.generic_type @@ -987,12 +1037,12 @@ class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model metric_name = Column(String(255), nullable=False) verbose_name = Column(String(1024)) metric_type = Column(String(32)) - description = Column(MediumText()) + description = Column(utils.MediumText()) d3format = Column(String(128)) currency = Column(String(128)) warning_text = Column(Text) table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE")) - expression = Column(MediumText(), nullable=False) + expression = Column(utils.MediumText(), nullable=False) extra = Column(Text) table: Mapped[SqlaTable] = relationship( @@ -1083,27 +1133,6 @@ def data(self) -> dict[str, Any]: ) -def _process_sql_expression( - expression: str | None, - database_id: int, - schema: str, - template_processor: BaseTemplateProcessor | None = None, -) -> str | None: - if template_processor and expression: - expression = template_processor.process_template(expression) - if expression: - try: - expression = validate_adhoc_subquery( - expression, - database_id, - schema, - ) - expression = sanitize_clause(expression) - except (QueryClauseValidationException, SupersetSecurityException) as ex: - raise QueryObjectValidationError(ex.message) from ex - return expression - - class SqlaTable( Model, BaseDatasource, @@ -1155,7 +1184,7 @@ class SqlaTable( ) schema = Column(String(255)) catalog = Column(String(256), nullable=True, default=None) - sql = Column(MediumText()) + sql = Column(utils.MediumText()) is_sqllab_view = Column(Boolean, default=False) template_params = Column(Text) extra = Column(Text) @@ -1499,12 +1528,16 @@ def adhoc_metric_to_sqla( sqla_column = column(column_name) sqla_metric = self.sqla_aggregations[metric["aggregate"]](sqla_column) elif expression_type == utils.AdhocMetricExpressionType.SQL: - expression = _process_sql_expression( - expression=metric["sqlExpression"], - database_id=self.database_id, - schema=self.schema, - template_processor=template_processor, - ) + try: + expression = self._process_sql_expression( + expression=metric["sqlExpression"], + database_id=self.database_id, + engine=self.database.backend, + schema=self.schema, + template_processor=template_processor, + ) + except SupersetSecurityException as ex: + raise QueryObjectValidationError(ex.message) from ex sqla_metric = literal_column(expression) else: raise QueryObjectValidationError("Adhoc metric expressionType is invalid") @@ -1529,12 +1562,16 @@ def adhoc_column_to_sqla( # pylint: disable=too-many-locals :rtype: sqlalchemy.sql.column """ label = utils.get_column_name(col) - expression = _process_sql_expression( - expression=col["sqlExpression"], - database_id=self.database_id, - schema=self.schema, - template_processor=template_processor, - ) + try: + expression = self._process_sql_expression( + expression=col["sqlExpression"], + database_id=self.database_id, + engine=self.database.backend, + schema=self.schema, + template_processor=template_processor, + ) + except SupersetSecurityException as ex: + raise QueryObjectValidationError(ex.message) from ex time_grain = col.get("timeGrain") has_timegrain = col.get("columnType") == "BASE_AXIS" and time_grain is_dttm = False @@ -1652,10 +1689,10 @@ def _normalize_prequery_result_type( if isinstance(value, np.generic): value = value.item() - column_ = columns_by_name[dimension] + column_ = columns_by_name.get(dimension) db_extra: dict[str, Any] = self.database.get_extra() - if column_.type and column_.is_temporal and isinstance(value, str): + if column_ and column_.type and column_.is_temporal and isinstance(value, str): sql = self.db_engine_spec.convert_dttm( column_.type, dateutil.parser.parse(value), db_extra=db_extra ) @@ -1942,6 +1979,26 @@ def has_extra_cache_key_calls(self, query_obj: QueryObjectDict) -> bool: templatable_statements.append(extras["where"]) if "having" in extras: templatable_statements.append(extras["having"]) + if columns := query_obj.get("columns"): + calculated_columns: dict[str, Any] = { + c.column_name: c.expression for c in self.columns if c.expression + } + for column_ in columns: + if utils.is_adhoc_column(column_): + templatable_statements.append(column_["sqlExpression"]) + elif isinstance(column_, str) and column_ in calculated_columns: + templatable_statements.append(calculated_columns[column_]) + if metrics := query_obj.get("metrics"): + metrics_by_name: dict[str, Any] = { + m.metric_name: m.expression for m in self.metrics + } + for metric in metrics: + if utils.is_adhoc_metric(metric) and ( + sql := metric.get("sqlExpression") + ): + templatable_statements.append(sql) + elif isinstance(metric, str) and metric in metrics_by_name: + templatable_statements.append(metrics_by_name[metric]) if self.is_rls_supported: templatable_statements += [ f.clause for f in security_manager.get_rls_filters(self) @@ -1963,7 +2020,7 @@ class and any keys added via `ExtraCache`. if self.has_extra_cache_key_calls(query_obj): sqla_query = self.get_sqla_query(**query_obj) extra_cache_keys += sqla_query.extra_cache_keys - return extra_cache_keys + return list(set(extra_cache_keys)) @property def quote_identifier(self) -> Callable[[str], str]: @@ -2083,4 +2140,4 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable): secondary=RLSFilterTables, backref="row_level_security_filters", ) - clause = Column(MediumText(), nullable=False) + clause = Column(utils.MediumText(), nullable=False) diff --git a/superset/connectors/sqla/utils.py b/superset/connectors/sqla/utils.py index 87b3d5dd3a28..84a6753f2286 100644 --- a/superset/connectors/sqla/utils.py +++ b/superset/connectors/sqla/utils.py @@ -38,7 +38,8 @@ ) from superset.models.core import Database from superset.result_set import SupersetResultSet -from superset.sql_parse import ParsedQuery, Table +from superset.sql.parse import SQLScript +from superset.sql_parse import Table from superset.superset_typing import ResultSetColumnType if TYPE_CHECKING: @@ -105,8 +106,8 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> list[ResultSetColumnType]: sql = dataset.get_template_processor().process_template( dataset.sql, **dataset.template_params_dict ) - parsed_query = ParsedQuery(sql, engine=db_engine_spec.engine) - if not db_engine_spec.is_readonly_query(parsed_query): + parsed_script = SQLScript(sql, engine=db_engine_spec.engine) + if parsed_script.has_mutation(): raise SupersetSecurityException( SupersetError( error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR, @@ -114,8 +115,7 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> list[ResultSetColumnType]: level=ErrorLevel.ERROR, ) ) - statements = parsed_query.get_statements() - if len(statements) > 1: + if len(parsed_script.statements) > 1: raise SupersetSecurityException( SupersetError( error_type=SupersetErrorType.DATASOURCE_SECURITY_ACCESS_ERROR, @@ -127,7 +127,7 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> list[ResultSetColumnType]: dataset.database, dataset.catalog, dataset.schema, - statements[0], + sql, ) @@ -144,8 +144,9 @@ def get_columns_description( with database.get_raw_connection(catalog=catalog, schema=schema) as conn: cursor = conn.cursor() query = database.apply_limit_to_sql(query, limit=1) - cursor.execute(query) - db_engine_spec.execute(cursor, query, database) + mutated_query = database.mutate_sql_based_on_config(query) + cursor.execute(mutated_query) + db_engine_spec.execute(cursor, mutated_query, database) result = db_engine_spec.fetch_data(cursor, limit=1) result_set = SupersetResultSet(result, cursor.description, db_engine_spec) return result_set.columns diff --git a/superset/css_templates/api.py b/superset/css_templates/api.py index ac222da66f81..5332553d4fd7 100644 --- a/superset/css_templates/api.py +++ b/superset/css_templates/api.py @@ -35,7 +35,12 @@ ) from superset.extensions import event_logger from superset.models.core import CssTemplate -from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics +from superset.views.base_api import ( + BaseSupersetModelRestApi, + RelatedFieldFilter, + statsd_metrics, +) +from superset.views.filters import BaseFilterRelatedUsers, FilterRelatedOwners logger = logging.getLogger(__name__) @@ -91,6 +96,13 @@ class CssTemplateRestApi(BaseSupersetModelRestApi): openapi_spec_tag = "CSS Templates" openapi_spec_methods = openapi_spec_methods_override + related_field_filters = { + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + } + base_related_field_filters = { + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], + } + @expose("/", methods=("DELETE",)) @protect() @safe diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py index 8196c197b248..d5cc08582cd4 100644 --- a/superset/daos/dashboard.py +++ b/superset/daos/dashboard.py @@ -259,7 +259,8 @@ def set_dash_metadata( md["refresh_frequency"] = data.get("refresh_frequency", 0) md["color_scheme"] = data.get("color_scheme", "") md["label_colors"] = data.get("label_colors", {}) - md["shared_label_colors"] = data.get("shared_label_colors", {}) + md["shared_label_colors"] = data.get("shared_label_colors", []) + md["map_label_colors"] = data.get("map_label_colors", {}) md["color_scheme_domain"] = data.get("color_scheme_domain", []) md["cross_filters_enabled"] = data.get("cross_filters_enabled", True) dashboard.json_metadata = json.dumps(md) diff --git a/superset/daos/dataset.py b/superset/daos/dataset.py index af1b705d6610..57d498661fe8 100644 --- a/superset/daos/dataset.py +++ b/superset/daos/dataset.py @@ -84,15 +84,19 @@ def validate_table_exists( @staticmethod def validate_uniqueness( - database_id: int, + database: Database, table: Table, dataset_id: int | None = None, ) -> bool: + # The catalog might not be set even if the database supports catalogs, in case + # multi-catalog is disabled. + catalog = table.catalog or database.get_default_catalog() + dataset_query = db.session.query(SqlaTable).filter( SqlaTable.table_name == table.table, SqlaTable.schema == table.schema, - SqlaTable.catalog == table.catalog, - SqlaTable.database_id == database_id, + SqlaTable.catalog == catalog, + SqlaTable.database_id == database.id, ) if dataset_id: @@ -103,15 +107,19 @@ def validate_uniqueness( @staticmethod def validate_update_uniqueness( - database_id: int, + database: Database, table: Table, dataset_id: int, ) -> bool: + # The catalog might not be set even if the database supports catalogs, in case + # multi-catalog is disabled. + catalog = table.catalog or database.get_default_catalog() + dataset_query = db.session.query(SqlaTable).filter( SqlaTable.table_name == table.table, - SqlaTable.database_id == database_id, + SqlaTable.database_id == database.id, SqlaTable.schema == table.schema, - SqlaTable.catalog == table.catalog, + SqlaTable.catalog == catalog, SqlaTable.id != dataset_id, ) return not db.session.query(dataset_query.exists()).scalar() diff --git a/superset/daos/log.py b/superset/daos/log.py index 002c3f230725..fe32ef816938 100644 --- a/superset/daos/log.py +++ b/superset/daos/log.py @@ -59,8 +59,14 @@ def get_recent_activity( .group_by(Log.dashboard_id, Log.slice_id, Log.action) .filter( and_( - Log.action.in_(actions), + Log.action == "log", Log.user_id == user_id, + or_( + *{ + Log.json.contains(f'"event_name": "{action}"') + for action in actions + }, + ), # limit to one year of data to improve performance Log.dttm > one_year_ago, or_(Log.dashboard_id.isnot(None), Log.slice_id.isnot(None)), @@ -99,7 +105,16 @@ def get_recent_activity( .outerjoin(Dashboard, Dashboard.id == Log.dashboard_id) .outerjoin(Slice, Slice.id == Log.slice_id) .filter(has_subject_title) - .filter(Log.action.in_(actions), Log.user_id == user_id) + .filter( + Log.action == "log", + Log.user_id == user_id, + or_( + *{ + Log.json.contains(f'"event_name": "{action}"') + for action in actions + }, + ), + ) .order_by(Log.dttm.desc()) .limit(page_size) .offset(page * page_size) diff --git a/superset/daos/query.py b/superset/daos/query.py index 55287ebd9fff..e5a96d16fe63 100644 --- a/superset/daos/query.py +++ b/superset/daos/query.py @@ -35,25 +35,6 @@ class QueryDAO(BaseDAO[Query]): base_filter = QueryFilter - @staticmethod - def update_saved_query_exec_info(query_id: int) -> None: - """ - Propagates query execution info back to saved query if applicable - - :param query_id: The query id - :return: - """ - query = db.session.query(Query).get(query_id) - related_saved_queries = ( - db.session.query(SavedQuery) - .filter(SavedQuery.database == query.database) - .filter(SavedQuery.sql == query.sql) - ).all() - if related_saved_queries: - for saved_query in related_saved_queries: - saved_query.rows = query.rows - saved_query.last_run = datetime.now() - @staticmethod def save_metadata(query: Query, payload: dict[str, Any]) -> None: # pull relevant data from payload and store in extra_json diff --git a/superset/daos/user.py b/superset/daos/user.py index 90a9b2bd2f6e..475e3252a6b2 100644 --- a/superset/daos/user.py +++ b/superset/daos/user.py @@ -21,7 +21,7 @@ from flask_appbuilder.security.sqla.models import User from superset.daos.base import BaseDAO -from superset.extensions import db +from superset.extensions import db, security_manager from superset.models.user_attributes import UserAttribute logger = logging.getLogger(__name__) @@ -30,7 +30,7 @@ class UserDAO(BaseDAO[User]): @staticmethod def get_by_id(user_id: int) -> User: - return db.session.query(User).filter_by(id=user_id).one() + return db.session.query(security_manager.user_model).filter_by(id=user_id).one() @staticmethod def set_avatar_url(user: User, url: str) -> None: diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 716e4c416d0e..0d1cbb4ebb2e 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -22,7 +22,7 @@ from typing import Any, Callable, cast, Optional from zipfile import is_zipfile, ZipFile -from flask import redirect, request, Response, send_file, url_for +from flask import g, redirect, request, Response, send_file, url_for from flask_appbuilder import permission_name from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.hooks import before_request @@ -34,10 +34,15 @@ from superset import db, is_feature_enabled, thumbnail_cache from superset.charts.schemas import ChartEntityResponseSchema +from superset.commands.dashboard.copy import CopyDashboardCommand from superset.commands.dashboard.create import CreateDashboardCommand -from superset.commands.dashboard.delete import DeleteDashboardCommand +from superset.commands.dashboard.delete import ( + DeleteDashboardCommand, + DeleteEmbeddedDashboardCommand, +) from superset.commands.dashboard.exceptions import ( DashboardAccessDeniedError, + DashboardCopyError, DashboardCreateFailedError, DashboardDeleteFailedError, DashboardForbiddenError, @@ -46,8 +51,10 @@ DashboardUpdateFailedError, ) from superset.commands.dashboard.export import ExportDashboardsCommand +from superset.commands.dashboard.fave import AddFavoriteDashboardCommand from superset.commands.dashboard.importers.dispatcher import ImportDashboardsCommand from superset.commands.dashboard.permalink.create import CreateDashboardPermalinkCommand +from superset.commands.dashboard.unfave import DelFavoriteDashboardCommand from superset.commands.dashboard.update import UpdateDashboardCommand from superset.commands.exceptions import TagForbiddenError from superset.commands.importers.exceptions import NoValidFilesFoundError @@ -88,6 +95,7 @@ from superset.extensions import event_logger from superset.models.dashboard import Dashboard from superset.models.embedded_dashboard import EmbeddedDashboard +from superset.security.guest_token import GuestUser from superset.tasks.thumbnails import ( cache_dashboard_screenshot, cache_dashboard_thumbnail, @@ -141,12 +149,18 @@ def wraps(self: BaseSupersetModelRestApi, id_or_slug: str) -> Response: class DashboardRestApi(BaseSupersetModelRestApi): datamodel = SQLAInterface(Dashboard) - @before_request(only=["thumbnail"]) + @before_request(only=["thumbnail", "cache_dashboard_screenshot", "screenshot"]) def ensure_thumbnails_enabled(self) -> Optional[Response]: if not is_feature_enabled("THUMBNAILS"): return self.response_404() return None + @before_request(only=["cache_dashboard_screenshot", "screenshot"]) + def ensure_screenshots_enabled(self) -> Optional[Response]: + if not is_feature_enabled("ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS"): + return self.response_404() + return None + include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | { RouteMethod.EXPORT, RouteMethod.IMPORT, @@ -204,6 +218,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "tags.id", "tags.name", "tags.type", + "uuid", ] list_select_columns = list_columns + ["changed_on", "created_on", "changed_by_fk"] @@ -271,6 +286,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: base_related_field_filters = { "owners": [["id", BaseFilterRelatedUsers, lambda: []]], "created_by": [["id", BaseFilterRelatedUsers, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], "roles": [["id", BaseFilterRelatedRoles, lambda: []]], } @@ -278,6 +294,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), "roles": RelatedFieldFilter("name", FilterRelatedRoles), "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), } allowed_rel_fields = {"owners", "roles", "created_by", "changed_by"} @@ -1026,7 +1043,7 @@ def cache_dashboard_screenshot(self, pk: int, **kwargs: Any) -> WerkzeugResponse dashboard_url = get_url_path("Superset.dashboard_permalink", key=permalink_key) screenshot_obj = DashboardScreenshot(dashboard_url, dashboard.digest) - cache_key = screenshot_obj.cache_key(window_size, thumb_size) + cache_key = screenshot_obj.cache_key(window_size, thumb_size, dashboard_state) image_url = get_url_path( "DashboardRestApi.screenshot", pk=dashboard.id, digest=cache_key ) @@ -1034,12 +1051,18 @@ def cache_dashboard_screenshot(self, pk: int, **kwargs: Any) -> WerkzeugResponse def trigger_celery() -> WerkzeugResponse: logger.info("Triggering screenshot ASYNC") cache_dashboard_screenshot.delay( - current_user=get_current_user(), + username=get_current_user(), + guest_token=( + g.user.guest_token + if get_current_user() and isinstance(g.user, GuestUser) + else None + ), dashboard_id=dashboard.id, dashboard_url=dashboard_url, - force=True, + force=False, thumb_size=thumb_size, window_size=window_size, + cache_key=cache_key, ) return self.response( 202, @@ -1206,11 +1229,14 @@ def add_favorite(self, pk: int) -> Response: 500: $ref: '#/components/responses/500' """ - dashboard = DashboardDAO.find_by_id(pk) - if not dashboard: + try: + AddFavoriteDashboardCommand(pk).run() + + except DashboardNotFoundError: return self.response_404() + except DashboardAccessDeniedError: + return self.response_403() - DashboardDAO.add_favorite(dashboard) return self.response(200, result="OK") @expose("/<pk>/favorites/", methods=("DELETE",)) @@ -1249,11 +1275,13 @@ def remove_favorite(self, pk: int) -> Response: 500: $ref: '#/components/responses/500' """ - dashboard = DashboardDAO.find_by_id(pk) - if not dashboard: + try: + DelFavoriteDashboardCommand(pk).run() + except DashboardNotFoundError: return self.response_404() + except DashboardAccessDeniedError: + return self.response_403() - DashboardDAO.remove_favorite(dashboard) return self.response(200, result="OK") @expose("/import/", methods=("POST",)) @@ -1496,15 +1524,16 @@ def set_embedded(self, dashboard: Dashboard) -> Response: try: body = self.embedded_config_schema.load(request.json) - with db.session.begin_nested(): - embedded = EmbeddedDashboardDAO.upsert( - dashboard, - body["allowed_domains"], - ) + embedded = EmbeddedDashboardDAO.upsert( + dashboard, + body["allowed_domains"], + ) + db.session.commit() # pylint: disable=consider-using-transaction result = self.embedded_response_schema.dump(embedded) return self.response(200, result=result) except ValidationError as error: + db.session.rollback() # pylint: disable=consider-using-transaction return self.response_400(message=error.messages) @expose("/<id_or_slug>/embedded", methods=("DELETE",)) @@ -1545,7 +1574,7 @@ def delete_embedded(self, dashboard: Dashboard) -> Response: 500: $ref: '#/components/responses/500' """ - EmbeddedDashboardDAO.delete(dashboard.embedded) + DeleteEmbeddedDashboardCommand(dashboard).run() return self.response(200, message="OK") @expose("/<id_or_slug>/copy/", methods=("POST",)) @@ -1604,9 +1633,11 @@ def copy_dash(self, original_dash: Dashboard) -> Response: return self.response_400(message=error.messages) try: - dash = DashboardDAO.copy_dashboard(original_dash, data) + dash = CopyDashboardCommand(original_dash, data).run() except DashboardForbiddenError: return self.response_403() + except DashboardCopyError: + return self.response_400() return self.response( 200, diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index 1f78a2235849..c3c655e7e89c 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. import re -from typing import Any, Union +from typing import Any, Mapping, Union from marshmallow import fields, post_dump, post_load, pre_load, Schema from marshmallow.validate import Length, ValidationError @@ -116,6 +116,28 @@ def validate_json_metadata(value: Union[bytes, bytearray, str]) -> None: raise ValidationError(errors) +class SharedLabelsColorsField(fields.Field): + """ + A custom field that accepts either a list of strings or a dictionary. + """ + + def _deserialize( + self, + value: Union[list[str], dict[str, str]], + attr: Union[str, None], + data: Union[Mapping[str, Any], None], + **kwargs: dict[str, Any], + ) -> list[str]: + if isinstance(value, list): + if all(isinstance(item, str) for item in value): + return value + elif isinstance(value, dict): + # Enforce list (for backward compatibility) + return [] + + raise ValidationError("Not a valid list") + + class DashboardJSONMetadataSchema(Schema): # native_filter_configuration is for dashboard-native filters native_filter_configuration = fields.List(fields.Dict(), allow_none=True) @@ -137,7 +159,8 @@ class DashboardJSONMetadataSchema(Schema): color_namespace = fields.Str(allow_none=True) positions = fields.Dict(allow_none=True) label_colors = fields.Dict() - shared_label_colors = fields.Dict() + shared_label_colors = SharedLabelsColorsField() + map_label_colors = fields.Dict() color_scheme_domain = fields.List(fields.Str()) cross_filters_enabled = fields.Boolean(dump_default=True) # used for v0 import/export @@ -262,6 +285,7 @@ class DashboardDatasetSchema(Schema): owners = fields.List(fields.Dict()) columns = fields.List(fields.Dict()) column_types = fields.List(fields.Int()) + column_names = fields.List(fields.Str()) metrics = fields.List(fields.Dict()) order_by_choices = fields.List(fields.List(fields.Str())) verbose_map = fields.Dict(fields.Str(), fields.Str()) diff --git a/superset/databases/api.py b/superset/databases/api.py index af5ce255aefe..eb611837bc9b 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -125,11 +125,13 @@ from superset.utils.ssh_tunnel import mask_password_info from superset.views.base_api import ( BaseSupersetModelRestApi, + RelatedFieldFilter, requires_form_data, requires_json, statsd_metrics, ) from superset.views.error_handling import json_error_response +from superset.views.filters import BaseFilterRelatedUsers, FilterRelatedOwners logger = logging.getLogger(__name__) @@ -304,6 +306,13 @@ class DatabaseRestApi(BaseSupersetModelRestApi): openapi_spec_methods = openapi_spec_methods_override """ Overrides GET methods OpenApi descriptions """ + related_field_filters = { + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + } + base_related_field_filters = { + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], + } + @expose("/<int:pk>/connection", methods=("GET",)) @protect() @safe @@ -1150,7 +1159,7 @@ def select_star( self.incr_stats("init", self.select_star.__name__) try: result = database.select_star( - Table(table_name, schema_name), + Table(table_name, schema_name, database.get_default_catalog()), latest_partition=True, ) except NoSuchTableError: @@ -2265,6 +2274,6 @@ def schemas_access_for_file_upload(self, pk: int) -> Response: # otherwise the database should have been filtered out # in CsvToDatabaseForm schemas_allowed_processed = security_manager.get_schemas_accessible_by_user( - database, schemas_allowed, True + database, database.get_default_catalog(), schemas_allowed, True ) return self.response(200, schemas=schemas_allowed_processed) diff --git a/superset/datasets/api.py b/superset/datasets/api.py index 0f14be1a6d17..667121ac9b7e 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -123,6 +123,7 @@ class DatasetRestApi(BaseSupersetModelRestApi): "schema", "sql", "table_name", + "uuid", ] list_select_columns = list_columns + ["changed_on", "changed_by_fk"] order_columns = [ @@ -242,10 +243,12 @@ class DatasetRestApi(BaseSupersetModelRestApi): base_related_field_filters = { "owners": [["id", BaseFilterRelatedUsers, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], "database": [["id", DatabaseFilter, lambda: []]], } related_field_filters = { "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), "database": "database_name", } search_filters = { diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index 1329597f02b7..d0f25d2f6715 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -63,7 +63,8 @@ from superset.databases.utils import get_table_metadata, make_url_safe from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import DisallowedSQLFunction, OAuth2Error, OAuth2RedirectError -from superset.sql_parse import ParsedQuery, SQLScript, Table +from superset.sql.parse import SQLScript, Table +from superset.sql_parse import ParsedQuery from superset.superset_typing import ( OAuth2ClientConfig, OAuth2State, @@ -74,6 +75,7 @@ from superset.utils import core as utils, json from superset.utils.core import ColumnSpec, GenericDataType from superset.utils.hashing import md5_sha_from_str +from superset.utils.json import redact_sensitive, reveal_sensitive from superset.utils.network import is_hostname_valid, is_port_open from superset.utils.oauth2 import encode_oauth2_state @@ -92,6 +94,12 @@ logger = logging.getLogger() +# When connecting to a database it's hard to catch specific exceptions, since we support +# more than 50 different database drivers. Usually the try/except block will catch the +# generic `Exception` class, which requires a pylint disablee comment. To make it clear +# that we know this is a necessary evil we create an alias, and catch it instead. +GenericDBException = Exception + def convert_inspector_columns(cols: list[SQLAColumnType]) -> list[ResultSetColumnType]: result_set_columns: list[ResultSetColumnType] = [] @@ -340,6 +348,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods # Does database support join-free timeslot grouping time_groupby_inline = False limit_method = LimitMethod.FORCE_LIMIT + supports_multivalues_insert = False allows_joins = True allows_subqueries = True allows_alias_in_select = True @@ -392,6 +401,11 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]] ] = {} + # List of JSON path to fields in `encrypted_extra` that should be masked when the + # database is edited. By default everything is masked. + # pylint: disable=invalid-name + encrypted_extra_sensitive_fields: set[str] = {"$.*"} + # Whether the engine supports file uploads # if True, database will be listed as option in the upload file form supports_file_upload = True @@ -406,7 +420,8 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods # # When this is changed to true in a DB engine spec it MUST support the # `get_default_catalog` and `get_catalog_names` methods. In addition, you MUST write - # a database migration updating any existing schema permissions. + # a database migration updating any existing schema permissions using the helper + # `upgrade_catalog_perms`. supports_catalog = False # Can the catalog be changed on a per-query basis? @@ -1270,9 +1285,11 @@ def df_to_sql( catalog=table.catalog, schema=table.schema, ) as engine: - if engine.dialect.supports_multivalues_insert: + if ( + engine.dialect.supports_multivalues_insert + or cls.supports_multivalues_insert + ): to_sql_kwargs["method"] = "multi" - df.to_sql(con=engine, **to_sql_kwargs) @classmethod @@ -2156,26 +2173,54 @@ def get_impersonation_key(cls, user: User | None) -> Any: @classmethod def mask_encrypted_extra(cls, encrypted_extra: str | None) -> str | None: """ - Mask ``encrypted_extra``. + Mask `encrypted_extra`. - This is used to remove any sensitive data in ``encrypted_extra`` when presenting - it to the user. For example, a private key might be replaced with a masked value - "XXXXXXXXXX". If the masked value is changed the corresponding entry is updated, - otherwise the old value is used (see ``unmask_encrypted_extra`` below). + This is used to remove any sensitive data in `encrypted_extra` when presenting + it to the user when a database is edited. For example, a private key might be + replaced with a masked value "XXXXXXXXXX". If the masked value is changed the + corresponding entry is updated, otherwise the old value is used (see + `unmask_encrypted_extra` below). """ - return encrypted_extra + if encrypted_extra is None or not cls.encrypted_extra_sensitive_fields: + return encrypted_extra + + try: + config = json.loads(encrypted_extra) + except (TypeError, json.JSONDecodeError): + return encrypted_extra + + masked_encrypted_extra = redact_sensitive( + config, + cls.encrypted_extra_sensitive_fields, + ) + + return json.dumps(masked_encrypted_extra) - # pylint: disable=unused-argument @classmethod def unmask_encrypted_extra(cls, old: str | None, new: str | None) -> str | None: """ - Remove masks from ``encrypted_extra``. + Remove masks from `encrypted_extra`. This method allows reusing existing values from the current encrypted extra on updates. It's useful for reusing masked passwords, allowing keys to be updated without having to provide sensitive data to the client. """ - return new + if old is None or new is None: + return new + + try: + old_config = json.loads(old) + new_config = json.loads(new) + except (TypeError, json.JSONDecodeError): + return new + + new_config = reveal_sensitive( + old_config, + new_config, + cls.encrypted_extra_sensitive_fields, + ) + + return json.dumps(new_config) @classmethod def get_public_information(cls) -> dict[str, Any]: diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py index 7693e48da22f..11175d795744 100644 --- a/superset/db_engine_specs/bigquery.py +++ b/superset/db_engine_specs/bigquery.py @@ -17,7 +17,6 @@ from __future__ import annotations -import contextlib import re import urllib from datetime import datetime @@ -38,7 +37,7 @@ from sqlalchemy.sql import sqltypes from superset import sql_parse -from superset.constants import PASSWORD_MASK, TimeGrain +from superset.constants import TimeGrain from superset.databases.schemas import encrypted_field_properties, EncryptedString from superset.databases.utils import make_url_safe from superset.db_engine_specs.base import BaseEngineSpec, BasicPropertiesType @@ -129,6 +128,10 @@ class BigQueryEngineSpec(BaseEngineSpec): # pylint: disable=too-many-public-met supports_catalog = supports_dynamic_catalog = True + # when editing the database, mask this field in `encrypted_extra` + # pylint: disable=invalid-name + encrypted_extra_sensitive_fields = {"$.credentials_info.private_key"} + """ https://www.python.org/dev/peps/pep-0249/#arraysize raw_connections bypass the sqlalchemy-bigquery query execution context and deal with @@ -594,47 +597,6 @@ def get_parameters_from_uri( raise ValidationError("Invalid service credentials") - @classmethod - def mask_encrypted_extra(cls, encrypted_extra: str | None) -> str | None: - if encrypted_extra is None: - return encrypted_extra - - try: - config = json.loads(encrypted_extra) - except (json.JSONDecodeError, TypeError): - return encrypted_extra - - with contextlib.suppress(KeyError): - config["credentials_info"]["private_key"] = PASSWORD_MASK - return json.dumps(config) - - @classmethod - def unmask_encrypted_extra(cls, old: str | None, new: str | None) -> str | None: - """ - Reuse ``private_key`` if available and unchanged. - """ - if old is None or new is None: - return new - - try: - old_config = json.loads(old) - new_config = json.loads(new) - except (TypeError, json.JSONDecodeError): - return new - - if "credentials_info" not in new_config: - return new - - if "private_key" not in new_config["credentials_info"]: - return new - - if new_config["credentials_info"]["private_key"] == PASSWORD_MASK: - new_config["credentials_info"]["private_key"] = old_config[ - "credentials_info" - ]["private_key"] - - return json.dumps(new_config) - @classmethod def get_dbapi_exception_mapping(cls) -> dict[type[Exception], type[Exception]]: # pylint: disable=import-outside-toplevel @@ -776,7 +738,7 @@ def _get_fields(cls, cols: list[ResultSetColumnType]) -> list[Any]: @classmethod def parse_error_exception(cls, exception: Exception) -> Exception: try: - return Exception(str(exception).splitlines()[0].strip()) + return type(exception)(str(exception).splitlines()[0].strip()) except Exception: # pylint: disable=broad-except # If for some reason we get an exception, for example, no new line # We will return the original exception diff --git a/superset/db_engine_specs/couchbasedb.py b/superset/db_engine_specs/couchbase.py similarity index 96% rename from superset/db_engine_specs/couchbasedb.py rename to superset/db_engine_specs/couchbase.py index 71dc7276791a..f42fb699bf90 100644 --- a/superset/db_engine_specs/couchbasedb.py +++ b/superset/db_engine_specs/couchbase.py @@ -74,14 +74,15 @@ class CouchbaseParametersSchema(Schema): ) -class CouchbaseDbEngineSpec(BasicParametersMixin, BaseEngineSpec): - engine = "couchbasedb" +class CouchbaseEngineSpec(BasicParametersMixin, BaseEngineSpec): + engine = "couchbase" + engine_aliases = {"couchbasedb"} engine_name = "Couchbase" - default_driver = "couchbasedb" + default_driver = "couchbase" allows_joins = False allows_subqueries = False sqlalchemy_uri_placeholder = ( - "couchbasedb://user:password@host[:port]?truststorepath=value?ssl=value" + "couchbase://user:password@host[:port]?truststorepath=value?ssl=value" ) parameters_schema = CouchbaseParametersSchema() @@ -128,7 +129,7 @@ def build_sqlalchemy_uri( if parameters.get("port") is None: uri = URL.create( - "couchbasedb", + "couchbase", username=parameters.get("username"), password=parameters.get("password"), host=parameters["host"], @@ -137,7 +138,7 @@ def build_sqlalchemy_uri( ) else: uri = URL.create( - "couchbasedb", + "couchbase", username=parameters.get("username"), password=parameters.get("password"), host=parameters["host"], diff --git a/superset/db_engine_specs/databricks.py b/superset/db_engine_specs/databricks.py index d487f682aed6..d80b0fe55133 100644 --- a/superset/db_engine_specs/databricks.py +++ b/superset/db_engine_specs/databricks.py @@ -434,8 +434,26 @@ def get_default_catalog( cls, database: Database, ) -> str | None: - with database.get_inspector() as inspector: - return inspector.bind.execute("SELECT current_catalog()").scalar() + """ + Return the default catalog. + + The default behavior for Databricks is confusing. When Unity Catalog is not + enabled we have (the DB engine spec hasn't been tested with it enabled): + + > SHOW CATALOGS; + spark_catalog + > SELECT current_catalog(); + hive_metastore + + To handle permissions correctly we use the result of `SHOW CATALOGS` when a + single catalog is returned. + """ + with database.get_sqla_engine() as engine: + catalogs = {catalog for (catalog,) in engine.execute("SHOW CATALOGS")} + if len(catalogs) == 1: + return catalogs.pop() + + return engine.execute("SELECT current_catalog()").scalar() @classmethod def get_prequeries( @@ -445,8 +463,10 @@ def get_prequeries( ) -> list[str]: prequeries = [] if catalog: + catalog = f"`{catalog}`" if not catalog.startswith("`") else catalog prequeries.append(f"USE CATALOG {catalog}") if schema: + schema = f"`{schema}`" if not schema.startswith("`") else schema prequeries.append(f"USE SCHEMA {schema}") return prequeries diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py index fd5ec6722ba0..070be5a92659 100644 --- a/superset/db_engine_specs/gsheets.py +++ b/superset/db_engine_specs/gsheets.py @@ -17,7 +17,6 @@ from __future__ import annotations -import contextlib import logging import re from re import Pattern @@ -37,7 +36,6 @@ from sqlalchemy.engine.url import URL from superset import db, security_manager -from superset.constants import PASSWORD_MASK from superset.databases.schemas import encrypted_field_properties, EncryptedString from superset.db_engine_specs.shillelagh import ShillelaghEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType @@ -93,6 +91,10 @@ class GSheetsEngineSpec(ShillelaghEngineSpec): default_driver = "apsw" sqlalchemy_uri_placeholder = "gsheets://" + # when editing the database, mask this field in `encrypted_extra` + # pylint: disable=invalid-name + encrypted_extra_sensitive_fields = {"$.service_account_info.private_key"} + custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = { SYNTAX_ERROR_REGEX: ( __( @@ -157,11 +159,11 @@ def get_extra_table_metadata( return {"metadata": metadata["extra"]} @classmethod + # pylint: disable=unused-argument def build_sqlalchemy_uri( cls, _: GSheetsParametersType, - encrypted_extra: None # pylint: disable=unused-argument - | (dict[str, Any]) = None, + encrypted_extra: None | (dict[str, Any]) = None, ) -> str: return "gsheets://" @@ -177,47 +179,6 @@ def get_parameters_from_uri( raise ValidationError("Invalid service credentials") - @classmethod - def mask_encrypted_extra(cls, encrypted_extra: str | None) -> str | None: - if encrypted_extra is None: - return encrypted_extra - - try: - config = json.loads(encrypted_extra) - except (TypeError, json.JSONDecodeError): - return encrypted_extra - - with contextlib.suppress(KeyError): - config["service_account_info"]["private_key"] = PASSWORD_MASK - return json.dumps(config) - - @classmethod - def unmask_encrypted_extra(cls, old: str | None, new: str | None) -> str | None: - """ - Reuse ``private_key`` if available and unchanged. - """ - if old is None or new is None: - return new - - try: - old_config = json.loads(old) - new_config = json.loads(new) - except (TypeError, json.JSONDecodeError): - return new - - if "service_account_info" not in new_config: - return new - - if "private_key" not in new_config["service_account_info"]: - return new - - if new_config["service_account_info"]["private_key"] == PASSWORD_MASK: - new_config["service_account_info"]["private_key"] = old_config[ - "service_account_info" - ]["private_key"] - - return json.dumps(new_config) - @classmethod def parameters_json_schema(cls) -> Any: """ diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index e3cf128b7a2c..c0f6888acd28 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -79,12 +79,6 @@ def upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str: ) s3 = boto3.client("s3") - - # The location is merely an S3 prefix and thus we first need to ensure that there is - # one and only one key associated with the table. - bucket = s3.Bucket(bucket_path) - bucket.objects.filter(Prefix=os.path.join(upload_prefix, table.table)).delete() - location = os.path.join("s3a://", bucket_path, upload_prefix, table.table) s3.upload_file( filename, diff --git a/superset/db_engine_specs/mssql.py b/superset/db_engine_specs/mssql.py index d5cc86c859a7..464f6cf2b9c8 100644 --- a/superset/db_engine_specs/mssql.py +++ b/superset/db_engine_specs/mssql.py @@ -53,6 +53,7 @@ class MssqlEngineSpec(BaseEngineSpec): max_column_name_length = 128 allows_cte_in_subquery = False allow_limit_clause = False + supports_multivalues_insert = True _time_grain_expressions = { None: "{col}", diff --git a/superset/db_engine_specs/pinot.py b/superset/db_engine_specs/pinot.py index faf919f10327..2e03f745773f 100644 --- a/superset/db_engine_specs/pinot.py +++ b/superset/db_engine_specs/pinot.py @@ -26,6 +26,11 @@ class PinotEngineSpec(BaseEngineSpec): engine = "pinot" engine_name = "Apache Pinot" + allows_subqueries = False + allows_joins = False + allows_alias_in_select = False + allows_alias_in_orderby = False + # https://docs.pinot.apache.org/users/user-guide-query/supported-transformations#datetime-functions _time_grain_expressions = { None: "{col}", diff --git a/superset/db_engine_specs/postgres.py b/superset/db_engine_specs/postgres.py index 015d5c52f240..0ec4adc5a63c 100644 --- a/superset/db_engine_specs/postgres.py +++ b/superset/db_engine_specs/postgres.py @@ -35,7 +35,7 @@ from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetException, SupersetSecurityException from superset.models.sql_lab import Query -from superset.sql_parse import SQLScript +from superset.sql.parse import SQLScript from superset.utils import core as utils, json from superset.utils.core import GenericDataType diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index 5a375896c1d4..f0664564f872 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -296,10 +296,13 @@ def epoch_to_dttm(cls) -> str: return "from_unixtime({col})" @classmethod - def get_default_catalog(cls, database: "Database") -> str | None: + def get_default_catalog(cls, database: Database) -> str | None: """ Return the default catalog. """ + if database.url_object.database is None: + return None + return database.url_object.database.split("/")[0] @classmethod diff --git a/superset/db_engine_specs/snowflake.py b/superset/db_engine_specs/snowflake.py index 72116cfc321b..9680628d06b0 100644 --- a/superset/db_engine_specs/snowflake.py +++ b/superset/db_engine_specs/snowflake.py @@ -87,6 +87,12 @@ class SnowflakeEngineSpec(PostgresBaseEngineSpec): supports_dynamic_schema = True supports_catalog = supports_dynamic_catalog = True + # pylint: disable=invalid-name + encrypted_extra_sensitive_fields = { + "$.auth_params.privatekey_body", + "$.auth_params.privatekey_pass", + } + _time_grain_expressions = { None: "{col}", TimeGrain.SECOND: "DATE_TRUNC('SECOND', {col})", diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 1eb4b307870d..3cbf6ebe79a8 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -14,21 +14,15 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=consider-using-transaction from __future__ import annotations import contextlib import logging import threading import time -from tempfile import NamedTemporaryFile from typing import Any, TYPE_CHECKING -import numpy as np -import pandas as pd -import pyarrow as pa -from flask import current_app, Flask, g -from sqlalchemy import text +from flask import ctx, current_app, Flask, g from sqlalchemy.engine.reflection import Inspector from sqlalchemy.engine.url import URL from sqlalchemy.exc import NoSuchTableError @@ -43,9 +37,7 @@ SupersetDBAPIOperationalError, SupersetDBAPIProgrammingError, ) -from superset.db_engine_specs.hive import upload_to_s3 from superset.db_engine_specs.presto import PrestoBaseEngineSpec -from superset.exceptions import SupersetException from superset.models.sql_lab import Query from superset.sql_parse import Table from superset.superset_typing import ResultSetColumnType @@ -192,7 +184,7 @@ def handle_cursor(cls, cursor: Cursor, query: Query) -> None: if tracking_url := cls.get_tracking_url(cursor): query.tracking_url = tracking_url - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction # if query cancelation was requested prior to the handle_cursor call, but # the query was still executed, trigger the actual query cancelation now @@ -222,18 +214,29 @@ def execute_with_cursor( # Fetch the query ID beforehand, since it might fail inside the thread due to # how the SQLAlchemy session is handled. query_id = query.id + query_database = query.database execute_result: dict[str, Any] = {} execute_event = threading.Event() def _execute( - results: dict[str, Any], event: threading.Event, app: Flask + results: dict[str, Any], + event: threading.Event, + app: Flask, + g_copy: ctx._AppCtxGlobals, ) -> None: logger.debug("Query %d: Running query: %s", query_id, sql) try: + # Flask contexts are local to the thread that handles the request. + # When you spawn a new thread, it does not inherit the contexts + # from the parent thread, + # meaning the g object and other context-bound variables are not + # accessible with app.app_context(): - cls.execute(cursor, sql, query.database) + for key, value in g_copy.__dict__.items(): + setattr(g, key, value) + cls.execute(cursor, sql, query_database) except Exception as ex: # pylint: disable=broad-except results["error"] = ex finally: @@ -245,10 +248,13 @@ def _execute( execute_result, execute_event, current_app._get_current_object(), # pylint: disable=protected-access + g._get_current_object(), # pylint: disable=protected-access ), ) execute_thread.start() + # Wait for the thread to start before continuing + time.sleep(0.1) # Wait for a query ID to be available before handling the cursor, as # it's required by that method; it may never become available on error. while not cursor.query_id and not execute_event.is_set(): @@ -270,7 +276,7 @@ def _execute( def prepare_cancel_query(cls, query: Query) -> None: if QUERY_CANCEL_KEY not in query.extra: query.set_extra_json_key(QUERY_EARLY_CANCEL_KEY, True) - db.session.commit() + db.session.commit() # pylint: disable=consider-using-transaction @classmethod def cancel_query(cls, cursor: Cursor, query: Query, cancel_query_id: str) -> bool: @@ -474,80 +480,3 @@ def get_indexes( return super().get_indexes(database, inspector, table) except NoSuchTableError: return [] - - @classmethod - def df_to_sql( - cls, - database: Database, - table: Table, - df: pd.DataFrame, - to_sql_kwargs: dict[str, Any], - ) -> None: - """ - Upload data from a Pandas DataFrame to a database. - - The data is stored via the binary Parquet format which is both less problematic - and more performant than a text file. - - Note this method does not create metadata for the table. - - :param database: The database to upload the data to - :param table: The table to upload the data to - :param df: The Pandas Dataframe with data to be uploaded - :param to_sql_kwargs: The `pandas.DataFrame.to_sql` keyword arguments - :see: superset.db_engine_specs.HiveEngineSpec.df_to_sql - """ - if to_sql_kwargs["if_exists"] == "append": - raise SupersetException("Append operation not currently supported") - - if to_sql_kwargs["if_exists"] == "fail": - if database.has_table_by_name(table.table, table.schema): - raise SupersetException("Table already exists") - elif to_sql_kwargs["if_exists"] == "replace": - with cls.get_engine(database) as engine: - engine.execute(f"DROP TABLE IF EXISTS {str(table)}") - - def _get_trino_type(dtype: np.dtype[Any]) -> str: - return { - np.dtype("bool"): "BOOLEAN", - np.dtype("float64"): "DOUBLE", - np.dtype("int64"): "BIGINT", - np.dtype("object"): "VARCHAR", - }.get(dtype, "VARCHAR") - - with NamedTemporaryFile( - dir=current_app.config["UPLOAD_FOLDER"], - suffix=".parquet", - ) as file: - pa.parquet.write_table(pa.Table.from_pandas(df), where=file.name) - - with cls.get_engine(database) as engine: - engine.execute( - # pylint: disable=consider-using-f-string - text( - """ - CREATE TABLE {table} ({schema}) - WITH ( - format = 'PARQUET', - external_location = '{location}' - ) - """.format( - location=upload_to_s3( - filename=file.name, - upload_prefix=current_app.config[ - "CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC" - ]( - database, - g.user, - table.schema, - ), - table=table, - ), - schema=", ".join( - f'"{name}" {_get_trino_type(dtype)}' - for name, dtype in df.dtypes.items() - ), - table=str(table), - ), - ), - ) diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py index 81e31e741655..b69cf0f4a4a1 100644 --- a/superset/examples/birth_names.py +++ b/superset/examples/birth_names.py @@ -21,7 +21,7 @@ from sqlalchemy import DateTime, inspect, String from sqlalchemy.sql import column -from superset import app, db +from superset import app, db, security_manager from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.models.core import Database from superset.models.dashboard import Dashboard @@ -152,6 +152,7 @@ def _add_table_metrics(datasource: SqlaTable) -> None: def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: + owner = security_manager.get_user_by_id(1) metrics = [ { "expressionType": "SIMPLE", @@ -193,7 +194,6 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: slice_kwargs = { "datasource_id": tbl.id, "datasource_type": DatasourceType.TABLE, - "owners": [], } print("Creating some slices") @@ -210,6 +210,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: compare_suffix="over 5Y", metric=metric, ), + owners=[], ), Slice( **slice_kwargs, @@ -218,6 +219,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: params=get_slice_json( defaults, viz_type="pie", groupby=["gender"], metric=metric ), + owners=[], ), Slice( **slice_kwargs, @@ -232,6 +234,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: show_legend=True, metrics=metrics, ), + owners=[], ), Slice( **slice_kwargs, @@ -268,6 +271,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: ], groupby=["state"], ), + owners=[], ), Slice( **slice_kwargs, @@ -281,6 +285,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: timeseries_limit_metric=metric, metrics=[metric], ), + owners=[], ), Slice( **slice_kwargs, @@ -297,6 +302,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: adhoc_filters=[gen_filter("gender", "girl")], metric=metric, ), + owners=[], ), Slice( **slice_kwargs, @@ -310,6 +316,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: timeseries_limit_metric=metric, metrics=[metric], ), + owners=[], ), Slice( **slice_kwargs, @@ -326,6 +333,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: adhoc_filters=[gen_filter("gender", "boy")], metric=metric, ), + owners=[], ), Slice( **slice_kwargs, @@ -343,6 +351,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: x_axis_forma="smart_date", metrics=metrics, ), + owners=[], ), Slice( **slice_kwargs, @@ -360,6 +369,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: x_axis_forma="smart_date", metrics=metrics, ), + owners=[], ), Slice( **slice_kwargs, @@ -381,6 +391,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: } ], ), + owners=[], ), ] misc_slices = [ @@ -405,12 +416,14 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: yAxisIndex=0, yAxisIndexB=1, ), + owners=[], ), Slice( **slice_kwargs, slice_name="Num Births Trend", viz_type="line", params=get_slice_json(defaults, viz_type="line", metrics=metrics), + owners=[], ), Slice( **slice_kwargs, @@ -433,6 +446,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: } ], ), + owners=[], ), Slice( **slice_kwargs, @@ -452,6 +466,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: viz_type="big_number_total", granularity_sqla="ds", ), + owners=[], ), Slice( **slice_kwargs, @@ -484,6 +499,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: }, limit="10", ), + owners=[owner] if owner else [], ), Slice( **slice_kwargs, @@ -504,6 +520,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: "label": "SUM(num_california)", }, ), + owners=[], ), Slice( **slice_kwargs, @@ -517,6 +534,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: adhoc_filters=[gen_filter("gender", "girl")], subheader="total female participants", ), + owners=[], ), Slice( **slice_kwargs, @@ -529,6 +547,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: groupbyColumns=["state"], metrics=metrics, ), + owners=[], ), ] for slc in slices: diff --git a/superset/examples/configs/charts/Featured Charts/World_Map.yaml b/superset/examples/configs/charts/Featured Charts/World_Map.yaml deleted file mode 100644 index e7ae235eb946..000000000000 --- a/superset/examples/configs/charts/Featured Charts/World_Map.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -slice_name: World Map -description: null -certified_by: null -certification_details: null -viz_type: world_map -params: - datasource: 1__table - viz_type: world_map - entity: country_name - country_fieldtype: name - metric: sum__SP_POP_TOTL - adhoc_filters: - - clause: WHERE - subject: year - operator: TEMPORAL_RANGE - comparator: No filter - expressionType: SIMPLE - row_limit: 10000 - show_bubbles: true - secondary_metric: sum__SP_RUR_TOTL - max_bubble_size: "25" - color_picker: - r: 0 - g: 122 - b: 135 - a: 1 - color_by: metric - linear_color_scheme: superset_seq_1 - color_scheme: supersetColors - y_axis_format: SMART_NUMBER - extra_form_data: {} - dashboards: - - 13 -cache_timeout: null -uuid: f0606234-1644-426f-8fc6-3eaf8e77fd58 -version: 1.0.0 -dataset_uuid: 3cee7b7f-0009-4d9d-8de4-1da4cad4569e diff --git a/superset/examples/configs/dashboards/Featured_Charts.yaml b/superset/examples/configs/dashboards/Featured_Charts.yaml index 598db88ad618..20155692bd48 100644 --- a/superset/examples/configs/dashboards/Featured_Charts.yaml +++ b/superset/examples/configs/dashboards/Featured_Charts.yaml @@ -261,20 +261,6 @@ position: - GRID_ID - ROW-LIWnqpnIk5 type: CHART - CHART-aAhaxRYu_t: - children: [] - id: CHART-aAhaxRYu_t - meta: - chartId: 116 - height: 50 - sliceName: World Map - uuid: f0606234-1644-426f-8fc6-3eaf8e77fd58 - width: 4 - parents: - - ROOT_ID - - GRID_ID - - ROW-we3i1eOT75 - type: CHART CHART-gfrGP3BD76: children: [] id: CHART-gfrGP3BD76 @@ -384,7 +370,6 @@ position: - ROW-Jq9auQfs6- - ROW-3XARWMYOfz - ROW-ux6j1ePT8I - - ROW-we3i1eOT75 id: GRID_ID parents: - ROOT_ID @@ -495,16 +480,6 @@ position: - ROOT_ID - GRID_ID type: ROW - ROW-we3i1eOT75: - children: - - CHART-aAhaxRYu_t - id: ROW-we3i1eOT75 - meta: - background: BACKGROUND_TRANSPARENT - parents: - - ROOT_ID - - GRID_ID - type: ROW metadata: color_scheme: supersetAndPresetColors refresh_frequency: 0 diff --git a/superset/examples/configs/datasets/examples/wb_health_population.yaml b/superset/examples/configs/datasets/examples/wb_health_population.yaml deleted file mode 100644 index ea89020b8ea2..000000000000 --- a/superset/examples/configs/datasets/examples/wb_health_population.yaml +++ /dev/null @@ -1,4317 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -table_name: wb_health_population -main_dttm_col: year -description: - "<!--\nLicensed to the Apache Software Foundation (ASF) under one\nor\ - \ more contributor license agreements. See the NOTICE file\ndistributed with this\ - \ work for additional information\nregarding copyright ownership. The ASF licenses\ - \ this file\nto you under the Apache License, Version 2.0 (the\n\"License\"); you\ - \ may not use this file except in compliance\nwith the License. You may obtain\ - \ a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless\ - \ required by applicable law or agreed to in writing,\nsoftware distributed under\ - \ the License is distributed on an\n\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS\ - \ OF ANY\nKIND, either express or implied. See the License for the\nspecific language\ - \ governing permissions and limitations\nunder the License.\n-->\nThis data was\ - \ downloaded from the\n[World's Health Organization's website](https://datacatalog.worldbank.org/dataset/health-nutrition-and-population-statistics)\n\ - \nHere's the script that was used to massage the data:\n\n DIR = \"\"\n df_country\ - \ = pd.read_csv(DIR + '/HNP_Country.csv')\n df_country.columns = ['country_code']\ - \ + list(df_country.columns[1:])\n df_country = df_country[['country_code', 'Region']]\n\ - \ df_country.columns = ['country_code', 'region']\n\n df = pd.read_csv(DIR\ - \ + '/HNP_Data.csv')\n del df['Unnamed: 60']\n df.columns = ['country_name',\ - \ 'country_code'] + list(df.columns[2:])\n ndf = df.merge(df_country, how='inner')\n\ - \n dims = ('country_name', 'country_code', 'region')\n vv = [str(i) for i\ - \ in range(1960, 2015)]\n mdf = pd.melt(ndf, id_vars=dims + ('Indicator Code',),\ - \ value_vars=vv)\n mdf['year'] = mdf.variable + '-01-01'\n dims = dims + ('year',)\n\ - \n pdf = mdf.pivot_table(values='value', columns='Indicator Code', index=dims)\n\ - \ pdf = pdf.reset_index()\n pdf.to_csv(DIR + '/countries.csv')\n pdf.to_json(DIR\ - \ + '/countries.json', orient='records')\n\nHere's the description of the metrics\ - \ available:\n\nSeries | Code Indicator Name\n--- | ---\nNY.GNP.PCAP.CD | GNI per\ - \ capita, Atlas method (current US$)\nSE.ADT.1524.LT.FM.ZS | Literacy rate, youth\ - \ (ages 15-24), gender parity index (GPI)\nSE.ADT.1524.LT.MA.ZS | Literacy rate,\ - \ youth male (% of males ages 15-24)\nSE.ADT.1524.LT.ZS | Literacy rate, youth total\ - \ (% of people ages 15-24)\nSE.ADT.LITR.FE.ZS | Literacy rate, adult female (% of\ - \ females ages 15 and above)\nSE.ADT.LITR.MA.ZS | Literacy rate, adult male (% of\ - \ males ages 15 and above)\nSE.ADT.LITR.ZS | Literacy rate, adult total (% of people\ - \ ages 15 and above)\nSE.ENR.ORPH | Ratio of school attendance of orphans to school\ - \ attendance of non-orphans ages 10-14\nSE.PRM.CMPT.FE.ZS | Primary completion rate,\ - \ female (% of relevant age group)\nSE.PRM.CMPT.MA.ZS | Primary completion rate,\ - \ male (% of relevant age group)\nSE.PRM.CMPT.ZS | Primary completion rate, total\ - \ (% of relevant age group)\nSE.PRM.ENRR | School enrollment, primary (% gross)\n\ - SE.PRM.ENRR.FE | School enrollment, primary, female (% gross)\nSE.PRM.ENRR.MA |\ - \ School enrollment, primary, male (% gross)\nSE.PRM.NENR | School enrollment, primary\ - \ (% net)\nSE.PRM.NENR.FE | School enrollment, primary, female (% net)\nSE.PRM.NENR.MA\ - \ | School enrollment, primary, male (% net)\nSE.SEC.ENRR | School enrollment, secondary\ - \ (% gross)\nSE.SEC.ENRR.FE | School enrollment, secondary, female (% gross)\nSE.SEC.ENRR.MA\ - \ | School enrollment, secondary, male (% gross)\nSE.SEC.NENR | School enrollment,\ - \ secondary (% net)\nSE.SEC.NENR.FE | School enrollment, secondary, female (% net)\n\ - SE.SEC.NENR.MA | School enrollment, secondary, male (% net)\nSE.TER.ENRR | School\ - \ enrollment, tertiary (% gross)\nSE.TER.ENRR.FE | School enrollment, tertiary,\ - \ female (% gross)\nSE.XPD.TOTL.GD.ZS | Government expenditure on education, total\ - \ (% of GDP)\nSH.ANM.CHLD.ZS | Prevalence of anemia among children (% of children\ - \ under 5)\nSH.ANM.NPRG.ZS | Prevalence of anemia among non-pregnant women (% of\ - \ women ages 15-49)\nSH.CON.1524.FE.ZS | Condom use, population ages 15-24, female\ - \ (% of females ages 15-24)\nSH.CON.1524.MA.ZS | Condom use, population ages 15-24,\ - \ male (% of males ages 15-24)\nSH.CON.AIDS.FE.ZS | Condom use at last high-risk\ - \ sex, adult female (% ages 15-49)\nSH.CON.AIDS.MA.ZS | Condom use at last high-risk\ - \ sex, adult male (% ages 15-49)\nSH.DTH.COMM.ZS | Cause of death, by communicable\ - \ diseases and maternal, prenatal and nutrition conditions (% of total)\nSH.DTH.IMRT\ - \ | Number of infant deaths\nSH.DTH.INJR.ZS | Cause of death, by injury (% of total)\n\ - SH.DTH.MORT | Number of under-five deaths\nSH.DTH.NCOM.ZS | Cause of death, by non-communicable\ - \ diseases (% of total)\nSH.DTH.NMRT | Number of neonatal deaths\nSH.DYN.AIDS |\ - \ Adults (ages 15+) living with HIV\nSH.DYN.AIDS.DH | AIDS estimated deaths (UNAIDS\ - \ estimates)\nSH.DYN.AIDS.FE.ZS | Women's share of population ages 15+ living with\ - \ HIV (%)\nSH.DYN.AIDS.ZS | Prevalence of HIV, total (% of population ages 15-49)\n\ - SH.DYN.MORT | Mortality rate, under-5 (per 1,000 live births)\nSH.DYN.MORT.FE |\ - \ Mortality rate, under-5, female (per 1,000 live births)\nSH.DYN.MORT.MA | Mortality\ - \ rate, under-5, male (per 1,000 live births)\nSH.DYN.NMRT | Mortality rate, neonatal\ - \ (per 1,000 live births)\nSH.FPL.SATI.ZS | Met need for contraception (% of married\ - \ women ages 15-49)\nSH.H2O.SAFE.RU.ZS | Improved water source, rural (% of rural\ - \ population with access)\nSH.H2O.SAFE.UR.ZS | Improved water source, urban (% of\ - \ urban population with access)\nSH.H2O.SAFE.ZS | Improved water source (% of population\ - \ with access)\nSH.HIV.0014 | Children (0-14) living with HIV\nSH.HIV.1524.FE.ZS\ - \ | Prevalence of HIV, female (% ages 15-24)\nSH.HIV.1524.KW.FE.ZS | Comprehensive\ - \ correct knowledge of HIV/AIDS, ages 15-24, female (2 prevent ways and reject 3\ - \ misconceptions)\nSH.HIV.1524.KW.MA.ZS | Comprehensive correct knowledge of HIV/AIDS,\ - \ ages 15-24, male (2 prevent ways and reject 3 misconceptions)\nSH.HIV.1524.MA.ZS\ - \ | Prevalence of HIV, male (% ages 15-24)\nSH.HIV.ARTC.ZS | Antiretroviral therapy\ - \ coverage (% of people living with HIV)\nSH.HIV.KNOW.FE.ZS | % of females ages\ - \ 15-49 having comprehensive correct knowledge about HIV (2 prevent ways and reject\ - \ 3 misconceptions)\nSH.HIV.KNOW.MA.ZS | % of males ages 15-49 having comprehensive\ - \ correct knowledge about HIV (2 prevent ways and reject 3 misconceptions)\nSH.HIV.ORPH\ - \ | Children orphaned by HIV/AIDS\nSH.HIV.TOTL | Adults (ages 15+) and children\ - \ (0-14 years) living with HIV\nSH.IMM.HEPB | Immunization, HepB3 (% of one-year-old\ - \ children)\nSH.IMM.HIB3 | Immunization, Hib3 (% of children ages 12-23 months)\n\ - SH.IMM.IBCG | Immunization, BCG (% of one-year-old children)\nSH.IMM.IDPT | Immunization,\ - \ DPT (% of children ages 12-23 months)\nSH.IMM.MEAS | Immunization, measles (%\ - \ of children ages 12-23 months)\nSH.IMM.POL3 | Immunization, Pol3 (% of one-year-old\ - \ children)\nSH.MED.BEDS.ZS | Hospital beds (per 1,000 people)\nSH.MED.CMHW.P3 |\ - \ Community health workers (per 1,000 people)\nSH.MED.NUMW.P3 | Nurses and midwives\ - \ (per 1,000 people)\nSH.MED.PHYS.ZS | Physicians (per 1,000 people)\nSH.MLR.NETS.ZS\ - \ | Use of insecticide-treated bed nets (% of under-5 population)\nSH.MLR.PREG.ZS\ - \ | Use of any antimalarial drug (% of pregnant women)\nSH.MLR.SPF2.ZS | Use of\ - \ Intermittent Preventive Treatment of malaria, 2+ doses of SP/Fansidar (% of pregnant\ - \ women)\nSH.MLR.TRET.ZS | Children with fever receiving antimalarial drugs (% of\ - \ children under age 5 with fever)\nSH.MMR.DTHS | Number of maternal deaths\nSH.MMR.LEVE\ - \ | Number of weeks of maternity leave\nSH.MMR.RISK | Lifetime risk of maternal\ - \ death (1 in: rate varies by country)\nSH.MMR.RISK.ZS | Lifetime risk of maternal\ - \ death (%)\nSH.MMR.WAGE.ZS | Maternal leave benefits (% of wages paid in covered\ - \ period)\nSH.PRG.ANEM | Prevalence of anemia among pregnant women (%)\nSH.PRG.ARTC.ZS\ - \ | Antiretroviral therapy coverage (% of pregnant women living with HIV)\nSH.PRG.SYPH.ZS\ - \ | Prevalence of syphilis (% of women attending antenatal care)\nSH.PRV.SMOK.FE\ - \ | Smoking prevalence, females (% of adults)\nSH.PRV.SMOK.MA | Smoking prevalence,\ - \ males (% of adults)\nSH.STA.ACSN | Improved sanitation facilities (% of population\ - \ with access)\nSH.STA.ACSN.RU | Improved sanitation facilities, rural (% of rural\ - \ population with access)\nSH.STA.ACSN.UR | Improved sanitation facilities, urban\ - \ (% of urban population with access)\nSH.STA.ANV4.ZS | Pregnant women receiving\ - \ prenatal care of at least four visits (% of pregnant women)\nSH.STA.ANVC.ZS |\ - \ Pregnant women receiving prenatal care (%)\nSH.STA.ARIC.ZS | ARI treatment (%\ - \ of children under 5 taken to a health provider)\nSH.STA.BFED.ZS | Exclusive breastfeeding\ - \ (% of children under 6 months)\nSH.STA.BRTC.ZS | Births attended by skilled health\ - \ staff (% of total)\nSH.STA.BRTW.ZS | Low-birthweight babies (% of births)\nSH.STA.DIAB.ZS\ - \ | Diabetes prevalence (% of population ages 20 to 79)\nSH.STA.IYCF.ZS | Infant\ - \ and young child feeding practices, all 3 IYCF (% children ages 6-23 months)\n\ - SH.STA.MALN.FE.ZS | Prevalence of underweight, weight for age, female (% of children\ - \ under 5)\nSH.STA.MALN.MA.ZS | Prevalence of underweight, weight for age, male\ - \ (% of children under 5)\nSH.STA.MALN.ZS | Prevalence of underweight, weight for\ - \ age (% of children under 5)\nSH.STA.MALR | Malaria cases reported\nSH.STA.MMRT\ - \ | Maternal mortality ratio (modeled estimate, per 100,000 live births)\nSH.STA.MMRT.NE\ - \ | Maternal mortality ratio (national estimate, per 100,000 live births)\nSH.STA.ORCF.ZS\ - \ | Diarrhea treatment (% of children under 5 receiving oral rehydration and continued\ - \ feeding)\nSH.STA.ORTH | Diarrhea treatment (% of children under 5 who received\ - \ ORS packet)\nSH.STA.OW15.FE.ZS | Prevalence of overweight, female (% of female\ - \ adults)\nSH.STA.OW15.MA.ZS | Prevalence of overweight, male (% of male adults)\n\ - SH.STA.OW15.ZS | Prevalence of overweight (% of adults)\nSH.STA.OWGH.FE.ZS | Prevalence\ - \ of overweight, weight for height, female (% of children under 5)\nSH.STA.OWGH.MA.ZS\ - \ | Prevalence of overweight, weight for height, male (% of children under 5)\n\ - SH.STA.OWGH.ZS | Prevalence of overweight, weight for height (% of children under\ - \ 5)\nSH.STA.PNVC.ZS | Postnatal care coverage (% mothers)\nSH.STA.STNT.FE.ZS |\ - \ Prevalence of stunting, height for age, female (% of children under 5)\nSH.STA.STNT.MA.ZS\ - \ | Prevalence of stunting, height for age, male (% of children under 5)\nSH.STA.STNT.ZS\ - \ | Prevalence of stunting, height for age (% of children under 5)\nSH.STA.WAST.FE.ZS\ - \ | Prevalence of wasting, weight for height, female (% of children under 5)\nSH.STA.WAST.MA.ZS\ - \ | Prevalence of wasting, weight for height, male (% of children under 5)\nSH.STA.WAST.ZS\ - \ | Prevalence of wasting, weight for height (% of children under 5)\nSH.SVR.WAST.FE.ZS\ - \ | Prevalence of severe wasting, weight for height, female (% of children under\ - \ 5)\nSH.SVR.WAST.MA.ZS | Prevalence of severe wasting, weight for height, male\ - \ (% of children under 5)\nSH.SVR.WAST.ZS | Prevalence of severe wasting, weight\ - \ for height (% of children under 5)\nSH.TBS.CURE.ZS | Tuberculosis treatment success\ - \ rate (% of new cases)\nSH.TBS.DTEC.ZS | Tuberculosis case detection rate (%, all\ - \ forms)\nSH.TBS.INCD | Incidence of tuberculosis (per 100,000 people)\nSH.TBS.MORT\ - \ | Tuberculosis death rate (per 100,000 people)\nSH.TBS.PREV | Prevalence of tuberculosis\ - \ (per 100,000 population)\nSH.VAC.TTNS.ZS | Newborns protected against tetanus\ - \ (%)\nSH.XPD.EXTR.ZS | External resources for health (% of total expenditure on\ - \ health)\nSH.XPD.OOPC.TO.ZS | Out-of-pocket health expenditure (% of total expenditure\ - \ on health)\nSH.XPD.OOPC.ZS | Out-of-pocket health expenditure (% of private expenditure\ - \ on health)\nSH.XPD.PCAP | Health expenditure per capita (current US$)\nSH.XPD.PCAP.PP.KD\ - \ | Health expenditure per capita, PPP (constant 2011 international $)\nSH.XPD.PRIV\ - \ | Health expenditure, private (% of total health expenditure)\nSH.XPD.PRIV.ZS\ - \ | Health expenditure, private (% of GDP)\nSH.XPD.PUBL | Health expenditure, public\ - \ (% of total health expenditure)\nSH.XPD.PUBL.GX.ZS | Health expenditure, public\ - \ (% of government expenditure)\nSH.XPD.PUBL.ZS | Health expenditure, public (%\ - \ of GDP)\nSH.XPD.TOTL.CD | Health expenditure, total (current US$)\nSH.XPD.TOTL.ZS\ - \ | Health expenditure, total (% of GDP)\nSI.POV.NAHC | Poverty headcount ratio\ - \ at national poverty lines (% of population)\nSI.POV.RUHC | Rural poverty headcount\ - \ ratio at national poverty lines (% of rural population)\nSI.POV.URHC | Urban poverty\ - \ headcount ratio at national poverty lines (% of urban population)\nSL.EMP.INSV.FE.ZS\ - \ | Share of women in wage employment in the nonagricultural sector (% of total\ - \ nonagricultural employment)\nSL.TLF.TOTL.FE.ZS | Labor force, female (% of total\ - \ labor force)\nSL.TLF.TOTL.IN | Labor force, total\nSL.UEM.TOTL.FE.ZS | Unemployment,\ - \ female (% of female labor force) (modeled ILO estimate)\nSL.UEM.TOTL.MA.ZS | Unemployment,\ - \ male (% of male labor force) (modeled ILO estimate)\nSL.UEM.TOTL.ZS | Unemployment,\ - \ total (% of total labor force) (modeled ILO estimate)\nSM.POP.NETM | Net migration\n\ - SN.ITK.DEFC | Number of people who are undernourished\nSN.ITK.DEFC.ZS | Prevalence\ - \ of undernourishment (% of population)\nSN.ITK.SALT.ZS | Consumption of iodized\ - \ salt (% of households)\nSN.ITK.VITA.ZS | Vitamin A supplementation coverage rate\ - \ (% of children ages 6-59 months)\nSP.ADO.TFRT | Adolescent fertility rate (births\ - \ per 1,000 women ages 15-19)\nSP.DYN.AMRT.FE | Mortality rate, adult, female (per\ - \ 1,000 female adults)\nSP.DYN.AMRT.MA | Mortality rate, adult, male (per 1,000\ - \ male adults)\nSP.DYN.CBRT.IN | Birth rate, crude (per 1,000 people)\nSP.DYN.CDRT.IN\ - \ | Death rate, crude (per 1,000 people)\nSP.DYN.CONU.ZS | Contraceptive prevalence\ - \ (% of women ages 15-49)\nSP.DYN.IMRT.FE.IN | Mortality rate, infant, female (per\ - \ 1,000 live births)\nSP.DYN.IMRT.IN | Mortality rate, infant (per 1,000 live births)\n\ - SP.DYN.IMRT.MA.IN | Mortality rate, infant, male (per 1,000 live births)\nSP.DYN.LE00.FE.IN\ - \ | Life expectancy at birth, female (years)\nSP.DYN.LE00.IN | Life expectancy at\ - \ birth, total (years)\nSP.DYN.LE00.MA.IN | Life expectancy at birth, male (years)\n\ - SP.DYN.SMAM.FE | Mean age at first marriage, female\nSP.DYN.SMAM.MA | Mean age at\ - \ first marriage, male\nSP.DYN.TFRT.IN | Fertility rate, total (births per woman)\n\ - SP.DYN.TO65.FE.ZS | Survival to age 65, female (% of cohort)\nSP.DYN.TO65.MA.ZS\ - \ | Survival to age 65, male (% of cohort)\nSP.DYN.WFRT | Wanted fertility rate\ - \ (births per woman)\nSP.HOU.FEMA.ZS | Female headed households (% of households\ - \ with a female head)\nSP.MTR.1519.ZS | Teenage mothers (% of women ages 15-19 who\ - \ have had children or are currently pregnant)\nSP.POP.0004.FE | Population ages\ - \ 0-4, female\nSP.POP.0004.FE.5Y | Population ages 0-4, female (% of female population)\n\ - SP.POP.0004.MA | Population ages 0-4, male\nSP.POP.0004.MA.5Y | Population ages\ - \ 0-4, male (% of male population)\nSP.POP.0014.FE.ZS | Population ages 0-14, female\ - \ (% of total)\nSP.POP.0014.MA.ZS | Population ages 0-14, male (% of total)\nSP.POP.0014.TO\ - \ | Population ages 0-14, total\nSP.POP.0014.TO.ZS | Population ages 0-14 (% of\ - \ total)\nSP.POP.0509.FE | Population ages 5-9, female\nSP.POP.0509.FE.5Y | Population\ - \ ages 5-9, female (% of female population)\nSP.POP.0509.MA | Population ages 5-9,\ - \ male\nSP.POP.0509.MA.5Y | Population ages 5-9, male (% of male population)\nSP.POP.1014.FE\ - \ | Population ages 10-14, female\nSP.POP.1014.FE.5Y | Population ages 10-14, female\ - \ (% of female population)\nSP.POP.1014.MA | Population ages 10-14, male\nSP.POP.1014.MA.5Y\ - \ | Population ages 10-14, male (% of male population)\nSP.POP.1519.FE | Population\ - \ ages 15-19, female\nSP.POP.1519.FE.5Y | Population ages 15-19, female (% of female\ - \ population)\nSP.POP.1519.MA | Population ages 15-19, male\nSP.POP.1519.MA.5Y |\ - \ Population ages 15-19, male (% of male population)\nSP.POP.1564.FE.ZS | Population\ - \ ages 15-64, female (% of total)\nSP.POP.1564.MA.ZS | Population ages 15-64, male\ - \ (% of total)\nSP.POP.1564.TO | Population ages 15-64, total\nSP.POP.1564.TO.ZS\ - \ | Population ages 15-64 (% of total)\nSP.POP.2024.FE | Population ages 20-24,\ - \ female\nSP.POP.2024.FE.5Y | Population ages 20-24, female (% of female population)\n\ - SP.POP.2024.MA | Population ages 20-24, male\nSP.POP.2024.MA.5Y | Population ages\ - \ 20-24, male (% of male population)\nSP.POP.2529.FE | Population ages 25-29, female\n\ - SP.POP.2529.FE.5Y | Population ages 25-29, female (% of female population)\nSP.POP.2529.MA\ - \ | Population ages 25-29, male\nSP.POP.2529.MA.5Y | Population ages 25-29, male\ - \ (% of male population)\nSP.POP.3034.FE | Population ages 30-34, female\nSP.POP.3034.FE.5Y\ - \ | Population ages 30-34, female (% of female population)\nSP.POP.3034.MA | Population\ - \ ages 30-34, male\nSP.POP.3034.MA.5Y | Population ages 30-34, male (% of male population)\n\ - SP.POP.3539.FE | Population ages 35-39, female\nSP.POP.3539.FE.5Y | Population ages\ - \ 35-39, female (% of female population)\nSP.POP.3539.MA | Population ages 35-39,\ - \ male\nSP.POP.3539.MA.5Y | Population ages 35-39, male (% of male population)\n\ - SP.POP.4044.FE | Population ages 40-44, female\nSP.POP.4044.FE.5Y | Population ages\ - \ 40-44, female (% of female population)\nSP.POP.4044.MA | Population ages 40-44,\ - \ male\nSP.POP.4044.MA.5Y | Population ages 40-44, male (% of male population)\n\ - SP.POP.4549.FE | Population ages 45-49, female\nSP.POP.4549.FE.5Y | Population ages\ - \ 45-49, female (% of female population)\nSP.POP.4549.MA | Population ages 45-49,\ - \ male\nSP.POP.4549.MA.5Y | Population ages 45-49, male (% of male population)\n\ - SP.POP.5054.FE | Population ages 50-54, female\nSP.POP.5054.FE.5Y | Population ages\ - \ 50-54, female (% of female population)\nSP.POP.5054.MA | Population ages 50-54,\ - \ male\nSP.POP.5054.MA.5Y | Population ages 50-54, male (% of male population)\n\ - SP.POP.5559.FE | Population ages 55-59, female\nSP.POP.5559.FE.5Y | Population ages\ - \ 55-59, female (% of female population)\nSP.POP.5559.MA | Population ages 55-59,\ - \ male\nSP.POP.5559.MA.5Y | Population ages 55-59, male (% of male population)\n\ - SP.POP.6064.FE | Population ages 60-64, female\nSP.POP.6064.FE.5Y | Population ages\ - \ 60-64, female (% of female population)\nSP.POP.6064.MA | Population ages 60-64,\ - \ male\nSP.POP.6064.MA.5Y | Population ages 60-64, male (% of male population)\n\ - SP.POP.6569.FE | Population ages 65-69, female\nSP.POP.6569.FE.5Y | Population ages\ - \ 65-69, female (% of female population)\nSP.POP.6569.MA | Population ages 65-69,\ - \ male\nSP.POP.6569.MA.5Y | Population ages 65-69, male (% of male population)\n\ - SP.POP.65UP.FE.ZS | Population ages 65 and above, female (% of total)\nSP.POP.65UP.MA.ZS\ - \ | Population ages 65 and above, male (% of total)\nSP.POP.65UP.TO | Population\ - \ ages 65 and above, total\nSP.POP.65UP.TO.ZS | Population ages 65 and above (%\ - \ of total)\nSP.POP.7074.FE | Population ages 70-74, female\nSP.POP.7074.FE.5Y |\ - \ Population ages 70-74, female (% of female population)\nSP.POP.7074.MA | Population\ - \ ages 70-74, male\nSP.POP.7074.MA.5Y | Population ages 70-74, male (% of male population)\n\ - SP.POP.7579.FE | Population ages 75-79, female\nSP.POP.7579.FE.5Y | Population ages\ - \ 75-79, female (% of female population)\nSP.POP.7579.MA | Population ages 75-79,\ - \ male\nSP.POP.7579.MA.5Y | Population ages 75-79, male (% of male population)\n\ - SP.POP.80UP.FE | Population ages 80 and above, female\nSP.POP.80UP.FE.5Y | Population\ - \ ages 80 and above, female (% of female population)\nSP.POP.80UP.MA | Population\ - \ ages 80 and above, male\nSP.POP.80UP.MA.5Y | Population ages 80 and above, male\ - \ (% of male population)\nSP.POP.AG00.FE.IN | Age population, age 0, female, interpolated\n\ - SP.POP.AG00.MA.IN | Age population, age 0, male, interpolated\nSP.POP.AG01.FE.IN\ - \ | Age population, age 01, female, interpolated\nSP.POP.AG01.MA.IN | Age population,\ - \ age 01, male, interpolated\nSP.POP.AG02.FE.IN | Age population, age 02, female,\ - \ interpolated\nSP.POP.AG02.MA.IN | Age population, age 02, male, interpolated\n\ - SP.POP.AG03.FE.IN | Age population, age 03, female, interpolated\nSP.POP.AG03.MA.IN\ - \ | Age population, age 03, male, interpolated\nSP.POP.AG04.FE.IN | Age population,\ - \ age 04, female, interpolated\nSP.POP.AG04.MA.IN | Age population, age 04, male,\ - \ interpolated\nSP.POP.AG05.FE.IN | Age population, age 05, female, interpolated\n\ - SP.POP.AG05.MA.IN | Age population, age 05, male, interpolated\nSP.POP.AG06.FE.IN\ - \ | Age population, age 06, female, interpolated\nSP.POP.AG06.MA.IN | Age population,\ - \ age 06, male, interpolated\nSP.POP.AG07.FE.IN | Age population, age 07, female,\ - \ interpolated\nSP.POP.AG07.MA.IN | Age population, age 07, male, interpolated\n\ - SP.POP.AG08.FE.IN | Age population, age 08, female, interpolated\nSP.POP.AG08.MA.IN\ - \ | Age population, age 08, male, interpolated\nSP.POP.AG09.FE.IN | Age population,\ - \ age 09, female, interpolated\nSP.POP.AG09.MA.IN | Age population, age 09, male,\ - \ interpolated\nSP.POP.AG10.FE.IN | Age population, age 10, female, interpolated\n\ - SP.POP.AG10.MA.IN | Age population, age 10, male\nSP.POP.AG11.FE.IN | Age population,\ - \ age 11, female, interpolated\nSP.POP.AG11.MA.IN | Age population, age 11, male\n\ - SP.POP.AG12.FE.IN | Age population, age 12, female, interpolated\nSP.POP.AG12.MA.IN\ - \ | Age population, age 12, male\nSP.POP.AG13.FE.IN | Age population, age 13, female,\ - \ interpolated\nSP.POP.AG13.MA.IN | Age population, age 13, male\nSP.POP.AG14.FE.IN\ - \ | Age population, age 14, female, interpolated\nSP.POP.AG14.MA.IN | Age population,\ - \ age 14, male\nSP.POP.AG15.FE.IN | Age population, age 15, female, interpolated\n\ - SP.POP.AG15.MA.IN | Age population, age 15, male, interpolated\nSP.POP.AG16.FE.IN\ - \ | Age population, age 16, female, interpolated\nSP.POP.AG16.MA.IN | Age population,\ - \ age 16, male, interpolated\nSP.POP.AG17.FE.IN | Age population, age 17, female,\ - \ interpolated\nSP.POP.AG17.MA.IN | Age population, age 17, male, interpolated\n\ - SP.POP.AG18.FE.IN | Age population, age 18, female, interpolated\nSP.POP.AG18.MA.IN\ - \ | Age population, age 18, male, interpolated\nSP.POP.AG19.FE.IN | Age population,\ - \ age 19, female, interpolated\nSP.POP.AG19.MA.IN | Age population, age 19, male,\ - \ interpolated\nSP.POP.AG20.FE.IN | Age population, age 20, female, interpolated\n\ - SP.POP.AG20.MA.IN | Age population, age 20, male, interpolated\nSP.POP.AG21.FE.IN\ - \ | Age population, age 21, female, interpolated\nSP.POP.AG21.MA.IN | Age population,\ - \ age 21, male, interpolated\nSP.POP.AG22.FE.IN | Age population, age 22, female,\ - \ interpolated\nSP.POP.AG22.MA.IN | Age population, age 22, male, interpolated\n\ - SP.POP.AG23.FE.IN | Age population, age 23, female, interpolated\nSP.POP.AG23.MA.IN\ - \ | Age population, age 23, male, interpolated\nSP.POP.AG24.FE.IN | Age population,\ - \ age 24, female, interpolated\nSP.POP.AG24.MA.IN | Age population, age 24, male,\ - \ interpolated\nSP.POP.AG25.FE.IN | Age population, age 25, female, interpolated\n\ - SP.POP.AG25.MA.IN | Age population, age 25, male, interpolated\nSP.POP.BRTH.MF |\ - \ Sex ratio at birth (male births per female births)\nSP.POP.DPND | Age dependency\ - \ ratio (% of working-age population)\nSP.POP.DPND.OL | Age dependency ratio, old\ - \ (% of working-age population)\nSP.POP.DPND.YG | Age dependency ratio, young (%\ - \ of working-age population)\nSP.POP.GROW | Population growth (annual %)\nSP.POP.TOTL\ - \ | Population, total\nSP.POP.TOTL.FE.IN | Population, female\nSP.POP.TOTL.FE.ZS\ - \ | Population, female (% of total)\nSP.POP.TOTL.MA.IN | Population, male\nSP.POP.TOTL.MA.ZS\ - \ | Population, male (% of total)\nSP.REG.BRTH.RU.ZS | Completeness of birth registration,\ - \ rural (%)\nSP.REG.BRTH.UR.ZS | Completeness of birth registration, urban (%)\n\ - SP.REG.BRTH.ZS | Completeness of birth registration (%)\nSP.REG.DTHS.ZS | Completeness\ - \ of death registration with cause-of-death information (%)\nSP.RUR.TOTL | Rural\ - \ population\nSP.RUR.TOTL.ZG | Rural population growth (annual %)\nSP.RUR.TOTL.ZS\ - \ | Rural population (% of total population)\nSP.URB.GROW | Urban population growth\ - \ (annual %)\nSP.URB.TOTL | Urban population\nSP.URB.TOTL.IN.ZS | Urban population\ - \ (% of total)\nSP.UWT.TFRT | Unmet need for contraception (% of married women ages\ - \ 15-49)\n" -default_endpoint: null -offset: 0 -cache_timeout: null -schema: public -sql: null -params: null -template_params: null -filter_select_enabled: true -fetch_values_predicate: null -extra: null -normalize_columns: false -always_filter_main_dttm: false -uuid: 3cee7b7f-0009-4d9d-8de4-1da4cad4569e -metrics: - - metric_name: sum__SP_DYN_LE00_IN - verbose_name: null - metric_type: null - expression: sum("SP_DYN_LE00_IN") - description: null - d3format: null - currency: null - extra: null - warning_text: null - - metric_name: sum__SH_DYN_AIDS - verbose_name: null - metric_type: null - expression: sum("SH_DYN_AIDS") - description: null - d3format: null - currency: null - extra: null - warning_text: null - - metric_name: sum__SP_POP_TOTL - verbose_name: null - metric_type: null - expression: sum("SP_POP_TOTL") - description: null - d3format: null - currency: null - extra: null - warning_text: null - - metric_name: sum__SP_RUR_TOTL_ZS - verbose_name: null - metric_type: null - expression: sum("SP_RUR_TOTL_ZS") - description: null - d3format: null - currency: null - extra: null - warning_text: null - - metric_name: sum__SP_RUR_TOTL - verbose_name: null - metric_type: null - expression: sum("SP_RUR_TOTL") - description: null - d3format: null - currency: null - extra: null - warning_text: null - - metric_name: count - verbose_name: COUNT(*) - metric_type: count - expression: COUNT(*) - description: null - d3format: null - currency: null - extra: null - warning_text: null -columns: - - column_name: year - verbose_name: null - is_dttm: true - is_active: true - type: TIMESTAMP WITHOUT TIME ZONE - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0004_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0004_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0004_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0004_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0014_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0014_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_0014 - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0014_TO_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0014_TO - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0509_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0509_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0509_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_0509_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG00_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_LE00_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG00_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_LE00_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_LE00_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1014_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1014_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1014_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1014_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG01_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG10_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG01_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG10_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2024_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2024_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2024_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2024_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG02_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG20_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG02_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG20_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3034_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3034_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3034_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3034_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG03_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG03_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4044_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4044_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4044_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4044_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5054_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5054_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5054_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5054_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6064_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6064_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7074_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7074_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6064_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6064_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7074_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7074_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG04_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG04_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_80UP_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_80UP_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG05_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG05_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG06_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG06_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG07_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG07_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG08_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG08_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_80UP_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_80UP_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG09_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG09_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1519_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1519_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1519_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1519_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_MTR_1519_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG11_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG11_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_1524_LT_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_CON_1524_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_1524_LT_FM_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_1524_LT_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_1524_KW_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_1524_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_CON_1524_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_1524_KW_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_1524_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG12_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG21_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG12_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG21_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG13_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG13_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1564_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1564_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1564_TO_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_1564_TO - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG14_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG14_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG15_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OW15_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG15_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OW15_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OW15_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG16_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG16_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG17_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG17_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG18_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG18_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG19_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG19_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2529_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2529_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2529_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_2529_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG22_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG22_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG23_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG23_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG24_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG24_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG25_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_AG25_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_H2O_SAFE_RU_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_H2O_SAFE_UR_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_H2O_SAFE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MLR_SPF2_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3539_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3539_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3539_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_3539_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_HIB3 - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MED_CMHW_P3 - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MED_NUMW_P3 - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_POL3 - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4549_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4549_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4549_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_4549_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ANV4_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5559_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5559_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5559_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_5559_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6569_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6569_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7579_MA_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7579_FE_5Y - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6569_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_6569_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_TO65_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_65UP_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_TO65_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_65UP_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_65UP_TO_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_65UP_TO - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7579_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_7579_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MALN_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_WAST_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_DIAB_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_CON_AIDS_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ARIC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ACSN_RU - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ACSN_UR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ANVC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ACSN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_LITR_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_SMAM_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_AMRT_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MALN_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_WAST_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OWGH_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MALN_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MALR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_STNT_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_SVR_WAST_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_WAST_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_BRTC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_BFED_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_BRTW_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: NY_GNP_PCAP_CD - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PCAP_PP_KD - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_CON_AIDS_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_ANM_CHLD_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PCAP - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_NENR_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_ENRR_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_CMPT_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_IYCF_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ORCF_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_PRG_ARTC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_ARTC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SI_POV_NAHC - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_PNVC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_VAC_TTNS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_AIDS_DH - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_AIDS_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_AIDS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_AIDS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_LITR_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_SMAM_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_AMRT_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ADT_LITR_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_ADO_TFRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_MORT_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_IMRT_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_NENR_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_ENRR_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OWGH_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_HOU_FEMA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_STNT_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_SVR_WAST_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MMR_WAGE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_PRG_ANEM - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_MEAS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MMRT_NE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_UEM_TOTL_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_FPL_SATI_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_OWGH_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_ANM_NPRG_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_KNOW_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_ORTH - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_PRV_SMOK_MA - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_MMRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_STA_STNT_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_SVR_WAST_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SN_ITK_VITA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SN_ITK_SALT_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_TOTL_MA_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_TOTL_MA_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_TBS_DTEC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_TBS_INCD - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_CBRT_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_TBS_CURE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_IBCG - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MED_BEDS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PUBL_GX_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PUBL_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PUBL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_REG_BRTH_RU_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_REG_BRTH_UR_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_REG_BRTH_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_HEPB - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_TBS_PREV - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_BRTH_MF - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_URB_GROW - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_TBS_MORT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_URB_TOTL_IN_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_URB_TOTL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_TOTL_CD - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_CDRT_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SN_ITK_DEFC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SN_ITK_DEFC - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_COMM_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_NCOM_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_OOPC_TO_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_OOPC_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_CONU_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_NENR_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_ENRR_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_NENR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_SEC_ENRR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_CMPT_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_CMPT_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SI_POV_RUHC - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SI_POV_URHC - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_XPD_TOTL_GD_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_DPND_YG - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_DPND_OL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_DPND - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_MORT_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_IMRT_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_REG_DTHS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MED_PHYS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_EXTR_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_TFRT_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_WFRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_INJR_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_IMRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MMR_DTHS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_NMRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DTH_MORT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_IMM_IDPT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PRIV_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_PRIV - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_XPD_TOTL_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_NMRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_DYN_MORT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_DYN_IMRT_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_TER_ENRR_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_NENR_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_ENRR_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_TER_ENRR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_EMP_INSV_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_UEM_TOTL_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MMR_LEVE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_ENR_ORPH - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_NENR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SE_PRM_ENRR - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_TLF_TOTL_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_KNOW_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_PRV_SMOK_FE - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_TOTL_FE_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_TOTL_FE_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MLR_PREG_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MLR_NETS_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MLR_TRET_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_UEM_TOTL_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SM_POP_NETM - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SL_TLF_TOTL_IN - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_UWT_TFRT - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_PRG_SYPH_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_RUR_TOTL_ZG - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_GROW - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_ORPH - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_HIV_TOTL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MMR_RISK_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SH_MMR_RISK - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_POP_TOTL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_RUR_TOTL_ZS - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: SP_RUR_TOTL - verbose_name: null - is_dttm: false - is_active: true - type: DOUBLE PRECISION - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: country_name - verbose_name: null - is_dttm: false - is_active: true - type: VARCHAR(255) - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: region - verbose_name: null - is_dttm: false - is_active: true - type: VARCHAR(255) - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null - - column_name: country_code - verbose_name: null - is_dttm: false - is_active: true - type: VARCHAR(3) - advanced_data_type: null - groupby: true - filterable: true - expression: null - description: null - python_date_format: null - extra: null -version: 1.0.0 -database_uuid: a2dc77af-e654-49bb-b321-40f6b559a1ee diff --git a/superset/examples/helpers.py b/superset/examples/helpers.py index 4cc9a47b2700..908142ec778e 100644 --- a/superset/examples/helpers.py +++ b/superset/examples/helpers.py @@ -14,6 +14,34 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +"""Helpers for loading Superset example datasets. + +All Superset example data files (CSV, JSON, etc.) are fetched via the +jsDelivr CDN instead of raw.githubusercontent.com to avoid GitHub API +rate limits (60 anonymous requests/hour/IP). + +jsDelivr is a multi‑CDN front for public GitHub repos and supports +arbitrary paths including nested folders. It doesn’t use the GitHub REST API +and advertises unlimited bandwidth for open-source use. + +Example URL:: + + https://cdn.jsdelivr.net/gh/apache-superset/examples-data@master/datasets/examples/slack/messages.csv + +Environment knobs +----------------- +``SUPERSET_EXAMPLES_DATA_REF`` (default: ``master``) + Tag / branch / SHA to pin so builds remain reproducible. + +``SUPERSET_EXAMPLES_BASE_URL`` + Override the base completely if you want to host the files elsewhere + (internal mirror, S3 bucket, ASF downloads, …). **Include any query + string required by your hosting (e.g. ``?raw=true`` if you point back + to a GitHub *blob* URL).** +""" + +from __future__ import annotations + import os from typing import Any @@ -22,27 +50,41 @@ from superset.models.slice import Slice from superset.utils import json -BASE_URL = "https://github.com/apache-superset/examples-data/blob/master/" +# --------------------------------------------------------------------------- +# Public sample‑data mirror configuration +# --------------------------------------------------------------------------- +BASE_COMMIT: str = os.getenv("SUPERSET_EXAMPLES_DATA_REF", "master") +BASE_URL: str = os.getenv( + "SUPERSET_EXAMPLES_BASE_URL", + f"https://cdn.jsdelivr.net/gh/apache-superset/examples-data@{BASE_COMMIT}/", +) -misc_dash_slices: set[str] = set() # slices assembled in a 'Misc Chart' dashboard +# Slices assembled into a 'Misc Chart' dashboard +misc_dash_slices: set[str] = set() + +# --------------------------------------------------------------------------- +# Utility functions +# --------------------------------------------------------------------------- def get_table_connector_registry() -> Any: + """Return the SqlaTable registry so we can mock it in unit tests.""" return SqlaTable def get_examples_folder() -> str: + """Return local path to the examples folder (when vendored).""" return os.path.join(app.config["BASE_DIR"], "examples") def update_slice_ids(pos: dict[Any, Any]) -> list[Slice]: - """Update slice ids in position_json and return the slices found.""" + """Update slice ids in ``position_json`` and return the slices found.""" slice_components = [ component for component in pos.values() if isinstance(component, dict) and component.get("type") == "CHART" ] - slices = {} + slices: dict[str, Slice] = {} for name in {component["meta"]["sliceName"] for component in slice_components}: slc = db.session.query(Slice).filter_by(slice_name=name).first() if slc: @@ -56,17 +98,24 @@ def update_slice_ids(pos: dict[Any, Any]) -> list[Slice]: def merge_slice(slc: Slice) -> None: - o = db.session.query(Slice).filter_by(slice_name=slc.slice_name).first() - if o: - db.session.delete(o) + """Upsert a Slice by name.""" + existing = db.session.query(Slice).filter_by(slice_name=slc.slice_name).first() + if existing: + db.session.delete(existing) db.session.add(slc) def get_slice_json(defaults: dict[Any, Any], **kwargs: Any) -> str: + """Return JSON string for a chart definition, merging extra kwargs.""" defaults_copy = defaults.copy() defaults_copy.update(kwargs) return json.dumps(defaults_copy, indent=4, sort_keys=True) def get_example_url(filepath: str) -> str: - return f"{BASE_URL}{filepath}?raw=true" + """Return an absolute URL to *filepath* under the examples‑data repo. + + All calls are routed through jsDelivr unless overridden. Supports nested + paths like ``datasets/examples/slack/messages.csv``. + """ + return f"{BASE_URL}{filepath}" diff --git a/superset/exceptions.py b/superset/exceptions.py index dd669f5b72ae..ff24a2e2adc3 100644 --- a/superset/exceptions.py +++ b/superset/exceptions.py @@ -14,6 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from __future__ import annotations + from collections import defaultdict from typing import Any, Optional @@ -304,12 +307,30 @@ class SupersetParseError(SupersetErrorException): status = 422 - def __init__(self, sql: str, engine: Optional[str] = None): + def __init__( # pylint: disable=too-many-arguments + self, + sql: str, + engine: Optional[str] = None, + message: Optional[str] = None, + highlight: Optional[str] = None, + line: Optional[int] = None, + column: Optional[int] = None, + ): + if message is None: + parts = [_("Error parsing")] + if highlight: + parts.append(_(" near '%(highlight)s'", highlight=highlight)) + if line: + parts.append(_(" at line %(line)d", line=line)) + if column: + parts.append(_(":%(column)d", column=column)) + message = "".join(parts) + error = SupersetError( - message=_("The SQL is invalid and cannot be parsed."), + message=message, error_type=SupersetErrorType.INVALID_SQL_ERROR, level=ErrorLevel.ERROR, - extra={"sql": sql, "engine": engine}, + extra={"sql": sql, "engine": engine, "line": line, "column": column}, ) super().__init__(error) diff --git a/superset/extensions/__init__.py b/superset/extensions/__init__.py index 65ba7eebc8e0..cc2106b01224 100644 --- a/superset/extensions/__init__.py +++ b/superset/extensions/__init__.py @@ -31,6 +31,7 @@ from superset.async_events.async_query_manager_factory import AsyncQueryManagerFactory from superset.extensions.ssh import SSHManagerFactory from superset.extensions.stats_logger import BaseStatsLoggerManager +from superset.security.manager import SupersetSecurityManager from superset.utils.cache_manager import CacheManager from superset.utils.encrypt import EncryptedFieldFactory from superset.utils.feature_flag_manager import FeatureFlagManager @@ -84,9 +85,9 @@ def get_files(bundle: str, asset_type: str = "js") -> list[str]: return { "js_manifest": lambda bundle: get_files(bundle, "js"), "css_manifest": lambda bundle: get_files(bundle, "css"), - "assets_prefix": self.app.config["STATIC_ASSETS_PREFIX"] - if self.app - else "", + "assets_prefix": ( + self.app.config["STATIC_ASSETS_PREFIX"] if self.app else "" + ), } def parse_manifest_json(self) -> None: @@ -132,7 +133,7 @@ def init_app(self, app: Flask) -> None: migrate = Migrate() profiling = ProfilingExtension() results_backend_manager = ResultsBackendManager() -security_manager = LocalProxy(lambda: appbuilder.sm) +security_manager: SupersetSecurityManager = LocalProxy(lambda: appbuilder.sm) ssh_manager_factory = SSHManagerFactory() stats_logger_manager = BaseStatsLoggerManager() talisman = Talisman() diff --git a/superset/extensions/metadb.py b/superset/extensions/metadb.py index fd697aea820f..b2d86149383c 100644 --- a/superset/extensions/metadb.py +++ b/superset/extensions/metadb.py @@ -274,7 +274,7 @@ def __init__( # to perform updates and deletes. Otherwise we can only do inserts and selects. self._rowid: str | None = None - # Does the database allow DML? + # Does the database allow DDL/DML? self._allow_dml: bool = False # Read column information from the database, and store it for later. diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index 7c35fd17a70d..afc5536848c9 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -32,6 +32,7 @@ from werkzeug.middleware.proxy_fix import ProxyFix from superset.constants import CHANGE_ME_SECRET_KEY +from superset.databases.utils import make_url_safe from superset.extensions import ( _event_logger, APP_DIR, @@ -53,6 +54,7 @@ talisman, ) from superset.security import SupersetSecurityManager +from superset.sql.parse import SQLGLOT_DIALECTS from superset.superset_typing import FlaskResponse from superset.tags.core import register_sqla_event_listeners from superset.utils.core import is_test, pessimistic_connection_handling @@ -480,12 +482,37 @@ def init_app(self) -> None: self.configure_wtf() self.configure_middlewares() self.configure_cache() + self.set_db_default_isolation() + self.configure_sqlglot_dialects() with self.superset_app.app_context(): self.init_app_in_ctx() self.post_init() + def set_db_default_isolation(self) -> None: + # This block sets the default isolation level for mysql to READ COMMITTED if not + # specified in the config. You can set your isolation in the config by using + # SQLALCHEMY_ENGINE_OPTIONS + eng_options = self.config["SQLALCHEMY_ENGINE_OPTIONS"] or {} + isolation_level = eng_options.get("isolation_level") + set_isolation_level_to = None + + if not isolation_level: + backend = make_url_safe( + self.config["SQLALCHEMY_DATABASE_URI"] + ).get_backend_name() + if backend in ("mysql", "postgresql"): + set_isolation_level_to = "READ COMMITTED" + + if set_isolation_level_to: + logger.info( + "Setting database isolation level to %s", + set_isolation_level_to, + ) + with self.superset_app.app_context(): + db.engine.execution_options(isolation_level=set_isolation_level_to) + def configure_auth_provider(self) -> None: machine_auth_provider_factory.init_app(self.superset_app) @@ -517,6 +544,9 @@ def configure_cache(self) -> None: def configure_feature_flags(self) -> None: feature_flag_manager.init_app(self.superset_app) + def configure_sqlglot_dialects(self) -> None: + SQLGLOT_DIALECTS.update(self.config["SQLGLOT_DIALECTS_EXTENSIONS"]) + @transaction() def configure_fab(self) -> None: if self.config["SILENCE_FAB"]: diff --git a/superset/jinja_context.py b/superset/jinja_context.py index 8d59eade155b..ff65e42cc3ec 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -34,6 +34,7 @@ from superset.constants import LRU_CACHE_MAX_SIZE from superset.exceptions import SupersetTemplateException from superset.extensions import feature_flag_manager +from superset.sql_parse import Table from superset.utils import json from superset.utils.core import ( convert_legacy_filters_into_adhoc, @@ -85,13 +86,14 @@ class ExtraCache: # Regular expression for detecting the presence of templated methods which could # be added to the cache key. regex = re.compile( - r"\{\{.*(" - r"current_user_id\(.*\)|" - r"current_username\(.*\)|" - r"current_user_email\(.*\)|" - r"cache_key_wrapper\(.*\)|" - r"url_param\(.*\)" - r").*\}\}" + r"(\{\{|\{%)[^{}]*?(" + r"current_user_id\([^()]*\)|" + r"current_username\([^()]*\)|" + r"current_user_email\([^()]*\)|" + r"cache_key_wrapper\([^()]*\)|" + r"url_param\([^()]*\)" + r")" + r"[^{}]*?(\}\}|\%\})" ) def __init__( @@ -565,7 +567,7 @@ def process_template(self, sql: str, **kwargs: Any) -> str: """ Makes processing a template a noop """ - return sql + return str(sql) class PrestoTemplateProcessor(JinjaTemplateProcessor): @@ -619,7 +621,7 @@ def latest_partitions(self, table_name: str) -> Optional[list[str]]: table_name, schema = self._schema_table(table_name, self._schema) return cast(PrestoEngineSpec, self._database.db_engine_spec).latest_partition( - table_name, schema, self._database + database=self._database, table=Table(table_name, schema) )[1] def latest_sub_partition(self, table_name: str, **kwargs: Any) -> Any: @@ -631,7 +633,7 @@ def latest_sub_partition(self, table_name: str, **kwargs: Any) -> Any: return cast( PrestoEngineSpec, self._database.db_engine_spec ).latest_sub_partition( - table_name=table_name, schema=schema, database=self._database, **kwargs + database=self._database, table=Table(table_name, schema), **kwargs ) latest_partition = first_latest_partition diff --git a/superset/migrations/env.py b/superset/migrations/env.py index ab9dea78554a..295eb85d1f88 100755 --- a/superset/migrations/env.py +++ b/superset/migrations/env.py @@ -14,9 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - import logging -import urllib.parse +import time from logging.config import fileConfig from alembic import context @@ -43,8 +42,9 @@ "SQLite Database support for metadata databases will \ be removed in a future version of Superset." ) -decoded_uri = urllib.parse.unquote(DATABASE_URI) -config.set_main_option("sqlalchemy.url", decoded_uri) +# Escape % chars in the database URI to avoid interpolation errors in ConfigParser +escaped_uri = DATABASE_URI.replace("%", "%%") +config.set_main_option("sqlalchemy.url", escaped_uri) target_metadata = Base.metadata # pylint: disable=no-member @@ -54,6 +54,13 @@ # ... etc. +def print_duration(start_time: float) -> None: + logger.info( + "Migration scripts completed. Duration: %s", + time.strftime("%H:%M:%S", time.gmtime(time.time() - start_time)), + ) + + def run_migrations_offline() -> None: """Run migrations in 'offline' mode. @@ -66,11 +73,15 @@ def run_migrations_offline() -> None: script output. """ + start_time = time.time() + logger.info("Starting the migration scripts.") + url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations() + print_duration(start_time) def run_migrations_online() -> None: @@ -81,6 +92,9 @@ def run_migrations_online() -> None: """ + start_time = time.time() + logger.info("Starting the migration scripts.") + # this callback is used to prevent an auto-migration from being generated # when there are no changes to the schema # reference: https://alembic.sqlalchemy.org/en/latest/cookbook.html @@ -117,6 +131,7 @@ def process_revision_directives( # pylint: disable=redefined-outer-name, unused try: with context.begin_transaction(): context.run_migrations() + print_duration(start_time) finally: connection.close() diff --git a/superset/migrations/shared/catalogs.py b/superset/migrations/shared/catalogs.py index 27952371bdeb..2787ca4768d5 100644 --- a/superset/migrations/shared/catalogs.py +++ b/superset/migrations/shared/catalogs.py @@ -18,20 +18,26 @@ from __future__ import annotations import logging -from typing import Any, Type +from datetime import datetime +from typing import Any, Type, Union import sqlalchemy as sa from alembic import op +from flask import current_app from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session from superset import db, security_manager -from superset.daos.database import DatabaseDAO -from superset.migrations.shared.security_converge import add_pvms, ViewMenu +from superset.db_engine_specs.base import GenericDBException +from superset.migrations.shared.security_converge import ( + add_pvms, + Permission, + PermissionView, + ViewMenu, +) from superset.models.core import Database -logger = logging.getLogger(__name__) - +logger = logging.getLogger("alembic") Base: Type[Any] = declarative_base() @@ -41,7 +47,9 @@ class SqlaTable(Base): id = sa.Column(sa.Integer, primary_key=True) database_id = sa.Column(sa.Integer, nullable=False) + perm = sa.Column(sa.String(1000)) schema_perm = sa.Column(sa.String(1000)) + catalog_perm = sa.Column(sa.String(1000), nullable=True, default=None) schema = sa.Column(sa.String(255)) catalog = sa.Column(sa.String(256), nullable=True, default=None) @@ -84,41 +92,285 @@ class Slice(Base): id = sa.Column(sa.Integer, primary_key=True) datasource_id = sa.Column(sa.Integer) datasource_type = sa.Column(sa.String(200)) + catalog_perm = sa.Column(sa.String(1000), nullable=True, default=None) schema_perm = sa.Column(sa.String(1000)) -def get_schemas(database_name: str) -> list[str]: +ModelType = Union[Type[Query], Type[SavedQuery], Type[TabState], Type[TableSchema]] + +MODELS: list[tuple[ModelType, str]] = [ + (Query, "database_id"), + (SavedQuery, "db_id"), + (TabState, "database_id"), + (TableSchema, "database_id"), +] + + +def get_known_schemas(database_name: str, session: Session) -> list[str]: + """ + Read all known schemas from the existing schema permissions. + """ + names = ( + session.query(ViewMenu.name) + .join(PermissionView, ViewMenu.id == PermissionView.view_menu_id) + .join(Permission, PermissionView.permission_id == Permission.id) + .filter( + ViewMenu.name.like(f"[{database_name}]%"), + Permission.name == "schema_access", + ) + .all() + ) + return sorted({name[0][1:-1].split("].[")[-1] for name in names}) + + +def get_batch_size(session: Session) -> int: + max_sqlite_in = 999 + return max_sqlite_in if session.bind.dialect.name == "sqlite" else 1_000_000 + + +def print_processed_batch( + start_time: datetime, + offset: int, + total_rows: int, + model: ModelType, + batch_size: int, +) -> None: + """ + Print the progress of batch processing. + + This function logs the progress of processing a batch of rows from a model. + It calculates the elapsed time since the start of the batch processing and + logs the number of rows processed along with the percentage completion. + + Parameters: + start_time (datetime): The start time of the batch processing. + offset (int): The current offset in the batch processing. + total_rows (int): The total number of rows to process. + model (ModelType): The model being processed. + batch_size (int): The size of the batch being processed. + """ + elapsed_time = datetime.now() - start_time + elapsed_seconds = elapsed_time.total_seconds() + elapsed_formatted = f"{int(elapsed_seconds // 3600):02}:{int((elapsed_seconds % 3600) // 60):02}:{int(elapsed_seconds % 60):02}" + rows_processed = min(offset + batch_size, total_rows) + logger.info( + f"{elapsed_formatted} - {rows_processed:,} of {total_rows:,} {model.__tablename__} rows processed " + f"({(rows_processed / total_rows) * 100:.2f}%)" + ) + + +def update_catalog_column( + session: Session, database: Database, catalog: str, downgrade: bool = False +) -> None: + """ + Update the `catalog` column in the specified models to the given catalog. + + This function iterates over a list of models defined by MODELS and updates + the `catalog` columnto the specified catalog or None depending on the downgrade + parameter. The update is performed in batches to optimize performance and reduce + memory usage. + + Parameters: + session (Session): The SQLAlchemy session to use for database operations. + database (Database): The database instance containing the models to update. + catalog (Catalog): The new catalog value to set in the `catalog` column or + the default catalog if `downgrade` is True. + downgrade (bool): If True, the `catalog` column is set to None where the + catalog matches the specified catalog. + """ + start_time = datetime.now() + + logger.info(f"Updating {database.database_name} models to catalog {catalog}") + + for model, column in MODELS: + # Get the total number of rows that match the condition + total_rows = ( + session.query(sa.func.count(model.id)) + .filter(getattr(model, column) == database.id) + .filter(model.catalog == catalog if downgrade else True) + .scalar() + ) + + logger.info( + f"Total rows to be processed for {model.__tablename__}: {total_rows:,}" + ) + + batch_size = get_batch_size(session) + limit_value = min(batch_size, total_rows) + + # Update in batches using row numbers + for i in range(0, total_rows, batch_size): + subquery = ( + session.query(model.id) + .filter(getattr(model, column) == database.id) + .filter(model.catalog == catalog if downgrade else True) + .order_by(model.id) + .offset(i) + .limit(limit_value) + .subquery() + ) + + # SQLite does not support multiple-table criteria within UPDATE + if session.bind.dialect.name == "sqlite": + ids_to_update = [row.id for row in session.query(subquery.c.id).all()] + if ids_to_update: + session.execute( + sa.update(model) + .where(model.id.in_(ids_to_update)) + .values(catalog=None if downgrade else catalog) + .execution_options(synchronize_session=False) + ) + else: + session.execute( + sa.update(model) + .where(model.id == subquery.c.id) + .values(catalog=None if downgrade else catalog) + .execution_options(synchronize_session=False) + ) + + print_processed_batch(start_time, i, total_rows, model, batch_size) + + +def update_schema_catalog_perms( + session: Session, + database: Database, + catalog_perm: str | None, + catalog: str, + downgrade: bool = False, +) -> None: """ - Read all known schemas from the schema permissions. + Update schema and catalog permissions for tables and charts in a given database. + + This function updates the `catalog`, `catalog_perm`, and `schema_perm` fields for + tables and charts associated with the specified database. If `downgrade` is True, + the `catalog` and `catalog_perm` fields are set to None, otherwise they are set + to the provided `catalog` and `catalog_perm` values. + + Args: + session (Session): The SQLAlchemy session to use for database operations. + database (Database): The database object whose tables and charts will be updated. + catalog_perm (str): The new catalog permission to set. + catalog (str): The new catalog to set. + downgrade (bool, optional): If True, reset the `catalog` and `catalog_perm` fields to None. + Defaults to False. """ - query = f""" -SELECT - avm.name -FROM ab_view_menu avm -JOIN ab_permission_view apv ON avm.id = apv.view_menu_id -JOIN ab_permission ap ON apv.permission_id = ap.id -WHERE - avm.name LIKE '[{database_name}]%' AND - ap.name = 'schema_access'; + # Mapping of table id to schema permission + mapping = {} + + for table in ( + session.query(SqlaTable) + .filter_by(database_id=database.id) + .filter_by(catalog=catalog if downgrade else None) + ): + schema_perm = security_manager.get_schema_perm( + database.database_name, + None if downgrade else catalog, + table.schema, + ) + table.catalog = None if downgrade else catalog + table.catalog_perm = catalog_perm + table.schema_perm = schema_perm + mapping[table.id] = schema_perm + + # Select all slices of type table that belong to the database + for chart in ( + session.query(Slice) + .join(SqlaTable, Slice.datasource_id == SqlaTable.id) + .join(Database, SqlaTable.database_id == Database.id) + .filter(Database.id == database.id) + .filter(Slice.datasource_type == "table") + ): + # We only care about tables that exist in the mapping + if mapping.get(chart.datasource_id) is not None: + chart.catalog_perm = catalog_perm + chart.schema_perm = mapping[chart.datasource_id] + + +def delete_models_non_default_catalog( + session: Session, database: Database, catalog: str +) -> None: + """ + Delete models that are not in the default catalog. + + This function iterates over a list of models defined by MODELS and deletes + the rows where the `catalog` column does not match the specified catalog. + + Parameters: + session (Session): The SQLAlchemy session to use for database operations. + database (Database): The database instance containing the models to delete. + catalog (Catalog): The catalog to use to filter the models to delete. """ - # [PostgreSQL].[postgres].[public] => public - conn = op.get_bind() - return sorted({row[0].split(".")[-1][1:-1] for row in conn.execute(query)}) + start_time = datetime.now() + + logger.info(f"Deleting models not in the default catalog: {catalog}") + + for model, column in MODELS: + # Get the total number of rows that match the condition + total_rows = ( + session.query(sa.func.count(model.id)) + .filter(getattr(model, column) == database.id) + .filter(model.catalog != catalog) + .scalar() + ) + + logger.info( + f"Total rows to be processed for {model.__tablename__}: {total_rows:,}" + ) + + batch_size = get_batch_size(session) + limit_value = min(batch_size, total_rows) + + # Update in batches using row numbers + for i in range(0, total_rows, batch_size): + subquery = ( + session.query(model.id) + .filter(getattr(model, column) == database.id) + .filter(model.catalog != catalog) + .order_by(model.id) + .offset(i) + .limit(limit_value) + .subquery() + ) + + # SQLite does not support multiple-table criteria within DELETE + if session.bind.dialect.name == "sqlite": + ids_to_delete = [row.id for row in session.query(subquery.c.id).all()] + if ids_to_delete: + session.execute( + sa.delete(model) + .where(model.id.in_(ids_to_delete)) + .execution_options(synchronize_session=False) + ) + else: + session.execute( + sa.delete(model) + .where(model.id == subquery.c.id) + .execution_options(synchronize_session=False) + ) + + print_processed_batch(start_time, i, total_rows, model, batch_size) def upgrade_catalog_perms(engines: set[str] | None = None) -> None: """ - Update models when catalogs are introduced in a DB engine spec. + Update models and permissions when catalogs are introduced in a DB engine spec. When an existing DB engine spec starts to support catalogs we need to: - - Add a `catalog_access` permission for each catalog. - - Populate the `catalog` field with the default catalog for each related model. + - Add `catalog_access` permissions for each catalog. + - Rename existing `schema_access` permissions to include the default catalog. + - Create `schema_access` permissions for each schema in the new catalogs. + + Also, for all the relevant existing models we need to: + + - Populate the `catalog` field with the default catalog. - Update `schema_perm` to include the default catalog. + - Populate `catalog_perm` to include the default catalog. """ bind = op.get_bind() session = db.Session(bind=bind) + for database in session.query(Database).all(): db_engine_spec = database.db_engine_spec if ( @@ -126,83 +378,196 @@ def upgrade_catalog_perms(engines: set[str] | None = None) -> None: ) or not db_engine_spec.supports_catalog: continue - catalog = database.get_default_catalog() - if catalog is None: - continue - - perm = security_manager.get_catalog_perm( - database.database_name, - catalog, - ) - add_pvms(session, {perm: ("catalog_access",)}) - - upgrade_schema_perms(database, catalog, session) - - # update existing models - models = [ - (Query, "database_id"), - (SavedQuery, "db_id"), - (TabState, "database_id"), - (TableSchema, "database_id"), - (SqlaTable, "database_id"), - ] - for model, column in models: - for instance in session.query(model).filter( - getattr(model, column) == database.id - ): - instance.catalog = catalog - - for table in session.query(SqlaTable).filter_by(database_id=database.id): - schema_perm = security_manager.get_schema_perm( + # For some databases, fetching the default catalog requires a connection to the + # analytical DB. If we can't connect to the analytical DB during the migration + # we should stop it, since we need the default catalog in order to update + # existing models. + try: + default_catalog = database.get_default_catalog() + except GenericDBException as ex: + logger.warning( + "Error fetching default catalog for database %s: %s", database.database_name, - catalog, - table.schema, + ex, ) - table.schema_perm = schema_perm - for chart in session.query(Slice).filter_by( - datasource_id=table.id, - datasource_type="table", - ): - chart.schema_perm = schema_perm + continue - session.commit() + if default_catalog: + upgrade_database_catalogs(database, default_catalog, session) + session.flush() -def upgrade_schema_perms(database: Database, catalog: str, session: Session) -> None: + +def upgrade_database_catalogs( + database: Database, + default_catalog: str, + session: Session, +) -> None: """ - Rename existing schema permissions to include the catalog. + Upgrade a given database to support the default catalog. + """ + catalog_perm: str | None = security_manager.get_catalog_perm( + database.database_name, + default_catalog, + ) + pvms: dict[str, tuple[str, ...]] = ( + {catalog_perm: ("catalog_access",)} if catalog_perm else {} + ) + + # rename existing schema permissions to include the catalog, and also find any new + # schemas + new_schema_pvms = upgrade_schema_perms(database, default_catalog, session) + pvms.update(new_schema_pvms) + + # update existing models that have a `catalog` column so it points to the default + # catalog + update_catalog_column(session, database, default_catalog, False) + + # update `schema_perm` and `catalog_perm` for tables and charts + update_schema_catalog_perms(session, database, catalog_perm, default_catalog, False) + + if ( + not current_app.config["CATALOGS_SIMPLIFIED_MIGRATION"] + and not database.is_oauth2_enabled() + ): + # add any new catalogs discovered and their schemas + new_catalog_pvms = add_non_default_catalogs(database, default_catalog, session) + pvms.update(new_catalog_pvms) + + # add default catalog permission and permissions for any new found schemas, and also + # permissions for new catalogs and their schemas + add_pvms(session, pvms) + + +def add_non_default_catalogs( + database: Database, + default_catalog: str, + session: Session, +) -> dict[str, tuple[str]]: + """ + Add permissions for additional catalogs and their schemas. """ - ssh_tunnel = DatabaseDAO.get_ssh_tunnel(database.id) try: - schemas = database.get_all_schema_names( - catalog=catalog, - cache=False, - ssh_tunnel=ssh_tunnel, + catalogs = { + catalog + for catalog in database.get_all_catalog_names() + if catalog != default_catalog + } + except GenericDBException: + # If we can't connect to the analytical DB to fetch the catalogs we should just + # return. The catalog and schema permissions can be created later when the DB is + # edited. + return {} + + pvms: dict[str, tuple[str]] = {} + for catalog in catalogs: + perm: str | None = security_manager.get_catalog_perm( + database.database_name, catalog ) - except Exception: # pylint: disable=broad-except - schemas = get_schemas(database.database_name) + if perm: + pvms[perm] = ("catalog_access",) + new_schema_pvms = create_schema_perms(database, catalog) + pvms.update(new_schema_pvms) + + return pvms + + +def upgrade_schema_perms( + database: Database, + default_catalog: str, + session: Session, +) -> dict[str, tuple[str]]: + """ + Rename existing schema permissions to include the catalog. + + Schema permissions are stored (and processed) as strings, in the form: + + [database_name].[schema_name] + When catalogs are first introduced for a DB engine spec we need to rename any + existing permissions to the form: + + [database_name].[default_catalog_name].[schema_name] + + """ + schemas = get_known_schemas(database.database_name, session) + + perms = {} for schema in schemas: - perm = security_manager.get_schema_perm( + current_perm: str | None = security_manager.get_schema_perm( database.database_name, None, schema, ) - existing_pvm = session.query(ViewMenu).filter_by(name=perm).one_or_none() - if existing_pvm: - existing_pvm.name = security_manager.get_schema_perm( - database.database_name, - catalog, - schema, + new_perm: str | None = security_manager.get_schema_perm( + database.database_name, + default_catalog, + schema, + ) + + if ( + existing_pvm := session.query(ViewMenu) + .filter_by(name=current_perm) + .one_or_none() + ): + # check that new_perm does not exist + if not session.query(ViewMenu).filter_by(name=new_perm).one_or_none(): + existing_pvm.name = new_perm + elif new_perm: + # new schema discovered, need to create a new permission + perms[new_perm] = ("schema_access",) + + return perms + + +def create_schema_perms( + database: Database, + catalog: str, +) -> dict[str, tuple[str]]: + """ + Create schema permissions for a given catalog. + """ + try: + schemas = database.get_all_schema_names(catalog=catalog) + except GenericDBException: + # If we can't connect to the analytical DB to fetch schemas in this catalog we + # should just return. The schema permissions can be created when the DB is + # edited. + return {} + + return { + perm: ("schema_access",) + for schema in schemas + if ( + perm := security_manager.get_schema_perm( + database.database_name, catalog, schema ) + ) + is not None + } def downgrade_catalog_perms(engines: set[str] | None = None) -> None: """ Reverse the process of `upgrade_catalog_perms`. + + This should: + + - Delete all `catalog_access` permissions. + - Rename `schema_access` permissions in the default catalog to omit it. + - Delete `schema_access` permissions for schemas not in the default catalog. + + Also, for models in the default catalog we should: + + - Populate the `catalog` field with `None`. + - Update `schema_perm` to omit the default catalog. + - Populate the `catalog_perm` field with `None`. + + WARNING: models (datasets and charts) not in the default catalog are deleted! """ bind = op.get_bind() session = db.Session(bind=bind) + for database in session.query(Database).all(): db_engine_spec = database.db_engine_spec if ( @@ -210,70 +575,131 @@ def downgrade_catalog_perms(engines: set[str] | None = None) -> None: ) or not db_engine_spec.supports_catalog: continue - catalog = database.get_default_catalog() - if catalog is None: + try: + default_catalog = database.get_default_catalog() + except GenericDBException as ex: + logger.warning( + "Error fetching default catalog for database %s: %s", + database.database_name, + ex, + ) continue - downgrade_schema_perms(database, catalog, session) - - # update existing models - models = [ - (Query, "database_id"), - (SavedQuery, "db_id"), - (TabState, "database_id"), - (TableSchema, "database_id"), - (SqlaTable, "database_id"), - ] - for model, column in models: - for instance in session.query(model).filter( - getattr(model, column) == database.id - ): - instance.catalog = None - - for table in session.query(SqlaTable).filter_by(database_id=database.id): - schema_perm = security_manager.get_schema_perm( - database.database_name, - None, - table.schema, + if default_catalog: + downgrade_database_catalogs(database, default_catalog, session) + + session.flush() + + +def downgrade_database_catalogs( + database: Database, + default_catalog: str, + session: Session, +) -> None: + # remove all catalog permissions associated with the DB + prefix = f"[{database.database_name}].%" + for pvm in ( + session.query(PermissionView) + .join(Permission, PermissionView.permission_id == Permission.id) + .join(ViewMenu, PermissionView.view_menu_id == ViewMenu.id) + .filter( + Permission.name == "catalog_access", + ViewMenu.name.like(prefix), + ) + .all() + ): + session.delete(pvm) + session.delete(pvm.view_menu) + + # rename existing schemas permissions to omit the catalog, and remove schema + # permissions associated with other catalogs + downgrade_schema_perms(database, default_catalog, session) + + update_catalog_column(session, database, default_catalog, True) + + # update `schema_perm` and `catalog_perm` for tables and charts + update_schema_catalog_perms(session, database, None, default_catalog, True) + + # delete models referencing non-default catalogs + delete_models_non_default_catalog(session, database, default_catalog) + + # delete datasets and any associated permissions + for table in session.query(SqlaTable).filter( + SqlaTable.database_id == database.id, + SqlaTable.catalog != default_catalog, + ): + for chart in session.query(Slice).filter( + Slice.datasource_id == table.id, + Slice.datasource_type == "table", + ): + session.delete(chart) + + session.delete(table) + pvm = ( + session.query(PermissionView) + .join(Permission, PermissionView.permission_id == Permission.id) + .join(ViewMenu, PermissionView.view_menu_id == ViewMenu.id) + .filter( + Permission.name == "datasource_access", + ViewMenu.name == table.perm, ) - table.schema_perm = schema_perm - for chart in session.query(Slice).filter_by( - datasource_id=table.id, - datasource_type="table", - ): - chart.schema_perm = schema_perm + .one() + ) + session.delete(pvm) + session.delete(pvm.view_menu) - session.commit() + session.flush() -def downgrade_schema_perms(database: Database, catalog: str, session: Session) -> None: +def downgrade_schema_perms( + database: Database, + default_catalog: str, + session: Session, +) -> None: """ - Rename existing schema permissions to omit the catalog. + Rename default catalog schema permissions and delete other schema permissions. """ - ssh_tunnel = DatabaseDAO.get_ssh_tunnel(database.id) - try: - schemas = database.get_all_schema_names( - catalog=catalog, - cache=False, - ssh_tunnel=ssh_tunnel, + prefix = f"[{database.database_name}].%" + pvms = ( + session.query(PermissionView) + .join(Permission, PermissionView.permission_id == Permission.id) + .join(ViewMenu, PermissionView.view_menu_id == ViewMenu.id) + .filter( + Permission.name == "schema_access", + ViewMenu.name.like(prefix), ) - except Exception: # pylint: disable=broad-except - schemas = get_schemas(database.database_name) + .all() + ) + + pvms_to_delete = [] + pvms_to_rename = [] + for pvm in pvms: + parts = pvm.view_menu.name[1:-1].split("].[") + if len(parts) != 3: + logger.warning( + "Invalid schema permission: %s. Please fix manually", + pvm.view_menu.name, + ) + continue - for schema in schemas: - perm = security_manager.get_schema_perm( - database.database_name, - catalog, - schema, - ) - existing_pvm = session.query(ViewMenu).filter_by(name=perm).one_or_none() - if existing_pvm: - new_perm = security_manager.get_schema_perm( - database.database_name, + database_name, catalog, schema = parts + + if catalog == default_catalog: + new_name = security_manager.get_schema_perm( + database_name, None, schema, ) - if pvm := session.query(ViewMenu).filter_by(name=new_perm).one_or_none(): - session.delete(pvm) - session.flush() - existing_pvm.name = new_perm + # check to see if the new name already exists + if not session.query(ViewMenu).filter_by(name=new_name).one_or_none(): + pvms_to_rename.append((pvm, new_name)) + else: + # non-default catalog, delete schema perm + pvms_to_delete.append(pvm) + + for pvm in pvms_to_delete: + session.delete(pvm) + session.delete(pvm.view_menu) + + for pvm, new_name in pvms_to_rename: + pvm.view_menu.name = new_name diff --git a/superset/migrations/shared/constraints.py b/superset/migrations/shared/constraints.py index d3bc140fd0ba..5ae63f57ce61 100644 --- a/superset/migrations/shared/constraints.py +++ b/superset/migrations/shared/constraints.py @@ -19,8 +19,10 @@ from dataclasses import dataclass from alembic import op +from sqlalchemy.dialects.sqlite.base import SQLiteDialect # noqa: E402 from sqlalchemy.engine.reflection import Inspector +from superset.migrations.shared.utils import has_table from superset.utils.core import generic_find_fk_constraint_name @@ -71,3 +73,23 @@ def redefine( ondelete=on_delete, onupdate=on_update, ) + + +def drop_fks_for_table(table_name: str) -> None: + """ + Drop all foreign key constraints for a table if it exist and the database + is not sqlite. + + :param table_name: The table name to drop foreign key constraints for + """ + connection = op.get_bind() + inspector = Inspector.from_engine(connection) + + if isinstance(connection.dialect, SQLiteDialect): + return # sqlite doesn't like constraints + + if has_table(table_name): + foreign_keys = inspector.get_foreign_keys(table_name) + + for fk in foreign_keys: + op.drop_constraint(fk["name"], table_name, type_="foreignkey") diff --git a/superset/migrations/shared/utils.py b/superset/migrations/shared/utils.py index 17e5b4128e46..f62f8a858a2b 100644 --- a/superset/migrations/shared/utils.py +++ b/superset/migrations/shared/utils.py @@ -21,6 +21,7 @@ from typing import Any, Callable, Optional, Union from uuid import uuid4 +import sqlalchemy as sa from alembic import op from sqlalchemy import inspect from sqlalchemy.dialects.mysql.base import MySQLDialect @@ -168,3 +169,31 @@ def try_load_json(data: Optional[str]) -> dict[str, Any]: except json.JSONDecodeError: print(f"Failed to parse: {data}") return {} + + +def has_table(table_name: str) -> bool: + """ + Check if a table exists in the database. + + :param table_name: The table name + :returns: True if the table exists + """ + + insp = inspect(op.get_context().bind) + table_exists = insp.has_table(table_name) + + return table_exists + + +def add_column_if_not_exists(table_name: str, column: sa.Column) -> None: + """ + Adds a column to a table if it does not already exist. + + :param table_name: Name of the table. + :param column: SQLAlchemy Column object. + """ + if not table_has_column(table_name, column.name): + print(f"Adding column '{column.name}' to table '{table_name}'.\n") + op.add_column(table_name, column) + else: + print(f"Column '{column.name}' already exists in table '{table_name}'.\n") diff --git a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py index 175822cc25e9..1fc4158357db 100644 --- a/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py +++ b/superset/migrations/versions/2024-02-07_17-13_87d38ad83218_migrate_can_view_and_drill_permission.py @@ -38,7 +38,7 @@ Pvm, ) -NEW_PVMS = {"Dashboard": ("can_view_chart_as_table",)} +NEW_PVMS = {"Dashboard": ("can_view_chart_as_table", "can_view_query")} PVM_MAP = { Pvm("Dashboard", "can_view_and_drill"): ( diff --git a/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py b/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py index ff3f06f077e6..0e92884e58c9 100644 --- a/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py +++ b/superset/migrations/versions/2024-03-20_16-02_678eefb4ab44_add_access_token_table.py @@ -30,6 +30,8 @@ from alembic import op # noqa: E402 from sqlalchemy_utils import EncryptedType # noqa: E402 +from superset.migrations.shared.constraints import drop_fks_for_table # noqa: E402 + def upgrade(): op.create_table( @@ -80,5 +82,6 @@ def upgrade(): def downgrade(): + drop_fks_for_table("database_user_oauth2_tokens") op.drop_index("idx_user_id_database_id", table_name="database_user_oauth2_tokens") op.drop_table("database_user_oauth2_tokens") diff --git a/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py b/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py index 8e07f1b6c38b..126280f33822 100644 --- a/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py +++ b/superset/migrations/versions/2024-04-01_22-44_c22cb5c2e546_user_attr_avatar_url.py @@ -24,14 +24,17 @@ import sqlalchemy as sa from alembic import op +from superset.migrations.shared.utils import add_column_if_not_exists + # revision identifiers, used by Alembic. revision = "c22cb5c2e546" down_revision = "678eefb4ab44" def upgrade(): - op.add_column( - "user_attribute", sa.Column("avatar_url", sa.String(length=100), nullable=True) + add_column_if_not_exists( + "user_attribute", + sa.Column("avatar_url", sa.String(length=100), nullable=True), ) diff --git a/superset/migrations/versions/2024-04-11_15-41_5f57af97bc3f_add_catalog_column.py b/superset/migrations/versions/2024-04-11_15-41_5f57af97bc3f_add_catalog_column.py index ec5733e15104..b535867d64a4 100644 --- a/superset/migrations/versions/2024-04-11_15-41_5f57af97bc3f_add_catalog_column.py +++ b/superset/migrations/versions/2024-04-11_15-41_5f57af97bc3f_add_catalog_column.py @@ -25,31 +25,23 @@ import sqlalchemy as sa from alembic import op +from superset.migrations.shared.utils import add_column_if_not_exists + # revision identifiers, used by Alembic. revision = "5f57af97bc3f" down_revision = "d60591c5515f" +tables = ["tables", "query", "saved_query", "tab_state", "table_schema"] + def upgrade(): - op.add_column("tables", sa.Column("catalog", sa.String(length=256), nullable=True)) - op.add_column("query", sa.Column("catalog", sa.String(length=256), nullable=True)) - op.add_column( - "saved_query", - sa.Column("catalog", sa.String(length=256), nullable=True), - ) - op.add_column( - "tab_state", - sa.Column("catalog", sa.String(length=256), nullable=True), - ) - op.add_column( - "table_schema", - sa.Column("catalog", sa.String(length=256), nullable=True), - ) + for table in tables: + add_column_if_not_exists( + table, + sa.Column("catalog", sa.String(length=256), nullable=True), + ) def downgrade(): - op.drop_column("table_schema", "catalog") - op.drop_column("tab_state", "catalog") - op.drop_column("saved_query", "catalog") - op.drop_column("query", "catalog") - op.drop_column("tables", "catalog") + for table in reversed(tables): + op.drop_column(table, "catalog") diff --git a/superset/migrations/versions/2024-05-01_10-52_58d051681a3b_add_catalog_perm_to_tables.py b/superset/migrations/versions/2024-05-01_10-52_58d051681a3b_add_catalog_perm_to_tables.py index 856ad2ad0367..6dfc2845bcb7 100644 --- a/superset/migrations/versions/2024-05-01_10-52_58d051681a3b_add_catalog_perm_to_tables.py +++ b/superset/migrations/versions/2024-05-01_10-52_58d051681a3b_add_catalog_perm_to_tables.py @@ -29,6 +29,7 @@ downgrade_catalog_perms, upgrade_catalog_perms, ) +from superset.migrations.shared.utils import add_column_if_not_exists # revision identifiers, used by Alembic. revision = "58d051681a3b" @@ -36,11 +37,11 @@ def upgrade(): - op.add_column( + add_column_if_not_exists( "tables", sa.Column("catalog_perm", sa.String(length=1000), nullable=True), ) - op.add_column( + add_column_if_not_exists( "slices", sa.Column("catalog_perm", sa.String(length=1000), nullable=True), ) @@ -48,6 +49,6 @@ def upgrade(): def downgrade(): + downgrade_catalog_perms(engines={"postgresql"}) op.drop_column("slices", "catalog_perm") op.drop_column("tables", "catalog_perm") - downgrade_catalog_perms(engines={"postgresql"}) diff --git a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py index 431d46799fb4..248bb28f7616 100644 --- a/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py +++ b/superset/migrations/versions/2024-05-10_18-02_f84fde59123a_update_charts_with_old_time_comparison.py @@ -63,18 +63,21 @@ class Slice(Base): def upgrade_comparison_params(slice_params: dict[str, Any]) -> dict[str, Any]: + if not slice_params or not isinstance(slice_params, dict): + return {} params = deepcopy(slice_params) - if "enable_time_comparison" in params: - # Remove enable_time_comparison - del params["enable_time_comparison"] - # Update time_comparison to time_compare if "time_comparison" in params: time_comp = params.pop("time_comparison") - params["time_compare"] = time_map.get( - time_comp, "inherit" - ) # Default to 'inherit' if not found + params["time_compare"] = ( + [time_map.get(time_comp, "inherit")] + if "enable_time_comparison" in params and params["enable_time_comparison"] + else [] + ) + + if "enable_time_comparison" in params: + del params["enable_time_comparison"] # Add comparison_type params["comparison_type"] = "values" @@ -82,7 +85,7 @@ def upgrade_comparison_params(slice_params: dict[str, Any]) -> dict[str, Any]: # Adjust adhoc_custom if "adhoc_custom" in params and params["adhoc_custom"]: adhoc = params["adhoc_custom"][0] # As there's always only one element - if adhoc["comparator"] != "No filter": + if adhoc["comparator"] and adhoc["comparator"] != "No filter": # Set start_date_offset in params, not in adhoc start_date_offset, _ = get_since_until(adhoc["comparator"]) params["start_date_offset"] = start_date_offset.strftime("%Y-%m-%d") @@ -102,6 +105,8 @@ def upgrade(): ) ): try: + if not slc.params: # Noop if there's no params on the slice + continue params = json.loads(slc.params) updated_slice_params = upgrade_comparison_params(params) slc.params = json.dumps(updated_slice_params) @@ -118,21 +123,24 @@ def upgrade(): def downgrade_comparison_params(slice_params: dict[str, Any]) -> dict[str, Any]: + if not slice_params or not isinstance(slice_params, dict): + return {} params = deepcopy(slice_params) + params["enable_time_comparison"] = False reverse_time_map = { v: k for k, v in time_map.items() } # Reverse the map from the upgrade function - # Add enable_time_comparison - params["enable_time_comparison"] = True - # Revert time_compare to time_comparison if "time_compare" in params: time_comp = params.pop("time_compare") - params["time_comparison"] = reverse_time_map.get( - time_comp, "r" - ) # Default to 'r' if not found + # Max one element in the time_compare list + time_comp = time_comp[0] if time_comp else "" + params["time_comparison"] = reverse_time_map.get(time_comp, "r") + # If the chart was using any time compare, enable time comparison + if time_comp: + params["enable_time_comparison"] = True # Remove comparison_type if "comparison_type" in params: @@ -197,6 +205,8 @@ def downgrade(): ) ): try: + if not slc.params: # Noop if there's no params on the slice + continue params = json.loads(slc.params) updated_slice_params = downgrade_comparison_params(params) slc.params = json.dumps(updated_slice_params) diff --git a/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl__tables.py b/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl__tables.py deleted file mode 100644 index 745da474a2a5..000000000000 --- a/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl__tables.py +++ /dev/null @@ -1,197 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -"""remove sl_ tables - -Revision ID: 02f4f7811799 -Revises: f7b6750b67e8 -Create Date: 2024-05-24 11:31:57.115586 - -""" - -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision = "02f4f7811799" -down_revision = "f7b6750b67e8" - - -def upgrade(): - op.drop_table("sl_dataset_columns") - op.drop_table("sl_table_columns") - op.drop_table("sl_dataset_tables") - op.drop_table("sl_columns") - op.drop_table("sl_tables") - op.drop_table("sl_dataset_users") - op.drop_table("sl_datasets") - - -def downgrade(): - op.create_table( - "sl_datasets", - sa.Column("uuid", sa.Numeric(precision=16), nullable=True), - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("database_id", sa.Integer(), nullable=False), - sa.Column("is_physical", sa.Boolean(), nullable=True), - sa.Column("is_managed_externally", sa.Boolean(), nullable=False), - sa.Column("name", sa.Text(), nullable=True), - sa.Column("expression", sa.Text(), nullable=True), - sa.Column("external_url", sa.Text(), nullable=True), - sa.Column("extra_json", sa.Text(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["changed_by_fk"], - ["ab_user.id"], - ), - sa.ForeignKeyConstraint( - ["created_by_fk"], - ["ab_user.id"], - ), - sa.ForeignKeyConstraint( - ["database_id"], - ["dbs.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("uuid"), - ) - op.create_table( - "sl_tables", - sa.Column("uuid", sa.Numeric(precision=16), nullable=True), - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("database_id", sa.Integer(), nullable=False), - sa.Column("is_managed_externally", sa.Boolean(), nullable=False), - sa.Column("catalog", sa.Text(), nullable=True), - sa.Column("schema", sa.Text(), nullable=True), - sa.Column("name", sa.Text(), nullable=True), - sa.Column("external_url", sa.Text(), nullable=True), - sa.Column("extra_json", sa.Text(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["changed_by_fk"], - ["ab_user.id"], - ), - sa.ForeignKeyConstraint( - ["created_by_fk"], - ["ab_user.id"], - ), - sa.ForeignKeyConstraint( - ["database_id"], - ["dbs.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("uuid"), - ) - op.create_table( - "sl_columns", - sa.Column("uuid", sa.Numeric(precision=16), nullable=True), - sa.Column("created_on", sa.DateTime(), nullable=True), - sa.Column("changed_on", sa.DateTime(), nullable=True), - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("is_aggregation", sa.Boolean(), nullable=False), - sa.Column("is_additive", sa.Boolean(), nullable=False), - sa.Column("is_dimensional", sa.Boolean(), nullable=False), - sa.Column("is_filterable", sa.Boolean(), nullable=False), - sa.Column("is_increase_desired", sa.Boolean(), nullable=False), - sa.Column("is_managed_externally", sa.Boolean(), nullable=False), - sa.Column("is_partition", sa.Boolean(), nullable=False), - sa.Column("is_physical", sa.Boolean(), nullable=False), - sa.Column("is_temporal", sa.Boolean(), nullable=False), - sa.Column("is_spatial", sa.Boolean(), nullable=False), - sa.Column("name", sa.Text(), nullable=True), - sa.Column("type", sa.Text(), nullable=True), - sa.Column("unit", sa.Text(), nullable=True), - sa.Column("expression", sa.Text(), nullable=True), - sa.Column("description", sa.Text(), nullable=True), - sa.Column("warning_text", sa.Text(), nullable=True), - sa.Column("external_url", sa.Text(), nullable=True), - sa.Column("extra_json", sa.Text(), nullable=True), - sa.Column("created_by_fk", sa.Integer(), nullable=True), - sa.Column("changed_by_fk", sa.Integer(), nullable=True), - sa.Column("advanced_data_type", sa.Text(), nullable=True), - sa.ForeignKeyConstraint( - ["changed_by_fk"], - ["ab_user.id"], - ), - sa.ForeignKeyConstraint( - ["created_by_fk"], - ["ab_user.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("uuid"), - ) - op.create_table( - "sl_dataset_users", - sa.Column("dataset_id", sa.Integer(), nullable=False), - sa.Column("user_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["dataset_id"], - ["sl_datasets.id"], - ), - sa.ForeignKeyConstraint( - ["user_id"], - ["ab_user.id"], - ), - sa.PrimaryKeyConstraint("dataset_id", "user_id"), - ) - op.create_table( - "sl_dataset_tables", - sa.Column("dataset_id", sa.Integer(), nullable=False), - sa.Column("table_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["dataset_id"], - ["sl_datasets.id"], - ), - sa.ForeignKeyConstraint( - ["table_id"], - ["sl_tables.id"], - ), - sa.PrimaryKeyConstraint("dataset_id", "table_id"), - ) - op.create_table( - "sl_table_columns", - sa.Column("table_id", sa.Integer(), nullable=False), - sa.Column("column_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["column_id"], - ["sl_columns.id"], - ), - sa.ForeignKeyConstraint( - ["table_id"], - ["sl_tables.id"], - ), - sa.PrimaryKeyConstraint("table_id", "column_id"), - ) - op.create_table( - "sl_dataset_columns", - sa.Column("dataset_id", sa.Integer(), nullable=False), - sa.Column("column_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["column_id"], - ["sl_columns.id"], - ), - sa.ForeignKeyConstraint( - ["dataset_id"], - ["sl_datasets.id"], - ), - sa.PrimaryKeyConstraint("dataset_id", "column_id"), - ) diff --git a/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl_dataset_columns_table.py b/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl_dataset_columns_table.py new file mode 100644 index 000000000000..03158fabd97b --- /dev/null +++ b/superset/migrations/versions/2024-05-24_11-31_02f4f7811799_remove_sl_dataset_columns_table.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_dataset_columns tables + +Revision ID: 02f4f7811799 +Revises: f7b6750b67e8 +Create Date: 2024-05-24 11:31:57.115586 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "02f4f7811799" +down_revision = "f7b6750b67e8" + +table_name = "sl_dataset_columns" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("dataset_id", sa.Integer(), nullable=False), + sa.Column("column_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["column_id"], + ["sl_columns.id"], + ), + sa.ForeignKeyConstraint( + ["dataset_id"], + ["sl_datasets.id"], + ), + sa.PrimaryKeyConstraint("dataset_id", "column_id"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-17_39549add7bfc_remove_sl_table_columns_table.py b/superset/migrations/versions/2024-08-13_15-17_39549add7bfc_remove_sl_table_columns_table.py new file mode 100644 index 000000000000..1ec41f5e1ca8 --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-17_39549add7bfc_remove_sl_table_columns_table.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_table_columns_table + +Revision ID: 39549add7bfc +Revises: 02f4f7811799 +Create Date: 2024-08-13 15:17:23.273168 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "39549add7bfc" +down_revision = "02f4f7811799" + +table_name = "sl_table_columns" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("table_id", sa.Integer(), nullable=False), + sa.Column("column_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["column_id"], + ["sl_columns.id"], + ), + sa.ForeignKeyConstraint( + ["table_id"], + ["sl_tables.id"], + ), + sa.PrimaryKeyConstraint("table_id", "column_id"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-23_38f4144e8558_remove_sl_dataset_tables.py b/superset/migrations/versions/2024-08-13_15-23_38f4144e8558_remove_sl_dataset_tables.py new file mode 100644 index 000000000000..4931456a3383 --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-23_38f4144e8558_remove_sl_dataset_tables.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_dataset_tables + +Revision ID: 38f4144e8558 +Revises: 39549add7bfc +Create Date: 2024-08-13 15:23:28.768963 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "38f4144e8558" +down_revision = "39549add7bfc" + +table_name = "sl_dataset_tables" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("dataset_id", sa.Integer(), nullable=False), + sa.Column("table_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["dataset_id"], + ["sl_datasets.id"], + ), + sa.ForeignKeyConstraint( + ["table_id"], + ["sl_tables.id"], + ), + sa.PrimaryKeyConstraint("dataset_id", "table_id"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-27_e53fd48cc078_remove_sl_dataset_users.py b/superset/migrations/versions/2024-08-13_15-27_e53fd48cc078_remove_sl_dataset_users.py new file mode 100644 index 000000000000..359b05dcd494 --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-27_e53fd48cc078_remove_sl_dataset_users.py @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_dataset_users + +Revision ID: e53fd48cc078 +Revises: 38f4144e8558 +Create Date: 2024-08-13 15:27:11.589886 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "e53fd48cc078" +down_revision = "38f4144e8558" + +table_name = "sl_dataset_users" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("dataset_id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["dataset_id"], + ["sl_datasets.id"], + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["ab_user.id"], + ), + sa.PrimaryKeyConstraint("dataset_id", "user_id"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-29_a6b32d2d07b1_remove_sl_columns.py b/superset/migrations/versions/2024-08-13_15-29_a6b32d2d07b1_remove_sl_columns.py new file mode 100644 index 000000000000..1562ed962a0c --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-29_a6b32d2d07b1_remove_sl_columns.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_columns + +Revision ID: a6b32d2d07b1 +Revises: e53fd48cc078 +Create Date: 2024-08-13 15:29:33.135672 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "a6b32d2d07b1" +down_revision = "e53fd48cc078" + +table_name = "sl_columns" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("uuid", sa.Numeric(precision=16), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("is_aggregation", sa.Boolean(), nullable=False), + sa.Column("is_additive", sa.Boolean(), nullable=False), + sa.Column("is_dimensional", sa.Boolean(), nullable=False), + sa.Column("is_filterable", sa.Boolean(), nullable=False), + sa.Column("is_increase_desired", sa.Boolean(), nullable=False), + sa.Column("is_managed_externally", sa.Boolean(), nullable=False), + sa.Column("is_partition", sa.Boolean(), nullable=False), + sa.Column("is_physical", sa.Boolean(), nullable=False), + sa.Column("is_temporal", sa.Boolean(), nullable=False), + sa.Column("is_spatial", sa.Boolean(), nullable=False), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("type", sa.Text(), nullable=True), + sa.Column("unit", sa.Text(), nullable=True), + sa.Column("expression", sa.Text(), nullable=True), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("warning_text", sa.Text(), nullable=True), + sa.Column("external_url", sa.Text(), nullable=True), + sa.Column("extra_json", sa.Text(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.Column("advanced_data_type", sa.Text(), nullable=True), + sa.ForeignKeyConstraint( + ["changed_by_fk"], + ["ab_user.id"], + ), + sa.ForeignKeyConstraint( + ["created_by_fk"], + ["ab_user.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-31_007a1abffe7e_remove_sl_tables.py b/superset/migrations/versions/2024-08-13_15-31_007a1abffe7e_remove_sl_tables.py new file mode 100644 index 000000000000..106cd7a1704a --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-31_007a1abffe7e_remove_sl_tables.py @@ -0,0 +1,74 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_tables + +Revision ID: 007a1abffe7e +Revises: a6b32d2d07b1 +Create Date: 2024-08-13 15:31:31.478017 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "007a1abffe7e" +down_revision = "a6b32d2d07b1" + +table_name = "sl_tables" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("uuid", sa.Numeric(precision=16), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("database_id", sa.Integer(), nullable=False), + sa.Column("is_managed_externally", sa.Boolean(), nullable=False), + sa.Column("catalog", sa.Text(), nullable=True), + sa.Column("schema", sa.Text(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("external_url", sa.Text(), nullable=True), + sa.Column("extra_json", sa.Text(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["changed_by_fk"], + ["ab_user.id"], + ), + sa.ForeignKeyConstraint( + ["created_by_fk"], + ["ab_user.id"], + ), + sa.ForeignKeyConstraint( + ["database_id"], + ["dbs.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + ) diff --git a/superset/migrations/versions/2024-08-13_15-33_48cbb571fa3a_remove_sl_datasets.py b/superset/migrations/versions/2024-08-13_15-33_48cbb571fa3a_remove_sl_datasets.py new file mode 100644 index 000000000000..c35d3d7572e5 --- /dev/null +++ b/superset/migrations/versions/2024-08-13_15-33_48cbb571fa3a_remove_sl_datasets.py @@ -0,0 +1,74 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""remove sl_datasets + +Revision ID: 48cbb571fa3a +Revises: 007a1abffe7e +Create Date: 2024-08-13 15:33:14.551012 + +""" + +import sqlalchemy as sa +from alembic import op + +from superset.migrations.shared.constraints import drop_fks_for_table +from superset.migrations.shared.utils import has_table + +# revision identifiers, used by Alembic. +revision = "48cbb571fa3a" +down_revision = "007a1abffe7e" + +table_name = "sl_datasets" + + +def upgrade(): + if has_table(table_name): + drop_fks_for_table(table_name) + op.drop_table(table_name) + + +def downgrade(): + op.create_table( + table_name, + sa.Column("uuid", sa.Numeric(precision=16), nullable=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("database_id", sa.Integer(), nullable=False), + sa.Column("is_physical", sa.Boolean(), nullable=True), + sa.Column("is_managed_externally", sa.Boolean(), nullable=False), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("expression", sa.Text(), nullable=True), + sa.Column("external_url", sa.Text(), nullable=True), + sa.Column("extra_json", sa.Text(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["changed_by_fk"], + ["ab_user.id"], + ), + sa.ForeignKeyConstraint( + ["created_by_fk"], + ["ab_user.id"], + ), + sa.ForeignKeyConstraint( + ["database_id"], + ["dbs.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("uuid"), + ) diff --git a/superset/models/core.py b/superset/models/core.py index 512c5a93e300..be9251b60668 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -490,7 +490,7 @@ def _get_sqla_engine( # pylint: disable=too-many-locals g.user.id, self.db_engine_spec, ) - if hasattr(g, "user") and hasattr(g.user, "id") and oauth2_config + if oauth2_config and hasattr(g, "user") and hasattr(g.user, "id") else None ) # If using MySQL or Presto for example, will set url.username diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 1c6ad2f5d3bb..42a64eb99b66 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -22,7 +22,6 @@ import logging import re import uuid -from collections import defaultdict from collections.abc import Hashable from datetime import datetime, timedelta from typing import Any, cast, NamedTuple, Optional, TYPE_CHECKING, Union @@ -52,7 +51,7 @@ from sqlalchemy.sql.selectable import Alias, TableClause from sqlalchemy_utils import UUIDType -from superset import app, db, is_feature_enabled, security_manager +from superset import app, db, is_feature_enabled from superset.advanced_data_type.types import AdvancedDataTypeResponse from superset.common.db_query_status import QueryStatus from superset.common.utils.time_range_utils import get_since_until_from_time_range @@ -69,13 +68,12 @@ ) from superset.extensions import feature_flag_manager from superset.jinja_context import BaseTemplateProcessor +from superset.sql.parse import SQLScript from superset.sql_parse import ( has_table_query, insert_rls_in_predicate, ParsedQuery, sanitize_clause, - SQLScript, - SQLStatement, ) from superset.superset_typing import ( AdhocMetric, @@ -115,6 +113,7 @@ def validate_adhoc_subquery( sql: str, database_id: int, + engine: str, default_schema: str, ) -> str: """ @@ -129,7 +128,12 @@ def validate_adhoc_subquery( """ statements = [] for statement in sqlparse.parse(sql): - if has_table_query(statement): + try: + has_table = has_table_query(str(statement), engine) + except SupersetParseError: + has_table = True + + if has_table: if not is_feature_enabled("ALLOW_ADHOC_SUBQUERY"): raise SupersetSecurityException( SupersetError( @@ -138,7 +142,9 @@ def validate_adhoc_subquery( level=ErrorLevel.ERROR, ) ) + # TODO (betodealmeida): reimplement with sqlglot statement = insert_rls_in_predicate(statement, database_id, default_schema) + statements.append(statement) return ";\n".join(str(statement) for statement in statements) @@ -806,52 +812,18 @@ def get_fetch_values_predicate( def get_sqla_row_level_filters( self, - template_processor: Optional[BaseTemplateProcessor] = None, + template_processor: Optional[BaseTemplateProcessor] = None, # pylint: disable=unused-argument ) -> list[TextClause]: - """ - Return the appropriate row level security filters for this table and the - current user. A custom username can be passed when the user is not present in the - Flask global namespace. - - :param template_processor: The template processor to apply to the filters. - :returns: A list of SQL clauses to be ANDed together. - """ - template_processor = template_processor or self.get_template_processor() - - all_filters: list[TextClause] = [] - filter_groups: dict[Union[int, str], list[TextClause]] = defaultdict(list) - try: - for filter_ in security_manager.get_rls_filters(self): - clause = self.text( - f"({template_processor.process_template(filter_.clause)})" - ) - if filter_.group_key: - filter_groups[filter_.group_key].append(clause) - else: - all_filters.append(clause) - - if is_feature_enabled("EMBEDDED_SUPERSET"): - for rule in security_manager.get_guest_rls_filters(self): - clause = self.text( - f"({template_processor.process_template(rule['clause'])})" - ) - all_filters.append(clause) - - grouped_filters = [or_(*clauses) for clauses in filter_groups.values()] - all_filters.extend(grouped_filters) - return all_filters - except TemplateError as ex: - raise QueryObjectValidationError( - _( - "Error in jinja expression in RLS filters: %(msg)s", - msg=ex.message, - ) - ) from ex + # TODO: We should refactor this mixin and remove this method + # as it exists in the BaseDatasource and is not applicable + # for datasources of type query + return [] - def _process_sql_expression( + def _process_sql_expression( # pylint: disable=too-many-arguments self, expression: Optional[str], database_id: int, + engine: str, schema: str, template_processor: Optional[BaseTemplateProcessor], ) -> Optional[str]: @@ -861,6 +833,7 @@ def _process_sql_expression( expression = validate_adhoc_subquery( expression, database_id, + engine, schema, ) try: @@ -907,10 +880,6 @@ def get_query_str_extended( sqlaq = self.get_sqla_query(**query_obj) sql = self.database.compile_sqla_query(sqlaq.sqla_query) sql = self._apply_cte(sql, sqlaq.cte) - try: - sql = SQLStatement(sql, engine=self.db_engine_spec.engine).format() - except SupersetParseError: - logger.warning("Unable to parse SQL to format it, passing it as-is") if mutate: sql = self.database.mutate_sql_based_on_config(sql) @@ -1150,6 +1119,7 @@ def adhoc_metric_to_sqla( expression = self._process_sql_expression( expression=metric["sqlExpression"], database_id=self.database_id, + engine=self.database.backend, schema=self.schema, template_processor=template_processor, ) @@ -1351,7 +1321,7 @@ def get_time_filter( # pylint: disable=too-many-arguments ) return and_(*l) - def values_for_column( + def values_for_column( # pylint: disable=too-many-locals self, column_name: str, limit: int = 10000, @@ -1387,6 +1357,9 @@ def values_for_column( if self.fetch_values_predicate: qry = qry.where(self.get_fetch_values_predicate(template_processor=tp)) + rls_filters = self.get_sqla_row_level_filters(template_processor=tp) + qry = qry.where(and_(*rls_filters)) + with self.database.get_sqla_engine() as engine: sql = str(qry.compile(engine, compile_kwargs={"literal_binds": True})) sql = self._apply_cte(sql, cte) @@ -1396,7 +1369,7 @@ def values_for_column( if engine.dialect.identifier_preparer._double_percents: sql = sql.replace("%%", "%") - df = pd.read_sql_query(sql=sql, con=engine) + df = pd.read_sql_query(sql=self.text(sql), con=engine) # replace NaN with None to ensure it can be serialized to JSON df = df.replace({np.nan: None}) return df["column_values"].to_list() @@ -1590,6 +1563,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma col["sqlExpression"] = self._process_sql_expression( expression=col["sqlExpression"], database_id=self.database_id, + engine=self.database.backend, schema=self.schema, template_processor=template_processor, ) @@ -1652,6 +1626,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma selected = validate_adhoc_subquery( selected, self.database_id, + self.database.backend, self.schema, ) outer = literal_column(f"({selected})") @@ -1678,6 +1653,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma selected = validate_adhoc_subquery( _sql, self.database_id, + self.database.backend, self.schema, ) @@ -1954,6 +1930,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma where = self._process_sql_expression( expression=where, database_id=self.database_id, + engine=self.database.backend, schema=self.schema, template_processor=template_processor, ) @@ -1972,6 +1949,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma having = self._process_sql_expression( expression=having, database_id=self.database_id, + engine=self.database.backend, schema=self.schema, template_processor=template_processor, ) diff --git a/superset/models/slice.py b/superset/models/slice.py index c30e643b7df2..cf94a50f51eb 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -369,6 +369,7 @@ def set_related_perm(_mapper: Mapper, _connection: Connection, target: Slice) -> ds = db.session.query(src_class).filter_by(id=int(id_)).first() if ds: target.perm = ds.perm + target.catalog_perm = ds.catalog_perm target.schema_perm = ds.schema_perm diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index 6f25a5a66058..1702601d0f24 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -374,6 +374,7 @@ def adhoc_column_to_sqla( expression = self._process_sql_expression( expression=col["sqlExpression"], database_id=self.database_id, + engine=self.database.backend, schema=self.schema, template_processor=template_processor, ) diff --git a/superset/queries/api.py b/superset/queries/api.py index 67afd8a81763..5d1ed10d0650 100644 --- a/superset/queries/api.py +++ b/superset/queries/api.py @@ -144,11 +144,13 @@ class QueryRestApi(BaseSupersetModelRestApi): ] base_related_field_filters = { "created_by": [["id", BaseFilterRelatedUsers, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], "user": [["id", BaseFilterRelatedUsers, lambda: []]], "database": [["id", DatabaseFilter, lambda: []]], } related_field_filters = { "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), "user": RelatedFieldFilter("first_name", FilterRelatedOwners), } diff --git a/superset/queries/saved_queries/api.py b/superset/queries/saved_queries/api.py index 4e34a75039f1..8a4fe32fb41e 100644 --- a/superset/queries/saved_queries/api.py +++ b/superset/queries/saved_queries/api.py @@ -56,9 +56,11 @@ from superset.utils import json from superset.views.base_api import ( BaseSupersetModelRestApi, + RelatedFieldFilter, requires_form_data, statsd_metrics, ) +from superset.views.filters import BaseFilterRelatedUsers, FilterRelatedOwners logger = logging.getLogger(__name__) @@ -179,8 +181,12 @@ class SavedQueryRestApi(BaseSupersetModelRestApi): related_field_filters = { "database": "database_name", + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + } + base_related_field_filters = { + "database": [["id", DatabaseFilter, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], } - base_related_field_filters = {"database": [["id", DatabaseFilter, lambda: []]]} allowed_rel_fields = {"database", "changed_by", "created_by"} allowed_distinct_fields = {"catalog", "schema"} diff --git a/superset/reports/api.py b/superset/reports/api.py index 5ff90165b627..320eb97c0513 100644 --- a/superset/reports/api.py +++ b/superset/reports/api.py @@ -228,6 +228,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "database": [["id", DatabaseFilter, lambda: []]], "owners": [["id", BaseFilterRelatedUsers, lambda: []]], "created_by": [["id", BaseFilterRelatedUsers, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], } text_field_rel_fields = { "dashboard": "dashboard_title", @@ -239,6 +240,7 @@ def ensure_alert_reports_enabled(self) -> Optional[Response]: "chart": "slice_name", "database": "database_name", "created_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), "owners": RelatedFieldFilter("first_name", FilterRelatedOwners), } diff --git a/superset/reports/notifications/email.py b/superset/reports/notifications/email.py index d5939f772af0..b4514d43aa81 100644 --- a/superset/reports/notifications/email.py +++ b/superset/reports/notifications/email.py @@ -84,13 +84,17 @@ class EmailNotification(BaseNotification): # pylint: disable=too-few-public-met def _get_smtp_domain() -> str: return parseaddr(app.config["SMTP_MAIL_FROM"])[1].split("@")[1] - @staticmethod - def _error_template(text: str) -> str: + def _error_template(self, text: str) -> str: + call_to_action = self._get_call_to_action() return __( """ - Error: %(text)s + <p>Your report/alert was unable to be generated because of the following error: %(text)s</p> + <p>Please check your dashboard/chart for errors.</p> + <p><b><a href="%(url)s">%(call_to_action)s</a></b></p> """, text=text, + url=self._content.url, + call_to_action=call_to_action, ) def _get_content(self) -> EmailContent: @@ -130,7 +134,6 @@ def _get_content(self) -> EmailContent: else: html_table = "" - call_to_action = __(app.config["EMAIL_REPORTS_CTA"]) img_tags = [] for msgid in images.keys(): img_tags.append( @@ -140,6 +143,7 @@ def _get_content(self) -> EmailContent: """ ) img_tag = "".join(img_tags) + call_to_action = self._get_call_to_action() body = textwrap.dedent( f""" <html> @@ -190,14 +194,28 @@ def _get_subject(self) -> str: title=self._content.name, ) + def _get_call_to_action(self) -> str: + return __(app.config["EMAIL_REPORTS_CTA"]) + def _get_to(self) -> str: return json.loads(self._recipient.recipient_config_json)["target"] + def _get_cc(self) -> str: + # To accomadate backward compatability + return json.loads(self._recipient.recipient_config_json).get("ccTarget", "") + + def _get_bcc(self) -> str: + # To accomadate backward compatability + return json.loads(self._recipient.recipient_config_json).get("bccTarget", "") + @statsd_gauge("reports.email.send") def send(self) -> None: subject = self._get_subject() content = self._get_content() to = self._get_to() + cc = self._get_cc() + bcc = self._get_bcc() + try: send_email_smtp( to, @@ -208,9 +226,10 @@ def send(self) -> None: data=content.data, pdf=content.pdf, images=content.images, - bcc="", mime_subtype="related", dryrun=False, + cc=cc, + bcc=bcc, header_data=content.header_data, ) logger.info( diff --git a/superset/reports/schemas.py b/superset/reports/schemas.py index 3de34b5b08b8..f88a384055f9 100644 --- a/superset/reports/schemas.py +++ b/superset/reports/schemas.py @@ -123,6 +123,8 @@ class ValidatorConfigJSONSchema(Schema): class ReportRecipientConfigJSONSchema(Schema): # TODO if email check validity target = fields.String() + ccTarget = fields.String() + bccTarget = fields.String() class ReportRecipientSchema(Schema): diff --git a/superset/row_level_security/api.py b/superset/row_level_security/api.py index 077d55ff4ebb..7ea0959842d0 100644 --- a/superset/row_level_security/api.py +++ b/superset/row_level_security/api.py @@ -47,10 +47,15 @@ from superset.views.base import DatasourceFilter from superset.views.base_api import ( BaseSupersetModelRestApi, + RelatedFieldFilter, requires_json, statsd_metrics, ) -from superset.views.filters import BaseFilterRelatedRoles +from superset.views.filters import ( + BaseFilterRelatedRoles, + BaseFilterRelatedUsers, + FilterRelatedOwners, +) logger = logging.getLogger(__name__) @@ -129,9 +134,13 @@ class RLSRestApi(BaseSupersetModelRestApi): edit_model_schema = RLSPutSchema() allowed_rel_fields = {"tables", "roles", "created_by", "changed_by"} + related_field_filters = { + "changed_by": RelatedFieldFilter("first_name", FilterRelatedOwners), + } base_related_field_filters = { "tables": [["id", DatasourceFilter, lambda: []]], "roles": [["id", BaseFilterRelatedRoles, lambda: []]], + "changed_by": [["id", BaseFilterRelatedUsers, lambda: []]], } openapi_spec_methods = openapi_spec_methods_override diff --git a/superset/security/api.py b/superset/security/api.py index 61fd68e6f0e7..02bf6b7101ea 100644 --- a/superset/security/api.py +++ b/superset/security/api.py @@ -17,7 +17,7 @@ import logging from typing import Any -from flask import request, Response +from flask import current_app, request, Response from flask_appbuilder import expose from flask_appbuilder.api import safe from flask_appbuilder.security.decorators import permission_name, protect @@ -27,6 +27,7 @@ from superset.commands.dashboard.embedded.exceptions import ( EmbeddedDashboardNotFoundError, ) +from superset.exceptions import SupersetGenericErrorException from superset.extensions import event_logger from superset.security.guest_token import GuestTokenResourceType from superset.views.base_api import BaseSupersetApi, statsd_metrics @@ -148,8 +149,19 @@ def guest_token(self) -> Response: try: body = guest_token_create_schema.load(request.json) self.appbuilder.sm.validate_guest_token_resources(body["resources"]) - - # todo validate stuff: + guest_token_validator_hook = current_app.config.get( + "GUEST_TOKEN_VALIDATOR_HOOK" + ) + # Run validator to ensure the token parameters are OK. + if guest_token_validator_hook is not None: + if callable(guest_token_validator_hook): + if not guest_token_validator_hook(body): + raise ValidationError(message="Guest token validation failed") + else: + raise SupersetGenericErrorException( + message="Guest token validator hook not callable" + ) + # TODO: Add generic validation: # make sure username doesn't reference an existing user # check rls rules for validity? token = self.appbuilder.sm.create_guest_access_token( diff --git a/superset/security/guest_token.py b/superset/security/guest_token.py index a8dc2e3393bf..6727330afe92 100644 --- a/superset/security/guest_token.py +++ b/superset/security/guest_token.py @@ -14,12 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from enum import Enum from typing import Optional, TypedDict, Union from flask_appbuilder.security.sqla.models import Role from flask_login import AnonymousUserMixin +from superset.utils.backports import StrEnum + class GuestTokenUser(TypedDict, total=False): username: str @@ -27,7 +28,7 @@ class GuestTokenUser(TypedDict, total=False): last_name: str -class GuestTokenResourceType(Enum): +class GuestTokenResourceType(StrEnum): DASHBOARD = "dashboard" diff --git a/superset/security/manager.py b/superset/security/manager.py index 30cf37f117fb..c69fffcb0c68 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -176,7 +176,12 @@ def query_context_modified(query_context: "QueryContext") -> bool: ) # compare columns and metrics in form_data with stored values - for key in ["metrics", "columns", "groupby"]: + for key, equivalent in [ + ("metrics", ["metrics"]), + ("columns", ["columns", "groupby"]), + ("groupby", ["columns", "groupby"]), + ("orderby", ["orderby"]), + ]: requested_values = {freeze_value(value) for value in form_data.get(key) or []} stored_values = { freeze_value(value) for value in stored_chart.params_dict.get(key) or [] @@ -192,9 +197,10 @@ def query_context_modified(query_context: "QueryContext") -> bool: } if stored_query_context: for query in stored_query_context.get("queries") or []: - stored_values.update( - {freeze_value(value) for value in query.get(key) or []} - ) + for key in equivalent: + stored_values.update( + {freeze_value(value) for value in query.get(key) or []} + ) if not queries_values.issubset(stored_values): return True @@ -238,6 +244,12 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods "SQL Lab", "User Registrations", "User's Statistics", + # Guarding all AB_ADD_SECURITY_API = True REST APIs + "Role", + "Permission", + "PermissionViewMenu", + "ViewMenu", + "User", } | USER_MODEL_VIEWS ALPHA_ONLY_VIEW_MENUS = { @@ -286,6 +298,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods OBJECT_SPEC_PERMISSIONS = { "database_access", + "catalog_access", "schema_access", "datasource_access", } @@ -773,6 +786,9 @@ def get_schemas_accessible_by_user( # pylint: disable=import-outside-toplevel from superset.connectors.sqla.models import SqlaTable + default_catalog = database.get_default_catalog() + catalog = catalog or default_catalog + if hierarchical and ( self.can_access_database(database) or (catalog and self.can_access_catalog(database, catalog)) @@ -782,7 +798,6 @@ def get_schemas_accessible_by_user( # schema_access accessible_schemas: set[str] = set() schema_access = self.user_view_menu_names("schema_access") - default_catalog = database.get_default_catalog() default_schema = database.get_default_schema(default_catalog) for perm in schema_access: @@ -799,7 +814,7 @@ def get_schemas_accessible_by_user( # [database].[catalog].[schema] matches when the catalog is equal to the # requested catalog or, when no catalog specified, it's equal to the default # catalog. - elif len(parts) == 3 and parts[1] == (catalog or default_catalog): + elif len(parts) == 3 and parts[1] == catalog: accessible_schemas.add(parts[2]) # datasource_access @@ -905,16 +920,16 @@ def get_datasources_accessible_by_user( # pylint: disable=invalid-name if self.can_access_database(database): return datasource_names + catalog = catalog or database.get_default_catalog() if catalog: catalog_perm = self.get_catalog_perm(database.database_name, catalog) if catalog_perm and self.can_access("catalog_access", catalog_perm): return datasource_names if schema: - default_catalog = database.get_default_catalog() schema_perm = self.get_schema_perm( database.database_name, - catalog or default_catalog, + catalog, schema, ) if schema_perm and self.can_access("schema_access", schema_perm): @@ -2157,7 +2172,6 @@ def raise_for_access( :param schema: Optional schema name :raises SupersetSecurityException: If the user cannot access the resource """ - # pylint: disable=import-outside-toplevel from superset import is_feature_enabled from superset.connectors.sqla.models import SqlaTable @@ -2182,6 +2196,7 @@ def raise_for_access( database = query.database database = cast("Database", database) + default_catalog = database.get_default_catalog() if self.can_access_database(database): return @@ -2195,19 +2210,19 @@ def raise_for_access( # from the SQLAlchemy URI if possible; if not, we use the SQLAlchemy # inspector to read it. default_schema = database.get_default_schema_for_query(query) - # Determining the default catalog is much easier, because DB engine - # specs need explicit support for catalogs. - default_catalog = database.get_default_catalog() tables = { Table( table_.table, table_.schema or default_schema, - table_.catalog or default_catalog, + table_.catalog or query.catalog or default_catalog, ) for table_ in extract_tables_from_jinja_sql(query.sql, database) } elif table: - tables = {table} + # Make sure table has the default catalog, if not specified. + tables = { + Table(table.table, table.schema, table.catalog or default_catalog) + } denied = set() @@ -2642,9 +2657,7 @@ def has_guest_access(self, dashboard: "Dashboard") -> bool: return False dashboards = [ - r - for r in user.resources - if r["type"] == GuestTokenResourceType.DASHBOARD.value + r for r in user.resources if r["type"] == GuestTokenResourceType.DASHBOARD ] # TODO (embedded): remove this check once uuids are rolled out diff --git a/superset/sql/__init__.py b/superset/sql/__init__.py new file mode 100644 index 000000000000..13a83393a912 --- /dev/null +++ b/superset/sql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/superset/sql/parse.py b/superset/sql/parse.py new file mode 100644 index 000000000000..38367faa7745 --- /dev/null +++ b/superset/sql/parse.py @@ -0,0 +1,717 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from __future__ import annotations + +import enum +import logging +import re +import urllib.parse +from collections.abc import Iterable +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +import sqlglot +import sqlparse +from deprecation import deprecated +from sqlglot import exp +from sqlglot.dialects.dialect import Dialect, Dialects +from sqlglot.errors import ParseError +from sqlglot.optimizer.scope import Scope, ScopeType, traverse_scope + +from superset.exceptions import SupersetParseError + +logger = logging.getLogger(__name__) + + +# mapping between DB engine specs and sqlglot dialects +SQLGLOT_DIALECTS = { + "base": Dialects.DIALECT, + "ascend": Dialects.HIVE, + "awsathena": Dialects.PRESTO, + "bigquery": Dialects.BIGQUERY, + "clickhouse": Dialects.CLICKHOUSE, + "clickhousedb": Dialects.CLICKHOUSE, + "cockroachdb": Dialects.POSTGRES, + "couchbase": Dialects.MYSQL, + # "crate": ??? + # "databend": ??? + "databricks": Dialects.DATABRICKS, + # "db2": ??? + # "dremio": ??? + "drill": Dialects.DRILL, + # "druid": ??? + "duckdb": Dialects.DUCKDB, + # "dynamodb": ??? + # "elasticsearch": ??? + # "exa": ??? + # "firebird": ??? + # "firebolt": ??? + "gsheets": Dialects.SQLITE, + "hana": Dialects.POSTGRES, + "hive": Dialects.HIVE, + # "ibmi": ??? + # "impala": ??? + # "kustokql": ??? + # "kylin": ??? + "mssql": Dialects.TSQL, + "mysql": Dialects.MYSQL, + "netezza": Dialects.POSTGRES, + # "ocient": ??? + # "odelasticsearch": ??? + "oracle": Dialects.ORACLE, + # "pinot": ??? + "postgresql": Dialects.POSTGRES, + "presto": Dialects.PRESTO, + "pydoris": Dialects.DORIS, + "redshift": Dialects.REDSHIFT, + # "risingwave": ??? + # "rockset": ??? + "shillelagh": Dialects.SQLITE, + "snowflake": Dialects.SNOWFLAKE, + # "solr": ??? + "spark": Dialects.SPARK, + "sqlite": Dialects.SQLITE, + "starrocks": Dialects.STARROCKS, + "superset": Dialects.SQLITE, + "teradatasql": Dialects.TERADATA, + "trino": Dialects.TRINO, + "vertica": Dialects.POSTGRES, + "yql": Dialects.CLICKHOUSE, +} + + +@dataclass(eq=True, frozen=True) +class Table: + """ + A fully qualified SQL table conforming to [[catalog.]schema.]table. + """ + + table: str + schema: str | None = None + catalog: str | None = None + + def __str__(self) -> str: + """ + Return the fully qualified SQL table name. + + Should not be used for SQL generation, only for logging and debugging, since the + quoting is not engine-specific. + """ + return ".".join( + urllib.parse.quote(part, safe="").replace(".", "%2E") + for part in [self.catalog, self.schema, self.table] + if part + ) + + def __eq__(self, other: Any) -> bool: + return str(self) == str(other) + + +# To avoid unnecessary parsing/formatting of queries, the statement has the concept of +# an "internal representation", which is the AST of the SQL statement. For most of the +# engines supported by Superset this is `sqlglot.exp.Expression`, but there is a special +# case: KustoKQL uses a different syntax and there are no Python parsers for it, so we +# store the AST as a string (the original query), and manipulate it with regular +# expressions. +InternalRepresentation = TypeVar("InternalRepresentation") + +# The base type. This helps type checking the `split_query` method correctly, since each +# derived class has a more specific return type (the class itself). This will no longer +# be needed once Python 3.11 is the lowest version supported. See PEP 673 for more +# information: https://peps.python.org/pep-0673/ +TBaseSQLStatement = TypeVar("TBaseSQLStatement") # pylint: disable=invalid-name + + +class BaseSQLStatement(Generic[InternalRepresentation]): + """ + Base class for SQL statements. + + The class should be instantiated with a string representation of the script and, for + efficiency reasons, optionally with a pre-parsed AST. This is useful with + `sqlglot.parse`, which will split a script in multiple already parsed statements. + + The `engine` parameters comes from the `engine` attribute in a Superset DB engine + spec. + """ + + def __init__( + self, + statement: str, + engine: str, + ast: InternalRepresentation | None = None, + ): + self._sql = statement + self._parsed = ast or self._parse_statement(statement, engine) + self.engine = engine + self.tables = self._extract_tables_from_statement(self._parsed, self.engine) + + @classmethod + def split_script( + cls: type[TBaseSQLStatement], + script: str, + engine: str, + ) -> list[TBaseSQLStatement]: + """ + Split a script into multiple instantiated statements. + + This is a helper function to split a full SQL script into multiple + `BaseSQLStatement` instances. It's used by `SQLScript` when instantiating the + statements within a script. + """ + raise NotImplementedError() + + @classmethod + def _parse_statement( + cls, + statement: str, + engine: str, + ) -> InternalRepresentation: + """ + Parse a string containing a single SQL statement, and returns the parsed AST. + + Derived classes should not assume that `statement` contains a single statement, + and MUST explicitly validate that. Since this validation is parser dependent the + responsibility is left to the children classes. + """ + raise NotImplementedError() + + @classmethod + def _extract_tables_from_statement( + cls, + parsed: InternalRepresentation, + engine: str, + ) -> set[Table]: + """ + Extract all table references in a given statement. + """ + raise NotImplementedError() + + def format(self, comments: bool = True) -> str: + """ + Format the statement, optionally ommitting comments. + """ + raise NotImplementedError() + + def get_settings(self) -> dict[str, str | bool]: + """ + Return any settings set by the statement. + + For example, for this statement: + + sql> SET foo = 'bar'; + + The method should return `{"foo": "'bar'"}`. Note the single quotes. + """ + raise NotImplementedError() + + def is_mutating(self) -> bool: + """ + Check if the statement mutates data (DDL/DML). + + :return: True if the statement mutates data. + """ + raise NotImplementedError() + + def __str__(self) -> str: + return self.format() + + +class SQLStatement(BaseSQLStatement[exp.Expression]): + """ + A SQL statement. + + This class is used for all engines with dialects that can be parsed using sqlglot. + """ + + def __init__( + self, + statement: str, + engine: str, + ast: exp.Expression | None = None, + ): + self._dialect = SQLGLOT_DIALECTS.get(engine) + super().__init__(statement, engine, ast) + + @classmethod + def _parse(cls, script: str, engine: str) -> list[exp.Expression]: + """ + Parse helper. + """ + dialect = SQLGLOT_DIALECTS.get(engine) + try: + return sqlglot.parse(script, dialect=dialect) + except sqlglot.errors.ParseError as ex: + error = ex.errors[0] + raise SupersetParseError( + script, + engine, + highlight=error["highlight"], + line=error["line"], + column=error["col"], + ) from ex + except sqlglot.errors.SqlglotError as ex: + raise SupersetParseError( + script, + engine, + message="Unable to parse script", + ) from ex + + @classmethod + def split_script( + cls, + script: str, + engine: str, + ) -> list[SQLStatement]: + if dialect := SQLGLOT_DIALECTS.get(engine): + try: + return [ + cls(ast.sql(), engine, ast) + for ast in cls._parse(script, engine) + if ast + ] + except ValueError: + # `ast.sql()` might raise an error on some cases (eg, `SHOW TABLES + # FROM`). In this case, we rely on the tokenizer to generate the + # statements. + pass + + # When we don't have a sqlglot dialect we can't rely on `ast.sql()` to correctly + # generate the SQL of each statement, so we tokenize the script and split it + # based on the location of semi-colons. + statements = [] + start = 0 + remainder = script + + try: + tokens = sqlglot.tokenize(script, dialect) + except sqlglot.errors.TokenError as ex: + raise SupersetParseError( + script, + engine, + message="Unable to tokenize script", + ) from ex + + for token in tokens: + if token.token_type == sqlglot.TokenType.SEMICOLON: + statement, start = script[start : token.start], token.end + 1 + ast = cls._parse(statement, engine)[0] + statements.append(cls(statement.strip(), engine, ast)) + remainder = script[start:] + + if remainder.strip(): + ast = cls._parse(remainder, engine)[0] + statements.append(cls(remainder.strip(), engine, ast)) + + return statements + + @classmethod + def _parse_statement( + cls, + statement: str, + engine: str, + ) -> exp.Expression: + """ + Parse a single SQL statement. + """ + statements = cls.split_script(statement, engine) + if len(statements) != 1: + raise SupersetParseError("SQLStatement should have exactly one statement") + + return statements[0]._parsed # pylint: disable=protected-access + + @classmethod + def _extract_tables_from_statement( + cls, + parsed: exp.Expression, + engine: str, + ) -> set[Table]: + """ + Find all referenced tables. + """ + dialect = SQLGLOT_DIALECTS.get(engine) + return extract_tables_from_statement(parsed, dialect) + + def is_mutating(self) -> bool: + """ + Check if the statement mutates data (DDL/DML). + + :return: True if the statement mutates data. + """ + for node in self._parsed.walk(): + if isinstance( + node, + ( + exp.Insert, + exp.Update, + exp.Delete, + exp.Merge, + exp.Create, + exp.Drop, + exp.TruncateTable, + ), + ): + return True + + if isinstance(node, exp.Command) and node.name == "ALTER": + return True + + # Postgres runs DMLs prefixed by `EXPLAIN ANALYZE`, see + # https://www.postgresql.org/docs/current/sql-explain.html + if ( + self._dialect == Dialects.POSTGRES + and isinstance(self._parsed, exp.Command) + and self._parsed.name == "EXPLAIN" + and self._parsed.expression.name.upper().startswith("ANALYZE ") + ): + analyzed_sql = self._parsed.expression.name[len("ANALYZE ") :] + return SQLStatement(analyzed_sql, self.engine).is_mutating() + + return False + + def format(self, comments: bool = True) -> str: + """ + Pretty-format the SQL statement. + """ + if self._dialect: + try: + write = Dialect.get_or_raise(self._dialect) + return write.generate( + self._parsed, + copy=False, + comments=comments, + pretty=True, + ) + except ValueError: + pass + + return self._fallback_formatting() + + @deprecated(deprecated_in="4.0", removed_in="5.0") + def _fallback_formatting(self) -> str: + """ + Format SQL without a specific dialect. + + Reformatting SQL using the generic sqlglot dialect is known to break queries. + For example, it will change `foo NOT IN (1, 2)` to `NOT foo IN (1,2)`, which + breaks the query for Firebolt. To avoid this, we use sqlparse for formatting + when the dialect is not known. + + In 5.0 we should remove `sqlparse`, and the method should return the query + unmodified. + """ + return sqlparse.format(self._sql, reindent=True, keyword_case="upper") + + def get_settings(self) -> dict[str, str | bool]: + """ + Return the settings for the SQL statement. + + >>> statement = SQLStatement("SET foo = 'bar'") + >>> statement.get_settings() + {"foo": "'bar'"} + + """ + return { + eq.this.sql(comments=False): eq.expression.sql(comments=False) + for set_item in self._parsed.find_all(exp.SetItem) + for eq in set_item.find_all(exp.EQ) + } + + +class KQLSplitState(enum.Enum): + """ + State machine for splitting a KQL script. + + The state machine keeps track of whether we're inside a string or not, so we + don't split the script in a semi-colon that's part of a string. + """ + + OUTSIDE_STRING = enum.auto() + INSIDE_SINGLE_QUOTED_STRING = enum.auto() + INSIDE_DOUBLE_QUOTED_STRING = enum.auto() + INSIDE_MULTILINE_STRING = enum.auto() + + +def split_kql(kql: str) -> list[str]: + """ + Custom function for splitting KQL statements. + """ + statements = [] + state = KQLSplitState.OUTSIDE_STRING + statement_start = 0 + script = kql if kql.endswith(";") else kql + ";" + for i, character in enumerate(script): + if state == KQLSplitState.OUTSIDE_STRING: + if character == ";": + statements.append(script[statement_start:i]) + statement_start = i + 1 + elif character == "'": + state = KQLSplitState.INSIDE_SINGLE_QUOTED_STRING + elif character == '"': + state = KQLSplitState.INSIDE_DOUBLE_QUOTED_STRING + elif character == "`" and script[i - 2 : i] == "``": + state = KQLSplitState.INSIDE_MULTILINE_STRING + + elif ( + state == KQLSplitState.INSIDE_SINGLE_QUOTED_STRING + and character == "'" + and script[i - 1] != "\\" + ): + state = KQLSplitState.OUTSIDE_STRING + + elif ( + state == KQLSplitState.INSIDE_DOUBLE_QUOTED_STRING + and character == '"' + and script[i - 1] != "\\" + ): + state = KQLSplitState.OUTSIDE_STRING + + elif ( + state == KQLSplitState.INSIDE_MULTILINE_STRING + and character == "`" + and script[i - 2 : i] == "``" + ): + state = KQLSplitState.OUTSIDE_STRING + + return statements + + +class KustoKQLStatement(BaseSQLStatement[str]): + """ + Special class for Kusto KQL. + + Kusto KQL is a SQL-like language, but it's not supported by sqlglot. Queries look + like this: + + StormEvents + | summarize PropertyDamage = sum(DamageProperty) by State + | join kind=innerunique PopulationData on State + | project State, PropertyDamagePerCapita = PropertyDamage / Population + | sort by PropertyDamagePerCapita + + See https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/ for more + details about it. + """ + + @classmethod + def split_script( + cls, + script: str, + engine: str, + ) -> list[KustoKQLStatement]: + """ + Split a script at semi-colons. + + Since we don't have a parser, we use a simple state machine based function. See + https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/scalar-data-types/string + for more information. + """ + return [ + cls(statement, engine, statement.strip()) for statement in split_kql(script) + ] + + @classmethod + def _parse_statement( + cls, + statement: str, + engine: str, + ) -> str: + if engine != "kustokql": + raise SupersetParseError(f"Invalid engine: {engine}") + + statements = split_kql(statement) + if len(statements) != 1: + raise SupersetParseError("SQLStatement should have exactly one statement") + + return statements[0].strip() + + @classmethod + def _extract_tables_from_statement( + cls, + parsed: str, + engine: str, + ) -> set[Table]: + """ + Extract all tables referenced in the statement. + + StormEvents + | where InjuriesDirect + InjuriesIndirect > 50 + | join (PopulationData) on State + | project State, Population, TotalInjuries = InjuriesDirect + InjuriesIndirect + + """ + logger.warning( + "Kusto KQL doesn't support table extraction. This means that data access " + "roles will not be enforced by Superset in the database." + ) + return set() + + def format(self, comments: bool = True) -> str: + """ + Pretty-format the SQL statement. + """ + return self._sql.strip() + + def get_settings(self) -> dict[str, str | bool]: + """ + Return the settings for the SQL statement. + + >>> statement = KustoKQLStatement("set querytrace;") + >>> statement.get_settings() + {"querytrace": True} + + """ + set_regex = r"^set\s+(?P<name>\w+)(?:\s*=\s*(?P<value>\w+))?$" + if match := re.match(set_regex, self._parsed, re.IGNORECASE): + return {match.group("name"): match.group("value") or True} + + return {} + + def is_mutating(self) -> bool: + """ + Check if the statement mutates data (DDL/DML). + + :return: True if the statement mutates data. + """ + return self._parsed.startswith(".") and not self._parsed.startswith(".show") + + +class SQLScript: + """ + A SQL script, with 0+ statements. + """ + + # Special engines that can't be parsed using sqlglot. Supporting non-SQL engines + # adds a lot of complexity to Superset, so we should avoid adding new engines to + # this data structure. + special_engines = { + "kustokql": KustoKQLStatement, + } + + def __init__( + self, + script: str, + engine: str, + ): + statement_class = self.special_engines.get(engine, SQLStatement) + self.engine = engine + self.statements = statement_class.split_script(script, engine) + + def format(self, comments: bool = True) -> str: + """ + Pretty-format the SQL script. + + Note that even though KQL is very different from SQL, multiple statements are + still separated by semi-colons. + """ + return ";\n".join(statement.format(comments) for statement in self.statements) + + def get_settings(self) -> dict[str, str | bool]: + """ + Return the settings for the SQL script. + + >>> statement = SQLScript("SET foo = 'bar'; SET foo = 'baz'") + >>> statement.get_settings() + {"foo": "'baz'"} + + """ + settings: dict[str, str | bool] = {} + for statement in self.statements: + settings.update(statement.get_settings()) + + return settings + + def has_mutation(self) -> bool: + """ + Check if the script contains mutating statements. + + :return: True if the script contains mutating statements + """ + return any(statement.is_mutating() for statement in self.statements) + + +def extract_tables_from_statement( + statement: exp.Expression, + dialect: Dialects | None, +) -> set[Table]: + """ + Extract all table references in a single statement. + + Please not that this is not trivial; consider the following queries: + + DESCRIBE some_table; + SHOW PARTITIONS FROM some_table; + WITH masked_name AS (SELECT * FROM some_table) SELECT * FROM masked_name; + + See the unit tests for other tricky cases. + """ + sources: Iterable[exp.Table] + + if isinstance(statement, exp.Describe): + # A `DESCRIBE` query has no sources in sqlglot, so we need to explicitly + # query for all tables. + sources = statement.find_all(exp.Table) + elif isinstance(statement, exp.Command): + # Commands, like `SHOW COLUMNS FROM foo`, have to be converted into a + # `SELECT` statetement in order to extract tables. + literal = statement.find(exp.Literal) + if not literal: + return set() + + try: + pseudo_query = sqlglot.parse_one(f"SELECT {literal.this}", dialect=dialect) + except ParseError: + return set() + sources = pseudo_query.find_all(exp.Table) + else: + sources = [ + source + for scope in traverse_scope(statement) + for source in scope.sources.values() + if isinstance(source, exp.Table) and not is_cte(source, scope) + ] + + return { + Table( + source.name, + source.db if source.db != "" else None, + source.catalog if source.catalog != "" else None, + ) + for source in sources + } + + +def is_cte(source: exp.Table, scope: Scope) -> bool: + """ + Is the source a CTE? + + CTEs in the parent scope look like tables (and are represented by + exp.Table objects), but should not be considered as such; + otherwise a user with access to table `foo` could access any table + with a query like this: + + WITH foo AS (SELECT * FROM target_table) SELECT * FROM foo + + """ + parent_sources = scope.parent.sources if scope.parent else {} + ctes_in_scope = { + name + for name, parent_scope in parent_sources.items() + if isinstance(parent_scope, Scope) and parent_scope.scope_type == ScopeType.CTE + } + + return source.name in ctes_in_scope diff --git a/superset/sql_lab.py b/superset/sql_lab.py index f20bff35c30c..65a093610d11 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -46,17 +46,18 @@ OAuth2RedirectError, SupersetErrorException, SupersetErrorsException, + SupersetParseError, ) from superset.extensions import celery_app, event_logger from superset.models.core import Database from superset.models.sql_lab import Query from superset.result_set import SupersetResultSet +from superset.sql.parse import SQLStatement, Table from superset.sql_parse import ( CtasMethod, insert_rls_as_subquery, insert_rls_in_predicate, ParsedQuery, - Table, ) from superset.sqllab.limiting_factor import LimitingFactor from superset.sqllab.utils import write_ipc_buffer @@ -194,7 +195,7 @@ def get_sql_results( # pylint: disable=too-many-arguments return handle_query_error(ex, query) -def execute_sql_statement( # pylint: disable=too-many-statements +def execute_sql_statement( # pylint: disable=too-many-statements, too-many-locals sql_statement: str, query: Query, cursor: Any, @@ -236,14 +237,27 @@ def execute_sql_statement( # pylint: disable=too-many-statements # We are testing to see if more rows exist than the limit. increased_limit = None if query.limit is None else query.limit + 1 - if not db_engine_spec.is_readonly_query(parsed_query) and not database.allow_dml: - raise SupersetErrorException( - SupersetError( - message=__("Only SELECT statements are allowed against this database."), - error_type=SupersetErrorType.DML_NOT_ALLOWED_ERROR, - level=ErrorLevel.ERROR, + if not database.allow_dml: + try: + parsed_statement = SQLStatement(sql_statement, engine=db_engine_spec.engine) + disallowed = parsed_statement.is_mutating() + except SupersetParseError: + # if we fail to parse teh query, disallow by default + disallowed = True + + if disallowed: + raise SupersetErrorException( + SupersetError( + message=__( + "This database does not allow for DDL/DML, and the query " + "could not be parsed to confirm it is a read-only query. Please " + "contact your administrator for more assistance." + ), + error_type=SupersetErrorType.DML_NOT_ALLOWED_ERROR, + level=ErrorLevel.ERROR, + ) ) - ) + if apply_ctas: if not query.tmp_table_name: start_dttm = datetime.fromtimestamp(query.start_time) diff --git a/superset/sql_parse.py b/superset/sql_parse.py index 05bf9b19bb84..a2a6aaa1a79a 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -19,23 +19,15 @@ from __future__ import annotations -import enum import logging import re -import urllib.parse -from collections.abc import Iterable, Iterator -from dataclasses import dataclass -from typing import Any, cast, Generic, TYPE_CHECKING, TypeVar +from collections.abc import Iterator +from typing import Any, cast, TYPE_CHECKING -import sqlglot import sqlparse from flask_babel import gettext as __ -from jinja2 import nodes +from jinja2 import nodes, Template from sqlalchemy import and_ -from sqlglot import exp, parse, parse_one -from sqlglot.dialects.dialect import Dialect, Dialects -from sqlglot.errors import ParseError, SqlglotError -from sqlglot.optimizer.scope import Scope, ScopeType, traverse_scope from sqlparse import keywords from sqlparse.lexer import Lexer from sqlparse.sql import ( @@ -68,6 +60,13 @@ SupersetParseError, SupersetSecurityException, ) +from superset.sql.parse import ( + extract_tables_from_statement, + SQLGLOT_DIALECTS, + SQLScript, + SQLStatement, + Table, +) from superset.utils.backports import StrEnum try: @@ -94,61 +93,6 @@ lex.set_SQL_REGEX(sqlparser_sql_regex) -# mapping between DB engine specs and sqlglot dialects -SQLGLOT_DIALECTS = { - "ascend": Dialects.HIVE, - "awsathena": Dialects.PRESTO, - "bigquery": Dialects.BIGQUERY, - "clickhouse": Dialects.CLICKHOUSE, - "clickhousedb": Dialects.CLICKHOUSE, - "cockroachdb": Dialects.POSTGRES, - "couchbasedb": Dialects.MYSQL, - # "crate": ??? - # "databend": ??? - "databricks": Dialects.DATABRICKS, - # "db2": ??? - # "dremio": ??? - "drill": Dialects.DRILL, - # "druid": ??? - "duckdb": Dialects.DUCKDB, - # "dynamodb": ??? - # "elasticsearch": ??? - # "exa": ??? - # "firebird": ??? - # "firebolt": ??? - "gsheets": Dialects.SQLITE, - "hana": Dialects.POSTGRES, - "hive": Dialects.HIVE, - # "ibmi": ??? - # "impala": ??? - # "kustokql": ??? - # "kylin": ??? - "mssql": Dialects.TSQL, - "mysql": Dialects.MYSQL, - "netezza": Dialects.POSTGRES, - # "ocient": ??? - # "odelasticsearch": ??? - "oracle": Dialects.ORACLE, - # "pinot": ??? - "postgresql": Dialects.POSTGRES, - "presto": Dialects.PRESTO, - "pydoris": Dialects.DORIS, - "redshift": Dialects.REDSHIFT, - # "risingwave": ??? - # "rockset": ??? - "shillelagh": Dialects.SQLITE, - "snowflake": Dialects.SNOWFLAKE, - # "solr": ??? - "spark": Dialects.SPARK, - "sqlite": Dialects.SQLITE, - "starrocks": Dialects.STARROCKS, - "superset": Dialects.SQLITE, - "teradatasql": Dialects.TERADATA, - "trino": Dialects.TRINO, - "vertica": Dialects.POSTGRES, -} - - class CtasMethod(StrEnum): TABLE = "TABLE" VIEW = "VIEW" @@ -226,7 +170,9 @@ def get_cte_remainder_query(sql: str) -> tuple[str | None, str]: def check_sql_functions_exist( - sql: str, function_list: set[str], engine: str | None = None + sql: str, + function_list: set[str], + engine: str = "base", ) -> bool: """ Check if the SQL statement contains any of the specified functions. @@ -238,7 +184,7 @@ def check_sql_functions_exist( return ParsedQuery(sql, engine=engine).check_functions_exist(function_list) -def strip_comments_from_sql(statement: str, engine: str | None = None) -> str: +def strip_comments_from_sql(statement: str, engine: str = "base") -> str: """ Strips comments from a SQL statement, does a simple test first to avoid always instantiating the expensive ParsedQuery constructor @@ -255,493 +201,18 @@ def strip_comments_from_sql(statement: str, engine: str | None = None) -> str: ) -@dataclass(eq=True, frozen=True) -class Table: - """ - A fully qualified SQL table conforming to [[catalog.]schema.]table. - """ - - table: str - schema: str | None = None - catalog: str | None = None - - def __str__(self) -> str: - """ - Return the fully qualified SQL table name. - """ - - return ".".join( - urllib.parse.quote(part, safe="").replace(".", "%2E") - for part in [self.catalog, self.schema, self.table] - if part - ) - - def __eq__(self, __o: object) -> bool: - return str(self) == str(__o) - - -def extract_tables_from_statement( - statement: exp.Expression, - dialect: Dialects | None, -) -> set[Table]: - """ - Extract all table references in a single statement. - - Please not that this is not trivial; consider the following queries: - - DESCRIBE some_table; - SHOW PARTITIONS FROM some_table; - WITH masked_name AS (SELECT * FROM some_table) SELECT * FROM masked_name; - - See the unit tests for other tricky cases. - """ - sources: Iterable[exp.Table] - - if isinstance(statement, exp.Describe): - # A `DESCRIBE` query has no sources in sqlglot, so we need to explicitly - # query for all tables. - sources = statement.find_all(exp.Table) - elif isinstance(statement, exp.Command): - # Commands, like `SHOW COLUMNS FROM foo`, have to be converted into a - # `SELECT` statetement in order to extract tables. - literal = statement.find(exp.Literal) - if not literal: - return set() - - try: - pseudo_query = parse_one(f"SELECT {literal.this}", dialect=dialect) - except ParseError: - return set() - sources = pseudo_query.find_all(exp.Table) - else: - sources = [ - source - for scope in traverse_scope(statement) - for source in scope.sources.values() - if isinstance(source, exp.Table) and not is_cte(source, scope) - ] - - return { - Table( - source.name, - source.db if source.db != "" else None, - source.catalog if source.catalog != "" else None, - ) - for source in sources - } - - -def is_cte(source: exp.Table, scope: Scope) -> bool: - """ - Is the source a CTE? - - CTEs in the parent scope look like tables (and are represented by - exp.Table objects), but should not be considered as such; - otherwise a user with access to table `foo` could access any table - with a query like this: - - WITH foo AS (SELECT * FROM target_table) SELECT * FROM foo - - """ - parent_sources = scope.parent.sources if scope.parent else {} - ctes_in_scope = { - name - for name, parent_scope in parent_sources.items() - if isinstance(parent_scope, Scope) and parent_scope.scope_type == ScopeType.CTE - } - - return source.name in ctes_in_scope - - -# To avoid unnecessary parsing/formatting of queries, the statement has the concept of -# an "internal representation", which is the AST of the SQL statement. For most of the -# engines supported by Superset this is `sqlglot.exp.Expression`, but there is a special -# case: KustoKQL uses a different syntax and there are no Python parsers for it, so we -# store the AST as a string (the original query), and manipulate it with regular -# expressions. -InternalRepresentation = TypeVar("InternalRepresentation") - -# The base type. This helps type checking the `split_query` method correctly, since each -# derived class has a more specific return type (the class itself). This will no longer -# be needed once Python 3.11 is the lowest version supported. See PEP 673 for more -# information: https://peps.python.org/pep-0673/ -TBaseSQLStatement = TypeVar("TBaseSQLStatement") # pylint: disable=invalid-name - - -class BaseSQLStatement(Generic[InternalRepresentation]): - """ - Base class for SQL statements. - - The class can be instantiated with a string representation of the query or, for - efficiency reasons, with a pre-parsed AST. This is useful with `sqlglot.parse`, - which will split a query in multiple already parsed statements. - - The `engine` parameters comes from the `engine` attribute in a Superset DB engine - spec. - """ - - def __init__( - self, - statement: str | InternalRepresentation, - engine: str, - ): - self._parsed: InternalRepresentation = ( - self._parse_statement(statement, engine) - if isinstance(statement, str) - else statement - ) - self.engine = engine - self.tables = self._extract_tables_from_statement(self._parsed, self.engine) - - @classmethod - def split_query( - cls: type[TBaseSQLStatement], - query: str, - engine: str, - ) -> list[TBaseSQLStatement]: - """ - Split a query into multiple instantiated statements. - - This is a helper function to split a full SQL query into multiple - `BaseSQLStatement` instances. It's used by `SQLScript` when instantiating the - statements within a query. - """ - raise NotImplementedError() - - @classmethod - def _parse_statement( - cls, - statement: str, - engine: str, - ) -> InternalRepresentation: - """ - Parse a string containing a single SQL statement, and returns the parsed AST. - - Derived classes should not assume that `statement` contains a single statement, - and MUST explicitly validate that. Since this validation is parser dependent the - responsibility is left to the children classes. - """ - raise NotImplementedError() - - @classmethod - def _extract_tables_from_statement( - cls, - parsed: InternalRepresentation, - engine: str, - ) -> set[Table]: - """ - Extract all table references in a given statement. - """ - raise NotImplementedError() - - def format(self, comments: bool = True) -> str: - """ - Format the statement, optionally ommitting comments. - """ - raise NotImplementedError() - - def get_settings(self) -> dict[str, str | bool]: - """ - Return any settings set by the statement. - - For example, for this statement: - - sql> SET foo = 'bar'; - - The method should return `{"foo": "'bar'"}`. Note the single quotes. - """ - raise NotImplementedError() - - def __str__(self) -> str: - return self.format() - - -class SQLStatement(BaseSQLStatement[exp.Expression]): - """ - A SQL statement. - - This class is used for all engines with dialects that can be parsed using sqlglot. - """ - - def __init__( - self, - statement: str | exp.Expression, - engine: str, - ): - self._dialect = SQLGLOT_DIALECTS.get(engine) - super().__init__(statement, engine) - - @classmethod - def split_query( - cls, - query: str, - engine: str, - ) -> list[SQLStatement]: - dialect = SQLGLOT_DIALECTS.get(engine) - - try: - statements = sqlglot.parse(query, dialect=dialect) - except sqlglot.errors.ParseError as ex: - raise SupersetParseError("Unable to split query") from ex - - return [cls(statement, engine) for statement in statements if statement] - - @classmethod - def _parse_statement( - cls, - statement: str, - engine: str, - ) -> exp.Expression: - """ - Parse a single SQL statement. - """ - dialect = SQLGLOT_DIALECTS.get(engine) - - # We could parse with `sqlglot.parse_one` to get a single statement, but we need - # to verify that the string contains exactly one statement. - try: - statements = sqlglot.parse(statement, dialect=dialect) - except sqlglot.errors.ParseError as ex: - raise SupersetParseError("Unable to split query") from ex - - statements = [statement for statement in statements if statement] - if len(statements) != 1: - raise SupersetParseError("SQLStatement should have exactly one statement") - - return statements[0] - - @classmethod - def _extract_tables_from_statement( - cls, - parsed: exp.Expression, - engine: str, - ) -> set[Table]: - """ - Find all referenced tables. - """ - dialect = SQLGLOT_DIALECTS.get(engine) - return extract_tables_from_statement(parsed, dialect) - - def format(self, comments: bool = True) -> str: - """ - Pretty-format the SQL statement. - """ - write = Dialect.get_or_raise(self._dialect) - return write.generate(self._parsed, copy=False, comments=comments, pretty=True) - - def get_settings(self) -> dict[str, str | bool]: - """ - Return the settings for the SQL statement. - - >>> statement = SQLStatement("SET foo = 'bar'") - >>> statement.get_settings() - {"foo": "'bar'"} - - """ - return { - eq.this.sql(): eq.expression.sql() - for set_item in self._parsed.find_all(exp.SetItem) - for eq in set_item.find_all(exp.EQ) - } - - -class KQLSplitState(enum.Enum): - """ - State machine for splitting a KQL query. - - The state machine keeps track of whether we're inside a string or not, so we - don't split the query in a semi-colon that's part of a string. - """ - - OUTSIDE_STRING = enum.auto() - INSIDE_SINGLE_QUOTED_STRING = enum.auto() - INSIDE_DOUBLE_QUOTED_STRING = enum.auto() - INSIDE_MULTILINE_STRING = enum.auto() - - -def split_kql(kql: str) -> list[str]: - """ - Custom function for splitting KQL statements. - """ - statements = [] - state = KQLSplitState.OUTSIDE_STRING - statement_start = 0 - query = kql if kql.endswith(";") else kql + ";" - for i, character in enumerate(query): - if state == KQLSplitState.OUTSIDE_STRING: - if character == ";": - statements.append(query[statement_start:i]) - statement_start = i + 1 - elif character == "'": - state = KQLSplitState.INSIDE_SINGLE_QUOTED_STRING - elif character == '"': - state = KQLSplitState.INSIDE_DOUBLE_QUOTED_STRING - elif character == "`" and query[i - 2 : i] == "``": - state = KQLSplitState.INSIDE_MULTILINE_STRING - - elif ( - state == KQLSplitState.INSIDE_SINGLE_QUOTED_STRING - and character == "'" - and query[i - 1] != "\\" - ): - state = KQLSplitState.OUTSIDE_STRING - - elif ( - state == KQLSplitState.INSIDE_DOUBLE_QUOTED_STRING - and character == '"' - and query[i - 1] != "\\" - ): - state = KQLSplitState.OUTSIDE_STRING - - elif ( - state == KQLSplitState.INSIDE_MULTILINE_STRING - and character == "`" - and query[i - 2 : i] == "``" - ): - state = KQLSplitState.OUTSIDE_STRING - - return statements - - -class KustoKQLStatement(BaseSQLStatement[str]): - """ - Special class for Kusto KQL. - - Kusto KQL is a SQL-like language, but it's not supported by sqlglot. Queries look - like this: - - StormEvents - | summarize PropertyDamage = sum(DamageProperty) by State - | join kind=innerunique PopulationData on State - | project State, PropertyDamagePerCapita = PropertyDamage / Population - | sort by PropertyDamagePerCapita - - See https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/ for more - details about it. - """ - - @classmethod - def split_query( - cls, - query: str, - engine: str, - ) -> list[KustoKQLStatement]: - """ - Split a query at semi-colons. - - Since we don't have a parser, we use a simple state machine based function. See - https://learn.microsoft.com/en-us/azure/data-explorer/kusto/query/scalar-data-types/string - for more information. - """ - return [cls(statement, engine) for statement in split_kql(query)] - - @classmethod - def _parse_statement( - cls, - statement: str, - engine: str, - ) -> str: - if engine != "kustokql": - raise SupersetParseError(f"Invalid engine: {engine}") - - statements = split_kql(statement) - if len(statements) != 1: - raise SupersetParseError("SQLStatement should have exactly one statement") - - return statements[0].strip() - - @classmethod - def _extract_tables_from_statement(cls, parsed: str, engine: str) -> set[Table]: - """ - Extract all tables referenced in the statement. - - StormEvents - | where InjuriesDirect + InjuriesIndirect > 50 - | join (PopulationData) on State - | project State, Population, TotalInjuries = InjuriesDirect + InjuriesIndirect - - """ - logger.warning( - "Kusto KQL doesn't support table extraction. This means that data access " - "roles will not be enforced by Superset in the database." - ) - return set() - - def format(self, comments: bool = True) -> str: - """ - Pretty-format the SQL statement. - """ - return self._parsed - - def get_settings(self) -> dict[str, str | bool]: - """ - Return the settings for the SQL statement. - - >>> statement = KustoKQLStatement("set querytrace;") - >>> statement.get_settings() - {"querytrace": True} - - """ - set_regex = r"^set\s+(?P<name>\w+)(?:\s*=\s*(?P<value>\w+))?$" - if match := re.match(set_regex, self._parsed, re.IGNORECASE): - return {match.group("name"): match.group("value") or True} - - return {} - - -class SQLScript: - """ - A SQL script, with 0+ statements. - """ - - # Special engines that can't be parsed using sqlglot. Supporting non-SQL engines - # adds a lot of complexity to Superset, so we should avoid adding new engines to - # this data structure. - special_engines = { - "kustokql": KustoKQLStatement, - } - - def __init__( - self, - query: str, - engine: str, - ): - statement_class = self.special_engines.get(engine, SQLStatement) - self.statements = statement_class.split_query(query, engine) - - def format(self, comments: bool = True) -> str: - """ - Pretty-format the SQL query. - """ - return ";\n".join(statement.format(comments) for statement in self.statements) - - def get_settings(self) -> dict[str, str | bool]: - """ - Return the settings for the SQL query. - - >>> statement = SQLScript("SET foo = 'bar'; SET foo = 'baz'") - >>> statement.get_settings() - {"foo": "'baz'"} - - """ - settings: dict[str, str | bool] = {} - for statement in self.statements: - settings.update(statement.get_settings()) - - return settings - - class ParsedQuery: def __init__( self, sql_statement: str, strip_comments: bool = False, - engine: str | None = None, + engine: str = "base", ): if strip_comments: sql_statement = sqlparse.format(sql_statement, strip_comments=True) self.sql: str = sql_statement + self._engine = engine self._dialect = SQLGLOT_DIALECTS.get(engine) if engine else None self._tables: set[Table] = set() self._alias_names: set[str] = set() @@ -793,24 +264,18 @@ def _extract_tables_from_sql(self) -> set[Table]: Note: this uses sqlglot, since it's better at catching more edge cases. """ try: - statements = parse(self.stripped(), dialect=self._dialect) - except SqlglotError as ex: + statements = [ + statement._parsed # pylint: disable=protected-access + for statement in SQLScript(self.stripped(), self._engine).statements + ] + except SupersetParseError as ex: logger.warning("Unable to parse SQL (%s): %s", self._dialect, self.sql) - - message = ( - "Error parsing near '{highlight}' at line {line}:{col}".format( # pylint: disable=consider-using-f-string - **ex.errors[0] - ) - if isinstance(ex, ParseError) - else str(ex) - ) - raise SupersetSecurityException( SupersetError( - error_type=SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR, + error_type=SupersetErrorType.INVALID_SQL_ERROR, message=__( "You may have an error in your SQL statement. {message}" - ).format(message=message), + ).format(message=ex.error.message), level=ErrorLevel.ERROR, ) ) from ex @@ -822,77 +287,6 @@ def _extract_tables_from_sql(self) -> set[Table]: if statement } - def _extract_tables_from_statement(self, statement: exp.Expression) -> set[Table]: - """ - Extract all table references in a single statement. - - Please not that this is not trivial; consider the following queries: - - DESCRIBE some_table; - SHOW PARTITIONS FROM some_table; - WITH masked_name AS (SELECT * FROM some_table) SELECT * FROM masked_name; - - See the unit tests for other tricky cases. - """ - sources: Iterable[exp.Table] - - if isinstance(statement, exp.Describe): - # A `DESCRIBE` query has no sources in sqlglot, so we need to explicitly - # query for all tables. - sources = statement.find_all(exp.Table) - elif isinstance(statement, exp.Command): - # Commands, like `SHOW COLUMNS FROM foo`, have to be converted into a - # `SELECT` statetement in order to extract tables. - if not (literal := statement.find(exp.Literal)): - return set() - - try: - pseudo_query = parse_one( - f"SELECT {literal.this}", - dialect=self._dialect, - ) - sources = pseudo_query.find_all(exp.Table) - except SqlglotError: - return set() - else: - sources = [ - source - for scope in traverse_scope(statement) - for source in scope.sources.values() - if isinstance(source, exp.Table) and not self._is_cte(source, scope) - ] - - return { - Table( - source.name, - source.db if source.db != "" else None, - source.catalog if source.catalog != "" else None, - ) - for source in sources - } - - def _is_cte(self, source: exp.Table, scope: Scope) -> bool: - """ - Is the source a CTE? - - CTEs in the parent scope look like tables (and are represented by - exp.Table objects), but should not be considered as such; - otherwise a user with access to table `foo` could access any table - with a query like this: - - WITH foo AS (SELECT * FROM target_table) SELECT * FROM foo - - """ - parent_sources = scope.parent.sources if scope.parent else {} - ctes_in_scope = { - name - for name, parent_scope in parent_sources.items() - if isinstance(parent_scope, Scope) - and parent_scope.scope_type == ScopeType.CTE - } - - return source.name in ctes_in_scope - @property def limit(self) -> int | None: return self._limit @@ -1177,46 +571,31 @@ class InsertRLSState(StrEnum): FOUND_TABLE = "FOUND_TABLE" -def has_table_query(token_list: TokenList) -> bool: +def has_table_query(expression: str, engine: str) -> bool: """ Return if a statement has a query reading from a table. - >>> has_table_query(sqlparse.parse("COUNT(*)")[0]) + >>> has_table_query("COUNT(*)", "postgresql") False - >>> has_table_query(sqlparse.parse("SELECT * FROM table")[0]) + >>> has_table_query("SELECT * FROM table", "postgresql") True Note that queries reading from constant values return false: - >>> has_table_query(sqlparse.parse("SELECT * FROM (SELECT 1)")[0]) + >>> has_table_query("SELECT * FROM (SELECT 1)", "postgresql") False """ - state = InsertRLSState.SCANNING - for token in token_list.tokens: - # Ignore comments - if isinstance(token, sqlparse.sql.Comment): - continue - - # Recurse into child token list - if isinstance(token, TokenList) and has_table_query(token): - return True + # Remove trailing semicolon. + expression = expression.strip().rstrip(";") - # Found a source keyword (FROM/JOIN) - if imt(token, m=[(Keyword, "FROM"), (Keyword, "JOIN")]): - state = InsertRLSState.SEEN_SOURCE + # Wrap the expression in parentheses if it's not already. + if not expression.startswith("("): + expression = f"({expression})" - # Found identifier/keyword after FROM/JOIN - elif state == InsertRLSState.SEEN_SOURCE and ( - isinstance(token, sqlparse.sql.Identifier) or token.ttype == Keyword - ): - return True - - # Found nothing, leaving source - elif state == InsertRLSState.SEEN_SOURCE and token.ttype != Whitespace: - state = InsertRLSState.SCANNING - - return False + sql = f"SELECT {expression}" + statement = SQLStatement(sql, engine) + return any(statement.tables) def add_table_name(rls: TokenList, table: str) -> None: @@ -1620,10 +999,13 @@ def extract_tables_from_jinja_sql(sql: str, database: Database) -> set[Table]: node.fields = nodes.TemplateData.fields node.data = "NULL" + # re-render template back into a string + rendered_template = Template(template).render() + return ( tables | ParsedQuery( - sql_statement=processor.process_template(template), + sql_statement=processor.process_template(rendered_template), engine=database.db_engine_spec.engine, ).tables ) diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py index cdb331c19bc3..2403a36583e7 100644 --- a/superset/sqllab/api.py +++ b/superset/sqllab/api.py @@ -35,8 +35,8 @@ from superset.extensions import event_logger from superset.jinja_context import get_template_processor from superset.models.sql_lab import Query +from superset.sql.parse import SQLScript from superset.sql_lab import get_sql_results -from superset.sql_parse import SQLScript from superset.sqllab.command_status import SqlJsonExecutionStatus from superset.sqllab.exceptions import ( QueryIsForbiddenToAccessException, @@ -284,6 +284,7 @@ def export_csv(self, client_id: str) -> CsvResponse: "client_id": client_id, "row_count": row_count, "database": query.database.name, + "catalog": query.catalog, "schema": query.schema, "sql": query.sql, "exported_format": "csv", diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py index ac9968ed6b46..e0d723c7b8c1 100644 --- a/superset/sqllab/sql_json_executer.py +++ b/superset/sqllab/sql_json_executer.py @@ -90,19 +90,16 @@ def execute( rendered_query: str, log_params: dict[str, Any] | None, ) -> SqlJsonExecutionStatus: - print(">>> execute <<<") query_id = execution_context.query.id try: data = self._get_sql_results_with_timeout( execution_context, rendered_query, log_params ) - self._query_dao.update_saved_query_exec_info(query_id) execution_context.set_execution_result(data) except SupersetTimeoutException: raise except Exception as ex: logger.exception("Query %i failed unexpectedly", query_id) - print(str(ex)) raise SupersetGenericDBErrorException( utils.error_msg_from_exception(ex) ) from ex @@ -202,5 +199,4 @@ def execute( query.status = QueryStatus.FAILED query.error_message = message raise SupersetErrorException(error) from ex - self._query_dao.update_saved_query_exec_info(query_id) return SqlJsonExecutionStatus.QUERY_IS_RUNNING diff --git a/superset/sqllab/sqllab_execution_context.py b/superset/sqllab/sqllab_execution_context.py index 5ca180d101b5..ab0f91bbf30c 100644 --- a/superset/sqllab/sqllab_execution_context.py +++ b/superset/sqllab/sqllab_execution_context.py @@ -125,6 +125,8 @@ def select_as_cta(self) -> bool: def set_database(self, database: Database) -> None: self._validate_db(database) self.database = database + if self.catalog is None: + self.catalog = database.get_default_catalog() if self.select_as_cta: schema_name = self._get_ctas_target_schema_name(database) self.create_table_as_select.target_schema_name = schema_name # type: ignore diff --git a/superset/tags/api.py b/superset/tags/api.py index ad25ffe7c936..7351a39a0462 100644 --- a/superset/tags/api.py +++ b/superset/tags/api.py @@ -47,6 +47,7 @@ openapi_spec_methods_override, TaggedObjectEntityResponseSchema, TagGetResponseSchema, + TagPostBulkResponseSchema, TagPostBulkSchema, TagPostSchema, TagPutSchema, @@ -132,6 +133,8 @@ class TagRestApi(BaseSupersetModelRestApi): openapi_spec_component_schemas = ( TagGetResponseSchema, TaggedObjectEntityResponseSchema, + TagPostBulkResponseSchema, + TagPostBulkSchema, ) apispec_parameter_schemas = { "delete_tags_schema": delete_tags_schema, @@ -211,40 +214,21 @@ def bulk_create(self) -> Response: """Bulk create tags and tagged objects --- post: - summary: Get all objects associated with a tag - parameters: - - in: path - schema: - type: integer - name: tag_id + summary: Bulk create tags and tagged objects requestBody: description: Tag schema required: true content: application/json: schema: - type: object - properties: - tags: - description: list of tag names to add to object - type: array - items: - type: string - objects_to_tag: - description: list of object names to add to object - type: array - items: - type: array + $ref: '#/components/schemas/TagPostBulkSchema' responses: 200: - description: Tag added to favorites + description: Bulk created tags and tagged objects content: application/json: schema: - type: object - properties: - result: - type: object + $ref: '#/components/schemas/TagPostBulkResponseSchema' 302: description: Redirects to the current digest 400: @@ -267,6 +251,7 @@ def bulk_create(self) -> Response: tagged_item: dict[str, Any] = self.add_model_schema.load( { "name": tag.get("name"), + "description": tag.get("description"), "objects_to_tag": tag.get("objects_to_tag"), } ) diff --git a/superset/tags/schemas.py b/superset/tags/schemas.py index 38676b42949e..89a5d4f46272 100644 --- a/superset/tags/schemas.py +++ b/superset/tags/schemas.py @@ -53,21 +53,41 @@ class TaggedObjectEntityResponseSchema(Schema): changed_on = fields.DateTime() created_by = fields.Nested(UserSchema(exclude=["username"])) creator = fields.String() - tags = fields.List(fields.Nested(TagGetResponseSchema)) - owners = fields.List(fields.Nested(UserSchema)) + tags = fields.List(fields.Nested(TagGetResponseSchema())) + owners = fields.List(fields.Nested(UserSchema())) + + +objects_to_tag_field = fields.List( + fields.Tuple( + ( + fields.String(metadata={"description": "type of resource"}), + fields.Int(validate=Range(min=1), metadata={"description": "resource id"}), + ), + ), + metadata={ + "description": "Objects to tag", + }, + required=False, +) class TagObjectSchema(Schema): name = fields.String(validate=Length(min=1)) description = fields.String(required=False, allow_none=True) - objects_to_tag = fields.List( - fields.Tuple((fields.String(), fields.Int(validate=Range(min=1)))), - required=False, - ) + objects_to_tag = objects_to_tag_field class TagPostBulkSchema(Schema): - tags = fields.List(fields.Nested(TagObjectSchema)) + tags = fields.List(fields.Nested(TagObjectSchema())) + + +class TagPostBulkResponseObjectSchema(Schema): + objects_tagged = objects_to_tag_field + objects_skipped = objects_to_tag_field + + +class TagPostBulkResponseSchema(Schema): + result = fields.Nested(TagPostBulkResponseObjectSchema()) class TagPostSchema(TagObjectSchema): diff --git a/superset/tasks/cache.py b/superset/tasks/cache.py index d1051c8fcb89..18ca7d7165ee 100644 --- a/superset/tasks/cache.py +++ b/superset/tasks/cache.py @@ -29,10 +29,11 @@ from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.tags.models import Tag, TaggedObject +from superset.tasks.utils import fetch_csrf_token from superset.utils import json from superset.utils.date_parser import parse_human_datetime from superset.utils.machine_auth import MachineAuthProvider -from superset.utils.urls import get_url_path +from superset.utils.urls import get_url_path, is_secure_url logger = get_task_logger(__name__) logger.setLevel(logging.INFO) @@ -219,7 +220,15 @@ def fetch_url(data: str, headers: dict[str, str]) -> dict[str, str]: """ result = {} try: - url = get_url_path("Superset.warm_up_cache") + url = get_url_path("ChartRestApi.warm_up_cache") + + if is_secure_url(url): + logger.info("URL '%s' is secure. Adding Referer header.", url) + headers.update({"Referer": url}) + + # Fetch CSRF token for API request + headers.update(fetch_csrf_token(headers)) + logger.info("Fetching %s with payload %s", url, data) req = request.Request( url, data=bytes(data, "utf-8"), headers=headers, method="PUT" diff --git a/superset/tasks/celery_app.py b/superset/tasks/celery_app.py index 5a0963ccd544..2049246f0428 100644 --- a/superset/tasks/celery_app.py +++ b/superset/tasks/celery_app.py @@ -32,6 +32,9 @@ flask_app = create_app() # Need to import late, as the celery_app will have been setup by "create_app()" +# ruff: noqa: E402, F401 +# pylint: disable=wrong-import-position, unused-import +from . import cache, scheduler # Export the celery app globally for Celery (as run on the cmd line) to find app = celery_app diff --git a/superset/tasks/scheduler.py b/superset/tasks/scheduler.py index df8c3c1c4f16..ade33f7ac518 100644 --- a/superset/tasks/scheduler.py +++ b/superset/tasks/scheduler.py @@ -14,17 +14,22 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from __future__ import annotations + import logging from datetime import datetime +from typing import Any -from celery import Celery +from celery import Task from celery.exceptions import SoftTimeLimitExceeded from superset import app, is_feature_enabled from superset.commands.exceptions import CommandException +from superset.commands.logs.prune import LogPruneCommand from superset.commands.report.exceptions import ReportScheduleUnexpectedError from superset.commands.report.execute import AsyncExecuteReportScheduleCommand from superset.commands.report.log_prune import AsyncPruneReportScheduleLogCommand +from superset.commands.sql_lab.query import QueryPruneCommand from superset.daos.report import ReportScheduleDAO from superset.extensions import celery_app from superset.stats_logger import BaseStatsLogger @@ -74,7 +79,7 @@ def scheduler() -> None: @celery_app.task(name="reports.execute", bind=True) -def execute(self: Celery.task, report_schedule_id: int) -> None: +def execute(self: Task, report_schedule_id: int) -> None: stats_logger: BaseStatsLogger = app.config["STATS_LOGGER"] stats_logger.incr("reports.execute") @@ -119,3 +124,41 @@ def prune_log() -> None: logger.warning("A timeout occurred while pruning report schedule logs: %s", ex) except CommandException: logger.exception("An exception occurred while pruning report schedule logs") + + +@celery_app.task(name="prune_query", bind=True) +def prune_query( # pylint: disable=unused-argument + self: Task, retention_period_days: int | None = None, **kwargs: Any +) -> None: + stats_logger: BaseStatsLogger = app.config["STATS_LOGGER"] + stats_logger.incr("prune_query") + + try: + QueryPruneCommand( + prune_query.request.properties.get("retention_period_days") + ).run() + except CommandException as ex: + logger.exception("An error occurred while pruning queries: %s", ex) + + +@celery_app.task(name="prune_logs", bind=True) +def prune_logs( # pylint: disable=unused-argument + self: Task, retention_period_days: int | None = None, **kwargs: Any +) -> None: + stats_logger: BaseStatsLogger = app.config["STATS_LOGGER"] + stats_logger.incr("prune_logs") + + # TODO: Deprecated: Remove support for passing retention period via options in 6.0 + if retention_period_days is None: + retention_period_days = prune_logs.request.properties.get( + "retention_period_days" + ) + logger.warning( + "Your `prune_logs` beat schedule uses `options` to pass the retention " + "period, please use `kwargs` instead." + ) + + try: + LogPruneCommand(retention_period_days).run() + except CommandException as ex: + logger.exception("An error occurred while pruning logs: %s", ex) diff --git a/superset/tasks/thumbnails.py b/superset/tasks/thumbnails.py index 483fb8495456..34c4fc7377e5 100644 --- a/superset/tasks/thumbnails.py +++ b/superset/tasks/thumbnails.py @@ -24,6 +24,7 @@ from superset import security_manager, thumbnail_cache from superset.extensions import celery_app +from superset.security.guest_token import GuestToken from superset.tasks.utils import get_executor from superset.utils.core import override_user from superset.utils.screenshots import ChartScreenshot, DashboardScreenshot @@ -85,6 +86,7 @@ def cache_dashboard_thumbnail( if not thumbnail_cache: logging.warning("No cache set, refusing to compute") return + dashboard = Dashboard.get(dashboard_id) url = get_url_path("Superset.dashboard", dashboard_id_or_slug=dashboard.id) @@ -106,15 +108,16 @@ def cache_dashboard_thumbnail( ) -# pylint: disable=too-many-arguments @celery_app.task(name="cache_dashboard_screenshot", soft_time_limit=300) -def cache_dashboard_screenshot( - current_user: Optional[str], +def cache_dashboard_screenshot( # pylint: disable=too-many-arguments + username: str, dashboard_id: int, dashboard_url: str, force: bool = True, + guest_token: Optional[GuestToken] = None, thumb_size: Optional[WindowSize] = None, window_size: Optional[WindowSize] = None, + cache_key: Optional[str] = None, ) -> None: # pylint: disable=import-outside-toplevel from superset.models.dashboard import Dashboard @@ -126,18 +129,25 @@ def cache_dashboard_screenshot( dashboard = Dashboard.get(dashboard_id) logger.info("Caching dashboard: %s", dashboard_url) - _, username = get_executor( - executor_types=current_app.config["THUMBNAIL_EXECUTE_AS"], - model=dashboard, - current_user=current_user, - ) - user = security_manager.find_user(username) - with override_user(user): + + # Requests from Embedded should always use the Guest user + if guest_token: + current_user = security_manager.get_guest_user_from_token(guest_token) + else: + _, exec_username = get_executor( + executor_types=current_app.config["THUMBNAIL_EXECUTE_AS"], + model=dashboard, + current_user=username, + ) + current_user = security_manager.find_user(exec_username) + + with override_user(current_user): screenshot = DashboardScreenshot(dashboard_url, dashboard.digest) screenshot.compute_and_cache( - user=user, + user=current_user, cache=thumbnail_cache, force=force, window_size=window_size, thumb_size=thumb_size, + cache_key=cache_key, ) diff --git a/superset/tasks/utils.py b/superset/tasks/utils.py index 5012330bbd43..5e3bc148082b 100644 --- a/superset/tasks/utils.py +++ b/superset/tasks/utils.py @@ -17,12 +17,18 @@ from __future__ import annotations -from typing import TYPE_CHECKING +import logging +from http.client import HTTPResponse +from typing import Optional, TYPE_CHECKING +from urllib import request +from celery.utils.log import get_task_logger from flask import current_app, g from superset.tasks.exceptions import ExecutorNotFoundError from superset.tasks.types import ExecutorType +from superset.utils import json +from superset.utils.urls import get_url_path if TYPE_CHECKING: from superset.models.dashboard import Dashboard @@ -30,6 +36,10 @@ from superset.reports.models import ReportSchedule +logger = get_task_logger(__name__) +logger.setLevel(logging.INFO) + + # pylint: disable=too-many-branches def get_executor( executor_types: list[ExecutorType], @@ -92,3 +102,39 @@ def get_current_user() -> str | None: return user.username return None + + +def fetch_csrf_token( + headers: dict[str, str], session_cookie_name: str = "session" +) -> dict[str, str]: + """ + Fetches a CSRF token for API requests + + :param headers: A map of headers to use in the request, including the session cookie + :returns: A map of headers, including the session cookie and csrf token + """ + url = get_url_path("SecurityRestApi.csrf_token") + logger.info("Fetching %s", url) + req = request.Request(url, headers=headers, method="GET") + response: HTTPResponse + with request.urlopen(req, timeout=600) as response: + body = response.read().decode("utf-8") + session_cookie: Optional[str] = None + cookie_headers = response.headers.get_all("set-cookie") + if cookie_headers: + for cookie in cookie_headers: + cookie = cookie.split(";", 1)[0] + name, value = cookie.split("=", 1) + if name == session_cookie_name: + session_cookie = value + break + + if response.status == 200: + data = json.loads(body) + res = {"X-CSRF-Token": data["result"]} + if session_cookie is not None: + res["Cookie"] = f"{session_cookie_name}={session_cookie}" + return res + + logger.error("Error fetching CSRF token, status code: %s", response.status) + return {} diff --git a/superset/translations/ar/LC_MESSAGES/messages.po b/superset/translations/ar/LC_MESSAGES/messages.po index 777c390f517e..b75484f3f916 100644 --- a/superset/translations/ar/LC_MESSAGES/messages.po +++ b/superset/translations/ar/LC_MESSAGES/messages.po @@ -21,7 +21,7 @@ msgid "" msgstr "" "Project-Id-Version: Superset VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-04-18 20:58+0300\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language: ar\n" @@ -347,11 +347,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -360,7 +359,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -771,6 +772,12 @@ msgstr "" msgid "Add Alert" msgstr "" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "" @@ -1723,6 +1730,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -1938,6 +1948,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + msgid "CREATE DATASET" msgstr "" @@ -2404,6 +2417,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + msgid "Choose sheet name" msgstr "" @@ -6648,7 +6664,7 @@ msgid "No %s yet" msgstr "" msgid "No Data" -msgstr "" +msgstr "0" msgid "No Results" msgstr "" @@ -6701,7 +6717,7 @@ msgstr "" msgid "No data" msgstr "" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -6755,7 +6771,7 @@ msgstr "" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7236,9 +7252,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -7666,6 +7679,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -8760,6 +8776,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +#, fuzzy +msgid "Select channels" +msgstr "قواعد البيانات المدعومة" + msgid "Select chart" msgstr "" @@ -9019,6 +9039,9 @@ msgstr "" msgid "Shift + Click to sort by multiple columns" msgstr "" +msgid "Shift start date" +msgstr "" + msgid "Short description must be unique for this layer" msgstr "" @@ -9313,6 +9336,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10032,6 +10060,9 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +msgid "The database that was used to generate this query could not be found" +msgstr "" + msgid "The database was deleted." msgstr "" @@ -10425,6 +10456,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -10579,7 +10619,7 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" +msgid "There was an error fetching the filtered charts and dashboards:" msgstr "" msgid "There was an error loading the catalogs" @@ -10888,6 +10928,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11411,6 +11456,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -11866,7 +11915,7 @@ msgstr "" msgid "Waiting on %s" msgstr "" -msgid "Waiting on database..." +msgid "Loading..." msgstr "" msgid "Want to add a new database?" @@ -13053,6 +13102,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/de/LC_MESSAGES/messages.po b/superset/translations/de/LC_MESSAGES/messages.po index e9b6fc216d02..576959c0f392 100644 --- a/superset/translations/de/LC_MESSAGES/messages.po +++ b/superset/translations/de/LC_MESSAGES/messages.po @@ -18,7 +18,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2023-04-07 19:45+0200\n" "Last-Translator: Holger Bruch <holger.bruch@wattbewerb.de>\n" "Language: de\n" @@ -368,22 +368,20 @@ msgstr "(keine Beschreibung, klicken Sie hier, um Fehlermeldung zu sehen)" msgid "), and they become available in your SQL (example:" msgstr "), und sie werden in Ihrem SQL verfügbar (Beispiel:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url)s|In Superset erkunden>\n" -"%(table)s\n" +"Fehler: %(text)s\n" #, python-format msgid "" @@ -391,13 +389,16 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"Fehler: %(text)s\n" +"<%(url)s|In Superset erkunden>\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -839,6 +840,14 @@ msgstr "Hinzufügen" msgid "Add Alert" msgstr "Alarm hinzufügen" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "Kürzlich" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "Kürzlich" + msgid "Add CSS template" msgstr "CSS Vorlagen" @@ -1885,6 +1894,10 @@ msgstr "Achse aufsteigend" msgid "Axis descending" msgstr "Achse absteigend" +#, fuzzy +msgid "BCC recipients" +msgstr "Kürzlich" + msgid "BOOLEAN" msgstr "WAHRHEITSWERT" @@ -2140,6 +2153,10 @@ msgstr "Nach Wert: Metrikwerte als Sortierschlüssel verwenden" msgid "CANCEL" msgstr "ABBRECHEN" +#, fuzzy +msgid "CC recipients" +msgstr "Kürzlich" + msgid "CREATE DATASET" msgstr "DATASET ERSTELLEN" @@ -2635,6 +2652,12 @@ msgstr "" "Wählen Sie einen der verfügbaren Datensätze aus dem Bereich auf der " "linken Seite." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "" +"Wählen Sie einen der verfügbaren Datensätze aus dem Bereich auf der " +"linken Seite." + #, fuzzy msgid "Choose sheet name" msgstr "Blattname" @@ -7269,7 +7292,7 @@ msgid "No %s yet" msgstr "Noch keine %s" msgid "No Data" -msgstr "Keine Daten" +msgstr "0" msgid "No Results" msgstr "Keine Ergebnisse" @@ -7325,7 +7348,7 @@ msgstr "Noch keine Dashboards" msgid "No data" msgstr "Keine Daten" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Keine Daten nach dem Filtern oder Daten sind NULL für den letzten " "Zeitdatensatz" @@ -7381,7 +7404,7 @@ msgstr "Keine Ergebnisse gefunden" msgid "No results match your filter criteria" msgstr "Keine Ergebnisse entsprechen Ihren Filterkriterien" -msgid "No results were returned for this query" +msgid "No data" msgstr "Für diese Abfrage wurden keine Ergebnisse zurückgegeben" msgid "" @@ -7927,10 +7950,6 @@ msgstr "" "Besitzende ist eine Liste von Benutzer*innen, die das Dashboard ändern " "können. Durchsuchbar nach Name oder Benutzer*innenname." -#, fuzzy -msgid "PDF download failed, please refresh and try again." -msgstr "Bilddownload fehlgeschlagen, bitte aktualisieren und erneut versuchen." - msgid "Page length" msgstr "Seitenlänge" @@ -8404,6 +8423,9 @@ msgstr "Primäres y-Achsenformat" msgid "Primary y-axis format" msgstr "Primäres y-Achsenformat" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Privater Schlüssel" @@ -9585,6 +9607,10 @@ msgstr "Auswählen beliebiger Spalten für die Metadatenüberprüfung" msgid "Select catalog or type to search catalogs" msgstr "Tabelle auswählen oder tippen, um Tabellen zu suchen" +#, fuzzy +msgid "Select channels" +msgstr "Diagramme auswählen" + #, fuzzy msgid "Select chart" msgstr "Diagramme auswählen" @@ -9865,6 +9891,10 @@ msgstr "Blattname" msgid "Shift + Click to sort by multiple columns" msgstr "UMSCHALT+Klicken um nach mehreren Spalten zu sortieren" +#, fuzzy +msgid "Shift start date" +msgstr "Startdatum" + msgid "Short description must be unique for this layer" msgstr "Kurzbeschreibung muss für diese Ebene eindeutig sein" @@ -10195,6 +10225,11 @@ msgstr "Durchgezogen" msgid "Some roles do not exist" msgstr "Einige Rollen sind nicht vorhanden" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Etwas ist schief gelaufen." @@ -10995,6 +11030,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "Die Datenbank hat einen unerwarteten Fehler zurückgegeben." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Datenbank nicht gefunden." + msgid "The database was deleted." msgstr "Die Datenbank wurde gelöscht." @@ -11514,6 +11553,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "Das Schema wurde in der Datenbank gelöscht oder umbenannt." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "Die Größe der quadratischen Zelle in Pixel" @@ -11710,8 +11758,9 @@ msgstr "Fehler beim Abrufen der zugehörigen Objekte des Datensatzes" msgid "There was an error fetching the favorite status: %s" msgstr "Beim Abrufen des Favoritenstatus ist ein Problem aufgetreten: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Beim Abrufen der letzten Aktivität ist ein Fehler aufgetreten:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Beim Abrufen des Favoritenstatus ist ein Problem aufgetreten: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -12103,6 +12152,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12683,6 +12737,10 @@ msgstr "Unbekannter MySQL-Server-Host \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Unbekannter MySQL-Server-Host \"%(hostname)s\"." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Unbekannter MySQL-Server-Host \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "Unbekannter Presto-Fehler" @@ -13227,7 +13285,7 @@ msgid "Waiting on %s" msgstr "Sie sehen %s von %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Verwalten Sie Ihre Datenbanken" msgid "Want to add a new database?" @@ -14567,6 +14625,10 @@ msgstr "in" msgid "in modal" msgstr " " +#, fuzzy +msgid "invalid email" +msgstr "Ungültiger Permalink-Schlüssel" + #, fuzzy msgid "is expected to be a Mapbox URL" msgstr "wird als Zahl erwartet" diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index 3a6e028b134b..b0cdd77d1132 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2016-05-02 08:49-0700\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language: en\n" @@ -330,11 +330,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -343,7 +342,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -754,6 +755,12 @@ msgstr "" msgid "Add Alert" msgstr "" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "" @@ -1703,6 +1710,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -1918,6 +1928,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + msgid "CREATE DATASET" msgstr "" @@ -2380,6 +2393,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + msgid "Choose sheet name" msgstr "" @@ -6586,7 +6602,7 @@ msgid "No %s yet" msgstr "" msgid "No Data" -msgstr "" +msgstr "0" msgid "No Results" msgstr "" @@ -6638,9 +6654,9 @@ msgid "No dashboards yet" msgstr "" msgid "No data" -msgstr "" +msgstr "0" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -6694,7 +6710,7 @@ msgstr "" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7175,9 +7191,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -7602,6 +7615,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -8698,6 +8714,9 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +msgid "Select channels" +msgstr "" + #, fuzzy msgid "Select chart" msgstr "" @@ -8959,6 +8978,9 @@ msgstr "" msgid "Shift + Click to sort by multiple columns" msgstr "" +msgid "Shift start date" +msgstr "" + msgid "Short description must be unique for this layer" msgstr "" @@ -9252,6 +9274,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -9970,6 +9997,9 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +msgid "The database that was used to generate this query could not be found" +msgstr "" + msgid "The database was deleted." msgstr "" @@ -10359,6 +10389,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -10513,7 +10552,7 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" +msgid "There was an error fetching the filtered charts and dashboards:" msgstr "" msgid "There was an error loading the catalogs" @@ -10822,6 +10861,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11342,6 +11386,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -11796,7 +11844,7 @@ msgstr "" msgid "Waiting on %s" msgstr "" -msgid "Waiting on database..." +msgid "Loading..." msgstr "" msgid "Want to add a new database?" @@ -12975,6 +13023,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/es/LC_MESSAGES/messages.po b/superset/translations/es/LC_MESSAGES/messages.po index bd2acbd79150..0ab34b1364e1 100644 --- a/superset/translations/es/LC_MESSAGES/messages.po +++ b/superset/translations/es/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2018-11-03 00:11+0100\n" "Last-Translator: Ruben Sastre <ruben.sastre@decathlon.com>\n" "Language: es\n" @@ -339,11 +339,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -352,7 +351,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -815,6 +816,14 @@ msgstr "Agregar" msgid "Add Alert" msgstr "alerta" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "Recientes" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "Recientes" + msgid "Add CSS template" msgstr "Cargar una plantilla CSS" @@ -1873,6 +1882,10 @@ msgstr "Orden Descendente" msgid "Axis descending" msgstr "Orden descendente" +#, fuzzy +msgid "BCC recipients" +msgstr "Recientes" + msgid "BOOLEAN" msgstr "" @@ -2110,6 +2123,10 @@ msgstr "" msgid "CANCEL" msgstr "CANCELAR" +#, fuzzy +msgid "CC recipients" +msgstr "Recientes" + #, fuzzy msgid "CREATE DATASET" msgstr "Cambiar fuente" @@ -2625,6 +2642,9 @@ msgstr "Agregar método de notificación" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + #, fuzzy msgid "Choose sheet name" msgstr "Nombre de Hoja" @@ -7317,7 +7337,7 @@ msgstr "Aún no hay %s" #, fuzzy msgid "No Data" -msgstr "No hay datos" +msgstr "0" #, fuzzy msgid "No Results" @@ -7383,7 +7403,7 @@ msgstr "No hay dashboards" msgid "No data" msgstr "No hay datos" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -7445,7 +7465,7 @@ msgstr "No se han encontrado resultados" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7992,9 +8012,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -8458,6 +8475,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -9702,6 +9722,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "Selecciona tabla o introduce su nombre" +#, fuzzy +msgid "Select channels" +msgstr "Todos los gráficos" + #, fuzzy msgid "Select chart" msgstr "Todos los gráficos" @@ -10001,6 +10025,10 @@ msgstr "Nombre de Hoja" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "El Gráfico ha cambiado" + msgid "Short description must be unique for this layer" msgstr "La descripción corta debe ser única para esta capa" @@ -10322,6 +10350,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "El dashboard no existe" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -11142,6 +11175,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "Issue 1002 - La base de datos devolvió un error inesperado." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "La base de datos no existe" + #, fuzzy msgid "The database was deleted." msgstr "La base de datos no han podido ser eliminada." @@ -11568,6 +11605,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "Issue 1004 - La columna fue eliminada o renombrada en la base de datos." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -11744,8 +11790,9 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "Hubo un problema al eliminar las plantillas seleccionadas: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Hubo un error al obtener tu actividad reciente:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Hubo un problema al eliminar las plantillas seleccionadas: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -12083,6 +12130,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12667,6 +12719,10 @@ msgstr "Host desconocido de MySQL: \"%(hostname)s\"" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Host desconocido de MySQL: \"%(hostname)s\"" +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Host desconocido de MySQL: \"%(hostname)s\"" + msgid "Unknown Presto Error" msgstr "Error de Presto desconocido" @@ -13164,7 +13220,7 @@ msgid "Waiting on %s" msgstr "Mostrando %s de %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Nombre de tu fuente de datos" msgid "Want to add a new database?" @@ -14464,6 +14520,10 @@ msgstr "en" msgid "in modal" msgstr "en modal" +#, fuzzy +msgid "invalid email" +msgstr "Certificado Inválido" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/fr/LC_MESSAGES/messages.po b/superset/translations/fr/LC_MESSAGES/messages.po index 31bffb6f5c0c..e4bb9491ad9d 100644 --- a/superset/translations/fr/LC_MESSAGES/messages.po +++ b/superset/translations/fr/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2021-11-16 17:33+0100\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language: fr\n" @@ -354,16 +354,15 @@ msgstr "(aucune description, cliquez pour voir le suivi de la pile)" msgid "), and they become available in your SQL (example:" msgstr "), et ils deviennent disponibles dans votre SQL (exemple :" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" -msgstr "*%(name)s*%(description)s<%(url)s|Explore in Superset>%(table)s" +" Error: %(text)s\n" +" " +msgstr "*%(name)s*%(description)sErreur : %(text)s" #, python-format msgid "" @@ -371,8 +370,10 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" -msgstr "*%(name)s*%(description)sErreur : %(text)s" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" +msgstr "*%(name)s*%(description)s<%(url)s|Explore in Superset>%(table)s" #, python-format msgid "+ %s more" @@ -848,6 +849,14 @@ msgstr "Ajouter" msgid "Add Alert" msgstr "Ajouter une alerte" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "récents" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "récents" + msgid "Add CSS template" msgstr "Ajouter un modèle CSS" @@ -1962,6 +1971,10 @@ msgstr "Axe ascendant" msgid "Axis descending" msgstr "Axe descendant" +#, fuzzy +msgid "BCC recipients" +msgstr "récents" + msgid "BOOLEAN" msgstr "BOOLÉEN" @@ -2222,6 +2235,10 @@ msgstr "Par valeur : utilisez les valeurs mesures comme clé de tri" msgid "CANCEL" msgstr "CANCEL" +#, fuzzy +msgid "CC recipients" +msgstr "récents" + #, fuzzy msgid "CREATE DATASET" msgstr "CREATE DATASET" @@ -2756,6 +2773,12 @@ msgstr "" "Choisissez l’une des bases de données disponibles dans le panneau de " "gauche." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "" +"Choisissez l’une des bases de données disponibles dans le panneau de " +"gauche." + #, fuzzy msgid "Choose sheet name" msgstr "Nom de feuille" @@ -7686,7 +7709,7 @@ msgid "No %s yet" msgstr "Pas encore de %s" msgid "No Data" -msgstr "Aucune donnée" +msgstr "0" #, fuzzy msgid "No Results" @@ -7751,7 +7774,7 @@ msgstr "Pas encore de tableau de bord" msgid "No data" msgstr "Aucune donnée" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Pas de données après le filtrage ou données NULLES pour le dernier " "enregistrement temporel" @@ -7816,7 +7839,7 @@ msgstr "Aucun résultat trouvé" msgid "No results match your filter criteria" msgstr "Aucun résultat ne correspond à vos critères de filtrage" -msgid "No results were returned for this query" +msgid "No data" msgstr "Aucun résultat n'a été obtenu pour cette requête" msgid "" @@ -8403,9 +8426,6 @@ msgstr "" " tableau de bord. Il est possible d'effectuer une recherche par nom ou " "par nom d'utilisateur." -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "Longueur de la page" @@ -8904,6 +8924,9 @@ msgstr "Limites de l’axe des ordonnées primaires" msgid "Primary y-axis format" msgstr "Format de l’axe primaire des ordonnées" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Clé privée" @@ -10170,6 +10193,10 @@ msgstr "Sélectionner n’importe quelle colonne pour l’inspection des métado msgid "Select catalog or type to search catalogs" msgstr "Sélectionner un tableau ou un type de tableau pour effectuer une recherche" +#, fuzzy +msgid "Select channels" +msgstr "Sélectionner des graphiques" + #, fuzzy msgid "Select chart" msgstr "Sélectionner un graphique" @@ -10485,6 +10512,10 @@ msgstr "Nom de feuille" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + clic pour classer par plusieurs colonnes" +#, fuzzy +msgid "Shift start date" +msgstr "Date de début" + msgid "Short description must be unique for this layer" msgstr "La description courte doit être unique pour cette couche" @@ -10844,6 +10875,11 @@ msgstr "Solide" msgid "Some roles do not exist" msgstr "Des profils n'existent pas" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + #, fuzzy msgid "Something went wrong." msgstr "Une erreur est survenue." @@ -11705,6 +11741,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "La base de données a renvoyé une erreur inattendue." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Base de données introuvable." + msgid "The database was deleted." msgstr "La base de données a été supprimée." @@ -12222,6 +12262,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "Le schéma a été supprimé ou renommé dans la base de données." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -12426,10 +12475,9 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "Erreur à la récupération du statut favori de ce tableau de bord : %s" -msgid "There was an error fetching your recent activity:" -msgstr "" -"Une erreur s'est produite lors de lors de la récupération de votre " -"activité récente :" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Erreur à la récupération du statut favori de ce tableau de bord : %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -12842,6 +12890,11 @@ msgstr "Cette mesure pourrait être incompatible avec l’ensemble de données a msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + #, fuzzy msgid "" "This section allows you to configure how to use the slice\n" @@ -13452,6 +13505,10 @@ msgstr "Hôte MySQL \"%(hostname)s\" inconnu." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Hôte inconnu du serveur MySQL « %(hostname)s »" +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Hôte MySQL \"%(hostname)s\" inconnu." + msgid "Unknown Presto Error" msgstr "Erreur Presto inconnue" @@ -14022,7 +14079,7 @@ msgid "Waiting on %s" msgstr "Affichage de %s sur %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Gérer vos bases de données" msgid "Want to add a new database?" @@ -15440,6 +15497,10 @@ msgstr "dans" msgid "in modal" msgstr "dans modal" +#, fuzzy +msgid "invalid email" +msgstr "Clé de liaison permanente non valide" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/it/LC_MESSAGES/messages.po b/superset/translations/it/LC_MESSAGES/messages.po index 06ef0b2d1062..8690be08f8c5 100644 --- a/superset/translations/it/LC_MESSAGES/messages.po +++ b/superset/translations/it/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2018-02-11 22:26+0200\n" "Last-Translator: Raffaele Spangaro <raffa@raffaelespangaro.it>\n" "Language: it\n" @@ -331,11 +331,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -344,7 +343,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -790,6 +791,12 @@ msgstr "" msgid "Add Alert" msgstr "Aggiungi grafico" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "Template CSS" @@ -1809,6 +1816,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -2041,6 +2051,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + #, fuzzy msgid "CREATE DATASET" msgstr "Seleziona una destinazione" @@ -2537,6 +2550,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + #, fuzzy msgid "Choose sheet name" msgstr "Nome Completo" @@ -7054,7 +7070,7 @@ msgstr "" #, fuzzy msgid "No Data" -msgstr "Metadati JSON" +msgstr "0" #, fuzzy msgid "No Results" @@ -7116,7 +7132,7 @@ msgstr "Elenco Dashboard" msgid "No data" msgstr "Metadati JSON" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -7177,7 +7193,7 @@ msgstr "Nessun record trovato" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7687,9 +7703,6 @@ msgid "" " or username." msgstr "Proprietari è una lista di utenti che può alterare la dashboard." -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -8142,6 +8155,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -9328,6 +9344,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +#, fuzzy +msgid "Select channels" +msgstr "Grafico a Proiettile" + #, fuzzy msgid "Select chart" msgstr "Grafico a Proiettile" @@ -9616,6 +9636,10 @@ msgstr "Nome Completo" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "Ultima Modifica" + msgid "Short description must be unique for this layer" msgstr "" @@ -9934,6 +9958,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "Elenco Dashboard" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10710,6 +10739,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Template CSS" + msgid "The database was deleted." msgstr "" @@ -11110,6 +11143,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -11270,8 +11312,9 @@ msgstr "Errore nel recupero dei metadati della tabella" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" -msgstr "" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Errore nel creare il datasource" #, fuzzy msgid "There was an error loading the catalogs" @@ -11595,6 +11638,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12148,6 +12196,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -12629,7 +12681,7 @@ msgid "Waiting on %s" msgstr "" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Database" msgid "Want to add a new database?" @@ -13881,6 +13933,9 @@ msgstr "Min" msgid "in modal" msgstr "in modale" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/ja/LC_MESSAGES/messages.po b/superset/translations/ja/LC_MESSAGES/messages.po old mode 100755 new mode 100644 index cc30ba0bd984..8ff874201dcc --- a/superset/translations/ja/LC_MESSAGES/messages.po +++ b/superset/translations/ja/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-05-14 13:30+0900\n" "Last-Translator: Yuri Umezaki <bungoume@gmail.com>\n" "Language: ja\n" @@ -205,9 +205,8 @@ msgid "" "schedule with a minimum interval of %(minimum_interval)d minutes per " "execution." msgstr "" -"%(report_type) のスケジュール頻度が制限を超えています。" -"処理ごとに %(minimum_interval) 分の間隔を持ったスケジュールを設定してください。" - +"%(report_type) のスケジュール頻度が制限を超えています。処理ごとに %(minimum_interval) " +"分の間隔を持ったスケジュールを設定してください。" #, python-format msgid "%(rows)d rows returned" @@ -348,23 +347,20 @@ msgstr "(説明なし、クリックするとスタック トレースが表示 msgid "), and they become available in your SQL (example:" msgstr ")、SQL で使用できるようになります (例:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" +" %(description)s\n" "\n" -"<%(url)s|Explore in Superset>\n" -"\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url)s|スーパーセット内の探索>\n" -"\n" -"%(table)s\n" +"エラー: %(text)s\n" #, python-format msgid "" @@ -372,13 +368,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"エラー: %(text)s\n" +"<%(url)s|スーパーセット内の探索>\n" +"\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -797,6 +797,14 @@ msgstr "追加" msgid "Add Alert" msgstr "アラートを追加" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "最近の" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "最近の" + msgid "Add CSS template" msgstr "CSSテンプレートを追加" @@ -993,14 +1001,12 @@ msgstr "比較値からの正または負の変化に基づいてチャート msgid "" "Adjust column settings such as specifying the columns to read, how " "duplicates are handled, column data types, and more." -msgstr "" -"列の設定を調整する。(例:読み込む列の指定、重複の処理方法、列のデータ型)" +msgstr "列の設定を調整する。(例:読み込む列の指定、重複の処理方法、列のデータ型)" msgid "" "Adjust how spaces, blank lines, null values are handled and other file " "wide settings." -msgstr "" -"スペース、空白行、NULL値の扱い方や、その他のファイル全体の設定を調整する。" +msgstr "スペース、空白行、NULL値の扱い方や、その他のファイル全体の設定を調整する。" msgid "Adjust how this database will interact with SQL Lab." msgstr "このデータベースが SQL Lab とどのように対話するかを調整" @@ -1760,6 +1766,10 @@ msgstr "軸上昇" msgid "Axis descending" msgstr "軸下降" +#, fuzzy +msgid "BCC recipients" +msgstr "最近の" + msgid "BOOLEAN" msgstr "ブール値" @@ -1987,6 +1997,10 @@ msgstr "値による: メトリック値を並べ替えキーとして使用し msgid "CANCEL" msgstr "キャンセル" +#, fuzzy +msgid "CC recipients" +msgstr "最近の" + msgid "CREATE DATASET" msgstr "データセットを作成" @@ -2460,6 +2474,10 @@ msgstr "通知方法を追加" msgid "Choose one of the available databases from the panel on the left." msgstr "左側のパネルから利用可能なデータベースの 1 つを選択します。" +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "左側のパネルから利用可能なデータベースの 1 つを選択します。" + #, fuzzy msgid "Choose sheet name" msgstr "シート名" @@ -6770,7 +6788,7 @@ msgid "No %s yet" msgstr "%s はまだありません" msgid "No Data" -msgstr "データなし" +msgstr "0" msgid "No Results" msgstr "結果がありません" @@ -6824,7 +6842,7 @@ msgstr "ダッシュボードはまだありません" msgid "No data" msgstr "データなし" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "フィルタリング後にデータがないか、最新の時間レコードのデータが NULL です" msgid "No data in file" @@ -6878,7 +6896,7 @@ msgstr "結果が見つかりませんでした" msgid "No results match your filter criteria" msgstr "フィルタ条件に一致する結果はありません" -msgid "No results were returned for this query" +msgid "No data" msgstr "このクエリでは結果が返されませんでした" msgid "" @@ -7377,9 +7395,6 @@ msgid "" " or username." msgstr "所有者は、ダッシュボードを変更できるユーザーのリストです。名前またはユーザー名で検索できます。" -msgid "PDF download failed, please refresh and try again." -msgstr "PDF のダウンロードに失敗しました。更新してもう一度お試しください。" - msgid "Page length" msgstr "ページの長さ" @@ -7812,6 +7827,9 @@ msgstr "主 y 軸境界" msgid "Primary y-axis format" msgstr "プライマリ y 軸フォーマット" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "個人のキー" @@ -8936,6 +8954,10 @@ msgstr "メタデータ検査用の列を選択します" msgid "Select catalog or type to search catalogs" msgstr "テーブルを選択するか、テーブルを検索するタイプを選択してください" +#, fuzzy +msgid "Select channels" +msgstr "チャートを選択" + msgid "Select chart" msgstr "チャートを選択" @@ -9217,6 +9239,10 @@ msgstr "シート名" msgid "Shift + Click to sort by multiple columns" msgstr "Shift クリックして複数の列で並べ替えます" +#, fuzzy +msgid "Shift start date" +msgstr "開始日" + msgid "Short description must be unique for this layer" msgstr "短い説明はこのレイヤーに対して一意である必要があります。" @@ -9517,6 +9543,11 @@ msgstr "ソリッド" msgid "Some roles do not exist" msgstr "一部のロールが存在しません" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "何か問題が発生しました。" @@ -10256,6 +10287,10 @@ msgstr "このクエリで参照されているデータベースが見つかり msgid "The database returned an unexpected error." msgstr "データベースが予期しないエラーを返しました。" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "データベースが見つかりませんでした。" + msgid "The database was deleted." msgstr "データベースが削除されました。" @@ -10681,6 +10716,15 @@ msgstr "送信されたペイロードのスキーマが無効です。" msgid "The schema was deleted or renamed in the database." msgstr "データベース内のスキーマが削除されたか、名前が変更されました。" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "各セルのサイズ (メートル単位)" @@ -10844,8 +10888,9 @@ msgstr "データセットの関連オブジェクトの取得中にエラーが msgid "There was an error fetching the favorite status: %s" msgstr "お気に入りステータスの取得中にエラーが発生しました: %s" -msgid "There was an error fetching your recent activity:" -msgstr "最近のアクティビティの取得中にエラーが発生しました:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "お気に入りステータスの取得中にエラーが発生しました: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -11178,6 +11223,11 @@ msgstr "このメトリクスは現在のデータセットと互換性がない msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11708,6 +11758,10 @@ msgstr "不明な Doris サーバー ホスト \"%(hostname)s\"。" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "不明な MySQL サーバー ホスト \"%(hostname)s\"。" +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "不明な Doris サーバー ホスト \"%(hostname)s\"。" + msgid "Unknown Presto Error" msgstr "不明なプレスト エラー" @@ -12189,7 +12243,7 @@ msgid "Waiting on %s" msgstr "Showing %s of %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "データベースを管理する" msgid "Want to add a new database?" @@ -13395,6 +13449,10 @@ msgstr "In" msgid "in modal" msgstr "モーダルで" +#, fuzzy +msgid "invalid email" +msgstr "パーマリンクキーが無効です。" + msgid "is expected to be a Mapbox URL" msgstr "Mapbox URL であることが期待されます" diff --git a/superset/translations/ko/LC_MESSAGES/messages.po b/superset/translations/ko/LC_MESSAGES/messages.po index 040cb99855ee..b070b4d9887c 100644 --- a/superset/translations/ko/LC_MESSAGES/messages.po +++ b/superset/translations/ko/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2019-02-02 22:28+0900\n" "Last-Translator: \n" "Language: ko\n" @@ -330,11 +330,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -343,7 +342,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -785,6 +786,12 @@ msgstr "" msgid "Add Alert" msgstr "차트 추가" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "CSS 템플릿" @@ -1793,6 +1800,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -2020,6 +2030,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + #, fuzzy msgid "CREATE DATASET" msgstr "데이터소스 선택" @@ -2516,6 +2529,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + #, fuzzy msgid "Choose sheet name" msgstr "테이블 명" @@ -6980,7 +6996,7 @@ msgid "No %s yet" msgstr "" msgid "No Data" -msgstr "" +msgstr "0" #, fuzzy msgid "No Results" @@ -7042,7 +7058,7 @@ msgstr "대시보드" msgid "No data" msgstr "" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -7099,7 +7115,7 @@ msgstr "" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7600,9 +7616,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -8045,6 +8058,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -9223,6 +9239,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +#, fuzzy +msgid "Select channels" +msgstr "차트 추가" + #, fuzzy msgid "Select chart" msgstr "차트 추가" @@ -9507,6 +9527,10 @@ msgstr "테이블 명" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "시작 시간" + msgid "Short description must be unique for this layer" msgstr "" @@ -9825,6 +9849,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "몇몇 역할이 존재하지 않습니다" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10593,6 +10622,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "이슈 1002 - 데이터베이스에 예상치 못한 에러가 발생했습니다." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "데이터베이스를 찾을 수 없습니다." + #, fuzzy msgid "The database was deleted." msgstr "데이터베이스를 삭제할 수 없습니다." @@ -10986,6 +11019,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -11143,8 +11185,9 @@ msgstr "데이터 베이스 목록을 가져오는 도중 에러가 발생하였 msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" -msgstr "" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "데이터 베이스 목록을 가져오는 도중 에러가 발생하였습니다." #, fuzzy msgid "There was an error loading the catalogs" @@ -11455,6 +11498,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12004,6 +12052,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -12487,7 +12539,7 @@ msgid "Waiting on %s" msgstr "" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "데이터베이스 선택" msgid "Want to add a new database?" @@ -13708,6 +13760,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/messages.pot b/superset/translations/messages.pot index 37d4abdc440b..c36c1ad9c38c 100644 --- a/superset/translations/messages.pot +++ b/superset/translations/messages.pot @@ -25,7 +25,7 @@ msgid "" msgstr "" "Project-Id-Version: Superset VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language-Team: LANGUAGE <LL@li.org>\n" @@ -336,11 +336,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -349,7 +348,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -760,6 +761,12 @@ msgstr "" msgid "Add Alert" msgstr "" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "" @@ -1708,6 +1715,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -1923,6 +1933,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + msgid "CREATE DATASET" msgstr "" @@ -2385,6 +2398,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + msgid "Choose sheet name" msgstr "" @@ -7172,9 +7188,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -7598,6 +7611,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -8691,6 +8707,9 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +msgid "Select channels" +msgstr "" + msgid "Select chart" msgstr "" @@ -8948,6 +8967,9 @@ msgstr "" msgid "Shift + Click to sort by multiple columns" msgstr "" +msgid "Shift start date" +msgstr "" + msgid "Short description must be unique for this layer" msgstr "" @@ -9241,6 +9263,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -9959,6 +9986,9 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +msgid "The database that was used to generate this query could not be found" +msgstr "" + msgid "The database was deleted." msgstr "" @@ -10348,6 +10378,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -10502,7 +10541,7 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" +msgid "There was an error fetching the filtered charts and dashboards:" msgstr "" msgid "There was an error loading the catalogs" @@ -10811,6 +10850,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11329,6 +11373,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -12962,6 +13010,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/nl/LC_MESSAGES/messages.po b/superset/translations/nl/LC_MESSAGES/messages.po index d5eeafc46d20..616127e9596e 100644 --- a/superset/translations/nl/LC_MESSAGES/messages.po +++ b/superset/translations/nl/LC_MESSAGES/messages.po @@ -13,7 +13,7 @@ msgid "" msgstr "" "Project-Id-Version: superset-ds\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-05-08 14:41+0000\n" "Last-Translator: \n" "Language: nl\n" @@ -360,23 +360,20 @@ msgstr "(geen beschrijving, klik om de tracering te zien)" msgid "), and they become available in your SQL (example:" msgstr "), en ze zijn beschikbaar in uw SQL (voorbeeld:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url)s| Verken in Superset>\n" -"\n" -"%(table)s\n" +"Fout: %(text)s\n" #, python-format msgid "" @@ -384,13 +381,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"Fout: %(text)s\n" +"<%(url)s| Verken in Superset>\n" +"\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -835,6 +836,14 @@ msgstr "Voeg toe" msgid "Add Alert" msgstr "Alarm toevoegen" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "recente" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "recente" + msgid "Add CSS template" msgstr "Voeg CSS template toe" @@ -1871,6 +1880,10 @@ msgstr "As oplopend" msgid "Axis descending" msgstr "As aflopend" +#, fuzzy +msgid "BCC recipients" +msgstr "recente" + msgid "BOOLEAN" msgstr "BOOLEAN" @@ -2121,6 +2134,10 @@ msgstr "Op waarde: gebruik metriek waarden als sorteersleutel" msgid "CANCEL" msgstr "ANNULEER" +#, fuzzy +msgid "CC recipients" +msgstr "recente" + msgid "CREATE DATASET" msgstr "DATASET AANMAKEN" @@ -2605,6 +2622,10 @@ msgstr "Meldingsmethode toevoegen" msgid "Choose one of the available databases from the panel on the left." msgstr "Kies een van de beschikbare databases uit het paneel aan de linkerkant." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "Kies een van de beschikbare databases uit het paneel aan de linkerkant." + #, fuzzy msgid "Choose sheet name" msgstr "Naam tabblad" @@ -7207,7 +7228,7 @@ msgstr "Nog geen dashboards" msgid "No data" msgstr "Geen data" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "Geen gegevens na filteren of gegevens is NULL voor het laatste tijdrecord" msgid "No data in file" @@ -7261,7 +7282,7 @@ msgstr "Geen resultaten gevonden" msgid "No results match your filter criteria" msgstr "Geen resultaten gevonden met uw filtercriteria" -msgid "No results were returned for this query" +msgid "No data" msgstr "Er zijn geen resultaten gevonden voor deze zoekopdracht" msgid "" @@ -7794,9 +7815,6 @@ msgstr "" "Eigenaren zijn een lijst met gebruikers die het dashboard kunnen " "veranderen. Doorzoekbaar op naam of gebruikersnaam." -msgid "PDF download failed, please refresh and try again." -msgstr "PDF download is mislukt, vernieuw en probeer het opnieuw." - msgid "Page length" msgstr "Pagina lengte" @@ -8248,6 +8266,9 @@ msgstr "Primaire y-as Grenzen" msgid "Primary y-axis format" msgstr "Primair y-as formaat" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Privésleutel" @@ -9413,6 +9434,10 @@ msgstr "Selecteer alle kolommen voor metadata inspectie" msgid "Select catalog or type to search catalogs" msgstr "Selecteer tabel of type om tabellen te zoeken" +#, fuzzy +msgid "Select channels" +msgstr "Selecteer grafieken" + msgid "Select chart" msgstr "Selecteer grafiek" @@ -9706,6 +9731,10 @@ msgstr "Naam tabblad" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + Klik om te sorteren op meerdere kolommen" +#, fuzzy +msgid "Shift start date" +msgstr "Start datum" + msgid "Short description must be unique for this layer" msgstr "Korte beschrijving moet uniek zijn voor deze laag" @@ -10030,6 +10059,11 @@ msgstr "Stevig" msgid "Some roles do not exist" msgstr "Sommige rollen bestaan niet" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Er ging iets mis." @@ -10808,6 +10842,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "De database gaf een onverwachte foutmelding." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "De database kon niet worden gevonden" + msgid "The database was deleted." msgstr "De database is verwijderd." @@ -11318,6 +11356,15 @@ msgstr "Het schema van de ingediende payload is ongeldig." msgid "The schema was deleted or renamed in the database." msgstr "Het schema werd verwijderd of hernoemd in de database." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "Grootte van elke cel in meters" @@ -11514,8 +11561,9 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "Er is een fout opgetreden bij het ophalen van de favoriete status: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Er is een fout opgetreden bij het ophalen van uw recente activiteit:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Er is een fout opgetreden bij het ophalen van de favoriete status: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -11898,6 +11946,11 @@ msgstr "Deze metriek is mogelijk niet compatibel met huidig dataset" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12462,6 +12515,10 @@ msgstr "Onbekende Doris server host \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Onbekende MySQL server host “%(hostname)s”." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Onbekende Doris server host \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "Onbekende Presto Fout" @@ -12994,7 +13051,7 @@ msgid "Waiting on %s" msgstr "Weergave %s van %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Beheer je databases" msgid "Want to add a new database?" @@ -14313,6 +14370,10 @@ msgstr "in" msgid "in modal" msgstr "in modal" +#, fuzzy +msgid "invalid email" +msgstr "Ongeldige permalink sleutel" + msgid "is expected to be a Mapbox URL" msgstr "wordt verwacht een Mapbox URL te zijn" diff --git a/superset/translations/pt/LC_MESSAGES/messages.po b/superset/translations/pt/LC_MESSAGES/messages.po index f845272b7b92..a91bc365998b 100644 --- a/superset/translations/pt/LC_MESSAGES/messages.po +++ b/superset/translations/pt/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2018-03-12 16:24+0000\n" "Last-Translator: Nuno Heli Beires <nuno.beires@douroeci.com>\n" "Language: pt\n" @@ -334,11 +334,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -347,7 +346,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -796,6 +797,12 @@ msgstr "" msgid "Add Alert" msgstr "Gráfico de Queijo" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + #, fuzzy msgid "Add CSS template" msgstr "Modelos CSS" @@ -1837,6 +1844,9 @@ msgstr "Ordenar decrescente" msgid "Axis descending" msgstr "Ordenar decrescente" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -2067,6 +2077,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + #, fuzzy msgid "CREATE DATASET" msgstr "Criado em" @@ -2572,6 +2585,9 @@ msgstr "Metadados adicionais" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + #, fuzzy msgid "Choose sheet name" msgstr "Nome Detalhado" @@ -7225,7 +7241,7 @@ msgstr "Sem dashboards" msgid "No data" msgstr "Metadados" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -7286,7 +7302,7 @@ msgstr "Nenhum registo encontrado" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7804,9 +7820,6 @@ msgid "" " or username." msgstr "Proprietários é uma lista de utilizadores que podem alterar o dashboard." -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -8259,6 +8272,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -9470,6 +9486,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +#, fuzzy +msgid "Select channels" +msgstr "Gráfico de bala" + #, fuzzy msgid "Select chart" msgstr "Gráfico de bala" @@ -9759,6 +9779,10 @@ msgstr "Nome Detalhado" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "Início" + msgid "Short description must be unique for this layer" msgstr "" @@ -10076,6 +10100,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "Dashboards" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10863,6 +10892,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Visualização %(id)s não encontrada" + #, fuzzy msgid "The database was deleted." msgstr "Esta origem de dados parece ter sido excluída" @@ -11274,6 +11307,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -11452,8 +11494,9 @@ msgstr "Desculpe, houve um erro ao gravar este dashbard: " msgid "There was an error fetching the favorite status: %s" msgstr "Desculpe, houve um erro ao gravar este dashbard: " -msgid "There was an error fetching your recent activity:" -msgstr "" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Desculpe, houve um erro ao gravar este dashbard: " #, fuzzy msgid "There was an error loading the catalogs" @@ -11784,6 +11827,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12347,6 +12395,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -12829,7 +12881,7 @@ msgid "Waiting on %s" msgstr "" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Editar Base de Dados" msgid "Want to add a new database?" @@ -14085,6 +14137,9 @@ msgstr "Mín" msgid "in modal" msgstr "em modal" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/pt_BR/LC_MESSAGES/messages.po b/superset/translations/pt_BR/LC_MESSAGES/messages.po index c67226a4d9ea..f1686a8d36f8 100644 --- a/superset/translations/pt_BR/LC_MESSAGES/messages.po +++ b/superset/translations/pt_BR/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2023-05-22 08:04-0400\n" "Last-Translator: \n" "Language: pt_BR\n" @@ -371,19 +371,16 @@ msgstr "), e eles tornaram-se disponíveis no seu SQL (exemplo:" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url) s|Explore no Superset >\n" -"\n" -"%(table)s" +"Erro: %(text)s" #, fuzzy, python-format msgid "" @@ -391,13 +388,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"Erro: %(text)s" +"<%(url) s|Explore no Superset >\n" +"\n" +"%(table)s" #, python-format msgid "+ %s more" @@ -842,6 +843,14 @@ msgstr "Adicionar" msgid "Add Alert" msgstr "Adicionar alerta" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "recentes" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "recentes" + msgid "Add CSS template" msgstr "Adicionar modelo CSS" @@ -1884,6 +1893,10 @@ msgstr "Eixo ascendente" msgid "Axis descending" msgstr "Eixo descendente" +#, fuzzy +msgid "BCC recipients" +msgstr "recentes" + msgid "BOOLEAN" msgstr "BOLEANO" @@ -2134,6 +2147,10 @@ msgstr "Por valor: utilizar valores métricos como chave de ordenação" msgid "CANCEL" msgstr "CANCELAR" +#, fuzzy +msgid "CC recipients" +msgstr "recentes" + msgid "CREATE DATASET" msgstr "CREATE DATASET" @@ -2630,6 +2647,10 @@ msgstr "Adicionar método de notificação" msgid "Choose one of the available databases from the panel on the left." msgstr "Escolha um dos bancos de dados disponíveis no painel na esquerda." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "Escolha um dos bancos de dados disponíveis no painel na esquerda." + #, fuzzy msgid "Choose sheet name" msgstr "Nome da planilha" @@ -7283,7 +7304,7 @@ msgstr "Ainda não há painéis" msgid "No data" msgstr "Sem dados" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Não há dados após a filtragem ou os dados são NULL para o último registo " "de tempo" @@ -7339,7 +7360,7 @@ msgstr "Não foram encontrados resultados" msgid "No results match your filter criteria" msgstr "Nenhum resultado corresponde aos seus critérios de filtragem" -msgid "No results were returned for this query" +msgid "No data" msgstr "Não foram apresentados resultados para esta consulta" msgid "" @@ -7880,10 +7901,6 @@ msgstr "" "Os proprietários são uma lista de usuários que podem alterar o painel. " "Pesquisável por nome ou nome de usuário." -#, fuzzy -msgid "PDF download failed, please refresh and try again." -msgstr "Falha no download da imagem, por favor atualizar e tentar novamente." - msgid "Page length" msgstr "Comprimento da página" @@ -8348,6 +8365,9 @@ msgstr "Formato do eixo y primário" msgid "Primary y-axis format" msgstr "Formato do eixo y primário" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Chave privada" @@ -9528,6 +9548,10 @@ msgstr "Selecionar quaisquer colunas para inspeção de metadados" msgid "Select catalog or type to search catalogs" msgstr "Selecione a tabela ou digite para pesquisar tabelas" +#, fuzzy +msgid "Select channels" +msgstr "Selecionar gráficos" + #, fuzzy msgid "Select chart" msgstr "Selecionar gráficos" @@ -9810,6 +9834,10 @@ msgstr "Nome da planilha" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + clique para organizar por colunas múltiplas" +#, fuzzy +msgid "Shift start date" +msgstr "Data de início" + msgid "Short description must be unique for this layer" msgstr "Uma breve descrição deve ser única para essa camada" @@ -10143,6 +10171,11 @@ msgstr "Sólido" msgid "Some roles do not exist" msgstr "Algumas funções não existem" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Algo não correu bem." @@ -10937,6 +10970,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "O banco de dados retornou um erro inesperado." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Não foi possível encontrar o banco de dados" + msgid "The database was deleted." msgstr "O banco de dados foi excluído." @@ -11430,6 +11467,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "O esquema foi excluído ou renomeado no banco de dados." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "O tamanho da célula quadrada, em pixels" @@ -11623,8 +11669,9 @@ msgstr "Ocorreu um erro ao buscar os objetos relacionados ao conjunto de dados" msgid "There was an error fetching the favorite status: %s" msgstr "Houve um erro ao buscar o status de favorito: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Ocorreu um erro ao buscar sua atividade recente:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Houve um erro ao buscar o status de favorito: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -11995,6 +12042,11 @@ msgstr "Essa métrica pode ser incompatível com o conjunto de dados atual" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12569,6 +12621,10 @@ msgstr "Host do servidor MySQL desconhecido \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Host do servidor MySQL desconhecido \"%(hostname)s\"." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Host do servidor MySQL desconhecido \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "Erro desconhecido do Presto" @@ -13104,7 +13160,7 @@ msgid "Waiting on %s" msgstr "Mostrando %s de %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Gerenciar seus bancos de dados" msgid "Want to add a new database?" @@ -14427,6 +14483,10 @@ msgstr "em" msgid "in modal" msgstr "no modal" +#, fuzzy +msgid "invalid email" +msgstr "Chave de permalink inválida" + #, fuzzy msgid "is expected to be a Mapbox URL" msgstr "espera-se que seja um número" diff --git a/superset/translations/ru/LC_MESSAGES/messages.po b/superset/translations/ru/LC_MESSAGES/messages.po index 4ab0cd10cb2e..57d07ef1f86e 100644 --- a/superset/translations/ru/LC_MESSAGES/messages.po +++ b/superset/translations/ru/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-07-03 22:19+0400\n" "Last-Translator: Daniel Checheta\n" "Language: ru\n" @@ -377,23 +377,20 @@ msgstr "(нет описания, нажмите для просмотра тр msgid "), and they become available in your SQL (example:" msgstr "), и они станут доступны в ваших SQL-запросах (пример:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url)s|Исследовать в Superset>\n" -"\n" -"%(table)s\n" +"Ошибка: %(text)s\n" #, python-format msgid "" @@ -401,13 +398,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"Ошибка: %(text)s\n" +"<%(url)s|Исследовать в Superset>\n" +"\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -843,6 +844,14 @@ msgstr "Добавить" msgid "Add Alert" msgstr "Добавить оповещение" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "получатели" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "получатели" + msgid "Add CSS template" msgstr "Добавить шаблоны CSS" @@ -1855,6 +1864,10 @@ msgstr "Ось по возрастанию" msgid "Axis descending" msgstr "Ось по убыванию" +#, fuzzy +msgid "BCC recipients" +msgstr "получатели" + msgid "BOOLEAN" msgstr "Булевый (BOOLEAN)" @@ -2100,6 +2113,10 @@ msgstr "По значению: использовать значения мер msgid "CANCEL" msgstr "ОТМЕНИТЬ" +#, fuzzy +msgid "CC recipients" +msgstr "получатели" + msgid "CREATE DATASET" msgstr "СОЗДАТЬ ДАТАСЕТ" @@ -2586,6 +2603,10 @@ msgstr "Выбрать способ уведомления и получател msgid "Choose one of the available databases from the panel on the left." msgstr "Выберите одну из доступных баз данных из панели слева." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "Выберите одну из доступных баз данных из панели слева." + msgid "Choose sheet name" msgstr "Выберите имя листа" @@ -7152,7 +7173,7 @@ msgstr "Нет дашбордов" msgid "No data" msgstr "Нет данных" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Нет данных после фильтрации или данные отсутствуют за последний отрезок " "времени" @@ -7210,7 +7231,7 @@ msgstr "Записи не найдены" msgid "No results match your filter criteria" msgstr "Не найдено результатов по вашим критериям" -msgid "No results were returned for this query" +msgid "No data" msgstr "Не было получено данных по этому запросу" msgid "" @@ -7742,11 +7763,6 @@ msgstr "" "Владельцы – это пользователи, которые могут изменять дашборд. Можно " "искать по имени или никнейму." -msgid "PDF download failed, please refresh and try again." -msgstr "" -"Произошла ошибка скачивания изображения. Обновите страницу и попробуйте " -"заново." - msgid "Page length" msgstr "Размер страницы" @@ -8201,6 +8217,9 @@ msgstr "Границы первичной оси Y" msgid "Primary y-axis format" msgstr "Формат первичной оси Y" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Приватный ключ" @@ -9347,6 +9366,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "Выберите таблицу или введите ее имя" +#, fuzzy +msgid "Select channels" +msgstr "Выберите диаграммы" + #, fuzzy msgid "Select chart" msgstr "Выберите диаграмму" @@ -9637,6 +9660,10 @@ msgstr "Имя листа" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + Нажать для сортировки по нескольким столбцам" +#, fuzzy +msgid "Shift start date" +msgstr "Дата начала" + msgid "Short description must be unique for this layer" msgstr "Содержимое аннотации должно быть уникальным внутри слоя" @@ -9955,6 +9982,11 @@ msgstr "Сплошной" msgid "Some roles do not exist" msgstr "Некоторые роли не существуют" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Что-то пошло не так" @@ -10731,6 +10763,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "База данных вернула неожиданную ошибку" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Не удалось найти базу данных" + msgid "The database was deleted." msgstr "База данных была удалена" @@ -11215,6 +11251,15 @@ msgstr "Некорректная схема отправленных даннх" msgid "The schema was deleted or renamed in the database." msgstr "Схема была удалена или переименована в базе данных" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "Размер ячейки в метрах" @@ -11406,8 +11451,9 @@ msgstr "Произошла ошибка при получении объекто msgid "There was an error fetching the favorite status: %s" msgstr "Произошла ошибка при получении статуса избранного: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Произошла ошибка при получении вашей недавней активности:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Произошла ошибка при получении статуса избранного: %s" msgid "There was an error loading the catalogs" msgstr "Произошла ошибка при загрузке каталогов" @@ -11760,6 +11806,11 @@ msgstr "Эта мера может быть несовместима с этим msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12324,6 +12375,10 @@ msgstr "Неизвестный хост MySQL \"%(hostname)s\"" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Неизвестный хост MySQL \"%(hostname)s\"" +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Неизвестный хост MySQL \"%(hostname)s\"" + msgid "Unknown Presto Error" msgstr "Неизвестная ошибка Presto" @@ -12833,7 +12888,7 @@ msgstr "СР" msgid "Waiting on %s" msgstr "Ждем %s" -msgid "Waiting on database..." +msgid "Loading..." msgstr "Ожидание ответа от базы данных…" msgid "Want to add a new database?" @@ -14122,6 +14177,10 @@ msgstr "в" msgid "in modal" msgstr "в модальном окне" +#, fuzzy +msgid "invalid email" +msgstr "Некорректная постоянная ссылка" + msgid "is expected to be a Mapbox URL" msgstr "ожидается URL-адрес Mapbox" diff --git a/superset/translations/sk/LC_MESSAGES/messages.po b/superset/translations/sk/LC_MESSAGES/messages.po index 7ba1e428b7a8..9abfb3ea19c4 100644 --- a/superset/translations/sk/LC_MESSAGES/messages.po +++ b/superset/translations/sk/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: PROJECT VERSION\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2021-05-24 15:59+0200\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language: sk\n" @@ -334,11 +334,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -347,7 +346,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -758,6 +759,12 @@ msgstr "" msgid "Add Alert" msgstr "" +msgid "Add BCC Recipients" +msgstr "" + +msgid "Add CC Recipients" +msgstr "" + msgid "Add CSS template" msgstr "" @@ -1722,6 +1729,9 @@ msgstr "" msgid "Axis descending" msgstr "" +msgid "BCC recipients" +msgstr "" + msgid "BOOLEAN" msgstr "" @@ -1938,6 +1948,9 @@ msgstr "" msgid "CANCEL" msgstr "" +msgid "CC recipients" +msgstr "" + msgid "CREATE DATASET" msgstr "" @@ -2405,6 +2418,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + msgid "Choose sheet name" msgstr "" @@ -6697,7 +6713,7 @@ msgstr "Importovať dashboard" msgid "No data" msgstr "" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" msgid "No data in file" @@ -6751,7 +6767,7 @@ msgstr "" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "" msgid "" @@ -7234,9 +7250,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -7663,6 +7676,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -8770,6 +8786,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "" +#, fuzzy +msgid "Select channels" +msgstr "Grafy" + #, fuzzy msgid "Select chart" msgstr "Grafy" @@ -9040,6 +9060,9 @@ msgstr "Datasety" msgid "Shift + Click to sort by multiple columns" msgstr "" +msgid "Shift start date" +msgstr "" + msgid "Short description must be unique for this layer" msgstr "" @@ -9334,6 +9357,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10057,6 +10085,9 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +msgid "The database that was used to generate this query could not be found" +msgstr "" + msgid "The database was deleted." msgstr "" @@ -10447,6 +10478,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -10601,7 +10641,7 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" +msgid "There was an error fetching the filtered charts and dashboards:" msgstr "" msgid "There was an error loading the catalogs" @@ -10910,6 +10950,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11433,6 +11478,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -11896,7 +11945,7 @@ msgstr "" msgid "Waiting on %s" msgstr "" -msgid "Waiting on database..." +msgid "Loading..." msgstr "" msgid "Want to add a new database?" @@ -13085,6 +13134,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/sl/LC_MESSAGES/messages.po b/superset/translations/sl/LC_MESSAGES/messages.po index e8038687b312..2744ecc92f80 100644 --- a/superset/translations/sl/LC_MESSAGES/messages.po +++ b/superset/translations/sl/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: Superset\n" "Report-Msgid-Bugs-To: dkrat7 @github.com\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-01-01 23:56+0100\n" "Last-Translator: dkrat7 <dkrat7 @github.com>\n" "Language: sl_SI\n" @@ -368,23 +368,20 @@ msgstr "(ni opisa, kliknite za ogled zapisov)" msgid "), and they become available in your SQL (example:" msgstr "), s čimer bodo na razpolago v sklopu SQL-poizvedbe (primer:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(url)s|Razišči v Supersetu>\n" -"\n" -"%(table)s\n" +"napaka: %(text)s\n" #, python-format msgid "" @@ -392,13 +389,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"napaka: %(text)s\n" +"<%(url)s|Razišči v Supersetu>\n" +"\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -834,6 +835,14 @@ msgstr "Dodaj" msgid "Add Alert" msgstr "Dodaj opozorilo" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "nedavne" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "nedavne" + msgid "Add CSS template" msgstr "Dodaj CSS predlogo" @@ -1844,6 +1853,10 @@ msgstr "Naraščajoča os" msgid "Axis descending" msgstr "Padajoča os" +#, fuzzy +msgid "BCC recipients" +msgstr "nedavne" + msgid "BOOLEAN" msgstr "BOOLEAN" @@ -2083,6 +2096,10 @@ msgstr "Po vrednosti: za razvrščanje uporabite vrednosti mere" msgid "CANCEL" msgstr "PREKINI" +#, fuzzy +msgid "CC recipients" +msgstr "nedavne" + msgid "CREATE DATASET" msgstr "USTVARI PODATKOVNI SET" @@ -2566,6 +2583,10 @@ msgstr "Dodajte način obveščanja" msgid "Choose one of the available databases from the panel on the left." msgstr "Izberite eno od razpoložljivih podatkovnih baz v panelu na levi." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "Izberite eno od razpoložljivih podatkovnih baz v panelu na levi." + #, fuzzy msgid "Choose sheet name" msgstr "Ime zvezka" @@ -7139,7 +7160,7 @@ msgstr "Ni še nadzornih plošč" msgid "No data" msgstr "Ni podatkov" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Ni podatkov po filtriranju ali pa imajo vrednost NULL za zadnji časovni " "zapis" @@ -7195,7 +7216,7 @@ msgstr "Rezultati niso najdeni" msgid "No results match your filter criteria" msgstr "Noben rezultat ne ustreza vašim kriterijem" -msgid "No results were returned for this query" +msgid "No data" msgstr "Poizvedba ni vrnila rezultatov" msgid "" @@ -7718,9 +7739,6 @@ msgstr "" "\"Lastniki\" je seznam uporabnikov, ki lahko spreminjajo nadzorno ploščo." " Iskanje je možno po imenu ali uporabniškem imenu." -msgid "PDF download failed, please refresh and try again." -msgstr "Prenos PDF ni uspel. Osvežite in poskusite ponovno." - msgid "Page length" msgstr "Dolžina strani" @@ -8172,6 +8190,9 @@ msgstr "Meje primarne y-osi" msgid "Primary y-axis format" msgstr "Oblika primarne y-osi" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Privatni ključ" @@ -9330,6 +9351,10 @@ msgstr "Izberite poljubne stolpce za pregled metapodatkov" msgid "Select catalog or type to search catalogs" msgstr "Izberite ali vnesite ime tabele" +#, fuzzy +msgid "Select channels" +msgstr "Izberi grafikone" + msgid "Select chart" msgstr "Izberi grafikon" @@ -9619,6 +9644,10 @@ msgstr "Ime zvezka" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + klik za razvrščanje po več stolpcih" +#, fuzzy +msgid "Shift start date" +msgstr "Začetni datum" + msgid "Short description must be unique for this layer" msgstr "Kratek opis mora biti za ta sloj unikaten" @@ -9939,6 +9968,11 @@ msgstr "Zapolnjen" msgid "Some roles do not exist" msgstr "Nekatere vloge ne obstajajo" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Nekaj je šlo narobe." @@ -10707,6 +10741,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "Podatkovna baza je vrnila nepričakovano napako." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Podatkovna baza ni bila najdena" + msgid "The database was deleted." msgstr "Podatkovna baza je bila izbrisana." @@ -11187,6 +11225,15 @@ msgstr "Shema podanih podatkov je neveljavna." msgid "The schema was deleted or renamed in the database." msgstr "Shema je bila izbrisana ali preimenovana v podatkovni bazi." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "Velikost vsake celice v metrih" @@ -11370,8 +11417,9 @@ msgstr "Pri pridobivanju elementov podatkovnega seta je prišlo do napake" msgid "There was an error fetching the favorite status: %s" msgstr "Napaka pri pridobivanju statusa \"Priljubljeno\": %s" -msgid "There was an error fetching your recent activity:" -msgstr "Pri pridobivanju nedavnih aktivnosti je prišlo do napake:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Napaka pri pridobivanju statusa \"Priljubljeno\": %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -11736,6 +11784,11 @@ msgstr "Ta mera je lahko nekompatibilna s trenutnim podatkovnim setom" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12293,6 +12346,10 @@ msgstr "Neznan Doris strežnik \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Neznan MySQL strežnik \"%(hostname)s\"." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Neznan Doris strežnik \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "Neznana Presto napaka" @@ -12805,7 +12862,7 @@ msgid "Waiting on %s" msgstr "Prikazanih %s od %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Upravljajte podatkovne baze" msgid "Want to add a new database?" @@ -14097,6 +14154,10 @@ msgstr "v" msgid "in modal" msgstr "v modalnem oknu" +#, fuzzy +msgid "invalid email" +msgstr "Neveljaven ključ povezave" + msgid "is expected to be a Mapbox URL" msgstr "mora biti URL za Mapbox" diff --git a/superset/translations/tr/LC_MESSAGES/messages.po b/superset/translations/tr/LC_MESSAGES/messages.po index ae1d27909368..2216c04dfbbe 100644 --- a/superset/translations/tr/LC_MESSAGES/messages.po +++ b/superset/translations/tr/LC_MESSAGES/messages.po @@ -21,7 +21,7 @@ msgid "" msgstr "" "Project-Id-Version: Superset VERSION\n" "Report-Msgid-Bugs-To: avsarcoteli@gmail.com\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2024-02-25 14:00+0300\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language: tr\n" @@ -331,11 +331,10 @@ msgstr "" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -344,7 +343,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -756,6 +757,14 @@ msgstr "Ekle" msgid "Add Alert" msgstr "Alarm Ekle" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "son kullanılanlar" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "son kullanılanlar" + msgid "Add CSS template" msgstr "CSS şablonu ekle" @@ -1705,6 +1714,10 @@ msgstr "" msgid "Axis descending" msgstr "" +#, fuzzy +msgid "BCC recipients" +msgstr "son kullanılanlar" + msgid "BOOLEAN" msgstr "" @@ -1921,6 +1934,10 @@ msgstr "" msgid "CANCEL" msgstr "İPTAL" +#, fuzzy +msgid "CC recipients" +msgstr "son kullanılanlar" + msgid "CREATE DATASET" msgstr "VERİSETİ OLUŞTUR" @@ -2389,6 +2406,9 @@ msgstr "" msgid "Choose one of the available databases from the panel on the left." msgstr "" +msgid "Choose one of the available databases on the left panel." +msgstr "" + #, fuzzy msgid "Choose sheet name" msgstr "Grafik tipini seçin" @@ -6676,7 +6696,7 @@ msgstr "Henüz dashboard yok" msgid "No data" msgstr "Veri bulunamadı" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "Filtrelemeden sonra veri bulunamadı veya en son kayıt için veriler NULL" msgid "No data in file" @@ -6730,7 +6750,7 @@ msgstr "Sonuç bulunamadı" msgid "No results match your filter criteria" msgstr "Filtrenizle eşleşen sonuç yok" -msgid "No results were returned for this query" +msgid "No data" msgstr "Bu sorgu için bir değer dönmedi" msgid "" @@ -7212,9 +7232,6 @@ msgid "" " or username." msgstr "" -msgid "PDF download failed, please refresh and try again." -msgstr "" - msgid "Page length" msgstr "" @@ -7639,6 +7656,9 @@ msgstr "" msgid "Primary y-axis format" msgstr "" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "" @@ -8746,6 +8766,10 @@ msgstr "" msgid "Select catalog or type to search catalogs" msgstr "Tablo seç" +#, fuzzy +msgid "Select channels" +msgstr "Grafikleri seç" + msgid "Select chart" msgstr "Grafik seç" @@ -9012,6 +9036,10 @@ msgstr "veriseti ismi" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "Başlangıç tarihi" + msgid "Short description must be unique for this layer" msgstr "" @@ -9306,6 +9334,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "" @@ -10026,6 +10059,9 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "" +msgid "The database that was used to generate this query could not be found" +msgstr "" + msgid "The database was deleted." msgstr "" @@ -10414,6 +10450,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + msgid "The size of each cell in meters" msgstr "" @@ -10568,8 +10613,9 @@ msgstr "" msgid "There was an error fetching the favorite status: %s" msgstr "" -msgid "There was an error fetching your recent activity:" -msgstr "" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Grafiği yüklerken hata oluştu" #, fuzzy msgid "There was an error loading the catalogs" @@ -10880,6 +10926,11 @@ msgstr "" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -11400,6 +11451,10 @@ msgstr "" msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "" +#, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "" + msgid "Unknown Presto Error" msgstr "" @@ -11863,7 +11918,7 @@ msgid "Waiting on %s" msgstr "" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Veritabanını düzenle" msgid "Want to add a new database?" @@ -13047,6 +13102,9 @@ msgstr "" msgid "in modal" msgstr "" +msgid "invalid email" +msgstr "" + msgid "is expected to be a Mapbox URL" msgstr "" diff --git a/superset/translations/uk/LC_MESSAGES/messages.po b/superset/translations/uk/LC_MESSAGES/messages.po index 4d6b5ab43a29..72cdfc08fa03 100644 --- a/superset/translations/uk/LC_MESSAGES/messages.po +++ b/superset/translations/uk/LC_MESSAGES/messages.po @@ -18,7 +18,7 @@ msgid "" msgstr "" "Project-Id-Version: \n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2023-09-17 12:57+0300\n" "Last-Translator: \n" "Language: uk\n" @@ -365,23 +365,20 @@ msgstr "(Немає опису, натисніть, щоб побачити Trac msgid "), and they become available in your SQL (example:" msgstr "), і вони стають доступними у вашому SQL (приклад:" -#, python-format +#, fuzzy, python-format msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"<%(URL)s | Ознайомтеся з Superset>\n" -"\n" -"%(table)s\n" +"Помилка: %(text)s\n" #, python-format msgid "" @@ -389,13 +386,17 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" "*%(name)s*\n" "\n" "%(description)s\n" "\n" -"Помилка: %(text)s\n" +"<%(URL)s | Ознайомтеся з Superset>\n" +"\n" +"%(table)s\n" #, python-format msgid "+ %s more" @@ -835,6 +836,14 @@ msgstr "Додавання" msgid "Add Alert" msgstr "Додати сповіщення" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "недавні" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "недавні" + msgid "Add CSS template" msgstr "Додайте шаблон CSS" @@ -1854,6 +1863,10 @@ msgstr "Осі висхідна" msgid "Axis descending" msgstr "Осі, що спускається" +#, fuzzy +msgid "BCC recipients" +msgstr "недавні" + msgid "BOOLEAN" msgstr "Булевий" @@ -2099,6 +2112,10 @@ msgstr "За значенням: Використовуйте метричні msgid "CANCEL" msgstr "Скасувати" +#, fuzzy +msgid "CC recipients" +msgstr "недавні" + msgid "CREATE DATASET" msgstr "Створити набір даних" @@ -2588,6 +2605,10 @@ msgstr "Додайте метод сповіщення" msgid "Choose one of the available databases from the panel on the left." msgstr "Виберіть одну з доступних баз даних з панелі зліва." +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "Виберіть одну з доступних баз даних з панелі зліва." + #, fuzzy msgid "Choose sheet name" msgstr "Назва аркуша" @@ -7197,7 +7218,7 @@ msgstr "Ще немає інформаційних панелей" msgid "No data" msgstr "Немає даних" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "" "Ніякі дані після фільтрації або даних є нульовими для останнього запису " "часу" @@ -7253,7 +7274,7 @@ msgstr "Нічого не знайдено" msgid "No results match your filter criteria" msgstr "Ніякі результати не відповідають вашим критеріям фільтра" -msgid "No results were returned for this query" +msgid "No data" msgstr "Для цього запиту не було повернуто жодних результатів" msgid "" @@ -7792,10 +7813,6 @@ msgstr "" "Власники - це список користувачів, які можуть змінити інформаційну " "панель. Шукати за іменем або іменем користувача." -#, fuzzy -msgid "PDF download failed, please refresh and try again." -msgstr "Завантажити зображення не вдалося, оновити та повторіть спробу." - msgid "Page length" msgstr "Довжина сторінки" @@ -8252,6 +8269,9 @@ msgstr "Первинні межі вісь Y" msgid "Primary y-axis format" msgstr "Первинний формат осі Y" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "Приватний ключ" @@ -9415,6 +9435,10 @@ msgstr "Виберіть будь -які стовпці для перевірк msgid "Select catalog or type to search catalogs" msgstr "Виберіть таблицю або введіть для пошукових таблиць" +#, fuzzy +msgid "Select channels" +msgstr "Виберіть діаграми" + msgid "Select chart" msgstr "Виберіть діаграму" @@ -9705,6 +9729,10 @@ msgstr "Назва аркуша" msgid "Shift + Click to sort by multiple columns" msgstr "Shift + Клацніть, щоб сортувати на кілька стовпців" +#, fuzzy +msgid "Shift start date" +msgstr "Дата початку" + msgid "Short description must be unique for this layer" msgstr "Короткий опис повинен бути унікальним для цього шару" @@ -10033,6 +10061,11 @@ msgstr "Суцільний" msgid "Some roles do not exist" msgstr "Деяких ролей не існує" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + msgid "Something went wrong." msgstr "Щось пішло не так." @@ -10824,6 +10857,10 @@ msgstr "" msgid "The database returned an unexpected error." msgstr "База даних повернула несподівану помилку." +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "Бази даних не вдалося знайти" + msgid "The database was deleted." msgstr "База даних була видалена." @@ -11312,6 +11349,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "Схема була видалена або перейменована в базу даних." +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "Розмір квадратної клітини, пікселів" @@ -11506,8 +11552,9 @@ msgstr "Були помилкові об'єкти, пов’язані з наб msgid "There was an error fetching the favorite status: %s" msgstr "Була помилка, яка отримала улюблений статус: %s" -msgid "There was an error fetching your recent activity:" -msgstr "Була помилка, яка отримала вашу недавню діяльність:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "Була помилка, яка отримала улюблений статус: %s" #, fuzzy msgid "There was an error loading the catalogs" @@ -11869,6 +11916,11 @@ msgstr "Цей показник може бути несумісним із по msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + msgid "" "This section allows you to configure how to use the slice\n" " to generate annotations." @@ -12435,6 +12487,10 @@ msgstr "Невідомий хост MySQL Server “%(hostname)s”." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "Невідомий хост MySQL Server “%(hostname)s”." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "Невідомий хост MySQL Server “%(hostname)s”." + msgid "Unknown Presto Error" msgstr "Невідома помилка Престо" @@ -12965,7 +13021,7 @@ msgid "Waiting on %s" msgstr "Показуючи %s %s" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "Керуйте своїми базами даних" msgid "Want to add a new database?" @@ -14266,6 +14322,10 @@ msgstr "у" msgid "in modal" msgstr "у модальному" +#, fuzzy +msgid "invalid email" +msgstr "Недійсний ключ постійного посилання" + #, fuzzy msgid "is expected to be a Mapbox URL" msgstr "очікується, що буде числом" diff --git a/superset/translations/zh/LC_MESSAGES/messages.po b/superset/translations/zh/LC_MESSAGES/messages.po index 9caec53c3658..deab80f6106d 100644 --- a/superset/translations/zh/LC_MESSAGES/messages.po +++ b/superset/translations/zh/LC_MESSAGES/messages.po @@ -18,7 +18,7 @@ msgid "" msgstr "" "Project-Id-Version: Apache Superset 0.22.1\n" "Report-Msgid-Bugs-To: zhouyao94@qq.com\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2019-01-04 22:19+0800\n" "Last-Translator: cdmikechen \n" "Language: zh\n" @@ -348,11 +348,10 @@ msgstr "), 他们在你的SQL中会变成有效数据 (比如:" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -361,7 +360,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -807,6 +808,14 @@ msgstr "新增" msgid "Add Alert" msgstr "新增告警" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "最近" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "最近" + msgid "Add CSS template" msgstr "新增CSS模板" @@ -1808,6 +1817,10 @@ msgstr "轴线升序" msgid "Axis descending" msgstr "轴线降序" +#, fuzzy +msgid "BCC recipients" +msgstr "最近" + msgid "BOOLEAN" msgstr "布尔值" @@ -2040,6 +2053,10 @@ msgstr "使用度量值作为排序关键字" msgid "CANCEL" msgstr "取消" +#, fuzzy +msgid "CC recipients" +msgstr "最近" + #, fuzzy msgid "CREATE DATASET" msgstr "创建数据集" @@ -2542,6 +2559,10 @@ msgstr "新增通知方法" msgid "Choose one of the available databases from the panel on the left." msgstr "从左侧的面板中选择一个可用的数据库" +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "从左侧的面板中选择一个可用的数据库" + #, fuzzy msgid "Choose sheet name" msgstr "Sheet名称" @@ -7122,7 +7143,7 @@ msgstr "还没有看板" msgid "No data" msgstr "没有数据" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "过滤后没有数据,或者最新时间记录的数据为NULL" msgid "No data in file" @@ -7181,7 +7202,7 @@ msgstr "未找到结果" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "此查询没有数据返回" msgid "" @@ -7698,10 +7719,6 @@ msgid "" " or username." msgstr "所有者是一个用户列表,这些用户有权限修改仪表板。可按名称或用户名搜索。" -#, fuzzy -msgid "PDF download failed, please refresh and try again." -msgstr "PDF下载失败,请刷新重试。" - msgid "Page length" msgstr "页长" @@ -8149,6 +8166,9 @@ msgstr "主Y轴界限" msgid "Primary y-axis format" msgstr "主Y轴格式" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "私钥" @@ -9333,6 +9353,10 @@ msgstr "选择任意列进行元数据巡检" msgid "Select catalog or type to search catalogs" msgstr "选择表或输入表名来搜索" +#, fuzzy +msgid "Select channels" +msgstr "选择图表" + #, fuzzy msgid "Select chart" msgstr "选择图表" @@ -9620,6 +9644,10 @@ msgstr "Sheet名称" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "开始时间" + msgid "Short description must be unique for this layer" msgstr "此层的简述必须是唯一的" @@ -9928,6 +9956,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "看板" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + #, fuzzy msgid "Something went wrong." msgstr "抱歉,出了点问题。请稍后再试。" @@ -10703,6 +10736,10 @@ msgstr "找不到此查询中引用的数据库。请与管理员联系以获得 msgid "The database returned an unexpected error." msgstr "数据库返回意外错误。" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "数据库没有找到" + msgid "The database was deleted." msgstr "数据库已删除。" @@ -11116,6 +11153,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "该模式已在数据库中删除或重命名。" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "每个单元的大小,以米为单位" @@ -11280,8 +11326,9 @@ msgstr "抱歉,获取数据库信息时出错:%s" msgid "There was an error fetching the favorite status: %s" msgstr "获取此看板的收藏夹状态时出现问题:%s。" -msgid "There was an error fetching your recent activity:" -msgstr "获取您最近的活动时出错:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "获取此看板的收藏夹状态时出现问题:%s。" #, fuzzy msgid "There was an error loading the catalogs" @@ -11603,6 +11650,11 @@ msgstr "此图表可能与过滤器不兼容(数据集不匹配)" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + #, fuzzy msgid "" "This section allows you to configure how to use the slice\n" @@ -12148,6 +12200,10 @@ msgstr "未知Doris服务器主机 \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "未知MySQL服务器主机 \"%(hostname)s\"." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "未知Doris服务器主机 \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "未知 Presto 错误" @@ -12632,7 +12688,7 @@ msgid "Waiting on %s" msgstr "显示 %s个 总计 %s个" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "管理你的数据库" msgid "Want to add a new database?" @@ -13903,6 +13959,10 @@ msgstr "处于" msgid "in modal" msgstr "(在模型中)" +#, fuzzy +msgid "invalid email" +msgstr "无效状态。" + #, fuzzy msgid "is expected to be a Mapbox URL" msgstr "应该为MapBox的URL" diff --git a/superset/translations/zh_TW/LC_MESSAGES/messages.po b/superset/translations/zh_TW/LC_MESSAGES/messages.po index d344729671f0..f50e7aabe928 100644 --- a/superset/translations/zh_TW/LC_MESSAGES/messages.po +++ b/superset/translations/zh_TW/LC_MESSAGES/messages.po @@ -17,7 +17,7 @@ msgid "" msgstr "" "Project-Id-Version: Apache Superset 0.22.1\n" "Report-Msgid-Bugs-To: bestlong168@gmail.com\n" -"POT-Creation-Date: 2024-07-03 14:07-0700\n" +"POT-Creation-Date: 2024-07-30 17:32-0600\n" "PO-Revision-Date: 2019-01-04 22:19+0800\n" "Last-Translator: Shao Yu-Lung <bestlong168@gmail.com>\n" "Language: zh_TW\n" @@ -347,11 +347,10 @@ msgstr "), 他們在你的 SQL 中會變成有效數據 (比如:" msgid "" "*%(name)s*\n" "\n" -"%(description)s\n" -"\n" -"<%(url)s|Explore in Superset>\n" +" %(description)s\n" "\n" -"%(table)s\n" +" Error: %(text)s\n" +" " msgstr "" #, python-format @@ -360,7 +359,9 @@ msgid "" "\n" "%(description)s\n" "\n" -"Error: %(text)s\n" +"<%(url)s|Explore in Superset>\n" +"\n" +"%(table)s\n" msgstr "" #, python-format @@ -806,6 +807,14 @@ msgstr "新增" msgid "Add Alert" msgstr "新增告警" +#, fuzzy +msgid "Add BCC Recipients" +msgstr "最近" + +#, fuzzy +msgid "Add CC Recipients" +msgstr "最近" + msgid "Add CSS template" msgstr "新增CSS模板" @@ -1807,6 +1816,10 @@ msgstr "軸線升序" msgid "Axis descending" msgstr "軸線降序" +#, fuzzy +msgid "BCC recipients" +msgstr "最近" + msgid "BOOLEAN" msgstr "布林值" @@ -2041,6 +2054,10 @@ msgstr "使用度量值作為排序關鍵字" msgid "CANCEL" msgstr "取消" +#, fuzzy +msgid "CC recipients" +msgstr "最近" + #, fuzzy msgid "CREATE DATASET" msgstr "創建數據集" @@ -2543,6 +2560,10 @@ msgstr "通知方式" msgid "Choose one of the available databases from the panel on the left." msgstr "從左側的面板中選擇一個可用的資料庫" +#, fuzzy +msgid "Choose one of the available databases on the left panel." +msgstr "從左側的面板中選擇一個可用的資料庫" + #, fuzzy msgid "Choose sheet name" msgstr "Sheet名稱" @@ -7128,7 +7149,7 @@ msgstr "還没有看板" msgid "No data" msgstr "没有數據" -msgid "No data after filtering or data is NULL for the latest time record" +msgid "" msgstr "過濾後没有數據,或者最新時間紀錄的數據為NULL" msgid "No data in file" @@ -7187,7 +7208,7 @@ msgstr "未找到结果" msgid "No results match your filter criteria" msgstr "" -msgid "No results were returned for this query" +msgid "No data" msgstr "此查詢没有數據返回" msgid "" @@ -7706,10 +7727,6 @@ msgid "" " or username." msgstr "所有者是一個用戶列表,這些用戶有權限修改仪表板。可按名稱或用戶名搜索。" -#, fuzzy -msgid "PDF download failed, please refresh and try again." -msgstr "PDF 下载失敗,請刷新重試。" - msgid "Page length" msgstr "頁長" @@ -8157,6 +8174,9 @@ msgstr "主 Y 軸界限" msgid "Primary y-axis format" msgstr "主 Y 軸格式" +msgid "Private Channels (Bot in channel)" +msgstr "" + msgid "Private Key" msgstr "私鑰" @@ -9346,6 +9366,10 @@ msgstr "選擇任意列進行元數據巡檢" msgid "Select catalog or type to search catalogs" msgstr "選擇表或输入表名來搜索" +#, fuzzy +msgid "Select channels" +msgstr "選擇圖表" + #, fuzzy msgid "Select chart" msgstr "選擇圖表" @@ -9633,6 +9657,10 @@ msgstr "Sheet名稱" msgid "Shift + Click to sort by multiple columns" msgstr "" +#, fuzzy +msgid "Shift start date" +msgstr "開始時間" + msgid "Short description must be unique for this layer" msgstr "此層的简述必須是唯一的" @@ -9941,6 +9969,11 @@ msgstr "" msgid "Some roles do not exist" msgstr "看板" +msgid "" +"Something went wrong with embedded authentication. Check the dev console " +"for details." +msgstr "" + #, fuzzy msgid "Something went wrong." msgstr "抱歉,出了點問題。請稍後再試。" @@ -10717,6 +10750,10 @@ msgstr "找不到此查詢中引用的資料庫。請與管理員聯繫以獲得 msgid "The database returned an unexpected error." msgstr "資料庫返回意外錯誤。" +#, fuzzy +msgid "The database that was used to generate this query could not be found" +msgstr "資料庫没有找到" + msgid "The database was deleted." msgstr "資料庫已删除。" @@ -11130,6 +11167,15 @@ msgstr "" msgid "The schema was deleted or renamed in the database." msgstr "該模式已在資料庫中删除或重命名。" +msgid "The screenshot could not be downloaded. Please, try again later." +msgstr "" + +msgid "The screenshot is being generated. Please, do not leave the page." +msgstr "" + +msgid "The screenshot is now being downloaded." +msgstr "" + #, fuzzy msgid "The size of each cell in meters" msgstr "每個單元的大小,以米為單位" @@ -11294,8 +11340,9 @@ msgstr "抱歉,獲取資料庫訊息時出錯:%s" msgid "There was an error fetching the favorite status: %s" msgstr "獲取此看板的收藏夹狀態時出現問題:%s。" -msgid "There was an error fetching your recent activity:" -msgstr "獲取您最近的活動時出錯:" +#, fuzzy +msgid "There was an error fetching the filtered charts and dashboards:" +msgstr "獲取此看板的收藏夹狀態時出現問題:%s。" #, fuzzy msgid "There was an error loading the catalogs" @@ -11617,6 +11664,11 @@ msgstr "此圖表可能與過濾器不兼容(數據集不匹配)" msgid "This option has been disabled by the administrator." msgstr "" +msgid "" +"This page is intended to be embedded in an iframe, but it looks like that" +" is not the case." +msgstr "" + #, fuzzy msgid "" "This section allows you to configure how to use the slice\n" @@ -12162,6 +12214,10 @@ msgstr "未知 Doris 服務器主機 \"%(hostname)s\"." msgid "Unknown MySQL server host \"%(hostname)s\"." msgstr "未知 MySQL 服務器主機 \"%(hostname)s\"." +#, fuzzy, python-format +msgid "Unknown OceanBase server host \"%(hostname)s\"." +msgstr "未知 Doris 服務器主機 \"%(hostname)s\"." + msgid "Unknown Presto Error" msgstr "未知 Presto 錯誤" @@ -12646,7 +12702,7 @@ msgid "Waiting on %s" msgstr "顯示 %s 個 總計 %s 個" #, fuzzy -msgid "Waiting on database..." +msgid "Loading..." msgstr "管理你的資料庫" msgid "Want to add a new database?" @@ -13917,6 +13973,10 @@ msgstr "處於" msgid "in modal" msgstr "(在模型中)" +#, fuzzy +msgid "invalid email" +msgstr "無效狀態。" + #, fuzzy msgid "is expected to be a Mapbox URL" msgstr "應該為 MapBox 的 URL" diff --git a/superset/utils/core.py b/superset/utils/core.py index b2f09aac2d95..9480e5473330 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -167,6 +167,7 @@ class HeaderDataType(TypedDict): notification_source: str | None chart_id: int | None dashboard_id: int | None + slack_channels: list[str] | None class DatasourceDict(TypedDict): @@ -711,7 +712,7 @@ def send_email_smtp( # pylint: disable=invalid-name,too-many-arguments,too-many recipients = smtp_mail_to if cc: smtp_mail_cc = get_email_address_list(cc) - msg["CC"] = ", ".join(smtp_mail_cc) + msg["Cc"] = ", ".join(smtp_mail_cc) recipients = recipients + smtp_mail_cc smtp_mail_bcc = [] diff --git a/superset/utils/excel.py b/superset/utils/excel.py index ccbeadee5ece..8609be5b43e6 100644 --- a/superset/utils/excel.py +++ b/superset/utils/excel.py @@ -19,16 +19,29 @@ import pandas as pd +from superset.utils.core import GenericDataType + def df_to_excel(df: pd.DataFrame, **kwargs: Any) -> Any: output = io.BytesIO() - # timezones are not supported - for column in df.select_dtypes(include=["datetimetz"]).columns: - df[column] = df[column].astype(str) - # pylint: disable=abstract-class-instantiated with pd.ExcelWriter(output, engine="xlsxwriter") as writer: df.to_excel(writer, **kwargs) return output.getvalue() + + +def apply_column_types( + df: pd.DataFrame, column_types: list[GenericDataType] +) -> pd.DataFrame: + for column, column_type in zip(df.columns, column_types): + if column_type == GenericDataType.NUMERIC: + try: + df[column] = pd.to_numeric(df[column]) + except ValueError: + df[column] = df[column].astype(str) + elif pd.api.types.is_datetime64tz_dtype(df[column]): + # timezones are not supported + df[column] = df[column].astype(str) + return df diff --git a/superset/utils/json.py b/superset/utils/json.py index 50a76d1a7cb5..d1c184afe194 100644 --- a/superset/utils/json.py +++ b/superset/utils/json.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import copy import decimal import logging import uuid @@ -24,8 +25,10 @@ import pandas as pd import simplejson from flask_babel.speaklater import LazyString +from jsonpath_ng import parse from simplejson import JSONDecodeError +from superset.constants import PASSWORD_MASK from superset.utils.dates import datetime_to_epoch, EPOCH logging.getLogger("MARKDOWN").setLevel(logging.INFO) @@ -171,7 +174,11 @@ def validate_json(obj: Union[bytes, bytearray, str]) -> None: :param obj: an object that should be parseable to JSON """ if obj: - loads(obj) + try: + loads(obj) + except JSONDecodeError as ex: + logger.error("JSON is not valid %s", str(ex), exc_info=True) + raise def dumps( # pylint: disable=too-many-arguments @@ -211,7 +218,7 @@ def dumps( # pylint: disable=too-many-arguments cls=cls, ) except UnicodeDecodeError: - results_string = simplejson.dumps( # type: ignore[call-overload] + results_string = simplejson.dumps( obj, default=default, allow_nan=allow_nan, @@ -240,13 +247,62 @@ def loads( :param object_hook: function that will be called to decode objects values :returns: A Python object deserialized from string """ - try: - return simplejson.loads( - obj, - encoding=encoding, - allow_nan=allow_nan, - object_hook=object_hook, - ) - except JSONDecodeError as ex: - logger.error("JSON is not valid %s", str(ex), exc_info=True) - raise + return simplejson.loads( + obj, + encoding=encoding, + allow_nan=allow_nan, + object_hook=object_hook, + ) + + +def redact_sensitive( + payload: dict[str, Any], + sensitive_fields: set[str], +) -> dict[str, Any]: + """ + Redacts sensitive fields from a payload. + + :param payload: The payload to redact + :param sensitive_fields: The set of fields to redact, as JSONPath expressions + :returns: The redacted payload + """ + redacted_payload = copy.deepcopy(payload) + + for json_path in sensitive_fields: + jsonpath_expr = parse(json_path) + for match in jsonpath_expr.find(redacted_payload): + match.context.value[match.path.fields[0]] = PASSWORD_MASK + + return redacted_payload + + +def reveal_sensitive( + old_payload: dict[str, Any], + new_payload: dict[str, Any], + sensitive_fields: set[str], +) -> dict[str, Any]: + """ + Reveals sensitive fields from a payload when not modified. + + This allows users to perform deep edits on a payload without having to provide + sensitive information. The old payload is sent to the user with any sensitive fields + masked, and when the user sends back a modified payload, any fields that were masked + are replaced with the original values from the old payload. + + For now this is only used to edit `encrypted_extra` fields in the database. + + :param old_payload: The old payload to reveal + :param new_payload: The new payload to reveal + :param sensitive_fields: The set of fields to reveal, as JSONPath expressions + :returns: The revealed payload + """ + revealed_payload = copy.deepcopy(new_payload) + + for json_path in sensitive_fields: + jsonpath_expr = parse(json_path) + for match in jsonpath_expr.find(revealed_payload): + if match.value == PASSWORD_MASK: + old_value = match.full_path.find(old_payload) + match.context.value[match.path.fields[0]] = old_value[0].value + + return revealed_payload diff --git a/superset/utils/log.py b/superset/utils/log.py index 71c552883307..70f6a1bda26b 100644 --- a/superset/utils/log.py +++ b/superset/utils/log.py @@ -394,8 +394,8 @@ def log( # pylint: disable=too-many-arguments,too-many-locals log = Log( action=action, json=json_string, - dashboard_id=dashboard_id, - slice_id=slice_id, + dashboard_id=dashboard_id or record.get("dashboard_id"), + slice_id=slice_id or record.get("slice_id"), duration_ms=duration_ms, referrer=referrer, user_id=user_id, diff --git a/superset/utils/pandas_postprocessing/histogram.py b/superset/utils/pandas_postprocessing/histogram.py index d91e129e8c97..dbe93ef32b15 100644 --- a/superset/utils/pandas_postprocessing/histogram.py +++ b/superset/utils/pandas_postprocessing/histogram.py @@ -17,7 +17,7 @@ from __future__ import annotations import numpy as np -from pandas import DataFrame, Series +from pandas import DataFrame, Series, to_numeric # pylint: disable=too-many-arguments @@ -48,12 +48,15 @@ def histogram( if groupby is None: groupby = [] - # check if the column is numeric - if not np.issubdtype(df[column].dtype, np.number): - raise ValueError(f"The column '{column}' must be numeric.") + # convert to numeric, coercing errors to NaN + df[column] = to_numeric(df[column], errors="coerce") + + # check if the column contains non-numeric values + if df[column].isna().any(): + raise ValueError(f"Column '{column}' contains non-numeric values") # calculate the histogram bin edges - bin_edges = np.histogram_bin_edges(df[column].dropna(), bins=bins) + bin_edges = np.histogram_bin_edges(df[column], bins=bins) # convert the bin edges to strings bin_edges_str = [ @@ -62,6 +65,7 @@ def histogram( ] def hist_values(series: Series) -> np.ndarray: + # we might have NaN values as the result of grouping so we need to drop them result = np.histogram(series.dropna(), bins=bin_edges)[0] return result if not cumulative else np.cumsum(result) diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py index bf6ed0f9e849..079bb3ab883a 100644 --- a/superset/utils/screenshots.py +++ b/superset/utils/screenshots.py @@ -23,6 +23,7 @@ from flask import current_app from superset import feature_flag_manager +from superset.dashboards.permalink.types import DashboardPermalinkState from superset.utils.hashing import md5_sha_from_dict from superset.utils.urls import modify_url_query from superset.utils.webdriver import ( @@ -144,6 +145,7 @@ def compute_and_cache( # pylint: disable=too-many-arguments thumb_size: WindowSize | None = None, cache: Cache = None, force: bool = True, + cache_key: str | None = None, ) -> bytes | None: """ Fetches the screenshot, computes the thumbnail and caches the result @@ -155,7 +157,7 @@ def compute_and_cache( # pylint: disable=too-many-arguments :param force: Will force the computation even if it's already cached :return: Image payload """ - cache_key = self.cache_key(window_size, thumb_size) + cache_key = cache_key or self.cache_key(window_size, thumb_size) window_size = window_size or self.window_size thumb_size = thumb_size or self.thumb_size if not force and cache and cache.get(cache_key): @@ -252,3 +254,21 @@ def __init__( super().__init__(url, digest) self.window_size = window_size or DEFAULT_DASHBOARD_WINDOW_SIZE self.thumb_size = thumb_size or DEFAULT_DASHBOARD_THUMBNAIL_SIZE + + def cache_key( + self, + window_size: bool | WindowSize | None = None, + thumb_size: bool | WindowSize | None = None, + dashboard_state: DashboardPermalinkState | None = None, + ) -> str: + window_size = window_size or self.window_size + thumb_size = thumb_size or self.thumb_size + args = { + "thumbnail_type": self.thumbnail_type, + "digest": self.digest, + "type": "thumb", + "window_size": window_size, + "thumb_size": thumb_size, + "dashboard_state": dashboard_state, + } + return md5_sha_from_dict(args) diff --git a/superset/utils/urls.py b/superset/utils/urls.py index 57a1b63dd41d..9b186f54f31e 100644 --- a/superset/utils/urls.py +++ b/superset/utils/urls.py @@ -16,6 +16,7 @@ # under the License. import urllib from typing import Any +from urllib.parse import urlparse from flask import current_app, url_for @@ -50,3 +51,14 @@ def modify_url_query(url: str, **kwargs: Any) -> str: f"{k}={urllib.parse.quote(str(v[0]))}" for k, v in params.items() ) return urllib.parse.urlunsplit(parts) + + +def is_secure_url(url: str) -> bool: + """ + Validates if a URL is secure (uses HTTPS). + + :param url: The URL to validate. + :return: True if the URL uses HTTPS (secure), False if it uses HTTP (non-secure). + """ + parsed_url = urlparse(url) + return parsed_url.scheme == "https" diff --git a/superset/views/base.py b/superset/views/base.py index f47eb32d1858..33ba43b2f687 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -114,6 +114,7 @@ "NATIVE_FILTER_DEFAULT_ROW_LIMIT", "PREVENT_UNSAFE_DEFAULT_URLS_ON_DATASET", "JWT_ACCESS_CSRF_COOKIE_NAME", + "SQLLAB_QUERY_RESULT_TIMEOUT", ) logger = logging.getLogger(__name__) diff --git a/superset/views/database/mixins.py b/superset/views/database/mixins.py index 21c664fa1f4e..a908dcba586a 100644 --- a/superset/views/database/mixins.py +++ b/superset/views/database/mixins.py @@ -187,7 +187,7 @@ class DatabaseMixin: "expose_in_sqllab": _("Expose in SQL Lab"), "allow_ctas": _("Allow CREATE TABLE AS"), "allow_cvas": _("Allow CREATE VIEW AS"), - "allow_dml": _("Allow DML"), + "allow_dml": _("Allow DDL/DML"), "force_ctas_schema": _("CTAS Schema"), "database_name": _("Database"), "creator": _("Creator"), diff --git a/superset/views/log/api.py b/superset/views/log/api.py index 33f4ad51d453..ffa3a860060e 100644 --- a/superset/views/log/api.py +++ b/superset/views/log/api.py @@ -130,7 +130,7 @@ def recent_activity(self, **kwargs: Any) -> FlaskResponse: """ args = kwargs["rison"] page, page_size = self._sanitize_page_args(*self._handle_page_args(args)) - actions = args.get("actions", ["explore", "dashboard"]) + actions = args.get("actions", ["mount_explorer", "mount_dashboard"]) distinct = args.get("distinct", True) payload = LogDAO.get_recent_activity(actions, distinct, page, page_size) diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py index 3ec366726747..29cf640d8c03 100644 --- a/superset/views/sql_lab/views.py +++ b/superset/views/sql_lab/views.py @@ -29,11 +29,12 @@ from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState from superset.superset_typing import FlaskResponse from superset.utils import json -from superset.utils.core import get_user_id +from superset.utils.core import error_msg_from_exception, get_user_id from superset.views.base import ( BaseSupersetView, DeleteMixin, DeprecateModelViewMixin, + json_error_response, json_success, SupersetModelView, ) @@ -84,48 +85,56 @@ class TabStateView(BaseSupersetView): @has_access_api @expose("/", methods=("POST",)) def post(self) -> FlaskResponse: - query_editor = json.loads(request.form["queryEditor"]) - tab_state = TabState( - user_id=get_user_id(), - # This is for backward compatibility - label=query_editor.get("name") - or query_editor.get("title", __("Untitled Query")), - active=True, - database_id=query_editor["dbId"], - catalog=query_editor.get("catalog"), - schema=query_editor.get("schema"), - sql=query_editor.get("sql", "SELECT ..."), - query_limit=query_editor.get("queryLimit"), - hide_left_bar=query_editor.get("hideLeftBar"), - saved_query_id=query_editor.get("remoteId"), - template_params=query_editor.get("templateParams"), - ) - ( - db.session.query(TabState) - .filter_by(user_id=get_user_id()) - .update({"active": False}) - ) - db.session.add(tab_state) - db.session.commit() - return json_success(json.dumps({"id": tab_state.id})) + try: + query_editor = json.loads(request.form["queryEditor"]) + tab_state = TabState( + user_id=get_user_id(), + # This is for backward compatibility + label=query_editor.get("name") + or query_editor.get("title", __("Untitled Query")), + active=True, + database_id=query_editor["dbId"], + catalog=query_editor.get("catalog"), + schema=query_editor.get("schema"), + sql=query_editor.get("sql", "SELECT ..."), + query_limit=query_editor.get("queryLimit"), + hide_left_bar=query_editor.get("hideLeftBar"), + saved_query_id=query_editor.get("remoteId"), + template_params=query_editor.get("templateParams"), + ) + ( + db.session.query(TabState) + .filter_by(user_id=get_user_id()) + .update({"active": False}) + ) + db.session.add(tab_state) + db.session.commit() + return json_success(json.dumps({"id": tab_state.id})) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("/<int:tab_state_id>", methods=("DELETE",)) def delete(self, tab_state_id: int) -> FlaskResponse: - owner_id = _get_owner_id(tab_state_id) - if owner_id is None: - return Response(status=404) - if owner_id != get_user_id(): - return Response(status=403) - - db.session.query(TabState).filter(TabState.id == tab_state_id).delete( - synchronize_session=False - ) - db.session.query(TableSchema).filter( - TableSchema.tab_state_id == tab_state_id - ).delete(synchronize_session=False) - db.session.commit() - return json_success(json.dumps("OK")) + try: + owner_id = _get_owner_id(tab_state_id) + if owner_id is None: + return Response(status=404) + if owner_id != get_user_id(): + return Response(status=403) + + db.session.query(TabState).filter(TabState.id == tab_state_id).delete( + synchronize_session=False + ) + db.session.query(TableSchema).filter( + TableSchema.tab_state_id == tab_state_id + ).delete(synchronize_session=False) + db.session.commit() + return json_success(json.dumps("OK")) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("/<int:tab_state_id>", methods=("GET",)) @@ -146,19 +155,23 @@ def get(self, tab_state_id: int) -> FlaskResponse: @has_access_api @expose("<int:tab_state_id>/activate", methods=("POST",)) def activate(self, tab_state_id: int) -> FlaskResponse: - owner_id = _get_owner_id(tab_state_id) - if owner_id is None: - return Response(status=404) - if owner_id != get_user_id(): - return Response(status=403) - - ( - db.session.query(TabState) - .filter_by(user_id=get_user_id()) - .update({"active": TabState.id == tab_state_id}) - ) - db.session.commit() - return json_success(json.dumps(tab_state_id)) + try: + owner_id = _get_owner_id(tab_state_id) + if owner_id is None: + return Response(status=404) + if owner_id != get_user_id(): + return Response(status=403) + + ( + db.session.query(TabState) + .filter_by(user_id=get_user_id()) + .update({"active": TabState.id == tab_state_id}) + ) + db.session.commit() + return json_success(json.dumps(tab_state_id)) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("<int:tab_state_id>", methods=("PUT",)) @@ -169,100 +182,118 @@ def put(self, tab_state_id: int) -> FlaskResponse: if owner_id != get_user_id(): return Response(status=403) - fields = {k: json.loads(v) for k, v in request.form.to_dict().items()} - if client_id := fields.get("latest_query_id"): - query = db.session.query(Query).filter_by(client_id=client_id).one_or_none() - if not query: - return self.json_response({"error": "Bad request"}, status=400) - db.session.query(TabState).filter_by(id=tab_state_id).update(fields) - db.session.commit() - return json_success(json.dumps(tab_state_id)) + try: + fields = {k: json.loads(v) for k, v in request.form.to_dict().items()} + db.session.query(TabState).filter_by(id=tab_state_id).update(fields) + db.session.commit() + return json_success(json.dumps(tab_state_id)) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("<int:tab_state_id>/migrate_query", methods=("POST",)) def migrate_query(self, tab_state_id: int) -> FlaskResponse: - owner_id = _get_owner_id(tab_state_id) - if owner_id is None: - return Response(status=404) - if owner_id != get_user_id(): - return Response(status=403) - - client_id = json.loads(request.form["queryId"]) - db.session.query(Query).filter_by(client_id=client_id).update( - {"sql_editor_id": tab_state_id} - ) - db.session.commit() - return json_success(json.dumps(tab_state_id)) + try: + owner_id = _get_owner_id(tab_state_id) + if owner_id is None: + return Response(status=404) + if owner_id != get_user_id(): + return Response(status=403) + + client_id = json.loads(request.form["queryId"]) + db.session.query(Query).filter_by(client_id=client_id).update( + {"sql_editor_id": tab_state_id} + ) + db.session.commit() + return json_success(json.dumps(tab_state_id)) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("<int:tab_state_id>/query/<client_id>", methods=("DELETE",)) def delete_query(self, tab_state_id: int, client_id: str) -> FlaskResponse: - # Before deleting the query, ensure it's not tied to any - # active tab as the last query. If so, replace the query - # with the latest one created in that tab - tab_state_query = db.session.query(TabState).filter_by( - id=tab_state_id, latest_query_id=client_id - ) - if tab_state_query.count(): - query = ( - db.session.query(Query) - .filter( - and_( - Query.client_id != client_id, - Query.user_id == get_user_id(), - Query.sql_editor_id == str(tab_state_id), - ), - ) - .order_by(Query.id.desc()) - .first() - ) - tab_state_query.update( - {"latest_query_id": query.client_id if query else None} + try: + # Before deleting the query, ensure it's not tied to any + # active tab as the last query. If so, replace the query + # with the latest one created in that tab + tab_state_query = db.session.query(TabState).filter_by( + id=tab_state_id, latest_query_id=client_id ) + if tab_state_query.count(): + query = ( + db.session.query(Query) + .filter( + and_( + Query.client_id != client_id, + Query.user_id == get_user_id(), + Query.sql_editor_id == str(tab_state_id), + ), + ) + .order_by(Query.id.desc()) + .first() + ) + tab_state_query.update( + {"latest_query_id": query.client_id if query else None} + ) - db.session.query(Query).filter_by( - client_id=client_id, - user_id=get_user_id(), - sql_editor_id=str(tab_state_id), - ).delete(synchronize_session=False) - db.session.commit() - return json_success(json.dumps("OK")) + db.session.query(Query).filter_by( + client_id=client_id, + user_id=get_user_id(), + sql_editor_id=str(tab_state_id), + ).delete(synchronize_session=False) + db.session.commit() + return json_success(json.dumps("OK")) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) class TableSchemaView(BaseSupersetView): @has_access_api @expose("/", methods=("POST",)) def post(self) -> FlaskResponse: - table = json.loads(request.form["table"]) - - # delete any existing table schema - db.session.query(TableSchema).filter( - TableSchema.tab_state_id == table["queryEditorId"], - TableSchema.database_id == table["dbId"], - TableSchema.schema == table["schema"], - TableSchema.table == table["name"], - ).delete(synchronize_session=False) - - table_schema = TableSchema( - tab_state_id=table["queryEditorId"], - database_id=table["dbId"], - schema=table["schema"], - table=table["name"], - description=json.dumps(table), - expanded=True, - ) - db.session.add(table_schema) - db.session.commit() - return json_success(json.dumps({"id": table_schema.id})) + try: + table = json.loads(request.form["table"]) + + # delete any existing table schema + db.session.query(TableSchema).filter( + TableSchema.tab_state_id == table["queryEditorId"], + TableSchema.database_id == table["dbId"], + TableSchema.catalog == table.get("catalog"), + TableSchema.schema == table["schema"], + TableSchema.table == table["name"], + ).delete(synchronize_session=False) + + table_schema = TableSchema( + tab_state_id=table["queryEditorId"], + database_id=table["dbId"], + catalog=table.get("catalog"), + schema=table["schema"], + table=table["name"], + description=json.dumps(table), + expanded=True, + ) + db.session.add(table_schema) + db.session.commit() + return json_success(json.dumps({"id": table_schema.id})) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("/<int:table_schema_id>", methods=("DELETE",)) def delete(self, table_schema_id: int) -> FlaskResponse: - db.session.query(TableSchema).filter(TableSchema.id == table_schema_id).delete( - synchronize_session=False - ) - db.session.commit() - return json_success(json.dumps("OK")) + try: + db.session.query(TableSchema).filter( + TableSchema.id == table_schema_id + ).delete(synchronize_session=False) + db.session.commit() + return json_success(json.dumps("OK")) + except Exception as ex: # pylint: disable=broad-except + db.session.rollback() + return json_error_response(error_msg_from_exception(ex), 400) @has_access_api @expose("/<int:table_schema_id>/expanded", methods=("POST",)) diff --git a/superset/views/users/api.py b/superset/views/users/api.py index a7000b6b96c0..82089fe84fe2 100644 --- a/superset/views/users/api.py +++ b/superset/views/users/api.py @@ -19,7 +19,7 @@ from flask_jwt_extended.exceptions import NoAuthorizationError from sqlalchemy.orm.exc import NoResultFound -from superset import app +from superset import app, is_feature_enabled from superset.daos.user import UserDAO from superset.utils.slack import get_user_avatar, SlackClientError from superset.views.base_api import BaseSupersetApi @@ -143,11 +143,12 @@ def avatar(self, user_id: int) -> Response: # fetch from the one-to-one relationship if len(user.extra_attributes) > 0: avatar_url = user.extra_attributes[0].avatar_url - - should_fetch_slack_avatar = app.config.get( - "SLACK_ENABLE_AVATARS" - ) and app.config.get("SLACK_API_TOKEN") - if not avatar_url and should_fetch_slack_avatar: + slack_token = app.config.get("SLACK_API_TOKEN") + if ( + not avatar_url + and slack_token + and is_feature_enabled("SLACK_ENABLE_AVATARS") + ): try: # Fetching the avatar url from slack avatar_url = get_user_avatar(user.email) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 0f5948ad7b23..fb8f34216593 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -18,11 +18,13 @@ from io import BytesIO from unittest import mock +from unittest.mock import patch from zipfile import is_zipfile, ZipFile import prison import pytest import yaml +from flask import g from flask_babel import lazy_gettext as _ from parameterized import parameterized from sqlalchemy import and_ @@ -61,6 +63,7 @@ dataset_config, dataset_metadata_config, ) +from tests.integration_tests.fixtures.query_context import get_query_context from tests.integration_tests.fixtures.tags import ( create_custom_tags, # noqa: F401 get_filter_params, @@ -1768,7 +1771,8 @@ def test_export_chart_gamma(self): assert rv.status_code == 404 - def test_import_chart(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_chart(self, mock_add_permissions): """ Chart API: Test import chart """ @@ -1805,7 +1809,8 @@ def test_import_chart(self): db.session.delete(database) db.session.commit() - def test_import_chart_overwrite(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_chart_overwrite(self, mock_add_permissions): """ Chart API: Test import existing chart """ @@ -1876,7 +1881,8 @@ def test_import_chart_overwrite(self): db.session.delete(database) db.session.commit() - def test_import_chart_invalid(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_chart_invalid(self, mock_add_permissions): """ Chart API: Test import invalid chart """ @@ -1977,6 +1983,7 @@ def test_gets_owned_created_favorited_by_me_filter(self): self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) + data["result"].sort(key=lambda x: x["datasource_id"]) assert data["result"][0]["slice_name"] == "name0" assert data["result"][0]["datasource_id"] == 1 @@ -2353,3 +2360,57 @@ def test_update_chart_no_tag_changes(self): security_manager.add_permission_role(alpha_role, write_tags_perm) security_manager.add_permission_role(alpha_role, tag_charts_perm) + + @patch("superset.security.manager.SupersetSecurityManager.has_guest_access") + @patch("superset.security.manager.SupersetSecurityManager.is_guest_user") + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_chart_data_as_guest_user( + self, is_guest_user, has_guest_access + ): # get_guest_rls_filters + """ + Chart API: Test create simple chart + """ + self.login(ADMIN_USERNAME) + g.user.rls = [] + is_guest_user.return_value = True + has_guest_access.return_value = True + + with mock.patch.object(Slice, "get_query_context") as mock_get_query_context: + mock_get_query_context.return_value = get_query_context("birth_names") + rv = self.client.post( + "api/v1/chart/data", # noqa: F541 + json={ + "datasource": {"id": 2, "type": "table"}, + "queries": [ + { + "extras": {"where": "", "time_grain_sqla": "P1D"}, + "columns": ["name"], + "metrics": [{"label": "sum__num"}], + "orderby": [("sum__num", False)], + "row_limit": 100, + "granularity": "ds", + "time_range": "100 years ago : now", + "timeseries_limit": 0, + "timeseries_limit_metric": None, + "order_desc": True, + "filters": [ + {"col": "gender", "op": "==", "val": "boy"}, + {"col": "num", "op": "IS NOT NULL"}, + { + "col": "name", + "op": "NOT IN", + "val": ["<NULL>", '"abc"'], + }, + ], + "having": "", + "where": "", + } + ], + "result_format": "json", + "result_type": "full", + }, + ) + data = json.loads(rv.data.decode("utf-8")) + result = data["result"] + excluded_key = "query" + assert all([excluded_key not in query for query in result]) diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 1ee4658b88ff..950c7cbc888d 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -23,19 +23,24 @@ from superset import db, security_manager from superset.commands.chart.create import CreateChartCommand from superset.commands.chart.exceptions import ( + ChartForbiddenError, ChartNotFoundError, WarmUpCacheChartNotFoundError, ) from superset.commands.chart.export import ExportChartsCommand +from superset.commands.chart.fave import AddFavoriteChartCommand from superset.commands.chart.importers.v1 import ImportChartsCommand +from superset.commands.chart.unfave import DelFavoriteChartCommand from superset.commands.chart.update import UpdateChartCommand from superset.commands.chart.warm_up_cache import ChartWarmUpCacheCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError from superset.connectors.sqla.models import SqlaTable +from superset.daos.chart import ChartDAO from superset.models.core import Database from superset.models.slice import Slice from superset.utils import json +from superset.utils.core import override_user from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, # noqa: F401 @@ -173,7 +178,8 @@ def test_export_chart_command_no_related(self, mock_g): class TestImportChartsCommand(SupersetTestCase): @patch("superset.utils.core.g") @patch("superset.security.manager.g") - def test_import_v1_chart(self, sm_g, utils_g) -> None: + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_chart(self, mock_add_permissions, sm_g, utils_g) -> None: """Test that we can import a chart""" admin = sm_g.user = utils_g.user = security_manager.find_user("admin") contents = { @@ -246,7 +252,8 @@ def test_import_v1_chart(self, sm_g, utils_g) -> None: db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_chart_multiple(self, sm_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_chart_multiple(self, mock_add_permissions, sm_g): """Test that a chart can be imported multiple times""" sm_g.user = security_manager.find_user("admin") contents = { @@ -272,7 +279,8 @@ def test_import_v1_chart_multiple(self, sm_g): db.session.delete(database) db.session.commit() - def test_import_v1_chart_validation(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_chart_validation(self, mock_add_permissions): """Test different validations applied when importing a chart""" # metadata.yaml must be present contents = { @@ -425,3 +433,58 @@ def test_warm_up_cache(self): self.assertEqual( result, {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} ) + + +class TestFavoriteChartCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_fave_unfave_chart_command(self): + """Test that a user can fave/unfave a chart""" + with self.client.application.test_request_context(): + example_chart = db.session.query(Slice).all()[0] + + # Assert that the chart exists + assert example_chart is not None + + with override_user(security_manager.find_user("admin")): + AddFavoriteChartCommand(example_chart.id).run() + + # Assert that the dashboard was faved + ids = ChartDAO.favorited_ids([example_chart]) + assert example_chart.id in ids + + DelFavoriteChartCommand(example_chart.id).run() + + # Assert that the chart was unfaved + ids = ChartDAO.favorited_ids([example_chart]) + assert example_chart.id not in ids + + @pytest.mark.usefixtures("load_energy_table_with_slice") + def test_fave_unfave_chart_command_not_found(self): + """Test that faving / unfaving a non-existing chart raises an exception""" + with self.client.application.test_request_context(): + example_chart_id = 1234 + + with override_user(security_manager.find_user("admin")): + with self.assertRaises(ChartNotFoundError): + AddFavoriteChartCommand(example_chart_id).run() + + with self.assertRaises(ChartNotFoundError): + DelFavoriteChartCommand(example_chart_id).run() + + @pytest.mark.usefixtures("load_energy_table_with_slice") + @patch("superset.daos.base.BaseDAO.find_by_id") + def test_fave_unfave_chart_command_forbidden(self, mock_find_by_id): + """Test that faving / unfaving raises an exception for a chart the user doesn't own""" + with self.client.application.test_request_context(): + example_chart = db.session.query(Slice).all()[0] + mock_find_by_id.return_value = example_chart + + # Assert that the chart exists + assert example_chart is not None + + with override_user(security_manager.find_user("gamma")): + with self.assertRaises(ChartForbiddenError): + AddFavoriteChartCommand(example_chart.id).run() + + with self.assertRaises(ChartForbiddenError): + DelFavoriteChartCommand(example_chart.id).run() diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index 56b0a9a793b0..ff7e46a6477e 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -716,7 +716,7 @@ def test_when_where_parameter_is_template_and_query_result_type__query_is_templa rv = self.post_assert_metric(CHART_DATA_URI, self.query_context_payload, "data") result = rv.json["result"][0]["query"] if get_example_database().backend != "presto": - assert "(\n 'boy' = 'boy'\n )" in result + assert "('boy' = 'boy')" in result @with_feature_flags(GLOBAL_ASYNC_QUERIES=True) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @@ -1346,13 +1346,13 @@ def test_time_filter_with_grain(test_client, login_as_admin, physical_query_cont backend = get_example_database().backend if backend == "sqlite": assert ( - "DATETIME(col5, 'start of day', -STRFTIME('%w', col5) || ' days') >=" + "DATETIME(col5, 'start of day', -strftime('%w', col5) || ' days') >=" in query ) elif backend == "mysql": - assert "DATE(DATE_SUB(col5, INTERVAL (DAYOFWEEK(col5) - 1) DAY)) >=" in query + assert "DATE(DATE_SUB(col5, INTERVAL DAYOFWEEK(col5) - 1 DAY)) >=" in query elif backend == "postgresql": - assert "DATE_TRUNC('WEEK', col5) >=" in query + assert "DATE_TRUNC('week', col5) >=" in query elif backend == "presto": assert "date_trunc('week', CAST(col5 AS TIMESTAMP)) >=" in query diff --git a/tests/integration_tests/commands_test.py b/tests/integration_tests/commands_test.py index 83409fd02280..c3015520b806 100644 --- a/tests/integration_tests/commands_test.py +++ b/tests/integration_tests/commands_test.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import copy +from unittest.mock import patch import yaml from flask import g @@ -63,8 +64,10 @@ def setUp(self): self.user = user setattr(g, "user", user) - def test_import_assets(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_assets(self, mock_add_permissions): """Test that we can import multiple assets""" + contents = { "metadata.yaml": yaml.safe_dump(metadata_config), "databases/imported_database.yaml": yaml.safe_dump(database_config), @@ -136,7 +139,29 @@ def test_import_assets(self): dataset = chart.table assert str(dataset.uuid) == dataset_config["uuid"] - assert chart.query_context is None + assert json.loads(chart.query_context) == { + "datasource": {"id": dataset.id, "type": "table"}, + "force": False, + "queries": [ + { + "annotation_layers": [], + "applied_time_extras": {}, + "columns": [], + "custom_form_data": {}, + "custom_params": {}, + "extras": {"having": "", "time_grain_sqla": None, "where": ""}, + "filters": [], + "metrics": [], + "order_desc": True, + "row_limit": 5000, + "time_range": " : ", + "timeseries_limit": 0, + "url_params": {}, + } + ], + "result_format": "json", + "result_type": "full", + } assert json.loads(chart.params)["datasource"] == dataset.uid database = dataset.database @@ -144,13 +169,16 @@ def test_import_assets(self): assert dashboard.owners == [self.user] + mock_add_permissions.assert_called_with(database, None) + db.session.delete(dashboard) db.session.delete(chart) db.session.delete(dataset) db.session.delete(database) db.session.commit() - def test_import_v1_dashboard_overwrite(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dashboard_overwrite(self, mock_add_permissions): """Test that assets can be overwritten""" contents = { "metadata.yaml": yaml.safe_dump(metadata_config), @@ -185,6 +213,9 @@ def test_import_v1_dashboard_overwrite(self): chart = dashboard.slices[0] dataset = chart.table database = dataset.database + + mock_add_permissions.assert_called_with(database, None) + db.session.delete(dashboard) db.session.delete(chart) db.session.delete(dataset) diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 44b7ef26e64c..2cc8c8f5ca78 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -1015,7 +1015,6 @@ def test_tabstate_update(self): data = {"sql": json.dumps("select 1"), "latest_query_id": json.dumps(client_id)} response = self.client.put(f"/tabstateview/{tab_state_id}", data=data) self.assertEqual(response.status_code, 400) - self.assertEqual(response.json["error"], "Bad request") # generate query db.session.add(Query(client_id=client_id, database_id=1)) db.session.commit() diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 259b9485fbe7..d78f2e475961 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -81,7 +81,7 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas "slug": "slug1_changed", "position_json": '{"b": "B"}', "css": "css_changed", - "json_metadata": '{"refresh_frequency": 30, "timed_refresh_immune_slices": [], "expanded_slices": {}, "color_scheme": "", "label_colors": {}, "shared_label_colors": {}, "color_scheme_domain": [], "cross_filters_enabled": false}', + "json_metadata": '{"refresh_frequency": 30, "timed_refresh_immune_slices": [], "expanded_slices": {}, "color_scheme": "", "label_colors": {}, "shared_label_colors": [], "map_label_colors": {}, "color_scheme_domain": [], "cross_filters_enabled": false}', "published": False, } @@ -2111,7 +2111,8 @@ def test_export_bundle_not_allowed(self): db.session.delete(dashboard) db.session.commit() - def test_import_dashboard(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_dashboard(self, mock_add_permissions): """ Dashboard API: Test import dashboard """ @@ -2215,7 +2216,8 @@ def test_import_dashboard_v0_export(self): db.session.delete(dataset) db.session.commit() - def test_import_dashboard_overwrite(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_dashboard_overwrite(self, mock_add_permissions): """ Dashboard API: Test import existing dashboard """ @@ -2831,7 +2833,9 @@ def _get_screenshot(self, dashboard_id, cache_key, download_format): return self.client.get(uri) @pytest.mark.usefixtures("create_dashboard_with_tag") - def test_cache_dashboard_screenshot_success(self): + @patch("superset.dashboards.api.is_feature_enabled") + def test_cache_dashboard_screenshot_success(self, is_feature_enabled): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) dashboard = ( db.session.query(Dashboard) @@ -2842,7 +2846,9 @@ def test_cache_dashboard_screenshot_success(self): self.assertEqual(response.status_code, 202) @pytest.mark.usefixtures("create_dashboard_with_tag") - def test_cache_dashboard_screenshot_dashboard_validation(self): + @patch("superset.dashboards.api.is_feature_enabled") + def test_cache_dashboard_screenshot_dashboard_validation(self, is_feature_enabled): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) dashboard = ( db.session.query(Dashboard) @@ -2858,7 +2864,9 @@ def test_cache_dashboard_screenshot_dashboard_validation(self): response = self._cache_screenshot(dashboard.id, invalid_payload) self.assertEqual(response.status_code, 400) - def test_cache_dashboard_screenshot_dashboard_not_found(self): + @patch("superset.dashboards.api.is_feature_enabled") + def test_cache_dashboard_screenshot_dashboard_not_found(self, is_feature_enabled): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) non_existent_id = 999 response = self._cache_screenshot(non_existent_id) @@ -2867,10 +2875,14 @@ def test_cache_dashboard_screenshot_dashboard_not_found(self): @pytest.mark.usefixtures("create_dashboard_with_tag") @patch("superset.dashboards.api.cache_dashboard_screenshot") @patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key") - def test_screenshot_success_png(self, mock_get_cache, mock_cache_task): + @patch("superset.dashboards.api.is_feature_enabled") + def test_screenshot_success_png( + self, is_feature_enabled, mock_get_cache, mock_cache_task + ): """ Validate screenshot returns png """ + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) mock_cache_task.return_value = None mock_get_cache.return_value = BytesIO(b"fake image data") @@ -2893,12 +2905,14 @@ def test_screenshot_success_png(self, mock_get_cache, mock_cache_task): @patch("superset.dashboards.api.cache_dashboard_screenshot") @patch("superset.dashboards.api.build_pdf_from_screenshots") @patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key") + @patch("superset.dashboards.api.is_feature_enabled") def test_screenshot_success_pdf( - self, mock_get_from_cache, mock_build_pdf, mock_cache_task + self, is_feature_enabled, mock_get_from_cache, mock_build_pdf, mock_cache_task ): """ Validate screenshot can return pdf. """ + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) mock_cache_task.return_value = None mock_get_from_cache.return_value = BytesIO(b"fake image data") @@ -2921,7 +2935,11 @@ def test_screenshot_success_pdf( @pytest.mark.usefixtures("create_dashboard_with_tag") @patch("superset.dashboards.api.cache_dashboard_screenshot") @patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key") - def test_screenshot_not_in_cache(self, mock_get_cache, mock_cache_task): + @patch("superset.dashboards.api.is_feature_enabled") + def test_screenshot_not_in_cache( + self, is_feature_enabled, mock_get_cache, mock_cache_task + ): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) mock_cache_task.return_value = None mock_get_cache.return_value = None @@ -2938,7 +2956,9 @@ def test_screenshot_not_in_cache(self, mock_get_cache, mock_cache_task): response = self._get_screenshot(dashboard.id, cache_key, "pdf") self.assertEqual(response.status_code, 404) - def test_screenshot_dashboard_not_found(self): + @patch("superset.dashboards.api.is_feature_enabled") + def test_screenshot_dashboard_not_found(self, is_feature_enabled): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) non_existent_id = 999 response = self._get_screenshot(non_existent_id, "some_cache_key", "png") @@ -2947,7 +2967,11 @@ def test_screenshot_dashboard_not_found(self): @pytest.mark.usefixtures("create_dashboard_with_tag") @patch("superset.dashboards.api.cache_dashboard_screenshot") @patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key") - def test_screenshot_invalid_download_format(self, mock_get_cache, mock_cache_task): + @patch("superset.dashboards.api.is_feature_enabled") + def test_screenshot_invalid_download_format( + self, is_feature_enabled, mock_get_cache, mock_cache_task + ): + is_feature_enabled.return_value = True self.login(ADMIN_USERNAME) mock_cache_task.return_value = None mock_get_cache.return_value = BytesIO(b"fake png data") @@ -2963,4 +2987,21 @@ def test_screenshot_invalid_download_format(self, mock_get_cache, mock_cache_tas cache_key = json.loads(cache_resp.data.decode("utf-8"))["cache_key"] response = self._get_screenshot(dashboard.id, cache_key, "invalid") - self.assertEqual(response.status_code, 404) + assert response.status_code == 404 + + @pytest.mark.usefixtures("create_dashboard_with_tag") + @patch("superset.dashboards.api.is_feature_enabled") + def test_cache_dashboard_screenshot_feature_disabled(self, is_feature_enabled): + is_feature_enabled.return_value = False + self.login(ADMIN_USERNAME) + + dashboard = ( + db.session.query(Dashboard) + .filter(Dashboard.dashboard_title == "dash with tag") + .first() + ) + + assert dashboard is not None + + response = self._cache_screenshot(dashboard.id) + assert response.status_code == 404 diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py index 334e0425cf1f..8c2fbc05f86b 100644 --- a/tests/integration_tests/dashboards/commands_tests.py +++ b/tests/integration_tests/dashboards/commands_tests.py @@ -22,20 +22,32 @@ from werkzeug.utils import secure_filename from superset import db, security_manager -from superset.commands.dashboard.exceptions import DashboardNotFoundError +from superset.commands.dashboard.copy import CopyDashboardCommand +from superset.commands.dashboard.delete import DeleteEmbeddedDashboardCommand +from superset.commands.dashboard.exceptions import ( + DashboardAccessDeniedError, + DashboardForbiddenError, + DashboardInvalidError, + DashboardNotFoundError, +) from superset.commands.dashboard.export import ( append_charts, ExportDashboardsCommand, get_default_position, ) +from superset.commands.dashboard.fave import AddFavoriteDashboardCommand from superset.commands.dashboard.importers import v0, v1 +from superset.commands.dashboard.unfave import DelFavoriteDashboardCommand from superset.commands.exceptions import CommandInvalidError from superset.commands.importers.exceptions import IncorrectVersionError from superset.connectors.sqla.models import SqlaTable +from superset.daos.dashboard import DashboardDAO from superset.models.core import Database from superset.models.dashboard import Dashboard +from superset.models.embedded_dashboard import EmbeddedDashboard from superset.models.slice import Slice from superset.utils import json +from superset.utils.core import override_user from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.importexport import ( chart_config, @@ -226,6 +238,51 @@ def test_export_dashboard_command(self, mock_g1, mock_g2): "version": "1.0.0", } + # @pytest.mark.usefixtures("load_covid_dashboard") + @pytest.mark.skip(reason="missing covid fixture") + @patch("superset.security.manager.g") + @patch("superset.views.base.g") + def test_export_dashboard_command_dataset_references(self, mock_g1, mock_g2): + mock_g1.user = security_manager.find_user("admin") + mock_g2.user = security_manager.find_user("admin") + + example_dashboard = ( + db.session.query(Dashboard) + .filter_by(uuid="f4065089-110a-41fa-8dd7-9ce98a65e250") + .one() + ) + command = ExportDashboardsCommand([example_dashboard.id]) + contents = dict(command.run()) + + expected_paths = { + "metadata.yaml", + f"dashboards/COVID_Vaccine_Dashboard_{example_dashboard.id}.yaml", + "datasets/examples/covid_vaccines.yaml", # referenced dataset needs to be exported + "databases/examples.yaml", + } + for chart in example_dashboard.slices: + chart_slug = secure_filename(chart.slice_name) + expected_paths.add(f"charts/{chart_slug}_{chart.id}.yaml") + assert expected_paths == set(contents.keys()) + + metadata = yaml.safe_load( + contents[f"dashboards/World_Banks_Data_{example_dashboard.id}.yaml"]() + ) + + # find the dataset references in native filter and check if they are correct + assert "native_filter_configuration" in metadata["metadata"] + + for filter_config in metadata["metadata"][ + "native_filter_configuration" + ].values(): + assert "targets" in filter_config + targets = filter_config["targets"] + + for column in targets: + # we need to find the correct datasetUuid (not datasetId) + assert "datasetUuid" in column + assert column["datasetUuid"] == "974b7a1c-22ea-49cb-9214-97b7dbd511e0" + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") @patch("superset.security.manager.g") @patch("superset.views.base.g") @@ -488,7 +545,8 @@ def test_import_v0_dashboard_cli_export(self): @patch("superset.utils.core.g") @patch("superset.security.manager.g") - def test_import_v1_dashboard(self, sm_g, utils_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dashboard(self, mock_add_permissions, sm_g, utils_g): """Test that we can import a dashboard""" admin = sm_g.user = utils_g.user = security_manager.find_user("admin") contents = { @@ -562,7 +620,29 @@ def test_import_v1_dashboard(self, sm_g, utils_g): dataset = chart.table assert str(dataset.uuid) == dataset_config["uuid"] - assert chart.query_context is None + assert json.loads(chart.query_context) == { + "datasource": {"id": dataset.id, "type": "table"}, + "force": False, + "queries": [ + { + "annotation_layers": [], + "applied_time_extras": {}, + "columns": [], + "custom_form_data": {}, + "custom_params": {}, + "extras": {"having": "", "time_grain_sqla": None, "where": ""}, + "filters": [], + "metrics": [], + "order_desc": True, + "row_limit": 5000, + "time_range": " : ", + "timeseries_limit": 0, + "url_params": {}, + } + ], + "result_format": "json", + "result_type": "full", + } assert json.loads(chart.params)["datasource"] == dataset.uid database = dataset.database @@ -577,7 +657,8 @@ def test_import_v1_dashboard(self, sm_g, utils_g): db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_dashboard_multiple(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dashboard_multiple(self, mock_add_permissions, mock_g): """Test that a dashboard can be imported multiple times""" mock_g.user = security_manager.find_user("admin") @@ -660,3 +741,156 @@ def test_import_v1_dashboard_validation(self): "table_name": ["Missing data for required field."], } } + + +class TestCopyDashboardCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_copy_dashboard_command(self): + """Test that an admin user can copy a dashboard""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + copy_data = {"dashboard_title": "Copied Dashboard", "json_metadata": "{}"} + + with override_user(security_manager.find_user("admin")): + command = CopyDashboardCommand(example_dashboard, copy_data) + copied_dashboard = command.run() + + assert copied_dashboard.dashboard_title == "Copied Dashboard" + assert copied_dashboard.slug != example_dashboard.slug + assert copied_dashboard.slices == example_dashboard.slices + + db.session.delete(copied_dashboard) + db.session.commit() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_copy_dashboard_command_no_access(self): + """Test that a non-owner user cannot copy a dashboard if DASHBOARD_RBAC is enabled""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + copy_data = {"dashboard_title": "Copied Dashboard", "json_metadata": "{}"} + + with override_user(security_manager.find_user("gamma")): + with patch( + "superset.commands.dashboard.copy.is_feature_enabled", + return_value=True, + ): + command = CopyDashboardCommand(example_dashboard, copy_data) + with self.assertRaises(DashboardForbiddenError): + command.run() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_copy_dashboard_command_invalid_data(self): + """Test that invalid data raises a DashboardInvalidError""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + invalid_copy_data = {"dashboard_title": "", "json_metadata": "{}"} + + with override_user(security_manager.find_user("admin")): + command = CopyDashboardCommand(example_dashboard, invalid_copy_data) + with self.assertRaises(DashboardInvalidError): + command.run() + + +class TestDeleteEmbeddedDashboardCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_delete_embedded_dashboard_command(self): + """Test that an admin user can add and then delete an embedded dashboard""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + + # Step 1: Add an embedded dashboard + new_embedded_dashboard = EmbeddedDashboard( + dashboard_id=example_dashboard.id + ) + db.session.add(new_embedded_dashboard) + db.session.commit() + + # Step 2: Assert that the embedded dashboard was added + embedded_dashboards = example_dashboard.embedded + assert len(embedded_dashboards) > 0 + assert new_embedded_dashboard in embedded_dashboards + + # Step 3: Delete the embedded dashboard + with override_user(security_manager.find_user("admin")): + command = DeleteEmbeddedDashboardCommand(example_dashboard) + command.run() + + # Step 4: Assert that the embedded dashboard was deleted + deleted_embedded_dashboard = ( + db.session.query(EmbeddedDashboard) + .filter_by(uuid=new_embedded_dashboard.uuid) + .one_or_none() + ) + assert deleted_embedded_dashboard is None + + +class TestFavoriteDashboardCommand(SupersetTestCase): + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_fave_unfave_dashboard_command(self): + """Test that a user can fave/unfave a dashboard""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + + # Assert that the dashboard exists + assert example_dashboard is not None + + with override_user(security_manager.find_user("admin")): + with patch( + "superset.daos.dashboard.DashboardDAO.get_by_id_or_slug", + return_value=example_dashboard, + ): + AddFavoriteDashboardCommand(example_dashboard.id).run() + + # Assert that the dashboard was faved + ids = DashboardDAO.favorited_ids([example_dashboard]) + assert example_dashboard.id in ids + + DelFavoriteDashboardCommand(example_dashboard.id).run() + + # Assert that the dashboard was unfaved + ids = DashboardDAO.favorited_ids([example_dashboard]) + assert example_dashboard.id not in ids + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + def test_fave_unfave_dashboard_command_not_found(self): + """Test that faving / unfaving a non-existing dashboard raises an exception""" + with self.client.application.test_request_context(): + example_dashboard_id = 1234 + + with override_user(security_manager.find_user("admin")): + with self.assertRaises(DashboardNotFoundError): + AddFavoriteDashboardCommand(example_dashboard_id).run() + + with self.assertRaises(DashboardNotFoundError): + DelFavoriteDashboardCommand(example_dashboard_id).run() + + @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") + @patch("superset.models.dashboard.Dashboard.get") + def test_fave_unfave_dashboard_command_forbidden(self, mock_get): + """Test that faving / unfaving raises an exception for a dashboard the user doesn't own""" + with self.client.application.test_request_context(): + example_dashboard = ( + db.session.query(Dashboard).filter_by(slug="world_health").one() + ) + + mock_get.return_value = example_dashboard + + # Assert that the dashboard exists + assert example_dashboard is not None + + with override_user(security_manager.find_user("gamma")): + with self.assertRaises(DashboardAccessDeniedError): + AddFavoriteDashboardCommand(example_dashboard.uuid).run() + + with self.assertRaises(DashboardAccessDeniedError): + DelFavoriteDashboardCommand(example_dashboard.uuid).run() diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 8d0cd0810f8b..677d6d36f9ae 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -1563,34 +1563,6 @@ def test_get_select_star_not_allowed(self): rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) - def test_get_select_star_datasource_access(self): - """ - Database API: Test get select star with datasource access - """ - table = SqlaTable( - schema="main", table_name="ab_permission", database=get_main_database() - ) - db.session.add(table) - db.session.commit() - - tmp_table_perm = security_manager.find_permission_view_menu( - "datasource_access", table.get_perm() - ) - gamma_role = security_manager.find_role("Gamma") - security_manager.add_permission_role(gamma_role, tmp_table_perm) - - self.login(GAMMA_USERNAME) - main_db = get_main_database() - uri = f"api/v1/database/{main_db.id}/select_star/ab_permission/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 200) - - # rollback changes - security_manager.del_permission_role(gamma_role, tmp_table_perm) - db.session.delete(table) - db.session.delete(main_db) - db.session.commit() - def test_get_select_star_not_found_database(self): """ Database API: Test get select star not found database @@ -2331,7 +2303,8 @@ def test_export_database_non_existing(self): rv = self.get_assert_metric(uri, "export") assert rv.status_code == 404 - def test_import_database(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database(self, mock_add_permissions): """ Database API: Test import database """ @@ -2363,7 +2336,8 @@ def test_import_database(self): db.session.delete(database) db.session.commit() - def test_import_database_overwrite(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database_overwrite(self, mock_add_permissions): """ Database API: Test import existing database """ @@ -2433,7 +2407,8 @@ def test_import_database_overwrite(self): db.session.delete(database) db.session.commit() - def test_import_database_invalid(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database_invalid(self, mock_add_permissions): """ Database API: Test import invalid database """ @@ -2483,7 +2458,8 @@ def test_import_database_invalid(self): ] } - def test_import_database_masked_password(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database_masked_password(self, mock_add_permissions): """ Database API: Test import database with masked password """ @@ -2540,7 +2516,8 @@ def test_import_database_masked_password(self): ] } - def test_import_database_masked_password_provided(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database_masked_password_provided(self, mock_add_permissions): """ Database API: Test import database with masked password provided """ @@ -2586,8 +2563,11 @@ def test_import_database_masked_password_provided(self): db.session.commit() @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_password( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with masked password @@ -2644,8 +2624,11 @@ def test_import_database_masked_ssh_tunnel_password( } @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_password_provided( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with masked password provided @@ -2692,8 +2675,11 @@ def test_import_database_masked_ssh_tunnel_password_provided( db.session.commit() @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_private_key_and_password( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with masked private_key @@ -2753,8 +2739,11 @@ def test_import_database_masked_ssh_tunnel_private_key_and_password( } @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_private_key_and_password_provided( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with masked password provided @@ -2804,7 +2793,11 @@ def test_import_database_masked_ssh_tunnel_private_key_and_password_provided( db.session.delete(database) db.session.commit() - def test_import_database_masked_ssh_tunnel_feature_flag_disabled(self): + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_database_masked_ssh_tunnel_feature_flag_disabled( + self, + mock_add_permissions, + ): """ Database API: Test import database with ssh_tunnel and feature flag disabled """ @@ -2856,8 +2849,11 @@ def test_import_database_masked_ssh_tunnel_feature_flag_disabled(self): } @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_feature_no_credentials( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with ssh_tunnel that has no credentials @@ -2911,8 +2907,11 @@ def test_import_database_masked_ssh_tunnel_feature_no_credentials( } @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_feature_mix_credentials( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with ssh_tunnel that has no credentials @@ -2966,8 +2965,11 @@ def test_import_database_masked_ssh_tunnel_feature_mix_credentials( } @mock.patch("superset.databases.schemas.is_feature_enabled") + @mock.patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_database_masked_ssh_tunnel_feature_only_pk_passwd( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """ Database API: Test import database with ssh_tunnel that has no credentials @@ -3802,7 +3804,7 @@ def test_get_related_objects(self): assert "dashboards" in rv.json assert "sqllab_tab_states" in rv.json - @patch.dict( + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", SQL_VALIDATORS_BY_ENGINE, clear=True, @@ -3828,7 +3830,7 @@ def test_validate_sql(self): self.assertEqual(rv.status_code, 200) self.assertEqual(response["result"], []) - @patch.dict( + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", SQL_VALIDATORS_BY_ENGINE, clear=True, @@ -3864,7 +3866,7 @@ def test_validate_sql_errors(self): ], ) - @patch.dict( + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", SQL_VALIDATORS_BY_ENGINE, clear=True, @@ -3885,7 +3887,7 @@ def test_validate_sql_not_found(self): rv = self.client.post(uri, json=request_payload) self.assertEqual(rv.status_code, 404) - @patch.dict( + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", SQL_VALIDATORS_BY_ENGINE, clear=True, @@ -3908,7 +3910,7 @@ def test_validate_sql_validation_fails(self): self.assertEqual(rv.status_code, 400) self.assertEqual(response, {"message": {"sql": ["Field may not be null."]}}) - @patch.dict( + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", {}, clear=True, @@ -3953,8 +3955,8 @@ def test_validate_sql_endpoint_noconfig(self): }, ) - @patch("superset.commands.database.validate_sql.get_validator_by_name") - @patch.dict( + @mock.patch("superset.commands.database.validate_sql.get_validator_by_name") + @mock.patch.dict( "superset.config.SQL_VALIDATORS_BY_ENGINE", PRESTO_SQL_VALIDATORS_BY_ENGINE, clear=True, diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py index 8979b91c4724..bbf9b8259244 100644 --- a/tests/integration_tests/databases/commands_tests.py +++ b/tests/integration_tests/databases/commands_tests.py @@ -218,9 +218,9 @@ def test_export_database_command(self, mock_g): "is_active": True, "is_dttm": False, "python_date_format": None, - "type": "STRING" - if example_db.backend == "hive" - else "VARCHAR(255)", + "type": ( + "STRING" if example_db.backend == "hive" else "VARCHAR(255)" + ), "advanced_data_type": None, "verbose_name": None, }, @@ -397,7 +397,8 @@ def test_export_database_command_no_related(self, mock_g): class TestImportDatabasesCommand(SupersetTestCase): @patch("superset.security.manager.g") - def test_import_v1_database(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database(self, mock_add_permissions, mock_g): """Test that a database can be imported""" mock_g.user = security_manager.find_user("admin") @@ -420,13 +421,14 @@ def test_import_v1_database(self, mock_g): assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "someengine://user:pass@host1" + assert database.sqlalchemy_uri == "postgresql://user:pass@host1" db.session.delete(database) db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_database_broken_csv_fields(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_broken_csv_fields(self, mock_add_permissions, mock_g): """ Test that a database can be imported with broken schema. @@ -459,13 +461,14 @@ def test_import_v1_database_broken_csv_fields(self, mock_g): assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == '{"schemas_allowed_for_file_upload": ["upload"]}' - assert database.sqlalchemy_uri == "someengine://user:pass@host1" + assert database.sqlalchemy_uri == "postgresql://user:pass@host1" db.session.delete(database) db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_database_multiple(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_multiple(self, mock_add_permissions, mock_g): """Test that a database can be imported multiple times""" mock_g.user = security_manager.find_user("admin") @@ -509,7 +512,8 @@ def test_import_v1_database_multiple(self, mock_g): db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_database_with_dataset(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_with_dataset(self, mock_add_permissions, mock_g): """Test that a database can be imported with datasets""" mock_g.user = security_manager.find_user("admin") @@ -532,7 +536,10 @@ def test_import_v1_database_with_dataset(self, mock_g): db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_database_with_dataset_multiple(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_with_dataset_multiple( + self, mock_add_permissions, mock_g + ): """Test that a database can be imported multiple times w/o changing datasets""" mock_g.user = security_manager.find_user("admin") @@ -570,7 +577,8 @@ def test_import_v1_database_with_dataset_multiple(self, mock_g): db.session.delete(dataset.database) db.session.commit() - def test_import_v1_database_validation(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_validation(self, mock_add_permissions): """Test different validations applied when importing a database""" # metadata.yaml must be present contents = { @@ -619,7 +627,8 @@ def test_import_v1_database_validation(self): } } - def test_import_v1_database_masked_password(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_database_masked_password(self, mock_add_permissions): """Test that database imports with masked passwords are rejected""" masked_database_config = database_config.copy() masked_database_config["sqlalchemy_uri"] = ( @@ -640,8 +649,11 @@ def test_import_v1_database_masked_password(self): } @patch("superset.databases.schemas.is_feature_enabled") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_masked_ssh_tunnel_password( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """Test that database imports with masked ssh_tunnel passwords are rejected""" mock_schema_is_feature_enabled.return_value = True @@ -661,8 +673,11 @@ def test_import_v1_database_masked_ssh_tunnel_password( } @patch("superset.databases.schemas.is_feature_enabled") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_masked_ssh_tunnel_private_key_and_password( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """Test that database imports with masked ssh_tunnel private_key and private_key_password are rejected""" mock_schema_is_feature_enabled.return_value = True @@ -686,8 +701,10 @@ def test_import_v1_database_masked_ssh_tunnel_private_key_and_password( @patch("superset.databases.schemas.is_feature_enabled") @patch("superset.security.manager.g") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_with_ssh_tunnel_password( self, + mock_add_permissions, mock_g, mock_schema_is_feature_enabled, ): @@ -715,7 +732,7 @@ def test_import_v1_database_with_ssh_tunnel_password( assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "someengine://user:pass@host1" + assert database.sqlalchemy_uri == "postgresql://user:pass@host1" model_ssh_tunnel = ( db.session.query(SSHTunnel) @@ -729,8 +746,10 @@ def test_import_v1_database_with_ssh_tunnel_password( @patch("superset.databases.schemas.is_feature_enabled") @patch("superset.security.manager.g") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_with_ssh_tunnel_private_key_and_password( self, + mock_add_permissions, mock_g, mock_schema_is_feature_enabled, ): @@ -760,7 +779,7 @@ def test_import_v1_database_with_ssh_tunnel_private_key_and_password( assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "someengine://user:pass@host1" + assert database.sqlalchemy_uri == "postgresql://user:pass@host1" model_ssh_tunnel = ( db.session.query(SSHTunnel) @@ -774,8 +793,11 @@ def test_import_v1_database_with_ssh_tunnel_private_key_and_password( db.session.commit() @patch("superset.databases.schemas.is_feature_enabled") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_masked_ssh_tunnel_no_credentials( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """Test that databases with ssh_tunnels that have no credentials are rejected""" mock_schema_is_feature_enabled.return_value = True @@ -790,8 +812,11 @@ def test_import_v1_database_masked_ssh_tunnel_no_credentials( assert str(excinfo.value) == "Must provide credentials for the SSH Tunnel" @patch("superset.databases.schemas.is_feature_enabled") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_masked_ssh_tunnel_multiple_credentials( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """Test that databases with ssh_tunnels that have multiple credentials are rejected""" mock_schema_is_feature_enabled.return_value = True @@ -808,8 +833,11 @@ def test_import_v1_database_masked_ssh_tunnel_multiple_credentials( ) @patch("superset.databases.schemas.is_feature_enabled") + @patch("superset.commands.database.importers.v1.utils.add_permissions") def test_import_v1_database_masked_ssh_tunnel_only_priv_key_psswd( - self, mock_schema_is_feature_enabled + self, + mock_add_permissions, + mock_schema_is_feature_enabled, ): """Test that databases with ssh_tunnels that have multiple credentials are rejected""" mock_schema_is_feature_enabled.return_value = True @@ -834,7 +862,8 @@ def test_import_v1_database_masked_ssh_tunnel_only_priv_key_psswd( } @patch("superset.commands.database.importers.v1.import_dataset") - def test_import_v1_rollback(self, mock_import_dataset): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_rollback(self, mock_add_permissions, mock_import_dataset): """Test than on an exception everything is rolled back""" num_databases = db.session.query(Database).count() diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 37de6e87c27a..e1c6589fa500 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -36,7 +36,6 @@ from superset.extensions import db, security_manager from superset.models.core import Database from superset.models.slice import Slice -from superset.sql_parse import Table from superset.utils import json from superset.utils.core import backend, get_example_default_schema from superset.utils.database import get_example_database, get_main_database @@ -254,6 +253,7 @@ def test_get_dataset_list(self): "schema", "sql", "table_name", + "uuid", ] assert sorted(list(response["result"][0].keys())) == expected_columns @@ -676,57 +676,6 @@ def test_create_dataset_item_owners_invalid(self): expected_result = {"message": {"owners": ["Owners are invalid"]}} assert data == expected_result - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_create_dataset_validate_uniqueness(self): - """ - Dataset API: Test create dataset validate table uniqueness - """ - - energy_usage_ds = self.get_energy_usage_dataset() - self.login(ADMIN_USERNAME) - table_data = { - "database": energy_usage_ds.database_id, - "table_name": energy_usage_ds.table_name, - } - if schema := get_example_default_schema(): - table_data["schema"] = schema - rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") - assert rv.status_code == 422 - data = json.loads(rv.data.decode("utf-8")) - assert data == { - "message": { - "table": [ - f"Dataset {Table(energy_usage_ds.table_name, schema)} already exists" - ] - } - } - - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_create_dataset_with_sql_validate_uniqueness(self): - """ - Dataset API: Test create dataset with sql - """ - - energy_usage_ds = self.get_energy_usage_dataset() - self.login(ADMIN_USERNAME) - table_data = { - "database": energy_usage_ds.database_id, - "table_name": energy_usage_ds.table_name, - "sql": "select * from energy_usage", - } - if schema := get_example_default_schema(): - table_data["schema"] = schema - rv = self.post_assert_metric("/api/v1/dataset/", table_data, "post") - assert rv.status_code == 422 - data = json.loads(rv.data.decode("utf-8")) - assert data == { - "message": { - "table": [ - f"Dataset {Table(energy_usage_ds.table_name, schema)} already exists" - ] - } - } - @pytest.mark.usefixtures("load_energy_table_with_slice") def test_create_dataset_with_sql(self): """ @@ -1455,27 +1404,6 @@ def test_update_dataset_item_owners_invalid(self): db.session.delete(dataset) db.session.commit() - def test_update_dataset_item_uniqueness(self): - """ - Dataset API: Test update dataset uniqueness - """ - - dataset = self.insert_default_dataset() - self.login(ADMIN_USERNAME) - ab_user = self.insert_dataset( - "ab_user", [self.get_user("admin").id], get_main_database() - ) - table_data = {"table_name": "ab_user"} - uri = f"api/v1/dataset/{dataset.id}" - rv = self.put_assert_metric(uri, table_data, "put") - data = json.loads(rv.data.decode("utf-8")) - assert rv.status_code == 422 - expected_response = {"message": {"table": ["Dataset ab_user already exists"]}} - assert data == expected_response - db.session.delete(dataset) - db.session.delete(ab_user) - db.session.commit() - @patch("superset.daos.dataset.DatasetDAO.update") def test_update_dataset_sqlalchemy_error(self, mock_dao_update): """ @@ -2039,7 +1967,8 @@ def test_get_datasets_custom_filter_sql(self): for table_name in self.fixture_tables_names: assert table_name in [ds["table_name"] for ds in data["result"]] - def test_import_dataset(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_dataset(self, mock_add_permissions): """ Dataset API: Test import dataset """ @@ -2102,7 +2031,8 @@ def test_import_dataset_v0_export(self): db.session.delete(dataset) db.session.commit() - def test_import_dataset_overwrite(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_dataset_overwrite(self, mock_add_permissions): """ Dataset API: Test import existing dataset """ diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py index 53bd7fa051aa..66a15e2e61d5 100644 --- a/tests/integration_tests/datasets/commands_tests.py +++ b/tests/integration_tests/datasets/commands_tests.py @@ -343,8 +343,9 @@ def test_import_v0_dataset_ui_export(self): @patch("superset.utils.core.g") @patch("superset.security.manager.g") + @patch("superset.commands.database.importers.v1.utils.add_permissions") @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_import_v1_dataset(self, sm_g, utils_g): + def test_import_v1_dataset(self, mock_add_permissions, sm_g, utils_g): """Test that we can import a dataset""" admin = sm_g.user = utils_g.user = security_manager.find_user("admin") contents = { @@ -411,7 +412,8 @@ def test_import_v1_dataset(self, sm_g, utils_g): db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_dataset_multiple(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dataset_multiple(self, mock_add_permissions, mock_g): """Test that a dataset can be imported multiple times""" mock_g.user = security_manager.find_user("admin") @@ -452,7 +454,8 @@ def test_import_v1_dataset_multiple(self, mock_g): db.session.delete(dataset.database) db.session.commit() - def test_import_v1_dataset_validation(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dataset_validation(self, mock_add_permissions): """Test different validations applied when importing a dataset""" # metadata.yaml must be present contents = { @@ -502,7 +505,8 @@ def test_import_v1_dataset_validation(self): } @patch("superset.security.manager.g") - def test_import_v1_dataset_existing_database(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_dataset_existing_database(self, mock_add_permissions, mock_g): """Test that a dataset can be imported when the database already exists""" mock_g.user = security_manager.find_user("admin") diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py index d9f3650793f3..e810e02ee571 100644 --- a/tests/integration_tests/datasource/api_tests.py +++ b/tests/integration_tests/datasource/api_tests.py @@ -18,6 +18,7 @@ from unittest.mock import ANY, patch import pytest +from sqlalchemy.sql.elements import TextClause from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable @@ -176,3 +177,31 @@ def test_get_column_values_denormalize_column(self, denormalize_name_mock): table.normalize_columns = False self.client.get(f"api/v1/datasource/table/{table.id}/column/col2/values/") # noqa: F841 denormalize_name_mock.assert_called_with(ANY, "col2") + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_with_rls(self): + self.login(ADMIN_USERNAME) + table = self.get_virtual_dataset() + with patch.object( + table, "get_sqla_row_level_filters", return_value=[TextClause("col2 = 'b'")] + ): + rv = self.client.get( + f"api/v1/datasource/table/{table.id}/column/col2/values/" + ) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["result"], ["b"]) + + @pytest.mark.usefixtures("app_context", "virtual_dataset") + def test_get_column_values_with_rls_no_values(self): + self.login(ADMIN_USERNAME) + table = self.get_virtual_dataset() + with patch.object( + table, "get_sqla_row_level_filters", return_value=[TextClause("col2 = 'q'")] + ): + rv = self.client.get( + f"api/v1/datasource/table/{table.id}/column/col2/values/" + ) + self.assertEqual(rv.status_code, 200) + response = json.loads(rv.data.decode("utf-8")) + self.assertEqual(response["result"], []) diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py index aaad26b85d72..242094737e2a 100644 --- a/tests/integration_tests/datasource_tests.py +++ b/tests/integration_tests/datasource_tests.py @@ -42,6 +42,7 @@ get_main_database, ) from tests.integration_tests.base_tests import db_insert_temp_object, SupersetTestCase +from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.constants import ADMIN_USERNAME from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, # noqa: F401 @@ -68,6 +69,24 @@ def create_test_table_context(database: Database): engine.execute(f"DROP TABLE {full_table_name}") +@contextmanager +def create_and_cleanup_table(table=None): + if table is None: + table = SqlaTable( + table_name="dummy_sql_table", + database=get_example_database(), + schema=get_example_default_schema(), + sql="select 123 as intcol, 'abc' as strcol", + ) + db.session.add(table) + db.session.commit() + try: + yield table + finally: + db.session.delete(table) + db.session.commit() + + class TestDatasource(SupersetTestCase): def setUp(self): db.session.begin(subtransactions=True) @@ -123,37 +142,22 @@ def test_always_filter_main_dttm(self): sql=sql, ) - db.session.add(table) - db.session.commit() + with create_and_cleanup_table(table): + table.always_filter_main_dttm = False + result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause) + assert "default_dttm" not in result and "additional_dttm" in result - table.always_filter_main_dttm = False - result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause) - assert "default_dttm" not in result and "additional_dttm" in result - - table.always_filter_main_dttm = True - result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause) - assert "default_dttm" in result and "additional_dttm" in result - - db.session.delete(table) - db.session.commit() + table.always_filter_main_dttm = True + result = str(table.get_sqla_query(**query_obj).sqla_query.whereclause) + assert "default_dttm" in result and "additional_dttm" in result def test_external_metadata_for_virtual_table(self): self.login(ADMIN_USERNAME) - table = SqlaTable( - table_name="dummy_sql_table", - database=get_example_database(), - schema=get_example_default_schema(), - sql="select 123 as intcol, 'abc' as strcol", - ) - db.session.add(table) - db.session.commit() - table = self.get_table(name="dummy_sql_table") - url = f"/datasource/external_metadata/table/{table.id}/" - resp = self.get_json_resp(url) - assert {o.get("column_name") for o in resp} == {"intcol", "strcol"} - db.session.delete(table) - db.session.commit() + with create_and_cleanup_table() as table: + url = f"/datasource/external_metadata/table/{table.id}/" + resp = self.get_json_resp(url) + assert {o.get("column_name") for o in resp} == {"intcol", "strcol"} @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_external_metadata_by_name_for_physical_table(self): @@ -178,31 +182,42 @@ def test_external_metadata_by_name_for_physical_table(self): def test_external_metadata_by_name_for_virtual_table(self): self.login(ADMIN_USERNAME) - table = SqlaTable( - table_name="dummy_sql_table", - database=get_example_database(), - schema=get_example_default_schema(), - sql="select 123 as intcol, 'abc' as strcol", - ) - db.session.add(table) - db.session.commit() + with create_and_cleanup_table() as tbl: + params = prison.dumps( + { + "datasource_type": "table", + "database_name": tbl.database.database_name, + "schema_name": tbl.schema, + "table_name": tbl.table_name, + "normalize_columns": tbl.normalize_columns, + "always_filter_main_dttm": tbl.always_filter_main_dttm, + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + assert {o.get("column_name") for o in resp} == {"intcol", "strcol"} - tbl = self.get_table(name="dummy_sql_table") - params = prison.dumps( - { - "datasource_type": "table", - "database_name": tbl.database.database_name, - "schema_name": tbl.schema, - "table_name": tbl.table_name, - "normalize_columns": tbl.normalize_columns, - "always_filter_main_dttm": tbl.always_filter_main_dttm, - } - ) - url = f"/datasource/external_metadata_by_name/?q={params}" - resp = self.get_json_resp(url) - assert {o.get("column_name") for o in resp} == {"intcol", "strcol"} - db.session.delete(tbl) - db.session.commit() + def test_external_metadata_by_name_for_virtual_table_uses_mutator(self): + self.login(ADMIN_USERNAME) + with create_and_cleanup_table() as tbl: + app.config["SQL_QUERY_MUTATOR"] = ( + lambda sql, **kwargs: "SELECT 456 as intcol, 'def' as mutated_strcol" + ) + + params = prison.dumps( + { + "datasource_type": "table", + "database_name": tbl.database.database_name, + "schema_name": tbl.schema, + "table_name": tbl.table_name, + "normalize_columns": tbl.normalize_columns, + "always_filter_main_dttm": tbl.always_filter_main_dttm, + } + ) + url = f"/datasource/external_metadata_by_name/?q={params}" + resp = self.get_json_resp(url) + assert {o.get("column_name") for o in resp} == {"intcol", "mutated_strcol"} + app.config["SQL_QUERY_MUTATOR"] = None def test_external_metadata_by_name_from_sqla_inspector(self): self.login(ADMIN_USERNAME) @@ -278,15 +293,10 @@ def test_external_metadata_for_virtual_table_template_params(self): sql="select {{ foo }} as intcol", template_params=json.dumps({"foo": "123"}), ) - db.session.add(table) - db.session.commit() - - table = self.get_table(name="dummy_sql_table_with_template_params") - url = f"/datasource/external_metadata/table/{table.id}/" - resp = self.get_json_resp(url) - assert {o.get("column_name") for o in resp} == {"intcol"} - db.session.delete(table) - db.session.commit() + with create_and_cleanup_table(table) as tbl: + url = f"/datasource/external_metadata/table/{tbl.id}/" + resp = self.get_json_resp(url) + assert {o.get("column_name") for o in resp} == {"intcol"} def test_external_metadata_for_malicious_virtual_table(self): self.login(ADMIN_USERNAME) @@ -567,6 +577,7 @@ def test_get_samples_with_incorrect_cc(test_client, login_as_admin, virtual_data assert "INCORRECT SQL" in rv.json.get("error") +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=True) def test_get_samples_on_physical_dataset(test_client, login_as_admin, physical_dataset): uri = ( f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" @@ -631,6 +642,7 @@ def test_get_samples_with_filters(test_client, login_as_admin, virtual_dataset): assert rv.json["result"]["rowcount"] == 0 +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=True) def test_get_samples_with_time_filter(test_client, login_as_admin, physical_dataset): uri = ( f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table" @@ -651,6 +663,7 @@ def test_get_samples_with_time_filter(test_client, login_as_admin, physical_data assert rv.json["result"]["total_count"] == 2 +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=True) def test_get_samples_with_multiple_filters( test_client, login_as_admin, physical_dataset ): @@ -683,7 +696,7 @@ def test_get_samples_with_multiple_filters( assert "2000-01-02" in rv.json["result"]["query"] assert "2000-01-04" in rv.json["result"]["query"] assert "col3 = 1.2" in rv.json["result"]["query"] - assert "col4 IS NULL" in rv.json["result"]["query"] + assert "col4 is null" in rv.json["result"]["query"] assert "col2 = 'c'" in rv.json["result"]["query"] diff --git a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py index c8db1f912ad2..215a01f58538 100644 --- a/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py +++ b/tests/integration_tests/db_engine_specs/base_engine_spec_tests.py @@ -308,7 +308,7 @@ def test_calculated_column_in_order_by_base_engine_spec(self): } sql = table.get_query_str(query_obj) assert ( - "ORDER BY\n CASE WHEN gender = 'boy' THEN 'male' ELSE 'female' END ASC" + "ORDER BY \n case\n when gender='boy' then 'male'\n else 'female'\n end\n ASC" in sql ) diff --git a/tests/integration_tests/db_engine_specs/bigquery_tests.py b/tests/integration_tests/db_engine_specs/bigquery_tests.py index 53f9137076bb..5b2571209509 100644 --- a/tests/integration_tests/db_engine_specs/bigquery_tests.py +++ b/tests/integration_tests/db_engine_specs/bigquery_tests.py @@ -380,4 +380,4 @@ def test_calculated_column_in_order_by(self): "orderby": [["gender_cc", True]], } sql = table.get_query_str(query_obj) - assert "ORDER BY\n `gender_cc` ASC" in sql + assert "ORDER BY `gender_cc` ASC" in sql diff --git a/tests/integration_tests/db_engine_specs/postgres_tests.py b/tests/integration_tests/db_engine_specs/postgres_tests.py index f21dbf54adde..175ee65b2d0e 100644 --- a/tests/integration_tests/db_engine_specs/postgres_tests.py +++ b/tests/integration_tests/db_engine_specs/postgres_tests.py @@ -25,6 +25,7 @@ from superset.db_engine_specs.postgres import PostgresEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.models.sql_lab import Query +from superset.utils.core import backend from superset.utils.database import get_example_database from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec from tests.integration_tests.fixtures.certificates import ssl_certificate @@ -525,11 +526,10 @@ def test_get_catalog_names(app_context: AppContext) -> None: """ Test the ``get_catalog_names`` method. """ - database = get_example_database() - - if database.backend != "postgresql": + if backend() != "postgresql": return + database = get_example_database() with database.get_inspector() as inspector: assert PostgresEngineSpec.get_catalog_names(database, inspector) == { "postgres", diff --git a/tests/integration_tests/db_engine_specs/trino_tests.py b/tests/integration_tests/db_engine_specs/trino_tests.py deleted file mode 100644 index d0399971380f..000000000000 --- a/tests/integration_tests/db_engine_specs/trino_tests.py +++ /dev/null @@ -1,115 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from unittest import mock - -import pandas as pd -import pytest - -from superset.db_engine_specs.trino import TrinoEngineSpec -from superset.exceptions import SupersetException -from superset.sql_parse import Table -from tests.integration_tests.test_app import app - - -def test_df_to_csv() -> None: - with pytest.raises(SupersetException): - TrinoEngineSpec.df_to_sql( - mock.MagicMock(), - Table("foobar"), - pd.DataFrame(), - {"if_exists": "append"}, - ) - - -@mock.patch("superset.db_engine_specs.trino.g", spec={}) -def test_df_to_sql_if_exists_fail(mock_g): - mock_g.user = True - mock_database = mock.MagicMock() - mock_database.get_df.return_value.empty = False - with pytest.raises(SupersetException, match="Table already exists"): - TrinoEngineSpec.df_to_sql( - mock_database, Table("foobar"), pd.DataFrame(), {"if_exists": "fail"} - ) - - -@mock.patch("superset.db_engine_specs.trino.g", spec={}) -def test_df_to_sql_if_exists_fail_with_schema(mock_g): - mock_g.user = True - mock_database = mock.MagicMock() - mock_database.get_df.return_value.empty = False - with pytest.raises(SupersetException, match="Table already exists"): - TrinoEngineSpec.df_to_sql( - mock_database, - Table(table="foobar", schema="schema"), - pd.DataFrame(), - {"if_exists": "fail"}, - ) - - -@mock.patch("superset.db_engine_specs.trino.g", spec={}) -@mock.patch("superset.db_engine_specs.trino.upload_to_s3") -def test_df_to_sql_if_exists_replace(mock_upload_to_s3, mock_g): - config = app.config.copy() - app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" # noqa: F722 - mock_upload_to_s3.return_value = "mock-location" - mock_g.user = True - mock_database = mock.MagicMock() - mock_database.get_df.return_value.empty = False - mock_execute = mock.MagicMock(return_value=True) - mock_database.get_sqla_engine.return_value.__enter__.return_value.execute = ( - mock_execute - ) - table_name = "foobar" - - with app.app_context(): - TrinoEngineSpec.df_to_sql( - mock_database, - Table(table=table_name), - pd.DataFrame(), - {"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"}, - ) - - mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {table_name}") - app.config = config - - -@mock.patch("superset.db_engine_specs.trino.g", spec={}) -@mock.patch("superset.db_engine_specs.trino.upload_to_s3") -def test_df_to_sql_if_exists_replace_with_schema(mock_upload_to_s3, mock_g): - config = app.config.copy() - app.config["CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC"]: lambda *args: "" # noqa: F722 - mock_upload_to_s3.return_value = "mock-location" - mock_g.user = True - mock_database = mock.MagicMock() - mock_database.get_df.return_value.empty = False - mock_execute = mock.MagicMock(return_value=True) - mock_database.get_sqla_engine.return_value.__enter__.return_value.execute = ( - mock_execute - ) - table_name = "foobar" - schema = "schema" - - with app.app_context(): - TrinoEngineSpec.df_to_sql( - mock_database, - Table(table=table_name, schema=schema), - pd.DataFrame(), - {"if_exists": "replace", "header": 1, "na_values": "mock", "sep": "mock"}, - ) - - mock_execute.assert_any_call(f"DROP TABLE IF EXISTS {schema}.{table_name}") - app.config = config diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py index cccf4fa7701a..5a778ed078bb 100644 --- a/tests/integration_tests/fixtures/importexport.py +++ b/tests/integration_tests/fixtures/importexport.py @@ -374,10 +374,24 @@ "database_name": "imported_database", "expose_in_sqllab": True, "extra": {}, - "sqlalchemy_uri": "someengine://user:pass@host1", + "sqlalchemy_uri": "postgresql://user:pass@host1", "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", "version": "1.0.0", } +database_config_no_creds: dict[str, Any] = { + "allow_csv_upload": False, + "allow_ctas": False, + "allow_cvas": False, + "allow_dml": False, + "allow_run_async": False, + "cache_timeout": None, + "database_name": "imported_database_no_creds", + "expose_in_sqllab": True, + "extra": {}, + "sqlalchemy_uri": "bigquery://test-db/", + "uuid": "2ff17edc-f3fa-4609-a5ac-b484281225bc", + "version": "1.0.0", +} database_with_ssh_tunnel_config_private_key: dict[str, Any] = { "allow_csv_upload": True, @@ -389,7 +403,7 @@ "database_name": "imported_database", "expose_in_sqllab": True, "extra": {}, - "sqlalchemy_uri": "someengine://user:pass@host1", + "sqlalchemy_uri": "postgresql://user:pass@host1", "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", "ssh_tunnel": { "server_address": "localhost", @@ -411,7 +425,7 @@ "database_name": "imported_database", "expose_in_sqllab": True, "extra": {}, - "sqlalchemy_uri": "someengine://user:pass@host1", + "sqlalchemy_uri": "postgresql://user:pass@host1", "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", "ssh_tunnel": { "server_address": "localhost", diff --git a/tests/integration_tests/log_api_tests.py b/tests/integration_tests/log_api_tests.py index fae09754aa9b..0666f4e156dc 100644 --- a/tests/integration_tests/log_api_tests.py +++ b/tests/integration_tests/log_api_tests.py @@ -171,8 +171,18 @@ def test_get_recent_activity(self): admin_user = self.get_user("admin") self.login(ADMIN_USERNAME) dash = create_dashboard("dash_slug", "dash_title", "{}", []) - log1 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) - log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log1 = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) + log2 = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) uri = f"api/v1/log/recent_activity/" # noqa: F541 rv = self.client.get(uri) @@ -184,21 +194,18 @@ def test_get_recent_activity(self): db.session.delete(dash) db.session.commit() - self.assertEqual( - response, - { - "result": [ - { - "action": "dashboard", - "item_type": "dashboard", - "item_url": "/superset/dashboard/dash_slug/", - "item_title": "dash_title", - "time": ANY, - "time_delta_humanized": ANY, - } - ] - }, - ) + assert response == { + "result": [ + { + "action": "log", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash_slug/", + "item_title": "dash_title", + "time": ANY, + "time_delta_humanized": ANY, + } + ] + } def test_get_recent_activity_actions_filter(self): """ @@ -207,10 +214,20 @@ def test_get_recent_activity_actions_filter(self): admin_user = self.get_user("admin") self.login(ADMIN_USERNAME) dash = create_dashboard("dash_slug", "dash_title", "{}", []) - log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) - log2 = self.insert_log("explore", admin_user, dashboard_id=dash.id) + log = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) + log2 = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_explorer"}', + ) - arguments = {"actions": ["dashboard"]} + arguments = {"actions": ["mount_dashboard"]} uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" rv = self.client.get(uri) @@ -232,8 +249,18 @@ def test_get_recent_activity_distinct_false(self): admin_user = self.get_user("admin") self.login(ADMIN_USERNAME) dash = create_dashboard("dash_slug", "dash_title", "{}", []) - log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) - log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) + log = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) + log2 = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) arguments = {"distinct": False} uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" @@ -256,9 +283,24 @@ def test_get_recent_activity_pagination(self): dash = create_dashboard("dash_slug", "dash_title", "{}", []) dash2 = create_dashboard("dash2_slug", "dash2_title", "{}", []) dash3 = create_dashboard("dash3_slug", "dash3_title", "{}", []) - log = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) - log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash2.id) - log3 = self.insert_log("dashboard", admin_user, dashboard_id=dash3.id) + log = self.insert_log( + "log", + admin_user, + dashboard_id=dash.id, + json='{"event_name": "mount_dashboard"}', + ) + log2 = self.insert_log( + "log", + admin_user, + dashboard_id=dash2.id, + json='{"event_name": "mount_dashboard"}', + ) + log3 = self.insert_log( + "log", + admin_user, + dashboard_id=dash3.id, + json='{"event_name": "mount_dashboard"}', + ) now = datetime.now() log3.dttm = now @@ -271,29 +313,26 @@ def test_get_recent_activity_pagination(self): self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - response, - { - "result": [ - { - "action": "dashboard", - "item_type": "dashboard", - "item_url": "/superset/dashboard/dash3_slug/", - "item_title": "dash3_title", - "time": ANY, - "time_delta_humanized": ANY, - }, - { - "action": "dashboard", - "item_type": "dashboard", - "item_url": "/superset/dashboard/dash2_slug/", - "item_title": "dash2_title", - "time": ANY, - "time_delta_humanized": ANY, - }, - ] - }, - ) + assert response == { + "result": [ + { + "action": "log", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash3_slug/", + "item_title": "dash3_title", + "time": ANY, + "time_delta_humanized": ANY, + }, + { + "action": "log", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash2_slug/", + "item_title": "dash2_title", + "time": ANY, + "time_delta_humanized": ANY, + }, + ] + } arguments = {"page": 1, "page_size": 2} uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" @@ -309,18 +348,15 @@ def test_get_recent_activity_pagination(self): self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) - self.assertEqual( - response, - { - "result": [ - { - "action": "dashboard", - "item_type": "dashboard", - "item_url": "/superset/dashboard/dash_slug/", - "item_title": "dash_title", - "time": ANY, - "time_delta_humanized": ANY, - } - ] - }, - ) + assert response == { + "result": [ + { + "action": "log", + "item_type": "dashboard", + "item_url": "/superset/dashboard/dash_slug/", + "item_title": "dash_title", + "time": ANY, + "time_delta_humanized": ANY, + } + ] + } diff --git a/tests/integration_tests/migrations/f84fde59123a_update_charts_with_old_time_comparison__test.py b/tests/integration_tests/migrations/f84fde59123a_update_charts_with_old_time_comparison__test.py index 5f7fe505c03b..1ec0acca2ece 100644 --- a/tests/integration_tests/migrations/f84fde59123a_update_charts_with_old_time_comparison__test.py +++ b/tests/integration_tests/migrations/f84fde59123a_update_charts_with_old_time_comparison__test.py @@ -29,7 +29,8 @@ migrate_time_comparison_to_new_format.upgrade_comparison_params ) -params_v1_with_custom: dict[str, Any] = { +# Base object containing common properties +base_params: dict[str, Any] = { "datasource": "2__table", "viz_type": "pop_kpi", "metric": { @@ -57,20 +58,18 @@ "datasourceWarning": False, "hasCustomLabel": False, "label": "SUM(num_boys)", - "optionName": "metric_o6rj1h6jty_3t6mrruogfv", }, "adhoc_filters": [ { "expressionType": "SIMPLE", "subject": "ds", "operator": "TEMPORAL_RANGE", - "comparator": "1984 : 1986", + "comparator": "1984 : 2000", "clause": "WHERE", "sqlExpression": None, "isExtra": False, "isNew": False, "datasourceWarning": False, - "filterOptionName": "filter_p50i4xw50d_8x8e4ypwjs8", } ], "row_limit": 10000, @@ -81,6 +80,22 @@ "comparison_color_scheme": "Green", "extra_form_data": {}, "dashboards": [], +} + +# Specific parameter objects overriding only the differing properties +params_v1_with_custom: dict[str, Any] = { + **base_params, + "metric": { + **base_params["metric"], + "optionName": "metric_o6rj1h6jty_3t6mrruogfv", + }, + "adhoc_filters": [ + { + **base_params["adhoc_filters"][0], + "comparator": "1984 : 1986", + "filterOptionName": "filter_p50i4xw50d_8x8e4ypwjs8", + } + ], "time_comparison": "c", "enable_time_comparison": True, "adhoc_custom": [ @@ -97,58 +112,13 @@ } ], } + params_v1_other_than_custom: dict[str, Any] = { - "datasource": "2__table", - "viz_type": "pop_kpi", + **base_params, "metric": { - "expressionType": "SIMPLE", - "column": { - "advanced_data_type": None, - "certification_details": None, - "certified_by": None, - "column_name": "num_boys", - "description": None, - "expression": None, - "filterable": True, - "groupby": True, - "id": 334, - "is_certified": False, - "is_dttm": False, - "python_date_format": None, - "type": "BIGINT", - "type_generic": 0, - "verbose_name": None, - "warning_markdown": None, - }, - "aggregate": "SUM", - "sqlExpression": None, - "datasourceWarning": False, - "hasCustomLabel": False, - "label": "SUM(num_boys)", + **base_params["metric"], "optionName": "metric_96s7b8iypsr_4wrlgm0i7il", }, - "adhoc_filters": [ - { - "expressionType": "SIMPLE", - "subject": "ds", - "operator": "TEMPORAL_RANGE", - "comparator": "1984 : 2000", - "clause": "WHERE", - "sqlExpression": None, - "isExtra": False, - "isNew": False, - "datasourceWarning": False, - "filterOptionName": "filter_2sefqq1rwb7_lhqvw7ukc6", - } - ], - "row_limit": 10000, - "y_axis_format": "SMART_NUMBER", - "percentDifferenceFormat": "SMART_NUMBER", - "header_font_size": 0.2, - "subheader_font_size": 0.125, - "comparison_color_scheme": "Green", - "extra_form_data": {}, - "dashboards": [], "time_comparison": "r", "enable_time_comparison": True, "adhoc_custom": [ @@ -161,116 +131,60 @@ } ], } + +params_v1_other_than_custom_false: dict[str, Any] = { + **params_v1_other_than_custom, + "enable_time_comparison": False, +} + params_v2_with_custom: dict[str, Any] = { - "datasource": "2__table", - "viz_type": "pop_kpi", + **base_params, "metric": { - "expressionType": "SIMPLE", - "column": { - "advanced_data_type": None, - "certification_details": None, - "certified_by": None, - "column_name": "num_boys", - "description": None, - "expression": None, - "filterable": True, - "groupby": True, - "id": 334, - "is_certified": False, - "is_dttm": False, - "python_date_format": None, - "type": "BIGINT", - "type_generic": 0, - "verbose_name": None, - "warning_markdown": None, - }, - "aggregate": "SUM", - "sqlExpression": None, - "datasourceWarning": False, - "hasCustomLabel": False, - "label": "SUM(num_boys)", + **base_params["metric"], "optionName": "metric_o6rj1h6jty_3t6mrruogfv", }, "adhoc_filters": [ { - "expressionType": "SIMPLE", - "subject": "ds", - "operator": "TEMPORAL_RANGE", + **base_params["adhoc_filters"][0], "comparator": "1984 : 1986", - "clause": "WHERE", - "sqlExpression": None, - "isExtra": False, - "isNew": False, - "datasourceWarning": False, "filterOptionName": "filter_p50i4xw50d_8x8e4ypwjs8", } ], - "row_limit": 10000, - "y_axis_format": "SMART_NUMBER", - "percentDifferenceFormat": "SMART_NUMBER", - "header_font_size": 0.2, - "subheader_font_size": 0.125, - "comparison_color_scheme": "Green", - "extra_form_data": {}, - "dashboards": [], - "time_compare": "custom", + "time_compare": ["custom"], "comparison_type": "values", "start_date_offset": "1981-01-01", } + params_v2_other_than_custom: dict[str, Any] = { - "datasource": "2__table", - "viz_type": "pop_kpi", + **base_params, "metric": { - "expressionType": "SIMPLE", - "column": { - "advanced_data_type": None, - "certification_details": None, - "certified_by": None, - "column_name": "num_boys", - "description": None, - "expression": None, - "filterable": True, - "groupby": True, - "id": 334, - "is_certified": False, - "is_dttm": False, - "python_date_format": None, - "type": "BIGINT", - "type_generic": 0, - "verbose_name": None, - "warning_markdown": None, - }, - "aggregate": "SUM", - "sqlExpression": None, - "datasourceWarning": False, - "hasCustomLabel": False, - "label": "SUM(num_boys)", + **base_params["metric"], "optionName": "metric_96s7b8iypsr_4wrlgm0i7il", }, - "adhoc_filters": [ + "time_compare": ["inherit"], + "comparison_type": "values", +} + +params_v2_other_than_custom_false: dict[str, Any] = { + **params_v2_other_than_custom, + "time_compare": [], +} + +params_v1_with_custom_and_no_comparator: dict[str, Any] = { + **params_v1_with_custom, + "adhoc_custom": [ { "expressionType": "SIMPLE", "subject": "ds", "operator": "TEMPORAL_RANGE", - "comparator": "1984 : 2000", + "comparator": None, "clause": "WHERE", "sqlExpression": None, "isExtra": False, "isNew": False, "datasourceWarning": False, - "filterOptionName": "filter_2sefqq1rwb7_lhqvw7ukc6", } ], - "row_limit": 10000, - "y_axis_format": "SMART_NUMBER", - "percentDifferenceFormat": "SMART_NUMBER", - "header_font_size": 0.2, - "subheader_font_size": 0.125, - "comparison_color_scheme": "Green", - "extra_form_data": {}, - "dashboards": [], - "time_compare": "inherit", - "comparison_type": "values", } @@ -313,3 +227,45 @@ def test_downgrade_chart_params_other_than_custom(): original_params = deepcopy(params_v2_other_than_custom) downgraded_params = downgrade_comparison_params(original_params) assert downgraded_params == params_v1_other_than_custom + + +def test_upgrade_chart_params_other_than_custom_false(): + """ + ensure that the new time comparison params are added + """ + original_params = deepcopy(params_v1_other_than_custom_false) + upgraded_params = upgrade_comparison_params(original_params) + assert upgraded_params == params_v2_other_than_custom_false + + +def test_downgrade_chart_params_other_than_custom_false(): + """ + ensure that the params downgrade operation produces an almost identical dict + as the original value + """ + original_params = deepcopy(params_v2_other_than_custom_false) + downgraded_params = downgrade_comparison_params(original_params) + assert downgraded_params == params_v1_other_than_custom_false + + +def test_upgrade_chart_params_empty(): + """ + Ensure that the migration does not fail when params is None or empty. + """ + assert upgrade_comparison_params(None) == {} + assert upgrade_comparison_params({}) == {} + assert upgrade_comparison_params("") == {} + assert downgrade_comparison_params(None) == {} + assert downgrade_comparison_params({}) == {} + assert downgrade_comparison_params("") == {} + + +def test_upgrade_chart_params_with_custom_no_comparator(): + """ + ensure that the new time comparison params are added but no start_date_offset + """ + original_params = deepcopy(params_v1_with_custom_and_no_comparator) + expected_after_upgrade = deepcopy(params_v2_with_custom) + expected_after_upgrade.pop("start_date_offset") + upgraded_params = upgrade_comparison_params(original_params) + assert upgraded_params == expected_after_upgrade diff --git a/tests/integration_tests/queries/saved_queries/api_tests.py b/tests/integration_tests/queries/saved_queries/api_tests.py index 9b1184b1f73f..4ce0a79dac9a 100644 --- a/tests/integration_tests/queries/saved_queries/api_tests.py +++ b/tests/integration_tests/queries/saved_queries/api_tests.py @@ -20,6 +20,7 @@ from datetime import datetime from io import BytesIO from typing import Optional +from unittest.mock import patch from zipfile import is_zipfile, ZipFile import yaml @@ -898,7 +899,8 @@ def create_saved_query_import(self): buf.seek(0) return buf - def test_import_saved_queries(self): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_saved_queries(self, mock_add_permissions): """ Saved Query API: Test import """ diff --git a/tests/integration_tests/queries/saved_queries/commands_tests.py b/tests/integration_tests/queries/saved_queries/commands_tests.py index 8babd7efb9db..4ce816622f7d 100644 --- a/tests/integration_tests/queries/saved_queries/commands_tests.py +++ b/tests/integration_tests/queries/saved_queries/commands_tests.py @@ -148,7 +148,8 @@ def test_export_query_command_key_order(self, mock_g): class TestImportSavedQueriesCommand(SupersetTestCase): @patch("superset.security.manager.g") - def test_import_v1_saved_queries(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_saved_queries(self, mock_add_permissions, mock_g): """Test that we can import a saved query""" mock_g.user = security_manager.find_user("admin") @@ -178,7 +179,8 @@ def test_import_v1_saved_queries(self, mock_g): db.session.commit() @patch("superset.security.manager.g") - def test_import_v1_saved_queries_multiple(self, mock_g): + @patch("superset.commands.database.importers.v1.utils.add_permissions") + def test_import_v1_saved_queries_multiple(self, mock_add_permissions, mock_g): """Test that a saved query can be imported multiple times""" mock_g.user = security_manager.find_user("admin") diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py index 2fcd6d204817..931e75c7c0aa 100644 --- a/tests/integration_tests/query_context_tests.py +++ b/tests/integration_tests/query_context_tests.py @@ -42,7 +42,11 @@ ) from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR from tests.integration_tests.base_tests import SupersetTestCase -from tests.integration_tests.conftest import only_postgresql, only_sqlite +from tests.integration_tests.conftest import ( + only_postgresql, + only_sqlite, + with_feature_flags, +) from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, # noqa: F401 load_birth_names_data, # noqa: F401 @@ -367,7 +371,7 @@ def test_query_response_type(self): sql_text = get_sql_text(payload) assert "SELECT" in sql_text - assert re.search(r'NOT [`"\[]?num[`"\]]? IS NULL', sql_text) + assert re.search(r'[`"\[]?num[`"\]]? IS NOT NULL', sql_text) assert re.search( r"""NOT \([\s\n]*[`"\[]?name[`"\]]? IS NULL[\s\n]* """ r"""OR [`"\[]?name[`"\]]? IN \('"abc"'\)[\s\n]*\)""", @@ -861,6 +865,7 @@ def test_non_time_column_with_time_grain(app_context, physical_dataset): assert df["COL2 ALIAS"][0] == "a" +@with_feature_flags(ALLOW_ADHOC_SUBQUERY=True) def test_special_chars_in_column_name(app_context, physical_dataset): qc = QueryContextFactory().create( datasource={ @@ -1161,16 +1166,19 @@ def test_time_offset_with_temporal_range_filter(app_context, physical_dataset): OFFSET 0 """ assert ( - re.search(r"WHERE\n col6 >= .*2002-01-01", sqls[0]) + re.search(r"WHERE col6 >= .*2002-01-01", sqls[0]) and re.search(r"AND col6 < .*2003-01-01", sqls[0]) ) is not None assert ( - re.search(r"WHERE\n col6 >= .*2001-10-01", sqls[1]) + re.search(r"WHERE col6 >= .*2001-10-01", sqls[1]) and re.search(r"AND col6 < .*2002-10-01", sqls[1]) ) is not None def test_virtual_dataset_with_comments(app_context, virtual_dataset_with_comments): + if backend() == "mysql": + return + qc = QueryContextFactory().create( datasource={ "type": virtual_dataset_with_comments.type, diff --git a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py index f2722596f803..b8846bec1586 100644 --- a/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py +++ b/tests/integration_tests/reports/commands/execute_dashboard_report_tests.py @@ -108,4 +108,4 @@ def test_report_with_header_data( assert header_data.get("notification_format") == report_schedule.report_format assert header_data.get("notification_source") == ReportSourceFormat.DASHBOARD assert header_data.get("notification_type") == report_schedule.type - assert len(send_email_smtp_mock.call_args.kwargs["header_data"]) == 6 + assert len(send_email_smtp_mock.call_args.kwargs["header_data"]) == 7 diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index 9aaebf11dde9..4b43326839cf 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -66,6 +66,7 @@ from superset.reports.models import ( ReportDataFormat, ReportExecutionLog, + ReportRecipientType, ReportSchedule, ReportScheduleType, ReportScheduleValidatorType, @@ -108,6 +109,20 @@ def get_target_from_report_schedule(report_schedule: ReportSchedule) -> list[str ] +def get_cctarget_from_report_schedule(report_schedule: ReportSchedule) -> list[str]: + return [ + json.loads(recipient.recipient_config_json).get("ccTarget", "") + for recipient in report_schedule.recipients + ] + + +def get_bcctarget_from_report_schedule(report_schedule: ReportSchedule) -> list[str]: + return [ + json.loads(recipient.recipient_config_json).get("bccTarget", "") + for recipient in report_schedule.recipients + ] + + def get_error_logs_query(report_schedule: ReportSchedule) -> BaseQuery: return ( db.session.query(ReportExecutionLog) @@ -152,7 +167,9 @@ def assert_log(state: str, error_message: Optional[str] = None): @contextmanager def create_test_table_context(database: Database): with database.get_sqla_engine() as engine: - engine.execute("CREATE TABLE test_table AS SELECT 1 as first, 2 as second") + engine.execute( + "CREATE TABLE IF NOT EXISTS test_table AS SELECT 1 as first, 2 as second" + ) engine.execute("INSERT INTO test_table (first, second) VALUES (1, 2)") engine.execute("INSERT INTO test_table (first, second) VALUES (3, 4)") @@ -172,6 +189,20 @@ def create_report_email_chart(): cleanup_report_schedule(report_schedule) +@pytest.fixture() +def create_report_email_chart_with_cc_and_bcc(): + chart = db.session.query(Slice).first() + report_schedule = create_report_notification( + email_target="target@email.com", + ccTarget="cc@email.com", + bccTarget="bcc@email.com", + chart=chart, + ) + yield report_schedule + + cleanup_report_schedule(report_schedule) + + @pytest.fixture() def create_report_email_chart_alpha_owner(get_user): owners = [get_user("alpha")] @@ -617,6 +648,73 @@ def create_invalid_sql_alert_email_chart(request, app_context: AppContext): cleanup_report_schedule(report_schedule) +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", + "create_report_email_chart_with_cc_and_bcc", +) +@patch("superset.reports.notifications.email.send_email_smtp") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_email_chart_report_schedule_with_cc_bcc( + screenshot_mock, + email_mock, + create_report_email_chart_with_cc_and_bcc, +): + """ + ExecuteReport Command: Test chart email report schedule with screenshot and email cc, bcc options + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + with freeze_time("2020-01-01T00:00:00Z"): + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_email_chart_with_cc_and_bcc.id, datetime.utcnow() + ).run() + + notification_targets = get_target_from_report_schedule( + create_report_email_chart_with_cc_and_bcc + ) + + notification_cctargets = get_cctarget_from_report_schedule( + create_report_email_chart_with_cc_and_bcc + ) + + notification_bcctargets = get_bcctarget_from_report_schedule( + create_report_email_chart_with_cc_and_bcc + ) + + # assert that the link sent is correct + assert ( + '<a href="http://0.0.0.0:8080/explore/?form_data=%7B%22slice_id%22:+' + f"{create_report_email_chart_with_cc_and_bcc.chart.id}" + '%7D&force=false">Explore in Superset</a>' in email_mock.call_args[0][2] + ) + # Assert the email smtp address + if notification_targets: + assert email_mock.call_args[0][0] == notification_targets[0] + + # Assert the cc recipients if provided + if notification_cctargets: + expected_cc_targets = [target.strip() for target in notification_cctargets] + assert ( + email_mock.call_args[1].get("cc", "").split(",") == expected_cc_targets + ) + + if notification_bcctargets: + expected_bcc_targets = [ + target.strip() for target in notification_bcctargets + ] + assert ( + email_mock.call_args[1].get("bcc", "").split(",") + == expected_bcc_targets + ) + + # Assert the email inline screenshot + smtp_images = email_mock.call_args[1]["images"] + assert smtp_images[list(smtp_images.keys())[0]] == SCREENSHOT_FILE + # Assert logs are correct + assert_log(ReportState.SUCCESS) + + @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_email_chart" ) @@ -1110,6 +1208,63 @@ def test_email_dashboard_report_schedule_force_screenshot( assert_log(ReportState.SUCCESS) +@pytest.mark.usefixtures( + "load_birth_names_dashboard_with_slices", "create_report_slack_chart" +) +@patch("superset.commands.report.execute.get_channels_with_search") +@patch("superset.reports.notifications.slack.should_use_v2_api", return_value=True) +@patch("superset.reports.notifications.slackv2.get_slack_client") +@patch("superset.utils.screenshots.ChartScreenshot.get_screenshot") +def test_slack_chart_report_schedule_converts_to_v2( + screenshot_mock, + slack_client_mock, + slack_should_use_v2_api_mock, + get_channels_with_search_mock, + create_report_slack_chart, +): + """ + ExecuteReport Command: Test chart slack report schedule + """ + # setup screenshot mock + screenshot_mock.return_value = SCREENSHOT_FILE + + channel_id = "slack_channel_id" + + get_channels_with_search_mock.return_value = channel_id + + with freeze_time("2020-01-01T00:00:00Z"): + with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: + AsyncExecuteReportScheduleCommand( + TEST_ID, create_report_slack_chart.id, datetime.utcnow() + ).run() + + assert ( + slack_client_mock.return_value.files_upload_v2.call_args[1]["channel"] + == channel_id + ) + assert ( + slack_client_mock.return_value.files_upload_v2.call_args[1]["file"] + == SCREENSHOT_FILE + ) + + # Assert that the report recipients were updated + assert create_report_slack_chart.recipients[ + 0 + ].recipient_config_json == json.dumps({"target": channel_id}) + assert ( + create_report_slack_chart.recipients[0].type + == ReportRecipientType.SLACKV2 + ) + + # Assert logs are correct + assert_log(ReportState.SUCCESS) + # this will send a warning + assert statsd_mock.call_args_list[0] == call( + "reports.slack.send.warning", 1 + ) + assert statsd_mock.call_args_list[1] == call("reports.slack.send.ok", 1) + + @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_slack_chartv2" ) @@ -1129,11 +1284,9 @@ def test_slack_chart_report_schedule_v2( """ # setup screenshot mock screenshot_mock.return_value = SCREENSHOT_FILE - notification_targets = get_target_from_report_schedule(create_report_slack_chart) - - channel_id = notification_targets[0] + channel_id = "slack_channel_id" - get_channels_with_search_mock.return_value = {} + get_channels_with_search_mock.return_value = channel_id with freeze_time("2020-01-01T00:00:00Z"): with patch.object(current_app.config["STATS_LOGGER"], "gauge") as statsd_mock: @@ -1515,6 +1668,7 @@ def test_email_dashboard_report_fails_uncaught_exception( screenshot_mock.return_value = SCREENSHOT_FILE email_mock.side_effect = Exception("Uncaught exception") + app.config["EMAIL_REPORTS_CTA"] = "Call to action" with pytest.raises(Exception): AsyncExecuteReportScheduleCommand( @@ -1522,6 +1676,11 @@ def test_email_dashboard_report_fails_uncaught_exception( ).run() assert_log(ReportState.ERROR, error_message="Uncaught exception") + assert ( + '<a href="http://0.0.0.0:8080/superset/dashboard/' + f"{create_report_email_dashboard.dashboard.uuid}/" + '?force=false">Call to action</a>' in email_mock.call_args[0][2] + ) @pytest.mark.usefixtures( diff --git a/tests/integration_tests/reports/utils.py b/tests/integration_tests/reports/utils.py index c963efc840a3..6cd90b769df8 100644 --- a/tests/integration_tests/reports/utils.py +++ b/tests/integration_tests/reports/utils.py @@ -116,6 +116,8 @@ def create_report_notification( extra: Optional[dict[str, Any]] = None, force_screenshot: bool = False, owners: Optional[list[User]] = None, + ccTarget: Optional[str] = None, + bccTarget: Optional[str] = None, ) -> ReportSchedule: if not owners: owners = [ @@ -138,7 +140,9 @@ def create_report_notification( else: recipient = ReportRecipients( type=ReportRecipientType.EMAIL, - recipient_config_json=json.dumps({"target": email_target}), + recipient_config_json=json.dumps( + {"target": email_target, "ccTarget": ccTarget, "bccTarget": bccTarget} + ), ) if name is None: diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py index 1fff519bd430..67aecd73b091 100644 --- a/tests/integration_tests/security/api_tests.py +++ b/tests/integration_tests/security/api_tests.py @@ -26,6 +26,7 @@ from superset.models.dashboard import Dashboard from superset.utils.urls import get_url_host from superset.utils import json +from tests.integration_tests.conftest import with_config from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.constants import ADMIN_USERNAME, GAMMA_USERNAME from tests.integration_tests.fixtures.birth_names_dashboard import ( @@ -80,7 +81,7 @@ def test_login(self): class TestSecurityGuestTokenApi(SupersetTestCase): - uri = "api/v1/security/guest_token/" # noqa: F541 + uri = "api/v1/security/guest_token/" def test_post_guest_token_unauthenticated(self): """ @@ -135,3 +136,143 @@ def test_post_guest_token_bad_resources(self): ) self.assert400(response) + + +@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices", scope="class") +class TestSecurityGuestTokenApiTokenValidator(SupersetTestCase): + uri = "api/v1/security/guest_token/" + + def _get_guest_token_with_rls(self, rls_rule): + dash = db.session.query(Dashboard).filter_by(slug="births").first() + self.embedded = EmbeddedDashboardDAO.upsert(dash, []) + self.login(ADMIN_USERNAME) + user = {"username": "bob", "first_name": "Bob", "last_name": "Also Bob"} + resource = {"type": "dashboard", "id": str(self.embedded.uuid)} + params = {"user": user, "resources": [resource], "rls": [rls_rule]} + return self.client.post( + self.uri, data=json.dumps(params), content_type="application/json" + ) + + @with_config({"GUEST_TOKEN_VALIDATOR_HOOK": lambda x: False}) + def test_guest_token_validator_hook_denied(self): + """ + Security API: Test False case from validator - should be 400 + """ + rls_rule = {"dataset": 1, "clause": "tenant_id=123"} + self.assert400(self._get_guest_token_with_rls(rls_rule)) + + @with_config({"GUEST_TOKEN_VALIDATOR_HOOK": lambda x: True}) + def test_guest_token_validator_hook_denied_allowed(self): + """ + Security API: Test True case from validator - should be 200 + """ + rls_rule = {"dataset": 1, "clause": "tenant_id=123"} + self.assert200(self._get_guest_token_with_rls(rls_rule)) + + @with_config({"GUEST_TOKEN_VALIDATOR_HOOK": 123}) + def test_guest_validator_hook_not_callable(self): + """ + Security API: Test validator throws exception when isn't callable + should be 500 + """ + rls_rule = {"dataset": 1, "clause": "tenant_id=123"} + self.assert500(self._get_guest_token_with_rls(rls_rule)) + + @with_config({"GUEST_TOKEN_VALIDATOR_HOOK": lambda x: [][0]}) + def test_guest_validator_hook_throws_exception(self): + """ + Security API: Test validator throws exception - should be 500 + """ + rls_rule = {"dataset": 1, "clause": "tenant_id=123"} + self.assert500(self._get_guest_token_with_rls(rls_rule)) + + @with_config( + { + "GUEST_TOKEN_VALIDATOR_HOOK": lambda x: len(x["rls"]) == 1 + and "tenant_id=" in x["rls"][0]["clause"] + } + ) + def test_guest_validator_hook_real_world_example_positive(self): + """ + Security API: Test validator real world example, check tenant_id is in clause + Positive case + """ + # Test validator real world example, check tenant_id is in clause + # Should be 200. + rls_rule = {"dataset": 1, "clause": "tenant_id=123"} + self.assert200(self._get_guest_token_with_rls(rls_rule)) + + @with_config( + { + "GUEST_TOKEN_VALIDATOR_HOOK": lambda x: len(x["rls"]) == 1 + and "tenant_id=" in x["rls"][0]["clause"] + } + ) + def test_guest_validator_hook_real_world_example_negative(self): + """ + Security API: Test validator real world example, check tenant_id is in clause + Negative case + """ + rls_rule = {} + self.assert400(self._get_guest_token_with_rls(rls_rule)) + + +class TestSecurityRolesApi(SupersetTestCase): + uri = "api/v1/security/roles/" # noqa: F541 + + @with_config({"FAB_ADD_SECURITY_API": True}) + def test_get_security_roles_admin(self): + """ + Security API: Admin should be able to get roles + """ + self.login(ADMIN_USERNAME) + response = self.client.get(self.uri) + self.assert200(response) + + @with_config({"FAB_ADD_SECURITY_API": True}) + def test_get_security_roles_gamma(self): + """ + Security API: Gamma should not be able to get roles + """ + self.login(GAMMA_USERNAME) + response = self.client.get(self.uri) + self.assert403(response) + + @with_config({"FAB_ADD_SECURITY_API": True}) + def test_post_security_roles_gamma(self): + """ + Security API: Gamma should not be able to create roles + """ + self.login(GAMMA_USERNAME) + response = self.client.post( + self.uri, + data=json.dumps({"name": "new_role"}), + content_type="application/json", + ) + self.assert403(response) + + @with_config({"FAB_ADD_SECURITY_API": True}) + def test_put_security_roles_gamma(self): + """ + Security API: Gamma shouldnt be able to update roles + """ + self.login(GAMMA_USERNAME) + response = self.client.put( + f"{self.uri}1", + data=json.dumps({"name": "new_role"}), + content_type="application/json", + ) + self.assert403(response) + + @with_config({"FAB_ADD_SECURITY_API": True}) + def test_delete_security_roles_gamma(self): + """ + Security API: Gamma shouldnt be able to delete roles + """ + self.login(GAMMA_USERNAME) + response = self.client.delete( + f"{self.uri}1", + data=json.dumps({"name": "new_role"}), + content_type="application/json", + ) + self.assert403(response) diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py index f4a515bb75c2..b8bab0390949 100644 --- a/tests/integration_tests/security/guest_token_security_tests.py +++ b/tests/integration_tests/security/guest_token_security_tests.py @@ -113,15 +113,29 @@ def setUp(self) -> None: self.authorized_guest = security_manager.get_guest_user_from_token( { "user": {}, - "resources": [{"type": "dashboard", "id": str(self.embedded.uuid)}], + "resources": [ + { + "type": GuestTokenResourceType.DASHBOARD, + "id": str(self.embedded.uuid), + } + ], + "iat": 10, + "exp": 20, + "rls_rules": [], } ) self.unauthorized_guest = security_manager.get_guest_user_from_token( { "user": {}, "resources": [ - {"type": "dashboard", "id": "06383667-3e02-4e5e-843f-44e9c5896b6c"} + { + "type": GuestTokenResourceType.DASHBOARD, + "id": "06383667-3e02-4e5e-843f-44e9c5896b6c", + } ], + "iat": 10, + "exp": 20, + "rls_rules": [], } ) @@ -247,15 +261,29 @@ def setUp(self) -> None: self.authorized_guest = security_manager.get_guest_user_from_token( { "user": {}, - "resources": [{"type": "dashboard", "id": str(self.embedded.uuid)}], + "resources": [ + { + "type": GuestTokenResourceType.DASHBOARD, + "id": str(self.embedded.uuid), + } + ], + "iat": 10, + "exp": 20, + "rls_rules": [], } ) self.unauthorized_guest = security_manager.get_guest_user_from_token( { "user": {}, "resources": [ - {"type": "dashboard", "id": "06383667-3e02-4e5e-843f-44e9c5896b6c"} + { + "type": GuestTokenResourceType.DASHBOARD, + "id": "06383667-3e02-4e5e-843f-44e9c5896b6c", + } ], + "iat": 10, + "exp": 20, + "rls_rules": [], } ) self.chart = self.get_slice("Girls") diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index 71bb1484e033..ffd38bd53374 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -268,7 +268,7 @@ def test_rls_filter_alters_gamma_birth_names_query(self): # establish that the filters are grouped together correctly with # ANDs, ORs and parens in the correct place assert ( - "WHERE\n (\n (\n name LIKE 'A%' OR name LIKE 'B%'\n ) OR (\n name LIKE 'Q%'\n )\n )\n AND (\n gender = 'boy'\n )" + "WHERE ((name like 'A%' or name like 'B%') OR (name like 'Q%')) AND (gender = 'boy');" in sql ) @@ -646,8 +646,15 @@ def guest_user_with_rls(self, rules: Optional[list[Any]] = None) -> GuestUser: return security_manager.get_guest_user_from_token( { "user": {}, - "resources": [{"type": GuestTokenResourceType.DASHBOARD.value}], + "resources": [ + { + "type": GuestTokenResourceType.DASHBOARD, + "id": "06383667-3e02-4e5e-843f-44e9c5896b6c", + } + ], "rls_rules": rules, + "iat": 10, + "exp": 20, } ) diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index 5b8e4f2ae00e..bd76448d4899 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -1633,7 +1633,10 @@ def test_raise_for_access_datasource( @patch("superset.security.SupersetSecurityManager.can_access") def test_raise_for_access_query(self, mock_can_access, mock_is_owner): query = Mock( - database=get_example_database(), schema="bar", sql="SELECT * FROM foo" + database=get_example_database(), + schema="bar", + sql="SELECT * FROM foo", + catalog=None, ) mock_can_access.return_value = True diff --git a/tests/integration_tests/sql_lab/api_tests.py b/tests/integration_tests/sql_lab/api_tests.py index 282a0c993e26..5280fd39d6a8 100644 --- a/tests/integration_tests/sql_lab/api_tests.py +++ b/tests/integration_tests/sql_lab/api_tests.py @@ -281,7 +281,7 @@ def test_format_sql_request(self): "/api/v1/sqllab/format_sql/", json=data, ) - success_resp = {"result": "SELECT\n 1\nFROM my_table"} + success_resp = {"result": "SELECT 1\nFROM my_table"} resp_data = json.loads(rv.data.decode("utf-8")) self.assertDictEqual(resp_data, success_resp) self.assertEqual(rv.status_code, 200) diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index 86fffee1ec89..86209f752962 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -15,11 +15,13 @@ # specific language governing permissions and limitations # under the License. # isort:skip_file +from __future__ import annotations + import re from datetime import datetime -from typing import Any, NamedTuple, Optional, Union +from typing import Any, Literal, NamedTuple, Optional, Union from re import Pattern -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest import numpy as np @@ -34,7 +36,9 @@ from superset.constants import EMPTY_STRING, NULL_STRING from superset.db_engine_specs.bigquery import BigQueryEngineSpec from superset.db_engine_specs.druid import DruidEngineSpec -from superset.exceptions import QueryObjectValidationError, SupersetSecurityException # noqa: F401 +from superset.exceptions import ( + QueryObjectValidationError, +) # noqa: F401 from superset.models.core import Database from superset.utils.core import ( AdhocMetricExpressionType, @@ -131,74 +135,6 @@ def test_db_column_types(self): col = TableColumn(column_name="foo", type=str_type, table=tbl, is_dttm=True) self.assertTrue(col.is_temporal) - @patch("superset.jinja_context.get_user_id", return_value=1) - @patch("superset.jinja_context.get_username", return_value="abc") - @patch("superset.jinja_context.get_user_email", return_value="abc@test.com") - def test_extra_cache_keys(self, mock_user_email, mock_username, mock_user_id): - base_query_obj = { - "granularity": None, - "from_dttm": None, - "to_dttm": None, - "groupby": ["id", "username", "email"], - "metrics": [], - "is_timeseries": False, - "filter": [], - } - - # Table with Jinja callable. - table1 = SqlaTable( - table_name="test_has_extra_cache_keys_table", - sql=""" - SELECT - '{{ current_user_id() }}' as id, - '{{ current_username() }}' as username, - '{{ current_user_email() }}' as email - """, - database=get_example_database(), - ) - - query_obj = dict(**base_query_obj, extras={}) - extra_cache_keys = table1.get_extra_cache_keys(query_obj) - self.assertTrue(table1.has_extra_cache_key_calls(query_obj)) - assert extra_cache_keys == [1, "abc", "abc@test.com"] - - # Table with Jinja callable disabled. - table2 = SqlaTable( - table_name="test_has_extra_cache_keys_disabled_table", - sql=""" - SELECT - '{{ current_user_id(False) }}' as id, - '{{ current_username(False) }}' as username, - '{{ current_user_email(False) }}' as email, - """, - database=get_example_database(), - ) - query_obj = dict(**base_query_obj, extras={}) - extra_cache_keys = table2.get_extra_cache_keys(query_obj) - self.assertTrue(table2.has_extra_cache_key_calls(query_obj)) - self.assertListEqual(extra_cache_keys, []) - - # Table with no Jinja callable. - query = "SELECT 'abc' as user" - table3 = SqlaTable( - table_name="test_has_no_extra_cache_keys_table", - sql=query, - database=get_example_database(), - ) - - query_obj = dict(**base_query_obj, extras={"where": "(user != 'abc')"}) - extra_cache_keys = table3.get_extra_cache_keys(query_obj) - self.assertFalse(table3.has_extra_cache_key_calls(query_obj)) - self.assertListEqual(extra_cache_keys, []) - - # With Jinja callable in SQL expression. - query_obj = dict( - **base_query_obj, extras={"where": "(user != '{{ current_username() }}')"} - ) - extra_cache_keys = table3.get_extra_cache_keys(query_obj) - self.assertTrue(table3.has_extra_cache_key_calls(query_obj)) - assert extra_cache_keys == ["abc"] - @patch("superset.jinja_context.get_username", return_value="abc") def test_jinja_metrics_and_calc_columns(self, mock_username): base_query_obj = { @@ -624,6 +560,32 @@ def test_values_for_column_on_text_column(text_column_table): assert len(with_null) == 8 +def test_values_for_column_on_text_column_with_rls(text_column_table): + with patch.object( + text_column_table, + "get_sqla_row_level_filters", + return_value=[ + TextClause("foo = 'foo'"), + ], + ): + with_rls = text_column_table.values_for_column(column_name="foo", limit=10000) + assert with_rls == ["foo"] + assert len(with_rls) == 1 + + +def test_values_for_column_on_text_column_with_rls_no_values(text_column_table): + with patch.object( + text_column_table, + "get_sqla_row_level_filters", + return_value=[ + TextClause("foo = 'bar'"), + ], + ): + with_rls = text_column_table.values_for_column(column_name="foo", limit=10000) + assert with_rls == [] + assert len(with_rls) == 0 + + def test_filter_on_text_column(text_column_table): table = text_column_table # null value should be replaced @@ -801,7 +763,7 @@ def test_none_operand_in_filter(login_as_admin, physical_dataset): { "operator": FilterOperator.NOT_EQUALS.value, "count": 0, - "sql_should_contain": "NOT COL4 IS NULL", + "sql_should_contain": "COL4 IS NOT NULL", }, ] for expected in expected_results: @@ -833,6 +795,227 @@ def test_none_operand_in_filter(login_as_admin, physical_dataset): ) +@pytest.mark.usefixtures("app_context") +@pytest.mark.parametrize( + "table_name,sql,expected_cache_keys,has_extra_cache_keys", + [ + ( + "test_has_extra_cache_keys_table", + """ + SELECT + '{{ current_user_id() }}' as id, + '{{ current_username() }}' as username, + '{{ current_user_email() }}' as email + """, + {1, "abc", "abc@test.com"}, + True, + ), + ( + "test_has_extra_cache_keys_table_with_set", + """ + {% set user_email = current_user_email() %} + SELECT + '{{ current_user_id() }}' as id, + '{{ current_username() }}' as username, + '{{ user_email }}' as email + """, + {1, "abc", "abc@test.com"}, + True, + ), + ( + "test_has_extra_cache_keys_table_with_se_multiple", + """ + {% set user_conditional_id = current_user_email() and current_user_id() %} + SELECT + '{{ user_conditional_id }}' as conditional + """, + {1, "abc@test.com"}, + True, + ), + ( + "test_has_extra_cache_keys_disabled_table", + """ + SELECT + '{{ current_user_id(False) }}' as id, + '{{ current_username(False) }}' as username, + '{{ current_user_email(False) }}' as email + """, + [], + True, + ), + ("test_has_no_extra_cache_keys_table", "SELECT 'abc' as user", [], False), + ], +) +@patch("superset.jinja_context.get_user_id", return_value=1) +@patch("superset.jinja_context.get_username", return_value="abc") +@patch("superset.jinja_context.get_user_email", return_value="abc@test.com") +def test_extra_cache_keys( + mock_user_email, + mock_username, + mock_user_id, + table_name, + sql, + expected_cache_keys, + has_extra_cache_keys, +): + table = SqlaTable( + table_name=table_name, + sql=sql, + database=get_example_database(), + ) + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["id", "username", "email"], + "metrics": [], + "is_timeseries": False, + "filter": [], + } + + query_obj = dict(**base_query_obj, extras={}) + + extra_cache_keys = table.get_extra_cache_keys(query_obj) + assert table.has_extra_cache_key_calls(query_obj) == has_extra_cache_keys + assert set(extra_cache_keys) == set(expected_cache_keys) + + +@pytest.mark.usefixtures("app_context") +@pytest.mark.parametrize( + "sql_expression,expected_cache_keys,has_extra_cache_keys", + [ + ("(user != '{{ current_username() }}')", ["abc"], True), + ("(user != 'abc')", [], False), + ], +) +@patch("superset.jinja_context.get_user_id", return_value=1) +@patch("superset.jinja_context.get_username", return_value="abc") +@patch("superset.jinja_context.get_user_email", return_value="abc@test.com") +def test_extra_cache_keys_in_sql_expression( + mock_user_email, + mock_username, + mock_user_id, + sql_expression, + expected_cache_keys, + has_extra_cache_keys, +): + table = SqlaTable( + table_name="test_has_no_extra_cache_keys_table", + sql="SELECT 'abc' as user", + database=get_example_database(), + ) + base_query_obj = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": ["id", "username", "email"], + "metrics": [], + "is_timeseries": False, + "filter": [], + } + + query_obj = dict(**base_query_obj, extras={"where": sql_expression}) + + extra_cache_keys = table.get_extra_cache_keys(query_obj) + assert table.has_extra_cache_key_calls(query_obj) == has_extra_cache_keys + assert extra_cache_keys == expected_cache_keys + + +@pytest.mark.usefixtures("app_context") +@pytest.mark.parametrize( + "sql_expression,expected_cache_keys,has_extra_cache_keys,item_type", + [ + ("'{{ current_username() }}'", ["abc"], True, "columns"), + ("(user != 'abc')", [], False, "columns"), + ("{{ current_user_id() }}", [1], True, "metrics"), + ("COUNT(*)", [], False, "metrics"), + ], +) +@patch("superset.jinja_context.get_user_id", return_value=1) +@patch("superset.jinja_context.get_username", return_value="abc") +def test_extra_cache_keys_in_adhoc_metrics_and_columns( + mock_username: Mock, + mock_user_id: Mock, + sql_expression: str, + expected_cache_keys: list[str | None], + has_extra_cache_keys: bool, + item_type: Literal["columns", "metrics"], +): + table = SqlaTable( + table_name="test_has_no_extra_cache_keys_table", + sql="SELECT 'abc' as user", + database=get_example_database(), + ) + base_query_obj: dict[str, Any] = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": [], + "metrics": [], + "columns": [], + "is_timeseries": False, + "filter": [], + } + + items: dict[str, Any] = { + item_type: [ + { + "label": None, + "expressionType": "SQL", + "sqlExpression": sql_expression, + } + ], + } + + query_obj = {**base_query_obj, **items} + + extra_cache_keys = table.get_extra_cache_keys(query_obj) + assert table.has_extra_cache_key_calls(query_obj) == has_extra_cache_keys + assert extra_cache_keys == expected_cache_keys + + +@pytest.mark.usefixtures("app_context") +@patch("superset.jinja_context.get_user_id", return_value=1) +@patch("superset.jinja_context.get_username", return_value="abc") +def test_extra_cache_keys_in_dataset_metrics_and_columns( + mock_username: Mock, + mock_user_id: Mock, +): + table = SqlaTable( + table_name="test_has_no_extra_cache_keys_table", + sql="SELECT 'abc' as user", + database=get_example_database(), + columns=[ + TableColumn(column_name="user", type="VARCHAR(255)"), + TableColumn( + column_name="username", + type="VARCHAR(255)", + expression="{{ current_username() }}", + ), + ], + metrics=[ + SqlMetric( + metric_name="variable_profit", + expression="SUM(price) * {{ url_param('multiplier') }}", + ), + ], + ) + query_obj: dict[str, Any] = { + "granularity": None, + "from_dttm": None, + "to_dttm": None, + "groupby": [], + "columns": ["username"], + "metrics": ["variable_profit"], + "is_timeseries": False, + "filter": [], + } + + extra_cache_keys = table.get_extra_cache_keys(query_obj) + assert table.has_extra_cache_key_calls(query_obj) is True + assert set(extra_cache_keys) == {"abc", None} + + @pytest.mark.usefixtures("app_context") @pytest.mark.parametrize( "row,dimension,result", diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 829854d96681..0a1346e79407 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -17,7 +17,6 @@ # isort:skip_file """Unit tests for Sql Lab""" -from datetime import datetime from textwrap import dedent import pytest @@ -26,7 +25,6 @@ from unittest import mock import prison -from freezegun import freeze_time from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable # noqa: F401 from superset.db_engine_specs import BaseEngineSpec @@ -34,7 +32,7 @@ from superset.db_engine_specs.presto import PrestoEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException -from superset.models.sql_lab import Query, SavedQuery +from superset.models.sql_lab import Query from superset.result_set import SupersetResultSet from superset.sqllab.limiting_factor import LimitingFactor from superset.sql_lab import ( @@ -137,7 +135,11 @@ def test_sql_json_dml_disallowed(self): assert data == { "errors": [ { - "message": "Only SELECT statements are allowed against this database.", + "message": ( + "This database does not allow for DDL/DML, and the query " + "could not be parsed to confirm it is a read-only query. Please " + "contact your administrator for more assistance." + ), "error_type": SupersetErrorType.DML_NOT_ALLOWED_ERROR, "level": ErrorLevel.ERROR, "extra": { @@ -152,34 +154,6 @@ def test_sql_json_dml_disallowed(self): ] } - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_sql_json_to_saved_query_info(self): - """ - SQLLab: Test SQLLab query execution info propagation to saved queries - """ - self.login(ADMIN_USERNAME) - - sql_statement = "SELECT * FROM birth_names LIMIT 10" - examples_db_id = get_example_database().id - saved_query = SavedQuery(db_id=examples_db_id, sql=sql_statement) - db.session.add(saved_query) - db.session.commit() - - with freeze_time(datetime.now().isoformat(timespec="seconds")): - self.run_sql(sql_statement, "1") - saved_query_ = ( - db.session.query(SavedQuery) - .filter( - SavedQuery.db_id == examples_db_id, SavedQuery.sql == sql_statement - ) - .one_or_none() - ) - assert saved_query_.rows is not None - assert saved_query_.last_run == datetime.now() - # Rollback changes - db.session.delete(saved_query_) - db.session.commit() - @parameterized.expand([CtasMethod.TABLE, CtasMethod.VIEW]) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_sql_json_cta_dynamic_db(self, ctas_method): diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py index 0935714c5427..2ffdf994f7a3 100644 --- a/tests/integration_tests/superset_test_config.py +++ b/tests/integration_tests/superset_test_config.py @@ -59,9 +59,6 @@ "removed in a future version of Superset." ) -if make_url(SQLALCHEMY_DATABASE_URI).get_backend_name() in ("postgresql", "mysql"): - SQLALCHEMY_ENGINE_OPTIONS["isolation_level"] = "READ COMMITTED" # noqa: F405 - # Speeding up the tests.integration_tests. PRESTO_POLL_INTERVAL = 0.1 HIVE_POLL_INTERVAL = 0.1 @@ -138,6 +135,8 @@ def GET_FEATURE_FLAGS_FUNC(ff): ALERT_REPORTS_QUERY_EXECUTION_MAX_TRIES = 3 +FAB_ADD_SECURITY_API = True + class CeleryConfig: broker_url = f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}" diff --git a/tests/integration_tests/superset_test_config_thumbnails.py b/tests/integration_tests/superset_test_config_thumbnails.py index 8fda61e804a3..3b2eb9193958 100644 --- a/tests/integration_tests/superset_test_config_thumbnails.py +++ b/tests/integration_tests/superset_test_config_thumbnails.py @@ -41,9 +41,6 @@ in a future version of Superset." ) -if make_url(SQLALCHEMY_DATABASE_URI).get_backend_name() in ("postgresql", "mysql"): - SQLALCHEMY_ENGINE_OPTIONS["isolation_level"] = "READ COMMITTED" # noqa: F405 - SQL_SELECT_AS_CTA = True SQL_MAX_ROW = 666 diff --git a/tests/integration_tests/tasks/test_cache.py b/tests/integration_tests/tasks/test_cache.py index 943b444f7693..368cb1ebf0af 100644 --- a/tests/integration_tests/tasks/test_cache.py +++ b/tests/integration_tests/tasks/test_cache.py @@ -22,16 +22,32 @@ @pytest.mark.parametrize( - "base_url", + "base_url, expected_referer", [ - "http://base-url", - "http://base-url/", + ("http://base-url", None), + ("http://base-url/", None), + ("https://base-url", "https://base-url/api/v1/chart/warm_up_cache"), + ("https://base-url/", "https://base-url/api/v1/chart/warm_up_cache"), + ], + ids=[ + "Without trailing slash (HTTP)", + "With trailing slash (HTTP)", + "Without trailing slash (HTTPS)", + "With trailing slash (HTTPS)", ], - ids=["Without trailing slash", "With trailing slash"], ) +@mock.patch("superset.tasks.cache.fetch_csrf_token") @mock.patch("superset.tasks.cache.request.Request") @mock.patch("superset.tasks.cache.request.urlopen") -def test_fetch_url(mock_urlopen, mock_request_cls, base_url): +@mock.patch("superset.tasks.cache.is_secure_url") +def test_fetch_url( + mock_is_secure_url, + mock_urlopen, + mock_request_cls, + mock_fetch_csrf_token, + base_url, + expected_referer, +): from superset.tasks.cache import fetch_url mock_request = mock.MagicMock() @@ -40,19 +56,40 @@ def test_fetch_url(mock_urlopen, mock_request_cls, base_url): mock_urlopen.return_value = mock.MagicMock() mock_urlopen.return_value.code = 200 + # Mock the URL validation to return True for HTTPS and False for HTTP + mock_is_secure_url.return_value = base_url.startswith("https") + + initial_headers = {"Cookie": "cookie", "key": "value"} + csrf_headers = initial_headers | {"X-CSRF-Token": "csrf_token"} + + # Conditionally add the Referer header and assert its presence + if expected_referer: + csrf_headers = csrf_headers | {"Referer": expected_referer} + assert csrf_headers["Referer"] == expected_referer + + mock_fetch_csrf_token.return_value = csrf_headers + app.config["WEBDRIVER_BASEURL"] = base_url - headers = {"key": "value"} data = "data" data_encoded = b"data" - result = fetch_url(data, headers) + result = fetch_url(data, initial_headers) + + expected_url = ( + f"{base_url}/api/v1/chart/warm_up_cache" + if not base_url.endswith("/") + else f"{base_url}api/v1/chart/warm_up_cache" + ) + + mock_fetch_csrf_token.assert_called_once_with(initial_headers) - assert data == result["success"] mock_request_cls.assert_called_once_with( - "http://base-url/superset/warm_up_cache/", + expected_url, # Use the dynamic URL based on base_url data=data_encoded, - headers=headers, + headers=csrf_headers, method="PUT", ) # assert the same Request object is used mock_urlopen.assert_called_once_with(mock_request, timeout=mock.ANY) + + assert data == result["success"] diff --git a/tests/integration_tests/tasks/test_utils.py b/tests/integration_tests/tasks/test_utils.py new file mode 100644 index 000000000000..29e5f38319cb --- /dev/null +++ b/tests/integration_tests/tasks/test_utils.py @@ -0,0 +1,77 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from unittest import mock + +import pytest + +from tests.integration_tests.test_app import app + + +@pytest.mark.parametrize( + "base_url", + [ + "http://base-url", + "http://base-url/", + "https://base-url", + "https://base-url/", + ], + ids=[ + "Without trailing slash (HTTP)", + "With trailing slash (HTTP)", + "Without trailing slash (HTTPS)", + "With trailing slash (HTTPS)", + ], +) +@mock.patch("superset.tasks.cache.request.Request") +@mock.patch("superset.tasks.cache.request.urlopen") +def test_fetch_csrf_token(mock_urlopen, mock_request_cls, base_url, app_context): + from superset.tasks.utils import fetch_csrf_token + + mock_request = mock.MagicMock() + mock_request_cls.return_value = mock_request + + mock_response = mock.MagicMock() + mock_urlopen.return_value.__enter__.return_value = mock_response + + mock_response.status = 200 + mock_response.read.return_value = b'{"result": "csrf_token"}' + mock_response.headers.get_all.return_value = [ + "session=new_session_cookie", + "async-token=websocket_cookie", + ] + + app.config["WEBDRIVER_BASEURL"] = base_url + headers = {"Cookie": "original_session_cookie"} + + result_headers = fetch_csrf_token(headers) + + expected_url = ( + f"{base_url}/api/v1/security/csrf_token/" + if not base_url.endswith("/") + else f"{base_url}api/v1/security/csrf_token/" + ) + + mock_request_cls.assert_called_with( + expected_url, + headers=headers, + method="GET", + ) + + assert result_headers["X-CSRF-Token"] == "csrf_token" + assert result_headers["Cookie"] == "session=new_session_cookie" # Updated assertion + # assert the same Request object is used + mock_urlopen.assert_called_once_with(mock_request, timeout=mock.ANY) diff --git a/tests/integration_tests/users/api_tests.py b/tests/integration_tests/users/api_tests.py index 4153a5bd08fb..7894b1856c1e 100644 --- a/tests/integration_tests/users/api_tests.py +++ b/tests/integration_tests/users/api_tests.py @@ -22,7 +22,7 @@ from superset import security_manager from superset.utils import json, slack # noqa: F401 from tests.integration_tests.base_tests import SupersetTestCase -from tests.integration_tests.conftest import with_config +from tests.integration_tests.conftest import with_config, with_feature_flags from tests.integration_tests.constants import ADMIN_USERNAME meUri = "/api/v1/me/" @@ -81,7 +81,8 @@ def test_avatar_valid_user_no_avatar(self): response = self.client.get("/api/v1/user/1/avatar.png", follow_redirects=False) assert response.status_code == 204 - @with_config({"SLACK_API_TOKEN": "dummy", "SLACK_ENABLE_AVATARS": True}) + @with_config({"SLACK_API_TOKEN": "dummy"}) + @with_feature_flags(SLACK_ENABLE_AVATARS=True) @patch("superset.views.users.api.get_user_avatar", return_value=AVATAR_URL) def test_avatar_with_valid_user(self, mock): self.login(ADMIN_USERNAME) diff --git a/tests/unit_tests/charts/commands/importers/v1/import_test.py b/tests/unit_tests/charts/commands/importers/v1/import_test.py index ddeb2c7431db..a87b920ef227 100644 --- a/tests/unit_tests/charts/commands/importers/v1/import_test.py +++ b/tests/unit_tests/charts/commands/importers/v1/import_test.py @@ -76,7 +76,9 @@ def test_import_chart(mocker: MockerFixture, session_with_schema: Session) -> No Test importing a chart. """ - mocker.patch.object(security_manager, "can_access", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) config = copy.deepcopy(chart_config) config["datasource_id"] = 1 @@ -89,7 +91,7 @@ def test_import_chart(mocker: MockerFixture, session_with_schema: Session) -> No assert chart.external_url is None # Assert that the can write to chart was checked - security_manager.can_access.assert_called_once_with("can_write", "Chart") + mock_can_access.assert_called_once_with("can_write", "Chart") def test_import_chart_managed_externally( @@ -98,7 +100,9 @@ def test_import_chart_managed_externally( """ Test importing a chart that is managed externally. """ - mocker.patch.object(security_manager, "can_access", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) config = copy.deepcopy(chart_config) config["datasource_id"] = 1 @@ -111,7 +115,7 @@ def test_import_chart_managed_externally( assert chart.external_url == "https://example.org/my_chart" # Assert that the can write to chart was checked - security_manager.can_access.assert_called_once_with("can_write", "Chart") + mock_can_access.assert_called_once_with("can_write", "Chart") def test_import_chart_without_permission( @@ -121,7 +125,9 @@ def test_import_chart_without_permission( """ Test importing a chart when a user doesn't have permissions to create. """ - mocker.patch.object(security_manager, "can_access", return_value=False) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=False + ) config = copy.deepcopy(chart_config) config["datasource_id"] = 1 @@ -134,7 +140,7 @@ def test_import_chart_without_permission( == "Chart doesn't exist and user doesn't have permission to create charts" ) # Assert that the can write to chart was checked - security_manager.can_access.assert_called_once_with("can_write", "Chart") + mock_can_access.assert_called_once_with("can_write", "Chart") def test_filter_chart_annotations(session: Session) -> None: @@ -162,8 +168,53 @@ def test_import_existing_chart_without_permission( """ Test importing a chart when a user doesn't have permissions to modify. """ - mocker.patch.object(security_manager, "can_access", return_value=True) - mocker.patch.object(security_manager, "can_access_chart", return_value=False) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_chart = mocker.patch.object( + security_manager, "can_access_chart", return_value=False + ) + + slice = ( + session_with_data.query(Slice) + .filter(Slice.uuid == chart_config["uuid"]) + .one_or_none() + ) + + user = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Admin")], + ) + + with override_user(user): + with pytest.raises(ImportFailedError) as excinfo: + import_chart(chart_config, overwrite=True) + assert ( + str(excinfo.value) + == "A chart already exists and user doesn't have permissions to overwrite it" # noqa: E501 + ) + + # Assert that the can write to chart was checked + mock_can_access.assert_called_once_with("can_write", "Chart") + mock_can_access_chart.assert_called_once_with(slice) + + +def test_import_existing_chart_without_owner_permission( + mocker: MockerFixture, + session_with_data: Session, +) -> None: + """ + Test importing a chart when a user doesn't have permissions to modify. + """ + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_chart = mocker.patch.object( + security_manager, "can_access_chart", return_value=True + ) slice = ( session_with_data.query(Slice) @@ -171,7 +222,15 @@ def test_import_existing_chart_without_permission( .one_or_none() ) - with override_user("admin"): + user = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Gamma")], + ) + + with override_user(user): with pytest.raises(ImportFailedError) as excinfo: import_chart(chart_config, overwrite=True) assert ( @@ -180,8 +239,8 @@ def test_import_existing_chart_without_permission( ) # Assert that the can write to chart was checked - security_manager.can_access.assert_called_once_with("can_write", "Chart") - security_manager.can_access_chart.assert_called_once_with(slice) + mock_can_access.assert_called_once_with("can_write", "Chart") + mock_can_access_chart.assert_called_once_with(slice) def test_import_existing_chart_with_permission( @@ -191,8 +250,12 @@ def test_import_existing_chart_with_permission( """ Test importing a chart that exists when a user has access permission to that chart. """ - mocker.patch.object(security_manager, "can_access", return_value=True) - mocker.patch.object(security_manager, "can_access_chart", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_chart = mocker.patch.object( + security_manager, "can_access_chart", return_value=True + ) admin = User( first_name="Alice", @@ -215,5 +278,5 @@ def test_import_existing_chart_with_permission( with override_user(admin): import_chart(config, overwrite=True) # Assert that the can write to chart was checked - security_manager.can_access.assert_called_once_with("can_write", "Chart") - security_manager.can_access_chart.assert_called_once_with(slice) + mock_can_access.assert_called_once_with("can_write", "Chart") + mock_can_access_chart.assert_called_once_with(slice) diff --git a/tests/unit_tests/charts/test_post_processing.py b/tests/unit_tests/charts/test_post_processing.py index 790c49451634..181b9f06352a 100644 --- a/tests/unit_tests/charts/test_post_processing.py +++ b/tests/unit_tests/charts/test_post_processing.py @@ -78,10 +78,10 @@ def test_pivot_df_no_cols_no_rows_single_metric(): ) assert ( pivoted.to_markdown() - == f""" + == """ | | ('SUM(num)',) | |:-----------------|----------------:| -| ('{_("Total")} (Sum)',) | 8.06797e+07 | +| ('Total (Sum)',) | 8.06797e+07 | """.strip() ) @@ -407,6 +407,476 @@ def test_pivot_df_single_row_two_metrics(): ) +def test_pivot_df_single_row_null_values(): + """ + Pivot table when a single column and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "gender": {0: "girl", 1: "boy"}, + "SUM(num)": {0: 118065, 1: None}, + "MAX(num)": {0: 2588, 1: None}, + } + ) + assert ( + df.to_markdown() + == """ +| | gender | SUM(num) | MAX(num) | +|---:|:---------|-----------:|-----------:| +| 0 | girl | 118065 | 2588 | +| 1 | boy | nan | nan | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | nan | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:-----------------|----------------------:|-----------------------:|----------------------:|-----------------------:| +| ('Total (Sum)',) | nan | 118065 | nan | 2588 | + """.strip() + ) + + # combine_metrics does nothing in this case + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | nan | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | ('Total (Sum)',) | +|:-----------------|----------------:|----------------:|:-------------------| +| ('boy',) | nan | nan | nannan | +| ('girl',) | 118065 | 2588 | 120653.0 | +| ('Total (Sum)',) | 118065 | 2588 | 120653.0 | + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=False, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:-------------------------|-------------------:| +| ('SUM(num)', 'boy') | nan | +| ('SUM(num)', 'girl') | 118065 | +| ('SUM(num)', 'Subtotal') | 118065 | +| ('MAX(num)', 'boy') | nan | +| ('MAX(num)', 'girl') | 2588 | +| ('MAX(num)', 'Subtotal') | 2588 | +| ('{_("Total")} (Sum)', '') | 120653 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:---------------------|-------------------:| +| ('boy', 'SUM(num)') | nan | +| ('boy', 'MAX(num)') | nan | +| ('boy', 'Subtotal') | 0 | +| ('girl', 'SUM(num)') | 118065 | +| ('girl', 'MAX(num)') | 2588 | +| ('girl', 'Subtotal') | 120653 | +| ('{_("Total")} (Sum)', '') | 120653 | + """.strip() + ) + + +def test_pivot_df_single_row_null_mix_values_strings(): + """ + Pivot table when a single column and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "gender": {0: "girl", 1: "boy"}, + "SUM(num)": {0: 118065, 1: "NULL"}, + "MAX(num)": {0: 2588, 1: None}, + } + ) + assert ( + df.to_markdown() + == """ +| | gender | SUM(num) | MAX(num) | +|---:|:---------|:-----------|-----------:| +| 0 | girl | 118065 | 2588 | +| 1 | boy | NULL | nan | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|:----------------|----------------:| +| ('boy',) | NULL | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:-----------------|:----------------------|-----------------------:|----------------------:|-----------------------:| +| ('Total (Sum)',) | NULL | 118065 | nan | 2588 | + + """.strip() + ) + + # combine_metrics does nothing in this case + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|:----------------|----------------:| +| ('boy',) | NULL | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=True, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | ('Total (Sum)',) | +|:-----------------|:----------------|----------------:|:-------------------| +| ('boy',) | NULL | nan | NULLnan | +| ('girl',) | 118065 | 2588 | 120653.0 | +| ('Total (Sum)',) | 118065.0 | 2588 | 120653.0 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('Total (Sum)',) | +|:---------------------|:-------------------| +| ('boy', 'SUM(num)') | NULL | +| ('boy', 'MAX(num)') | nan | +| ('girl', 'SUM(num)') | 118065 | +| ('girl', 'MAX(num)') | 2588.0 | + """.strip() + ) + + +def test_pivot_df_single_row_null_mix_values_numbers(): + """ + Pivot table when a single column and 2 metrics are selected. + """ + df = pd.DataFrame.from_dict( + { + "gender": {0: "girl", 1: "boy"}, + "SUM(num)": {0: 118065, 1: 21}, + "MAX(num)": {0: 2588, 1: None}, + } + ) + assert ( + df.to_markdown() + == """ +| | gender | SUM(num) | MAX(num) | +|---:|:---------|-----------:|-----------:| +| 0 | girl | 118065 | 2588 | +| 1 | boy | 21 | nan | + """.strip() + ) + + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | 21 | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # transpose_pivot + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=True, + combine_metrics=False, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)', 'boy') | ('SUM(num)', 'girl') | ('MAX(num)', 'boy') | ('MAX(num)', 'girl') | +|:-----------------|----------------------:|-----------------------:|----------------------:|-----------------------:| +| ('Total (Sum)',) | 21 | 118065 | nan | 2588 | """.strip() + ) + + # combine_metrics does nothing in this case + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=False, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:----------|----------------:|----------------:| +| ('boy',) | 21 | nan | +| ('girl',) | 118065 | 2588 | + """.strip() + ) + + # show totals + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=False, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('SUM(num)',) | ('MAX(num)',) | +|:-----------------|----------------:|----------------:| +| ('boy',) | 21 | nan | +| ('girl',) | 118065 | 2588 | +| ('Total (Sum)',) | 118086 | 2588 | + """.strip() + ) + + # apply_metrics_on_rows + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=False, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == """ +| | ('Total (Sum)',) | +|:---------------------|-------------------:| +| ('SUM(num)', 'boy') | 21 | +| ('SUM(num)', 'girl') | 118065 | +| ('MAX(num)', 'boy') | nan | +| ('MAX(num)', 'girl') | 2588 | + """.strip() + ) + + # apply_metrics_on_rows with combine_metrics + pivoted = pivot_df( + df, + rows=["gender"], + columns=[], + metrics=["SUM(num)", "MAX(num)"], + aggfunc="Sum", + transpose_pivot=False, + combine_metrics=True, + show_rows_total=False, + show_columns_total=True, + apply_metrics_on_rows=True, + ) + assert ( + pivoted.to_markdown() + == f""" +| | ('{_("Total")} (Sum)',) | +|:---------------------|-------------------:| +| ('boy', 'SUM(num)') | 21 | +| ('boy', 'MAX(num)') | nan | +| ('girl', 'SUM(num)') | 118065 | +| ('girl', 'MAX(num)') | 2588 | + """.strip() + ) + + def test_pivot_df_complex(): """ Pivot table when a column, rows and 2 metrics are selected. @@ -1106,14 +1576,14 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('SUM(num)', 'NULL') | ('MAX(num)', 'NULL') | -|:-------------------|-----------------------:|-----------------------:| -| ('boy', 'Edward') | 40685 | 1669 | -| ('boy', 'Tony') | 6438 | 845 | -| ('girl', 'Amy') | 60166 | 3081 | -| ('girl', 'Cindy') | 15367 | 1059 | -| ('girl', 'Dawn') | 16492 | 1618 | -| ('girl', 'Sophia') | 26040 | 3775 | +| | ('SUM(num)', nan) | ('MAX(num)', nan) | +|:-------------------|--------------------:|--------------------:| +| ('boy', 'Edward') | 40685 | 1669 | +| ('boy', 'Tony') | 6438 | 845 | +| ('girl', 'Amy') | 60166 | 3081 | +| ('girl', 'Cindy') | 15367 | 1059 | +| ('girl', 'Dawn') | 16492 | 1618 | +| ('girl', 'Sophia') | 26040 | 3775 | """.strip() ) @@ -1134,9 +1604,9 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('SUM(num)', 'boy', 'Edward') | ('SUM(num)', 'boy', 'Tony') | ('SUM(num)', 'girl', 'Amy') | ('SUM(num)', 'girl', 'Cindy') | ('SUM(num)', 'girl', 'Dawn') | ('SUM(num)', 'girl', 'Sophia') | ('MAX(num)', 'boy', 'Edward') | ('MAX(num)', 'boy', 'Tony') | ('MAX(num)', 'girl', 'Amy') | ('MAX(num)', 'girl', 'Cindy') | ('MAX(num)', 'girl', 'Dawn') | ('MAX(num)', 'girl', 'Sophia') | -|:----------|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:| -| ('NULL',) | 40685 | 6438 | 60166 | 15367 | 16492 | 26040 | 1669 | 845 | 3081 | 1059 | 1618 | 3775 | +| | ('SUM(num)', 'boy', 'Edward') | ('SUM(num)', 'boy', 'Tony') | ('SUM(num)', 'girl', 'Amy') | ('SUM(num)', 'girl', 'Cindy') | ('SUM(num)', 'girl', 'Dawn') | ('SUM(num)', 'girl', 'Sophia') | ('MAX(num)', 'boy', 'Edward') | ('MAX(num)', 'boy', 'Tony') | ('MAX(num)', 'girl', 'Amy') | ('MAX(num)', 'girl', 'Cindy') | ('MAX(num)', 'girl', 'Dawn') | ('MAX(num)', 'girl', 'Sophia') | +|:-------|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:|--------------------------------:|------------------------------:|------------------------------:|--------------------------------:|-------------------------------:|---------------------------------:| +| (nan,) | 40685 | 6438 | 60166 | 15367 | 16492 | 26040 | 1669 | 845 | 3081 | 1059 | 1618 | 3775 | """.strip() ) @@ -1156,14 +1626,14 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('NULL', 'SUM(num)') | ('NULL', 'MAX(num)') | -|:-------------------|-----------------------:|-----------------------:| -| ('boy', 'Edward') | 40685 | 1669 | -| ('boy', 'Tony') | 6438 | 845 | -| ('girl', 'Amy') | 60166 | 3081 | -| ('girl', 'Cindy') | 15367 | 1059 | -| ('girl', 'Dawn') | 16492 | 1618 | -| ('girl', 'Sophia') | 26040 | 3775 | +| | (nan, 'SUM(num)') | (nan, 'MAX(num)') | +|:-------------------|--------------------:|--------------------:| +| ('boy', 'Edward') | 40685 | 1669 | +| ('boy', 'Tony') | 6438 | 845 | +| ('girl', 'Amy') | 60166 | 3081 | +| ('girl', 'Cindy') | 15367 | 1059 | +| ('girl', 'Dawn') | 16492 | 1618 | +| ('girl', 'Sophia') | 26040 | 3775 | """.strip() ) @@ -1183,17 +1653,17 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('SUM(num)', 'NULL') | ('SUM(num)', 'Subtotal') | ('MAX(num)', 'NULL') | ('MAX(num)', 'Subtotal') | ('Total (Sum)', '') | -|:---------------------|-----------------------:|---------------------------:|-----------------------:|---------------------------:|----------------------:| -| ('boy', 'Edward') | 40685 | 40685 | 1669 | 1669 | 42354 | -| ('boy', 'Tony') | 6438 | 6438 | 845 | 845 | 7283 | -| ('boy', 'Subtotal') | 47123 | 47123 | 2514 | 2514 | 49637 | -| ('girl', 'Amy') | 60166 | 60166 | 3081 | 3081 | 63247 | -| ('girl', 'Cindy') | 15367 | 15367 | 1059 | 1059 | 16426 | -| ('girl', 'Dawn') | 16492 | 16492 | 1618 | 1618 | 18110 | -| ('girl', 'Sophia') | 26040 | 26040 | 3775 | 3775 | 29815 | -| ('girl', 'Subtotal') | 118065 | 118065 | 9533 | 9533 | 127598 | -| ('Total (Sum)', '') | 165188 | 165188 | 12047 | 12047 | 177235 | +| | ('SUM(num)', nan) | ('SUM(num)', 'Subtotal') | ('MAX(num)', nan) | ('MAX(num)', 'Subtotal') | ('Total (Sum)', '') | +|:---------------------|--------------------:|---------------------------:|--------------------:|---------------------------:|----------------------:| +| ('boy', 'Edward') | 40685 | 40685 | 1669 | 1669 | 42354 | +| ('boy', 'Tony') | 6438 | 6438 | 845 | 845 | 7283 | +| ('boy', 'Subtotal') | 47123 | 47123 | 2514 | 2514 | 49637 | +| ('girl', 'Amy') | 60166 | 60166 | 3081 | 3081 | 63247 | +| ('girl', 'Cindy') | 15367 | 15367 | 1059 | 1059 | 16426 | +| ('girl', 'Dawn') | 16492 | 16492 | 1618 | 1618 | 18110 | +| ('girl', 'Sophia') | 26040 | 26040 | 3775 | 3775 | 29815 | +| ('girl', 'Subtotal') | 118065 | 118065 | 9533 | 9533 | 127598 | +| ('Total (Sum)', '') | 165188 | 165188 | 12047 | 12047 | 177235 | """.strip() ) @@ -1213,20 +1683,20 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('NULL',) | -|:-------------------------------|------------:| -| ('SUM(num)', 'boy', 'Edward') | 40685 | -| ('SUM(num)', 'boy', 'Tony') | 6438 | -| ('SUM(num)', 'girl', 'Amy') | 60166 | -| ('SUM(num)', 'girl', 'Cindy') | 15367 | -| ('SUM(num)', 'girl', 'Dawn') | 16492 | -| ('SUM(num)', 'girl', 'Sophia') | 26040 | -| ('MAX(num)', 'boy', 'Edward') | 1669 | -| ('MAX(num)', 'boy', 'Tony') | 845 | -| ('MAX(num)', 'girl', 'Amy') | 3081 | -| ('MAX(num)', 'girl', 'Cindy') | 1059 | -| ('MAX(num)', 'girl', 'Dawn') | 1618 | -| ('MAX(num)', 'girl', 'Sophia') | 3775 | +| | (nan,) | +|:-------------------------------|---------:| +| ('SUM(num)', 'boy', 'Edward') | 40685 | +| ('SUM(num)', 'boy', 'Tony') | 6438 | +| ('SUM(num)', 'girl', 'Amy') | 60166 | +| ('SUM(num)', 'girl', 'Cindy') | 15367 | +| ('SUM(num)', 'girl', 'Dawn') | 16492 | +| ('SUM(num)', 'girl', 'Sophia') | 26040 | +| ('MAX(num)', 'boy', 'Edward') | 1669 | +| ('MAX(num)', 'boy', 'Tony') | 845 | +| ('MAX(num)', 'girl', 'Amy') | 3081 | +| ('MAX(num)', 'girl', 'Cindy') | 1059 | +| ('MAX(num)', 'girl', 'Dawn') | 1618 | +| ('MAX(num)', 'girl', 'Sophia') | 3775 | """.strip() ) @@ -1246,20 +1716,20 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('NULL',) | -|:-------------------------------|------------:| -| ('boy', 'Edward', 'SUM(num)') | 40685 | -| ('boy', 'Edward', 'MAX(num)') | 1669 | -| ('boy', 'Tony', 'SUM(num)') | 6438 | -| ('boy', 'Tony', 'MAX(num)') | 845 | -| ('girl', 'Amy', 'SUM(num)') | 60166 | -| ('girl', 'Amy', 'MAX(num)') | 3081 | -| ('girl', 'Cindy', 'SUM(num)') | 15367 | -| ('girl', 'Cindy', 'MAX(num)') | 1059 | -| ('girl', 'Dawn', 'SUM(num)') | 16492 | -| ('girl', 'Dawn', 'MAX(num)') | 1618 | -| ('girl', 'Sophia', 'SUM(num)') | 26040 | -| ('girl', 'Sophia', 'MAX(num)') | 3775 | +| | (nan,) | +|:-------------------------------|---------:| +| ('boy', 'Edward', 'SUM(num)') | 40685 | +| ('boy', 'Edward', 'MAX(num)') | 1669 | +| ('boy', 'Tony', 'SUM(num)') | 6438 | +| ('boy', 'Tony', 'MAX(num)') | 845 | +| ('girl', 'Amy', 'SUM(num)') | 60166 | +| ('girl', 'Amy', 'MAX(num)') | 3081 | +| ('girl', 'Cindy', 'SUM(num)') | 15367 | +| ('girl', 'Cindy', 'MAX(num)') | 1059 | +| ('girl', 'Dawn', 'SUM(num)') | 16492 | +| ('girl', 'Dawn', 'MAX(num)') | 1618 | +| ('girl', 'Sophia', 'SUM(num)') | 26040 | +| ('girl', 'Sophia', 'MAX(num)') | 3775 | """.strip() ) @@ -1279,12 +1749,12 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('boy', 'Edward') | ('boy', 'Tony') | ('boy', 'Subtotal') | ('girl', 'Amy') | ('girl', 'Cindy') | ('girl', 'Dawn') | ('girl', 'Sophia') | ('girl', 'Subtotal') | ('Total (Sum)', '') | -|:---------------------|--------------------:|------------------:|----------------------:|------------------:|--------------------:|-------------------:|---------------------:|-----------------------:|----------------------:| -| ('NULL', 'SUM(num)') | 40685 | 6438 | 47123 | 60166 | 15367 | 16492 | 26040 | 118065 | 165188 | -| ('NULL', 'MAX(num)') | 1669 | 845 | 2514 | 3081 | 1059 | 1618 | 3775 | 9533 | 12047 | -| ('NULL', 'Subtotal') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | -| ('Total (Sum)', '') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | +| | ('boy', 'Edward') | ('boy', 'Tony') | ('boy', 'Subtotal') | ('girl', 'Amy') | ('girl', 'Cindy') | ('girl', 'Dawn') | ('girl', 'Sophia') | ('girl', 'Subtotal') | ('Total (Sum)', '') | +|:--------------------|--------------------:|------------------:|----------------------:|------------------:|--------------------:|-------------------:|---------------------:|-----------------------:|----------------------:| +| (nan, 'SUM(num)') | 40685 | 6438 | 47123 | 60166 | 15367 | 16492 | 26040 | 118065 | 165188 | +| (nan, 'MAX(num)') | 1669 | 845 | 2514 | 3081 | 1059 | 1618 | 3775 | 9533 | 12047 | +| (nan, 'Subtotal') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | +| ('Total (Sum)', '') | 42354 | 7283 | 49637 | 63247 | 16426 | 18110 | 29815 | 127598 | 177235 | """.strip() ) @@ -1304,17 +1774,17 @@ def test_pivot_df_complex_null_values(): assert ( pivoted.to_markdown() == """ -| | ('SUM(num)', 'NULL') | ('MAX(num)', 'NULL') | -|:-------------------------------------------|-----------------------:|-----------------------:| -| ('boy', 'Edward') | 0.246295 | 0.138541 | -| ('boy', 'Tony') | 0.0389738 | 0.0701419 | -| ('boy', 'Subtotal') | 0.285269 | 0.208683 | -| ('girl', 'Amy') | 0.364227 | 0.255748 | -| ('girl', 'Cindy') | 0.0930273 | 0.0879057 | -| ('girl', 'Dawn') | 0.0998378 | 0.134307 | -| ('girl', 'Sophia') | 0.157639 | 0.313356 | -| ('girl', 'Subtotal') | 0.714731 | 0.791317 | -| ('Total (Sum as Fraction of Columns)', '') | 1 | 1 | +| | ('SUM(num)', nan) | ('MAX(num)', nan) | +|:-------------------------------------------|--------------------:|--------------------:| +| ('boy', 'Edward') | 0.246295 | 0.138541 | +| ('boy', 'Tony') | 0.0389738 | 0.0701419 | +| ('boy', 'Subtotal') | 0.285269 | 0.208683 | +| ('girl', 'Amy') | 0.364227 | 0.255748 | +| ('girl', 'Cindy') | 0.0930273 | 0.0879057 | +| ('girl', 'Dawn') | 0.0998378 | 0.134307 | +| ('girl', 'Sophia') | 0.157639 | 0.313356 | +| ('girl', 'Subtotal') | 0.714731 | 0.791317 | +| ('Total (Sum as Fraction of Columns)', '') | 1 | 1 | """.strip() ) diff --git a/tests/unit_tests/commands/databases/update_test.py b/tests/unit_tests/commands/databases/update_test.py index 37500d521420..b1b5e6843f0c 100644 --- a/tests/unit_tests/commands/databases/update_test.py +++ b/tests/unit_tests/commands/databases/update_test.py @@ -178,7 +178,12 @@ def test_rename_with_catalog( DatabaseDAO.find_by_id.return_value = original_database database_with_catalog.database_name = "my_other_db" DatabaseDAO.update.return_value = database_with_catalog - DatabaseDAO.get_datasets.return_value = [] + + dataset = mocker.MagicMock() + chart = mocker.MagicMock() + DatabaseDAO.get_datasets.return_value = [dataset] + DatasetDAO = mocker.patch("superset.commands.database.update.DatasetDAO") + DatasetDAO.get_related_objects.return_value = {"charts": [chart]} find_permission_view_menu = mocker.patch.object( security_manager, @@ -218,6 +223,11 @@ def test_rename_with_catalog( assert catalog2_pvm.view_menu.name == "[my_other_db].[catalog2]" assert catalog2_schema3_pvm.view_menu.name == "[my_other_db].[catalog2].[schema3]" + assert dataset.catalog_perm == "[my_other_db].[catalog2]" + assert dataset.schema_perm == "[my_other_db].[catalog2].[schema4]" + assert chart.catalog_perm == "[my_other_db].[catalog2]" + assert chart.schema_perm == "[my_other_db].[catalog2].[schema4]" + def test_rename_without_catalog( mocker: MockerFixture, diff --git a/tests/unit_tests/commands/report/execute_test.py b/tests/unit_tests/commands/report/execute_test.py new file mode 100644 index 000000000000..b7b545fd4a6e --- /dev/null +++ b/tests/unit_tests/commands/report/execute_test.py @@ -0,0 +1,222 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from pytest_mock import MockerFixture + +from superset.commands.report.execute import BaseReportState +from superset.reports.models import ( + ReportRecipientType, + ReportSchedule, + ReportSourceFormat, +) +from superset.utils.core import HeaderDataType + + +def test_log_data_with_chart(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = True + mock_report_schedule.chart_id = 123 + mock_report_schedule.dashboard_id = None + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [1, 2] + mock_report_schedule.recipients = [] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.CHART, + "notification_format": "report_format", + "chart_id": 123, + "dashboard_id": None, + "owners": [1, 2], + "slack_channels": None, + } + + assert result == expected_result + + +def test_log_data_with_dashboard(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = False + mock_report_schedule.chart_id = None + mock_report_schedule.dashboard_id = 123 + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [1, 2] + mock_report_schedule.recipients = [] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.DASHBOARD, + "notification_format": "report_format", + "chart_id": None, + "dashboard_id": 123, + "owners": [1, 2], + "slack_channels": None, + } + + assert result == expected_result + + +def test_log_data_with_email_recipients(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = False + mock_report_schedule.chart_id = None + mock_report_schedule.dashboard_id = 123 + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [1, 2] + mock_report_schedule.recipients = [] + mock_report_schedule.recipients = [ + mocker.Mock(type=ReportRecipientType.EMAIL, recipient_config_json="email_1"), + mocker.Mock(type=ReportRecipientType.EMAIL, recipient_config_json="email_2"), + ] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.DASHBOARD, + "notification_format": "report_format", + "chart_id": None, + "dashboard_id": 123, + "owners": [1, 2], + "slack_channels": [], + } + + assert result == expected_result + + +def test_log_data_with_slack_recipients(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = False + mock_report_schedule.chart_id = None + mock_report_schedule.dashboard_id = 123 + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [1, 2] + mock_report_schedule.recipients = [] + mock_report_schedule.recipients = [ + mocker.Mock(type=ReportRecipientType.SLACK, recipient_config_json="channel_1"), + mocker.Mock(type=ReportRecipientType.SLACK, recipient_config_json="channel_2"), + ] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.DASHBOARD, + "notification_format": "report_format", + "chart_id": None, + "dashboard_id": 123, + "owners": [1, 2], + "slack_channels": ["channel_1", "channel_2"], + } + + assert result == expected_result + + +def test_log_data_no_owners(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = False + mock_report_schedule.chart_id = None + mock_report_schedule.dashboard_id = 123 + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [] + mock_report_schedule.recipients = [ + mocker.Mock(type=ReportRecipientType.SLACK, recipient_config_json="channel_1"), + mocker.Mock(type=ReportRecipientType.SLACK, recipient_config_json="channel_2"), + ] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.DASHBOARD, + "notification_format": "report_format", + "chart_id": None, + "dashboard_id": 123, + "owners": [], + "slack_channels": ["channel_1", "channel_2"], + } + + assert result == expected_result + + +def test_log_data_with_missing_values(mocker: MockerFixture) -> None: + mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule) + mock_report_schedule.chart = None + mock_report_schedule.chart_id = None + mock_report_schedule.dashboard_id = None + mock_report_schedule.type = "report_type" + mock_report_schedule.report_format = "report_format" + mock_report_schedule.owners = [1, 2] + mock_report_schedule.recipients = [ + mocker.Mock(type=ReportRecipientType.SLACK, recipient_config_json="channel_1"), + mocker.Mock( + type=ReportRecipientType.SLACKV2, recipient_config_json="channel_2" + ), + ] + + class_instance: BaseReportState = BaseReportState( + mock_report_schedule, "January 1, 2021", "execution_id_example" + ) + class_instance._report_schedule = mock_report_schedule + + result: HeaderDataType = class_instance._get_log_data() + + expected_result: HeaderDataType = { + "notification_type": "report_type", + "notification_source": ReportSourceFormat.DASHBOARD, + "notification_format": "report_format", + "chart_id": None, + "dashboard_id": None, + "owners": [1, 2], + "slack_channels": ["channel_1", "channel_2"], + } + + assert result == expected_result diff --git a/tests/unit_tests/connectors/sqla/models_test.py b/tests/unit_tests/connectors/sqla/models_test.py index c1e06f3755dd..013d03e7e4cf 100644 --- a/tests/unit_tests/connectors/sqla/models_test.py +++ b/tests/unit_tests/connectors/sqla/models_test.py @@ -15,13 +15,14 @@ # specific language governing permissions and limitations # under the License. +import pandas as pd import pytest from pytest_mock import MockerFixture from sqlalchemy import create_engine from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.session import Session -from superset.connectors.sqla.models import SqlaTable +from superset.connectors.sqla.models import SqlaTable, TableColumn from superset.daos.dataset import DatasetDAO from superset.exceptions import OAuth2RedirectError from superset.models.core import Database @@ -255,11 +256,34 @@ def test_dataset_uniqueness(session: Session) -> None: # but the DAO enforces application logic for uniqueness assert not DatasetDAO.validate_uniqueness( - database.id, + database, Table("table", "schema", None), ) assert DatasetDAO.validate_uniqueness( - database.id, + database, Table("table", "schema", "some_catalog"), ) + + +def test_normalize_prequery_result_type_custom_sql() -> None: + """ + Test that the `_normalize_prequery_result_type` can hanndle custom SQL. + """ + sqla_table = SqlaTable( + table_name="my_sqla_table", + columns=[], + metrics=[], + database=Database(database_name="my_db", sqlalchemy_uri="sqlite://"), + ) + row: pd.Series = { + "custom_sql": "Car", + } + dimension: str = "custom_sql" + columns_by_name: dict[str, TableColumn] = { + "product_line": TableColumn(column_name="product_line"), + } + assert ( + sqla_table._normalize_prequery_result_type(row, dimension, columns_by_name) + == "Car" + ) diff --git a/tests/unit_tests/connectors/sqla/utils_test.py b/tests/unit_tests/connectors/sqla/utils_test.py new file mode 100644 index 000000000000..0da3ab7e95a9 --- /dev/null +++ b/tests/unit_tests/connectors/sqla/utils_test.py @@ -0,0 +1,139 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import pytest +from pytest_mock import MockerFixture + +from superset.connectors.sqla.utils import ( + get_columns_description, + get_virtual_table_metadata, +) +from superset.exceptions import SupersetSecurityException + + +# Returns column descriptions when given valid database, catalog, schema, and query +def test_returns_column_descriptions(mocker: MockerFixture) -> None: + database = mocker.MagicMock() + cursor = mocker.MagicMock() + + result_set = mocker.MagicMock() + db_engine_spec = mocker.MagicMock() + + CURSOR_DESCR = ( + ("foo", "string"), + ("bar", "string"), + ("baz", "string"), + ("type_generic", "string"), + ("is_dttm", "boolean"), + ) + cursor.description = CURSOR_DESCR + + database.get_raw_connection.return_value.__enter__.return_value.cursor.return_value = cursor + database.db_engine_spec = db_engine_spec + database.apply_limit_to_sql.return_value = "SELECT * FROM table LIMIT 1" + database.mutate_sql_based_on_config.return_value = "SELECT * FROM table LIMIT 1" + db_engine_spec.fetch_data.return_value = [("col1", "col1", "STRING", None, False)] + db_engine_spec.get_datatype.return_value = "STRING" + db_engine_spec.get_column_spec.return_value.is_dttm = False + db_engine_spec.get_column_spec.return_value.generic_type = "STRING" + + mocker.patch("superset.result_set.SupersetResultSet", return_value=result_set) + + columns = get_columns_description( + database, "catalog", "schema", "SELECT * FROM table" + ) + + assert columns == [ + { + "column_name": "foo", + "name": "foo", + "type": "STRING", + "type_generic": "STRING", + "is_dttm": False, + }, + { + "column_name": "bar", + "name": "bar", + "type": "STRING", + "type_generic": "STRING", + "is_dttm": False, + }, + { + "column_name": "baz", + "name": "baz", + "type": "STRING", + "type_generic": "STRING", + "is_dttm": False, + }, + { + "column_name": "type_generic", + "name": "type_generic", + "type": "STRING", + "type_generic": "STRING", + "is_dttm": False, + }, + { + "column_name": "is_dttm", + "name": "is_dttm", + "type": "STRING", + "type_generic": "STRING", + "is_dttm": False, + }, + ] + + +def test_get_virtual_table_metadata(mocker: MockerFixture) -> None: + """ + Test the `get_virtual_table_metadata` function. + """ + mocker.patch( + "superset.connectors.sqla.utils.get_columns_description", + return_value=[{"name": "one", "type": "INTEGER"}], + ) + dataset = mocker.MagicMock( + sql="with source as ( select 1 as one ) select * from source", + ) + dataset.database.db_engine_spec.engine = "postgresql" + dataset.get_template_processor().process_template.return_value = dataset.sql + + assert get_virtual_table_metadata(dataset) == [{"name": "one", "type": "INTEGER"}] + + +def test_get_virtual_table_metadata_mutating(mocker: MockerFixture) -> None: + """ + Test the `get_virtual_table_metadata` function with mutating SQL. + """ + dataset = mocker.MagicMock(sql="DROP TABLE sample_data") + dataset.database.db_engine_spec.engine = "postgresql" + dataset.get_template_processor().process_template.return_value = dataset.sql + + with pytest.raises(SupersetSecurityException) as excinfo: + get_virtual_table_metadata(dataset) + assert str(excinfo.value) == "Only `SELECT` statements are allowed" + + +def test_get_virtual_table_metadata_multiple(mocker: MockerFixture) -> None: + """ + Test the `get_virtual_table_metadata` function with multiple statements. + """ + dataset = mocker.MagicMock(sql="SELECT 1; SELECT 2") + dataset.database.db_engine_spec.engine = "postgresql" + dataset.get_template_processor().process_template.return_value = dataset.sql + + with pytest.raises(SupersetSecurityException) as excinfo: + get_virtual_table_metadata(dataset) + assert str(excinfo.value) == "Only single queries supported" diff --git a/tests/unit_tests/dao/dataset_test.py b/tests/unit_tests/dao/dataset_test.py index 473d1e27b766..2b0b5c3d5f2e 100644 --- a/tests/unit_tests/dao/dataset_test.py +++ b/tests/unit_tests/dao/dataset_test.py @@ -53,7 +53,7 @@ def test_validate_update_uniqueness(session: Session) -> None: assert ( DatasetDAO.validate_update_uniqueness( - database_id=database.id, + database=database, table=Table(dataset1.table_name, dataset1.schema), dataset_id=dataset1.id, ) @@ -62,7 +62,7 @@ def test_validate_update_uniqueness(session: Session) -> None: assert ( DatasetDAO.validate_update_uniqueness( - database_id=database.id, + database=database, table=Table(dataset1.table_name, dataset2.schema), dataset_id=dataset1.id, ) @@ -71,7 +71,7 @@ def test_validate_update_uniqueness(session: Session) -> None: assert ( DatasetDAO.validate_update_uniqueness( - database_id=database.id, + database=database, table=Table(dataset1.table_name), dataset_id=dataset1.id, ) diff --git a/tests/unit_tests/dao/user_test.py b/tests/unit_tests/dao/user_test.py index bf65c51121fa..6066b0e7dfc5 100644 --- a/tests/unit_tests/dao/user_test.py +++ b/tests/unit_tests/dao/user_test.py @@ -14,79 +14,65 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from unittest.mock import MagicMock +from __future__ import annotations import pytest from flask_appbuilder.security.sqla.models import User -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import NoResultFound +from superset import db from superset.daos.user import UserDAO +from superset.extensions import security_manager from superset.models.user_attributes import UserAttribute +from tests.unit_tests.fixtures.common import admin_user, after_each # noqa: F401 -@pytest.fixture -def mock_db_session(mocker): - db = mocker.patch("superset.daos.user.db", autospec=True) - db.session = MagicMock() - db.session.query = MagicMock() - db.session.commit = MagicMock() - db.session.query.return_value = MagicMock() - return db.session +def test_get_by_id_found(admin_user: User, after_each: None) -> None: # noqa: F811 + user = UserDAO.get_by_id(admin_user.id) + assert user.id == admin_user.id -def test_get_by_id_found(mock_db_session): - # Setup - user_id = 1 - mock_user = User() - mock_user.id = user_id - mock_query = mock_db_session.query.return_value - mock_query.filter_by.return_value.one.return_value = mock_user - - # Execute - UserDAO.get_by_id(user_id) # noqa: F841 - - # Assert - mock_db_session.query.assert_called_with(User) - mock_query.filter_by.assert_called_with(id=user_id) - +def test_get_by_id_not_found(): + with pytest.raises(NoResultFound): + UserDAO.get_by_id(123456) -def test_get_by_id_not_found(mock_db_session): - # Setup - user_id = 1 - mock_query = mock_db_session.query.return_value - mock_query.filter_by.return_value.one.side_effect = NoResultFound - # Execute & Assert - with pytest.raises(NoResultFound): - UserDAO.get_by_id(user_id) +def test_set_avatar_url_with_existing_attributes( + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + admin_user.extra_attributes = [ + UserAttribute(user_id=admin_user.id, avatar_url="old_url"), + ] + db.session.flush() + new_url = "http://newurl.com" + UserDAO.set_avatar_url(admin_user, new_url) + user = UserDAO.get_by_id(admin_user.id) + assert user.extra_attributes[0].avatar_url == new_url -def test_set_avatar_url_with_existing_attributes(mock_db_session): - # Setup - user = User() - user.id = 1 - user.extra_attributes = [UserAttribute(user_id=user.id, avatar_url="old_url")] - # Execute +def test_set_avatar_url_without_existing_attributes( + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: new_url = "http://newurl.com" - UserDAO.set_avatar_url(user, new_url) + UserDAO.set_avatar_url(admin_user, new_url) - # Assert + user = UserDAO.get_by_id(admin_user.id) + assert len(admin_user.extra_attributes) == 1 assert user.extra_attributes[0].avatar_url == new_url - mock_db_session.add.assert_not_called() # No new attributes should be added -def test_set_avatar_url_without_existing_attributes(mock_db_session): - # Setup - user = User() - user.id = 1 - user.extra_attributes = [] +def test_get_by_id_custom_user_class( + monkeypatch: pytest.MonkeyPatch, + admin_user: User, # noqa: F811 + after_each: None, # noqa: F811 +) -> None: + class CustomUserModel(User): + __tablename__ = "ab_user" - # Execute - new_url = "http://newurl.com" - UserDAO.set_avatar_url(user, new_url) + monkeypatch.setattr(security_manager, "user_model", CustomUserModel) - # Assert - assert len(user.extra_attributes) == 1 - assert user.extra_attributes[0].avatar_url == new_url - mock_db_session.add.assert_called() # New attribute should be added + user = UserDAO.get_by_id(admin_user.id) + assert isinstance(user, CustomUserModel) diff --git a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py index 03f4e9fd3079..b2e8066b3475 100644 --- a/tests/unit_tests/dashboards/commands/importers/v1/import_test.py +++ b/tests/unit_tests/dashboards/commands/importers/v1/import_test.py @@ -65,7 +65,9 @@ def test_import_dashboard(mocker: MockerFixture, session_with_schema: Session) - """ Test importing a dashboard. """ - mocker.patch.object(security_manager, "can_access", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) dashboard = import_dashboard(dashboard_config) assert dashboard.dashboard_title == "Test dash" @@ -73,7 +75,7 @@ def test_import_dashboard(mocker: MockerFixture, session_with_schema: Session) - assert dashboard.is_managed_externally is False assert dashboard.external_url is None # Assert that the can write to dashboard was checked - security_manager.can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access.assert_called_once_with("can_write", "Dashboard") def test_import_dashboard_managed_externally( @@ -83,7 +85,9 @@ def test_import_dashboard_managed_externally( """ Test importing a dashboard that is managed externally. """ - mocker.patch.object(security_manager, "can_access", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) config = copy.deepcopy(dashboard_config) config["is_managed_externally"] = True @@ -93,7 +97,7 @@ def test_import_dashboard_managed_externally( assert dashboard.external_url == "https://example.org/my_dashboard" # Assert that the can write to dashboard was checked - security_manager.can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access.assert_called_once_with("can_write", "Dashboard") def test_import_dashboard_without_permission( @@ -103,7 +107,9 @@ def test_import_dashboard_without_permission( """ Test importing a dashboard when a user doesn't have permissions to create. """ - mocker.patch.object(security_manager, "can_access", return_value=False) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=False + ) with pytest.raises(ImportFailedError) as excinfo: import_dashboard(dashboard_config) @@ -113,18 +119,22 @@ def test_import_dashboard_without_permission( ) # Assert that the can write to dashboard was checked - security_manager.can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access.assert_called_once_with("can_write", "Dashboard") -def test_import_existing_dashboard_without_permission( +def test_import_existing_dashboard_without_access_permission( mocker: MockerFixture, session_with_data: Session, ) -> None: """ Test importing a dashboard when a user doesn't have permissions to create. """ - mocker.patch.object(security_manager, "can_access", return_value=True) - mocker.patch.object(security_manager, "can_access_dashboard", return_value=False) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_dashboard = mocker.patch.object( + security_manager, "can_access_dashboard", return_value=False + ) dashboard = ( session_with_data.query(Dashboard) @@ -132,7 +142,56 @@ def test_import_existing_dashboard_without_permission( .one_or_none() ) - with override_user("admin"): + admin = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Admin")], + ) + + with override_user(admin): + with pytest.raises(ImportFailedError) as excinfo: + import_dashboard(dashboard_config, overwrite=True) + assert ( + str(excinfo.value) + == "A dashboard already exists and user doesn't have permissions to overwrite it" # noqa: E501 + ) + + # Assert that the can write to dashboard was checked + mock_can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access_dashboard.assert_called_once_with(dashboard) + + +def test_import_existing_dashboard_without_owner_permission( + mocker: MockerFixture, + session_with_data: Session, +) -> None: + """ + Test importing a dashboard when a user doesn't have ownership and is not an Admin. + """ + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_dashboard = mocker.patch.object( + security_manager, "can_access_dashboard", return_value=True + ) + + dashboard = ( + session_with_data.query(Dashboard) + .filter(Dashboard.uuid == dashboard_config["uuid"]) + .one_or_none() + ) + + user = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Gamma")], + ) + + with override_user(user): with pytest.raises(ImportFailedError) as excinfo: import_dashboard(dashboard_config, overwrite=True) assert ( @@ -141,8 +200,8 @@ def test_import_existing_dashboard_without_permission( ) # Assert that the can write to dashboard was checked - security_manager.can_access.assert_called_once_with("can_write", "Dashboard") - security_manager.can_access_dashboard.assert_called_once_with(dashboard) + mock_can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access_dashboard.assert_called_once_with(dashboard) def test_import_existing_dashboard_with_permission( @@ -152,8 +211,12 @@ def test_import_existing_dashboard_with_permission( """ Test importing a dashboard that exists when a user has access permission to that dashboard. """ - mocker.patch.object(security_manager, "can_access", return_value=True) - mocker.patch.object(security_manager, "can_access_dashboard", return_value=True) + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + mock_can_access_dashboard = mocker.patch.object( + security_manager, "can_access_dashboard", return_value=True + ) admin = User( first_name="Alice", @@ -173,5 +236,5 @@ def test_import_existing_dashboard_with_permission( import_dashboard(dashboard_config, overwrite=True) # Assert that the can write to dashboard was checked - security_manager.can_access.assert_called_once_with("can_write", "Dashboard") - security_manager.can_access_dashboard.assert_called_once_with(dashboard) + mock_can_access.assert_called_once_with("can_write", "Dashboard") + mock_can_access_dashboard.assert_called_once_with(dashboard) diff --git a/tests/unit_tests/databases/commands/importers/v1/import_test.py b/tests/unit_tests/databases/commands/importers/v1/import_test.py index bfb472b3e2a0..06be5bc16f1f 100644 --- a/tests/unit_tests/databases/commands/importers/v1/import_test.py +++ b/tests/unit_tests/databases/commands/importers/v1/import_test.py @@ -37,6 +37,7 @@ def test_import_database(mocker: MockerFixture, session: Session) -> None: from tests.integration_tests.fixtures.importexport import database_config mocker.patch.object(security_manager, "can_access", return_value=True) + mocker.patch("superset.commands.database.importers.v1.utils.add_permissions") engine = db.session.get_bind() Database.metadata.create_all(engine) # pylint: disable=no-member @@ -44,7 +45,7 @@ def test_import_database(mocker: MockerFixture, session: Session) -> None: config = copy.deepcopy(database_config) database = import_database(config) assert database.database_name == "imported_database" - assert database.sqlalchemy_uri == "someengine://user:pass@host1" + assert database.sqlalchemy_uri == "postgresql://user:pass@host1" assert database.cache_timeout is None assert database.expose_in_sqllab is True assert database.allow_run_async is False @@ -67,6 +68,28 @@ def test_import_database(mocker: MockerFixture, session: Session) -> None: assert database.allow_dml is False +def test_import_database_no_creds(mocker: MockerFixture, session: Session) -> None: + """ + Test importing a database. + """ + from superset import security_manager + from superset.commands.database.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config_no_creds + + mocker.patch.object(security_manager, "can_access", return_value=True) + + engine = db.session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config_no_creds) + database = import_database(config) + assert database.database_name == "imported_database_no_creds" + assert database.sqlalchemy_uri == "bigquery://test-db/" + assert database.extra == "{}" + assert database.uuid == "2ff17edc-f3fa-4609-a5ac-b484281225bc" + + def test_import_database_sqlite_invalid( mocker: MockerFixture, session: Session ) -> None: @@ -108,6 +131,7 @@ def test_import_database_managed_externally( from tests.integration_tests.fixtures.importexport import database_config mocker.patch.object(security_manager, "can_access", return_value=True) + mocker.patch("superset.commands.database.importers.v1.utils.add_permissions") engine = db.session.get_bind() Database.metadata.create_all(engine) # pylint: disable=no-member @@ -158,6 +182,7 @@ def test_import_database_with_version(mocker: MockerFixture, session: Session) - from tests.integration_tests.fixtures.importexport import database_config mocker.patch.object(security_manager, "can_access", return_value=True) + mocker.patch("superset.commands.database.importers.v1.utils.add_permissions") engine = db.session.get_bind() Database.metadata.create_all(engine) # pylint: disable=no-member @@ -166,3 +191,27 @@ def test_import_database_with_version(mocker: MockerFixture, session: Session) - config["extra"]["version"] = "1.1.1" database = import_database(config) assert json.loads(database.extra)["version"] == "1.1.1" + + +def test_import_database_with_user_impersonation( + mocker: MockerFixture, + session: Session, +) -> None: + """ + Test importing a database that is managed externally. + """ + from superset import security_manager + from superset.commands.database.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config + + mocker.patch.object(security_manager, "can_access", return_value=True) + mocker.patch("superset.commands.database.importers.v1.utils.add_permissions") + engine = db.session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config) + config["impersonate_user"] = True + + database = import_database(config) + assert database.impersonate_user is True diff --git a/tests/unit_tests/databases/commands/utils_test.py b/tests/unit_tests/databases/commands/utils_test.py new file mode 100644 index 000000000000..e8f27d041602 --- /dev/null +++ b/tests/unit_tests/databases/commands/utils_test.py @@ -0,0 +1,76 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from pytest_mock import MockerFixture + +from superset.commands.database.utils import add_permissions + + +def test_add_permissions(mocker: MockerFixture) -> None: + """ + Test adding permissions to a database when it's created. + """ + database = mocker.MagicMock() + database.database_name = "my_db" + database.db_engine_spec.supports_catalog = True + database.get_all_catalog_names.return_value = ["catalog1", "catalog2"] + database.get_all_schema_names.side_effect = [["schema1"], ["schema2"]] + ssh_tunnel = mocker.MagicMock() + add_permission_view_menu = mocker.patch( + "superset.commands.database.importers.v1.utils.security_manager." + "add_permission_view_menu" + ) + + add_permissions(database, ssh_tunnel) + + add_permission_view_menu.assert_has_calls( + [ + mocker.call("catalog_access", "[my_db].[catalog1]"), + mocker.call("catalog_access", "[my_db].[catalog2]"), + mocker.call("schema_access", "[my_db].[catalog1].[schema1]"), + mocker.call("schema_access", "[my_db].[catalog2].[schema2]"), + ] + ) + + +def test_add_permissions_handle_failures(mocker: MockerFixture) -> None: + """ + Test adding permissions to a database when it's created in case + the request to get all schemas for one fo the catalogs fail. + """ + database = mocker.MagicMock() + database.database_name = "my_db" + database.db_engine_spec.supports_catalog = True + database.get_all_catalog_names.return_value = ["catalog1", "catalog2", "catalog3"] + database.get_all_schema_names.side_effect = [["schema1"], Exception, ["schema3"]] + ssh_tunnel = mocker.MagicMock() + add_permission_view_menu = mocker.patch( + "superset.commands.database.importers.v1.utils.security_manager." + "add_permission_view_menu" + ) + + add_permissions(database, ssh_tunnel) + + add_permission_view_menu.assert_has_calls( + [ + mocker.call("catalog_access", "[my_db].[catalog1]"), + mocker.call("catalog_access", "[my_db].[catalog2]"), + mocker.call("catalog_access", "[my_db].[catalog3]"), + mocker.call("schema_access", "[my_db].[catalog1].[schema1]"), + mocker.call("schema_access", "[my_db].[catalog3].[schema3]"), + ] + ) diff --git a/tests/unit_tests/datasets/commands/importers/v1/import_test.py b/tests/unit_tests/datasets/commands/importers/v1/import_test.py index 2a2038046d3d..6bba6f039d52 100644 --- a/tests/unit_tests/datasets/commands/importers/v1/import_test.py +++ b/tests/unit_tests/datasets/commands/importers/v1/import_test.py @@ -24,6 +24,7 @@ import pytest from flask import current_app +from flask_appbuilder.security.sqla.models import Role, User from pytest_mock import MockerFixture from sqlalchemy.orm.session import Session @@ -32,7 +33,9 @@ DatasetForbiddenDataURI, ) from superset.commands.dataset.importers.v1.utils import validate_data_uri +from superset.commands.exceptions import ImportFailedError from superset.utils import json +from superset.utils.core import override_user def test_import_dataset(mocker: MockerFixture, session: Session) -> None: @@ -536,6 +539,55 @@ def test_import_dataset_managed_externally( assert sqla_table.external_url == "https://example.org/my_table" +def test_import_dataset_without_owner_permission( + mocker: MockerFixture, + session: Session, +) -> None: + """ + Test importing a dataset that is managed externally. + """ + from superset import security_manager + from superset.commands.dataset.importers.v1.utils import import_dataset + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import dataset_config + + mock_can_access = mocker.patch.object( + security_manager, "can_access", return_value=True + ) + + engine = db.session.get_bind() + SqlaTable.metadata.create_all(engine) # pylint: disable=no-member + + database = Database(database_name="my_database", sqlalchemy_uri="sqlite://") + db.session.add(database) + db.session.flush() + + config = copy.deepcopy(dataset_config) + config["database_id"] = database.id + + import_dataset(config) + user = User( + first_name="Alice", + last_name="Doe", + email="adoe@example.org", + username="admin", + roles=[Role(name="Gamma")], + ) + + with override_user(user): + with pytest.raises(ImportFailedError) as excinfo: + import_dataset(config, overwrite=True) + + assert ( + str(excinfo.value) + == "A dataset already exists and user doesn't have permissions to overwrite it" # noqa: E501 + ) + + # Assert that the can write to chart was checked + mock_can_access.assert_called_with("can_write", "Dataset") + + @pytest.mark.parametrize( "allowed_urls, data_uri, expected, exception_class", [ diff --git a/tests/unit_tests/db_engine_specs/test_base.py b/tests/unit_tests/db_engine_specs/test_base.py index 9ec1ebaf00ef..d8e632ce0933 100644 --- a/tests/unit_tests/db_engine_specs/test_base.py +++ b/tests/unit_tests/db_engine_specs/test_base.py @@ -19,6 +19,7 @@ from __future__ import annotations +import json from textwrap import dedent from typing import Any @@ -240,14 +241,7 @@ class NoLimitDBEngineSpec(BaseEngineSpec): latest_partition=False, cols=cols, ) - assert ( - sql - == """SELECT - a -FROM my_table -LIMIT ? -OFFSET ?""" - ) + assert sql == "SELECT a\nFROM my_table\nLIMIT ?\nOFFSET ?" sql = NoLimitDBEngineSpec.select_star( database=database, @@ -259,12 +253,7 @@ class NoLimitDBEngineSpec(BaseEngineSpec): latest_partition=False, cols=cols, ) - assert ( - sql - == """SELECT - a -FROM my_table""" - ) + assert sql == "SELECT a\nFROM my_table" def test_extra_table_metadata(mocker: MockerFixture) -> None: @@ -334,3 +323,60 @@ def test_quote_table() -> None: BaseEngineSpec.quote_table(Table("ta ble", "sche.ma", 'cata"log'), dialect) == '"cata""log"."sche.ma"."ta ble"' ) + + +def test_mask_encrypted_extra() -> None: + """ + Test that the private key is masked when the database is edited. + """ + from superset.db_engine_specs.base import BaseEngineSpec + + config = json.dumps( + { + "foo": "bar", + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + + assert BaseEngineSpec.mask_encrypted_extra(config) == json.dumps( + { + "foo": "XXXXXXXXXX", + "service_account_info": "XXXXXXXXXX", + } + ) + + +def test_unmask_encrypted_extra() -> None: + """ + Test that the private key can be reused from the previous `encrypted_extra`. + """ + from superset.db_engine_specs.base import BaseEngineSpec + + old = json.dumps( + { + "foo": "bar", + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = json.dumps( + { + "foo": "XXXXXXXXXX", + "service_account_info": "XXXXXXXXXX", + } + ) + + assert BaseEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "foo": "bar", + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) diff --git a/tests/unit_tests/db_engine_specs/test_bigquery.py b/tests/unit_tests/db_engine_specs/test_bigquery.py index 1a6173057652..9e3d98ff8c60 100644 --- a/tests/unit_tests/db_engine_specs/test_bigquery.py +++ b/tests/unit_tests/db_engine_specs/test_bigquery.py @@ -191,7 +191,7 @@ def test_get_parameters_from_uri_serializable() -> None: def test_unmask_encrypted_extra() -> None: """ - Test that the private key can be reused from the previous ``encrypted_extra``. + Test that the private key can be reused from the previous `encrypted_extra`. """ from superset.db_engine_specs.bigquery import BigQueryEngineSpec @@ -212,17 +212,52 @@ def test_unmask_encrypted_extra() -> None: } ) - assert json.loads(str(BigQueryEngineSpec.unmask_encrypted_extra(old, new))) == { - "credentials_info": { - "project_id": "yellow-unicorn-314419", - "private_key": "SECRET", - }, - } + assert BigQueryEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "SECRET", + }, + } + ) -def test_unmask_encrypted_extra_when_empty() -> None: +def test_unmask_encrypted_extra_field_changeed() -> None: """ - Test that a None value works for ``encrypted_extra``. + Test that the private key is not reused when the field has changed. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + old = json.dumps( + { + "credentials_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "NEW-SECRET", + }, + } + ) + + assert BigQueryEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "NEW-SECRET", + }, + } + ) + + +def test_unmask_encrypted_extra_when_old_is_none() -> None: + """ + Test that a `None` value for the old field works for `encrypted_extra`. """ from superset.db_engine_specs.bigquery import BigQueryEngineSpec @@ -236,17 +271,19 @@ def test_unmask_encrypted_extra_when_empty() -> None: } ) - assert json.loads(str(BigQueryEngineSpec.unmask_encrypted_extra(old, new))) == { - "credentials_info": { - "project_id": "yellow-unicorn-314419", - "private_key": "XXXXXXXXXX", - }, - } + assert BigQueryEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "credentials_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) -def test_unmask_encrypted_extra_when_new_is_empty() -> None: +def test_unmask_encrypted_extra_when_new_is_none() -> None: """ - Test that a None value works for ``encrypted_extra``. + Test that a `None` value for the new field works for `encrypted_extra`. """ from superset.db_engine_specs.bigquery import BigQueryEngineSpec @@ -263,6 +300,31 @@ def test_unmask_encrypted_extra_when_new_is_empty() -> None: assert BigQueryEngineSpec.unmask_encrypted_extra(old, new) is None +def test_mask_encrypted_extra() -> None: + """ + Test that the private key is masked when the database is edited. + """ + from superset.db_engine_specs.bigquery import BigQueryEngineSpec + + config = json.dumps( + { + "credentials_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + + assert BigQueryEngineSpec.mask_encrypted_extra(config) == json.dumps( + { + "credentials_info": { + "project_id": "black-sanctum-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + def test_mask_encrypted_extra_when_empty() -> None: """ Test that the encrypted extra will return a none value if the field is empty. diff --git a/tests/unit_tests/db_engine_specs/test_couchbase.py b/tests/unit_tests/db_engine_specs/test_couchbase.py index 140df287324f..bae7e6c8dc36 100644 --- a/tests/unit_tests/db_engine_specs/test_couchbase.py +++ b/tests/unit_tests/db_engine_specs/test_couchbase.py @@ -33,18 +33,18 @@ def test_epoch_to_dttm() -> None: """ DB Eng Specs (couchbase): Test epoch to dttm """ - from superset.db_engine_specs.couchbasedb import CouchbaseDbEngineSpec + from superset.db_engine_specs.couchbase import CouchbaseEngineSpec - assert CouchbaseDbEngineSpec.epoch_to_dttm() == "MILLIS_TO_STR({col} * 1000)" + assert CouchbaseEngineSpec.epoch_to_dttm() == "MILLIS_TO_STR({col} * 1000)" def test_epoch_ms_to_dttm() -> None: """ DB Eng Specs (couchbase): Test epoch ms to dttm """ - from superset.db_engine_specs.couchbasedb import CouchbaseDbEngineSpec + from superset.db_engine_specs.couchbase import CouchbaseEngineSpec - assert CouchbaseDbEngineSpec.epoch_ms_to_dttm() == "MILLIS_TO_STR({col})" + assert CouchbaseEngineSpec.epoch_ms_to_dttm() == "MILLIS_TO_STR({col})" @pytest.mark.parametrize( @@ -62,7 +62,7 @@ def test_convert_dttm( expected_result: Optional[str], dttm: datetime, # noqa: F811 ) -> None: - from superset.db_engine_specs.couchbasedb import CouchbaseDbEngineSpec as spec + from superset.db_engine_specs.couchbase import CouchbaseEngineSpec as spec assert_convert_dttm(spec, target_type, expected_result, dttm) @@ -88,6 +88,6 @@ def test_get_column_spec( generic_type: GenericDataType, is_dttm: bool, ) -> None: - from superset.db_engine_specs.couchbasedb import CouchbaseDbEngineSpec as spec + from superset.db_engine_specs.couchbase import CouchbaseEngineSpec as spec assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm) diff --git a/tests/unit_tests/db_engine_specs/test_databricks.py b/tests/unit_tests/db_engine_specs/test_databricks.py index 77d7313dc753..314b0a06e4c2 100644 --- a/tests/unit_tests/db_engine_specs/test_databricks.py +++ b/tests/unit_tests/db_engine_specs/test_databricks.py @@ -255,12 +255,26 @@ def test_get_prequeries() -> None: assert DatabricksNativeEngineSpec.get_prequeries() == [] assert DatabricksNativeEngineSpec.get_prequeries(schema="test") == [ - "USE SCHEMA test", + "USE SCHEMA `test`", ] assert DatabricksNativeEngineSpec.get_prequeries(catalog="test") == [ - "USE CATALOG test", + "USE CATALOG `test`", ] assert DatabricksNativeEngineSpec.get_prequeries(catalog="foo", schema="bar") == [ - "USE CATALOG foo", - "USE SCHEMA bar", + "USE CATALOG `foo`", + "USE SCHEMA `bar`", + ] + + assert DatabricksNativeEngineSpec.get_prequeries( + catalog="with-hyphen", schema="hyphen-again" + ) == [ + "USE CATALOG `with-hyphen`", + "USE SCHEMA `hyphen-again`", + ] + + assert DatabricksNativeEngineSpec.get_prequeries( + catalog="`escaped-hyphen`", schema="`hyphen-escaped`" + ) == [ + "USE CATALOG `escaped-hyphen`", + "USE SCHEMA `hyphen-escaped`", ] diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py index fe8230ac95cc..5d2ddb807bbc 100644 --- a/tests/unit_tests/db_engine_specs/test_gsheets.py +++ b/tests/unit_tests/db_engine_specs/test_gsheets.py @@ -247,9 +247,34 @@ def test_validate_parameters_catalog_and_credentials( ) +def test_mask_encrypted_extra() -> None: + """ + Test that the private key is masked when the database is edited. + """ + from superset.db_engine_specs.gsheets import GSheetsEngineSpec + + config = json.dumps( + { + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + + assert GSheetsEngineSpec.mask_encrypted_extra(config) == json.dumps( + { + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) + + def test_unmask_encrypted_extra() -> None: """ - Test that the private key can be reused from the previous ``encrypted_extra``. + Test that the private key can be reused from the previous `encrypted_extra`. """ from superset.db_engine_specs.gsheets import GSheetsEngineSpec @@ -270,17 +295,52 @@ def test_unmask_encrypted_extra() -> None: } ) - assert json.loads(str(GSheetsEngineSpec.unmask_encrypted_extra(old, new))) == { - "service_account_info": { - "project_id": "yellow-unicorn-314419", - "private_key": "SECRET", - }, - } + assert GSheetsEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "SECRET", + }, + } + ) + + +def test_unmask_encrypted_extra_field_changeed() -> None: + """ + Test that the private key is not reused when the field has changed. + """ + from superset.db_engine_specs.gsheets import GSheetsEngineSpec + + old = json.dumps( + { + "service_account_info": { + "project_id": "black-sanctum-314419", + "private_key": "SECRET", + }, + } + ) + new = json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "NEW-SECRET", + }, + } + ) + + assert GSheetsEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "NEW-SECRET", + }, + } + ) def test_unmask_encrypted_extra_when_old_is_none() -> None: """ - Test that a None value works for ``encrypted_extra``. + Test that a `None` value for the old field works for `encrypted_extra`. """ from superset.db_engine_specs.gsheets import GSheetsEngineSpec @@ -294,17 +354,19 @@ def test_unmask_encrypted_extra_when_old_is_none() -> None: } ) - assert json.loads(str(GSheetsEngineSpec.unmask_encrypted_extra(old, new))) == { - "service_account_info": { - "project_id": "yellow-unicorn-314419", - "private_key": "XXXXXXXXXX", - }, - } + assert GSheetsEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "service_account_info": { + "project_id": "yellow-unicorn-314419", + "private_key": "XXXXXXXXXX", + }, + } + ) def test_unmask_encrypted_extra_when_new_is_none() -> None: """ - Test that a None value works for ``encrypted_extra``. + Test that a `None` value for the new field works for `encrypted_extra`. """ from superset.db_engine_specs.gsheets import GSheetsEngineSpec diff --git a/tests/unit_tests/db_engine_specs/test_snowflake.py b/tests/unit_tests/db_engine_specs/test_snowflake.py index 67ac88f69578..73b93b27ef5b 100644 --- a/tests/unit_tests/db_engine_specs/test_snowflake.py +++ b/tests/unit_tests/db_engine_specs/test_snowflake.py @@ -291,3 +291,106 @@ def test_get_default_catalog() -> None: sqlalchemy_uri="snowflake://user:pass@account/database_name/default", ) assert SnowflakeEngineSpec.get_default_catalog(database) == "database_name" + + +def test_mask_encrypted_extra() -> None: + """ + Test that the private keys are masked when the database is edited. + """ + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + + config = json.dumps( + { + "auth_method": "keypair", + "auth_params": { + "privatekey_body": ( + "-----BEGIN ENCRYPTED PRIVATE KEY-----" + "..." + "-----END ENCRYPTED PRIVATE KEY-----" + ), + "privatekey_pass": "my_password", + }, + } + ) + + assert SnowflakeEngineSpec.mask_encrypted_extra(config) == json.dumps( + { + "auth_method": "keypair", + "auth_params": { + "privatekey_body": "XXXXXXXXXX", + "privatekey_pass": "XXXXXXXXXX", + }, + } + ) + + +def test_mask_encrypted_extra_no_fields() -> None: + """ + Test that the private key is masked when the database is edited. + """ + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + + config = json.dumps( + { + # this is a fake example and the fields are made up + "auth_method": "token", + "auth_params": { + "jwt": "SECRET", + }, + } + ) + + assert SnowflakeEngineSpec.mask_encrypted_extra(config) == json.dumps( + { + "auth_method": "token", + "auth_params": { + "jwt": "SECRET", + }, + } + ) + + +def test_unmask_encrypted_extra() -> None: + """ + Test that the private keys can be reused from the previous `encrypted_extra`. + """ + from superset.db_engine_specs.snowflake import SnowflakeEngineSpec + + old = json.dumps( + { + "auth_method": "keypair", + "auth_params": { + "privatekey_body": ( + "-----BEGIN ENCRYPTED PRIVATE KEY-----" + "..." + "-----END ENCRYPTED PRIVATE KEY-----" + ), + "privatekey_pass": "my_password", + }, + } + ) + new = json.dumps( + { + "foo": "bar", + "auth_method": "keypair", + "auth_params": { + "privatekey_body": "XXXXXXXXXX", + "privatekey_pass": "XXXXXXXXXX", + }, + } + ) + + assert SnowflakeEngineSpec.unmask_encrypted_extra(old, new) == json.dumps( + { + "foo": "bar", + "auth_method": "keypair", + "auth_params": { + "privatekey_body": ( + "-----BEGIN ENCRYPTED PRIVATE KEY-----" + "..." + "-----END ENCRYPTED PRIVATE KEY-----" + ), + "privatekey_pass": "my_password", + }, + } + ) diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index a0923e811186..5a32cd05044c 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=unused-argument, import-outside-toplevel, protected-access +from __future__ import annotations + import copy from collections import namedtuple from datetime import datetime @@ -23,6 +25,7 @@ import pandas as pd import pytest +from flask import g, has_app_context from pytest_mock import MockerFixture from requests.exceptions import ConnectionError as RequestsConnectionError from sqlalchemy import sql, text, types @@ -435,6 +438,33 @@ def _mock_execute(*args, **kwargs): ) +def test_execute_with_cursor_app_context(app, mocker: MockerFixture): + """Test that `execute_with_cursor` still contains the current app context""" + from superset.db_engine_specs.trino import TrinoEngineSpec + + mock_cursor = mocker.MagicMock() + mock_cursor.query_id = None + + mock_query = mocker.MagicMock() + g.some_value = "some_value" + + def _mock_execute(*args, **kwargs): + assert has_app_context() + assert g.some_value == "some_value" + + with patch.object(TrinoEngineSpec, "execute", side_effect=_mock_execute): + with patch.dict( + "superset.config.DISALLOWED_SQL_FUNCTIONS", + {}, + clear=True, + ): + TrinoEngineSpec.execute_with_cursor( + cursor=mock_cursor, + sql="SELECT 1 FROM foo", + query=mock_query, + ) + + def test_get_columns(mocker: MockerFixture): """Test that ROW columns are not expanded without expand_rows""" from superset.db_engine_specs.trino import TrinoEngineSpec @@ -691,7 +721,15 @@ def test_adjust_engine_params_catalog_only() -> None: assert str(uri) == "trino://user:pass@localhost:8080/new_catalog/new_schema" -def test_get_default_catalog() -> None: +@pytest.mark.parametrize( + "sqlalchemy_uri,result", + [ + ("trino://user:pass@localhost:8080/system", "system"), + ("trino://user:pass@localhost:8080/system/default", "system"), + ("trino://trino@localhost:8081", None), + ], +) +def test_get_default_catalog(sqlalchemy_uri: str, result: str | None) -> None: """ Test the ``get_default_catalog`` method. """ @@ -700,15 +738,9 @@ def test_get_default_catalog() -> None: database = Database( database_name="my_db", - sqlalchemy_uri="trino://user:pass@localhost:8080/system", - ) - assert TrinoEngineSpec.get_default_catalog(database) == "system" - - database = Database( - database_name="my_db", - sqlalchemy_uri="trino://user:pass@localhost:8080/system/default", + sqlalchemy_uri=sqlalchemy_uri, ) - assert TrinoEngineSpec.get_default_catalog(database) == "system" + assert TrinoEngineSpec.get_default_catalog(database) == result @patch("superset.db_engine_specs.trino.TrinoEngineSpec.latest_partition") diff --git a/tests/unit_tests/fixtures/assets_configs.py b/tests/unit_tests/fixtures/assets_configs.py index 3fa0c586e23b..0bb7e2711ee7 100644 --- a/tests/unit_tests/fixtures/assets_configs.py +++ b/tests/unit_tests/fixtures/assets_configs.py @@ -175,7 +175,8 @@ "color_scheme": "supersetColors", "label_colors": {}, "color_scheme_domain": [], - "shared_label_colors": {}, + "shared_label_colors": [], + "map_label_colors": {}, "cross_filters_enabled": False, }, "version": "1.0.0", @@ -251,7 +252,8 @@ "color_scheme": "supersetColors", "label_colors": {}, "color_scheme_domain": [], - "shared_label_colors": {}, + "shared_label_colors": [], + "map_label_colors": {}, }, "version": "1.0.0", }, diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py index b2404fba7683..8bab200923dc 100644 --- a/tests/unit_tests/jinja_context_test.py +++ b/tests/unit_tests/jinja_context_test.py @@ -418,10 +418,6 @@ def test_dataset_macro(mocker: MockerFixture) -> None: "superset.connectors.sqla.models.security_manager.get_guest_rls_filters", return_value=[], ) - mocker.patch( - "superset.models.helpers.security_manager.get_rls_filters", - return_value=[], - ) columns = [ TableColumn(column_name="ds", is_dttm=1, type="TIMESTAMP"), @@ -470,53 +466,30 @@ def test_dataset_macro(mocker: MockerFixture) -> None: "superset.connectors.sqla.models.security_manager.get_guest_rls_filters", return_value=[], ) - mocker.patch( - "superset.models.helpers.security_manager.get_guest_rls_filters", - return_value=[], - ) + + space = " " assert ( dataset_macro(1) - == """( -SELECT - ds AS ds, - num_boys AS num_boys, - revenue AS revenue, - expenses AS expenses, - revenue - expenses AS profit + == f"""( +SELECT ds AS ds, num_boys AS num_boys, revenue AS revenue, expenses AS expenses, revenue-expenses AS profit{space} FROM my_schema.old_dataset ) AS dataset_1""" ) assert ( dataset_macro(1, include_metrics=True) - == """( -SELECT - ds AS ds, - num_boys AS num_boys, - revenue AS revenue, - expenses AS expenses, - revenue - expenses AS profit, - COUNT(*) AS cnt -FROM my_schema.old_dataset -GROUP BY - ds, - num_boys, - revenue, - expenses, - revenue - expenses + == f"""( +SELECT ds AS ds, num_boys AS num_boys, revenue AS revenue, expenses AS expenses, revenue-expenses AS profit, COUNT(*) AS cnt{space} +FROM my_schema.old_dataset GROUP BY ds, num_boys, revenue, expenses, revenue-expenses ) AS dataset_1""" ) assert ( dataset_macro(1, include_metrics=True, columns=["ds"]) - == """( -SELECT - ds AS ds, - COUNT(*) AS cnt -FROM my_schema.old_dataset -GROUP BY - ds + == f"""( +SELECT ds AS ds, COUNT(*) AS cnt{space} +FROM my_schema.old_dataset GROUP BY ds ) AS dataset_1""" ) diff --git a/tests/unit_tests/migrations/shared/catalogs_test.py b/tests/unit_tests/migrations/shared/catalogs_test.py index 78ef5222171d..db06b75a8de9 100644 --- a/tests/unit_tests/migrations/shared/catalogs_test.py +++ b/tests/unit_tests/migrations/shared/catalogs_test.py @@ -18,21 +18,23 @@ from pytest_mock import MockerFixture from sqlalchemy.orm.session import Session +from superset import app from superset.migrations.shared.catalogs import ( downgrade_catalog_perms, upgrade_catalog_perms, ) -from superset.migrations.shared.security_converge import ViewMenu +from superset.migrations.shared.security_converge import ( + Permission, + PermissionView, + ViewMenu, +) def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: """ Test the `upgrade_catalog_perms` function. - The function is called when catalogs are introduced into a new DB engine spec. When - that happens, we need to update the `catalog` attribute so it points to the default - catalog, instead of being `NULL`. We also need to update `schema_perms` to include - the default catalog. + The function is called when catalogs are introduced into a new DB engine spec. """ from superset.connectors.sqla.models import SqlaTable from superset.models.core import Database @@ -51,6 +53,11 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: "get_all_schema_names", return_value=["public", "information_schema"], ) + mocker.patch.object( + Database, + "get_all_catalog_names", + return_value=["db", "other_catalog"], + ) database = Database( database_name="my_db", @@ -61,6 +68,7 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: database=database, catalog=None, schema="public", + catalog_perm=None, schema_perm="[my_db].[public]", ) session.add(dataset) @@ -70,6 +78,8 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: slice_name="my_chart", datasource_type="table", datasource_id=dataset.id, + catalog_perm=None, + schema_perm="[my_db].[public]", ) query = Query( client_id="foo", @@ -102,15 +112,43 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: assert saved_query.catalog is None assert tab_state.catalog is None assert table_schema.catalog is None + assert dataset.catalog_perm is None assert dataset.schema_perm == "[my_db].[public]" + assert chart.catalog_perm is None assert chart.schema_perm == "[my_db].[public]" - assert session.query(ViewMenu.name).all() == [ - ("[my_db].(id:1)",), - ("[my_db].[my_table](id:1)",), - ("[my_db].[public]",), + assert ( + session.query(ViewMenu.name, Permission.name) + .join(PermissionView, ViewMenu.id == PermissionView.view_menu_id) + .join(Permission, PermissionView.permission_id == Permission.id) + .all() + ) == [ + ("[my_db].(id:1)", "database_access"), + ("[my_db].[my_table](id:1)", "datasource_access"), + ("[my_db].[public]", "schema_access"), ] upgrade_catalog_perms() + session.commit() + + # add dataset/chart in new catalog + new_dataset = SqlaTable( + table_name="my_table", + database=database, + catalog="other_catalog", + schema="public", + schema_perm="[my_db].[other_catalog].[public]", + catalog_perm="[my_db].[other_catalog]", + ) + session.add(new_dataset) + session.commit() + + new_chart = Slice( + slice_name="my_chart", + datasource_type="table", + datasource_id=new_dataset.id, + ) + session.add(new_chart) + session.commit() # after migration assert dataset.catalog == "db" @@ -118,16 +156,29 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: assert saved_query.catalog == "db" assert tab_state.catalog == "db" assert table_schema.catalog == "db" + assert dataset.catalog_perm == "[my_db].[db]" assert dataset.schema_perm == "[my_db].[db].[public]" + assert chart.catalog_perm == "[my_db].[db]" assert chart.schema_perm == "[my_db].[db].[public]" - assert session.query(ViewMenu.name).all() == [ - ("[my_db].(id:1)",), - ("[my_db].[my_table](id:1)",), - ("[my_db].[db].[public]",), - ("[my_db].[db]",), + assert ( + session.query(ViewMenu.name, Permission.name) + .join(PermissionView, ViewMenu.id == PermissionView.view_menu_id) + .join(Permission, PermissionView.permission_id == Permission.id) + .all() + ) == [ + ("[my_db].(id:1)", "database_access"), + ("[my_db].[my_table](id:1)", "datasource_access"), + ("[my_db].[db].[public]", "schema_access"), + ("[my_db].[db]", "catalog_access"), + ("[my_db].[other_catalog]", "catalog_access"), + ("[my_db].[other_catalog].[public]", "schema_access"), + ("[my_db].[other_catalog].[information_schema]", "schema_access"), + ("[my_db].[my_table](id:2)", "datasource_access"), ] + # do a downgrade downgrade_catalog_perms() + session.commit() # revert assert dataset.catalog is None @@ -135,15 +186,25 @@ def test_upgrade_catalog_perms(mocker: MockerFixture, session: Session) -> None: assert saved_query.catalog is None assert tab_state.catalog is None assert table_schema.catalog is None + assert dataset.catalog_perm is None assert dataset.schema_perm == "[my_db].[public]" + assert chart.catalog_perm is None assert chart.schema_perm == "[my_db].[public]" - assert session.query(ViewMenu.name).all() == [ - ("[my_db].(id:1)",), - ("[my_db].[my_table](id:1)",), - ("[my_db].[public]",), - ("[my_db].[db]",), + assert ( + session.query(ViewMenu.name, Permission.name) + .join(PermissionView, ViewMenu.id == PermissionView.view_menu_id) + .join(Permission, PermissionView.permission_id == Permission.id) + .all() + ) == [ + ("[my_db].(id:1)", "database_access"), + ("[my_db].[my_table](id:1)", "datasource_access"), + ("[my_db].[public]", "schema_access"), ] + # make sure new dataset/chart were deleted + assert session.query(SqlaTable).all() == [dataset] + assert session.query(Slice).all() == [chart] + def test_upgrade_catalog_perms_graceful( mocker: MockerFixture, @@ -236,6 +297,7 @@ def test_upgrade_catalog_perms_graceful( ] upgrade_catalog_perms() + session.commit() # after migration assert dataset.catalog == "db" @@ -253,6 +315,256 @@ def test_upgrade_catalog_perms_graceful( ] downgrade_catalog_perms() + session.commit() + + # revert + assert dataset.catalog is None + assert query.catalog is None + assert saved_query.catalog is None + assert tab_state.catalog is None + assert table_schema.catalog is None + assert dataset.schema_perm == "[my_db].[public]" + assert chart.schema_perm == "[my_db].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[public]",), + ] + + +def test_upgrade_catalog_perms_oauth_connection( + mocker: MockerFixture, + session: Session, +) -> None: + """ + Test the `upgrade_catalog_perms` function when the DB is set up using OAuth. + + During the migration we try to connect to the analytical database to get the list of + schemas. This step should be skipped if the database is set up using OAuth and not + raise an exception. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.slice import Slice + from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState + + engine = session.get_bind() + Database.metadata.create_all(engine) + + mocker.patch("superset.migrations.shared.catalogs.op") + db = mocker.patch("superset.migrations.shared.catalogs.db") + db.Session.return_value = session + add_non_default_catalogs = mocker.patch( + "superset.migrations.shared.catalogs.add_non_default_catalogs" + ) + mocker.patch("superset.migrations.shared.catalogs.op", session) + + database = Database( + database_name="my_db", + sqlalchemy_uri="bigquery://my-test-project", + encrypted_extra='{"oauth2_client_info": "fake_mock_oauth_conn"}', + ) + dataset = SqlaTable( + table_name="my_table", + database=database, + catalog=None, + schema="public", + schema_perm="[my_db].[public]", + ) + session.add(dataset) + session.commit() + + chart = Slice( + slice_name="my_chart", + datasource_type="table", + datasource_id=dataset.id, + ) + query = Query( + client_id="foo", + database=database, + catalog=None, + schema="public", + ) + saved_query = SavedQuery( + database=database, + sql="SELECT * FROM public.t", + catalog=None, + schema="public", + ) + tab_state = TabState( + database=database, + catalog=None, + schema="public", + ) + table_schema = TableSchema( + database=database, + catalog=None, + schema="public", + ) + session.add_all([chart, query, saved_query, tab_state, table_schema]) + session.commit() + + # before migration + assert dataset.catalog is None + assert query.catalog is None + assert saved_query.catalog is None + assert tab_state.catalog is None + assert table_schema.catalog is None + assert dataset.schema_perm == "[my_db].[public]" + assert chart.schema_perm == "[my_db].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[public]",), + ] + + upgrade_catalog_perms() + session.commit() + + # after migration + assert dataset.catalog == "my-test-project" + assert query.catalog == "my-test-project" + assert saved_query.catalog == "my-test-project" + assert tab_state.catalog == "my-test-project" + assert table_schema.catalog == "my-test-project" + assert dataset.schema_perm == "[my_db].[my-test-project].[public]" + assert chart.schema_perm == "[my_db].[my-test-project].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[my-test-project].[public]",), + ("[my_db].[my-test-project]",), + ] + + add_non_default_catalogs.assert_not_called() + + downgrade_catalog_perms() + session.commit() + + # revert + assert dataset.catalog is None + assert query.catalog is None + assert saved_query.catalog is None + assert tab_state.catalog is None + assert table_schema.catalog is None + assert dataset.schema_perm == "[my_db].[public]" + assert chart.schema_perm == "[my_db].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[public]",), + ] + + +def test_upgrade_catalog_perms_simplified_migration( + mocker: MockerFixture, + session: Session, +) -> None: + """ + Test the `upgrade_catalog_perms` function when the ``CATALOGS_SIMPLIFIED_MIGRATION`` + config is set to ``True``. + + This should only update existing permissions + create a new permission + for the default catalog. + """ + from superset.connectors.sqla.models import SqlaTable + from superset.models.core import Database + from superset.models.slice import Slice + from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState + + engine = session.get_bind() + Database.metadata.create_all(engine) + + mocker.patch("superset.migrations.shared.catalogs.op") + db = mocker.patch("superset.migrations.shared.catalogs.db") + db.Session.return_value = session + add_non_default_catalogs = mocker.patch( + "superset.migrations.shared.catalogs.add_non_default_catalogs" + ) + mocker.patch("superset.migrations.shared.catalogs.op", session) + + database = Database( + database_name="my_db", + sqlalchemy_uri="bigquery://my-test-project", + ) + dataset = SqlaTable( + table_name="my_table", + database=database, + catalog=None, + schema="public", + schema_perm="[my_db].[public]", + ) + session.add(dataset) + session.commit() + + chart = Slice( + slice_name="my_chart", + datasource_type="table", + datasource_id=dataset.id, + ) + query = Query( + client_id="foo", + database=database, + catalog=None, + schema="public", + ) + saved_query = SavedQuery( + database=database, + sql="SELECT * FROM public.t", + catalog=None, + schema="public", + ) + tab_state = TabState( + database=database, + catalog=None, + schema="public", + ) + table_schema = TableSchema( + database=database, + catalog=None, + schema="public", + ) + session.add_all([chart, query, saved_query, tab_state, table_schema]) + session.commit() + + # before migration + assert dataset.catalog is None + assert query.catalog is None + assert saved_query.catalog is None + assert tab_state.catalog is None + assert table_schema.catalog is None + assert dataset.schema_perm == "[my_db].[public]" + assert chart.schema_perm == "[my_db].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[public]",), + ] + + with app.test_request_context(): + app.config["CATALOGS_SIMPLIFIED_MIGRATION"] = True + upgrade_catalog_perms() + session.commit() + + # after migration + assert dataset.catalog == "my-test-project" + assert query.catalog == "my-test-project" + assert saved_query.catalog == "my-test-project" + assert tab_state.catalog == "my-test-project" + assert table_schema.catalog == "my-test-project" + assert dataset.schema_perm == "[my_db].[my-test-project].[public]" + assert chart.schema_perm == "[my_db].[my-test-project].[public]" + assert session.query(ViewMenu.name).all() == [ + ("[my_db].(id:1)",), + ("[my_db].[my_table](id:1)",), + ("[my_db].[my-test-project].[public]",), + ("[my_db].[my-test-project]",), + ] + + add_non_default_catalogs.assert_not_called() + + downgrade_catalog_perms() + session.commit() # revert assert dataset.catalog is None @@ -266,5 +578,4 @@ def test_upgrade_catalog_perms_graceful( ("[my_db].(id:1)",), ("[my_db].[my_table](id:1)",), ("[my_db].[public]",), - ("[my_db].[db]",), ] diff --git a/tests/unit_tests/models/helpers_test.py b/tests/unit_tests/models/helpers_test.py index 009cff0adf4c..a29f4b13b837 100644 --- a/tests/unit_tests/models/helpers_test.py +++ b/tests/unit_tests/models/helpers_test.py @@ -21,10 +21,11 @@ from contextlib import contextmanager from typing import TYPE_CHECKING +from unittest.mock import patch import pytest from pytest_mock import MockerFixture -from sqlalchemy import create_engine +from sqlalchemy import create_engine, text from sqlalchemy.orm.session import Session from sqlalchemy.pool import StaticPool @@ -85,6 +86,58 @@ def test_values_for_column(database: Database) -> None: assert table.values_for_column("a") == [1, None] +def test_values_for_column_with_rls(database: Database) -> None: + """ + Test the `values_for_column` method with RLS enabled. + """ + from sqlalchemy.sql.elements import TextClause + + from superset.connectors.sqla.models import SqlaTable, TableColumn + + table = SqlaTable( + database=database, + schema=None, + table_name="t", + columns=[ + TableColumn(column_name="a"), + ], + ) + with patch.object( + table, + "get_sqla_row_level_filters", + return_value=[ + TextClause("a = 1"), + ], + ): + assert table.values_for_column("a") == [1] + + +def test_values_for_column_with_rls_no_values(database: Database) -> None: + """ + Test the `values_for_column` method with RLS enabled and no values. + """ + from sqlalchemy.sql.elements import TextClause + + from superset.connectors.sqla.models import SqlaTable, TableColumn + + table = SqlaTable( + database=database, + schema=None, + table_name="t", + columns=[ + TableColumn(column_name="a"), + ], + ) + with patch.object( + table, + "get_sqla_row_level_filters", + return_value=[ + TextClause("a = 2"), + ], + ): + assert table.values_for_column("a") == [] + + def test_values_for_column_calculated( mocker: MockerFixture, database: Database, @@ -148,10 +201,12 @@ def test_values_for_column_double_percents( ) # make sure final query has single percents with database.get_sqla_engine() as engine: - pd.read_sql_query.assert_called_with( - sql=( - "SELECT DISTINCT CASE WHEN b LIKE 'A%' THEN 'yes' ELSE 'nope' END " - "AS column_values \nFROM t\n LIMIT 10000 OFFSET 0" - ), - con=engine, + expected_sql = text( + "SELECT DISTINCT CASE WHEN b LIKE 'A%' THEN 'yes' ELSE 'nope' END " + "AS column_values \nFROM t\n LIMIT 10000 OFFSET 0" ) + called_sql = pd.read_sql_query.call_args.kwargs["sql"] + called_conn = pd.read_sql_query.call_args.kwargs["con"] + + assert called_sql.compare(expected_sql) is True + assert called_conn == engine diff --git a/tests/unit_tests/pandas_postprocessing/test_histogram.py b/tests/unit_tests/pandas_postprocessing/test_histogram.py index 6ea4c34f57f6..73370c8e62fd 100644 --- a/tests/unit_tests/pandas_postprocessing/test_histogram.py +++ b/tests/unit_tests/pandas_postprocessing/test_histogram.py @@ -117,28 +117,20 @@ def test_histogram_with_groupby_and_cumulative_and_normalize(): def test_histogram_with_non_numeric_column(): try: - histogram(data, "b", ["group"], bins) + histogram(data, "group", None, bins) except ValueError as e: - assert str(e) == "The column 'b' must be numeric." + assert str(e) == "Column 'group' contains non-numeric values" -# test histogram ignore null values -def test_histogram_ignore_null_values(): - data_with_null = DataFrame( +def test_histogram_with_some_non_numeric_values(): + data_with_non_numeric = DataFrame( { "group": ["A", "A", "B", "B", "A", "A", "B", "B", "A", "A"], - "a": [1, 2, 3, 4, 5, 6, 7, 8, 9, None], - "b": [1, 2, 3, 4, 5, 6, 7, 8, 9, None], + "a": [1, 2, 3, 4, 5, 6, 7, 8, 9, "10"], + "b": [1, 2, 3, 4, 5, 6, 7, 8, 9, "10"], } ) - result = histogram(data_with_null, "a", ["group"], bins) - assert result.shape == (2, bins + 1) - assert result.columns.tolist() == [ - "group", - "1 - 2", - "2 - 4", - "4 - 5", - "5 - 7", - "7 - 9", - ] - assert result.values.tolist() == [["A", 2, 0, 1, 1, 1], ["B", 0, 2, 0, 1, 1]] + try: + histogram(data_with_non_numeric, "a", ["group"], bins) + except ValueError as e: + assert str(e) == "Column 'group' contains non-numeric values" diff --git a/tests/unit_tests/reports/notifications/email_tests.py b/tests/unit_tests/reports/notifications/email_tests.py index 697a9bac40c8..ab3fa8c5afc6 100644 --- a/tests/unit_tests/reports/notifications/email_tests.py +++ b/tests/unit_tests/reports/notifications/email_tests.py @@ -41,6 +41,7 @@ def test_render_description_with_html() -> None: "notification_source": None, "chart_id": None, "dashboard_id": None, + "slack_channels": None, }, ) email_body = ( diff --git a/tests/unit_tests/reports/notifications/slack_tests.py b/tests/unit_tests/reports/notifications/slack_tests.py index 83aa0d2b4d62..b7f996631d1a 100644 --- a/tests/unit_tests/reports/notifications/slack_tests.py +++ b/tests/unit_tests/reports/notifications/slack_tests.py @@ -19,12 +19,27 @@ from unittest.mock import MagicMock, patch import pandas as pd +import pytest from slack_sdk.errors import SlackApiError from superset.reports.notifications.slackv2 import SlackV2Notification +from superset.utils.core import HeaderDataType + + +@pytest.fixture +def mock_header_data() -> HeaderDataType: + return { + "notification_format": "PNG", + "notification_type": "Alert", + "owners": [1], + "notification_source": None, + "chart_id": None, + "dashboard_id": None, + "slack_channels": ["some_channel"], + } -def test_get_channel_with_multi_recipients() -> None: +def test_get_channel_with_multi_recipients(mock_header_data) -> None: """ Test the _get_channel function to ensure it will return a string with recipients separated by commas without interstitial spacing @@ -35,14 +50,7 @@ def test_get_channel_with_multi_recipients() -> None: content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -67,7 +75,7 @@ def test_get_channel_with_multi_recipients() -> None: # Test if the recipient configuration JSON is valid when using a SlackV2 recipient type -def test_valid_recipient_config_json_slackv2() -> None: +def test_valid_recipient_config_json_slackv2(mock_header_data) -> None: """ Test if the recipient configuration JSON is valid when using a SlackV2 recipient type """ @@ -77,14 +85,7 @@ def test_valid_recipient_config_json_slackv2() -> None: content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -109,7 +110,7 @@ def test_valid_recipient_config_json_slackv2() -> None: # Ensure _get_inline_files function returns the correct tuple when content has screenshots -def test_get_inline_files_with_screenshots() -> None: +def test_get_inline_files_with_screenshots(mock_header_data) -> None: """ Test the _get_inline_files function to ensure it will return the correct tuple when content has screenshots @@ -120,14 +121,7 @@ def test_get_inline_files_with_screenshots() -> None: content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -153,7 +147,7 @@ def test_get_inline_files_with_screenshots() -> None: # Ensure _get_inline_files function returns None when content has no screenshots or csv -def test_get_inline_files_with_no_screenshots_or_csv() -> None: +def test_get_inline_files_with_no_screenshots_or_csv(mock_header_data) -> None: """ Test the _get_inline_files function to ensure it will return None when content has no screenshots or csv @@ -164,14 +158,7 @@ def test_get_inline_files_with_no_screenshots_or_csv() -> None: content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -201,6 +188,7 @@ def test_send_slackv2( slack_client_mock: MagicMock, logger_mock: MagicMock, flask_global_mock: MagicMock, + mock_header_data, ) -> None: # `superset.models.helpers`, a dependency of following imports, # requires app context @@ -212,14 +200,7 @@ def test_send_slackv2( slack_client_mock.return_value.chat_postMessage.return_value = {"ok": True} content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -269,6 +250,7 @@ def test_send_slack( slack_client_mock_util: MagicMock, logger_mock: MagicMock, flask_global_mock: MagicMock, + mock_header_data, ) -> None: # `superset.models.helpers`, a dependency of following imports, # requires app context @@ -285,14 +267,7 @@ def test_send_slack( content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], @@ -343,6 +318,7 @@ def test_send_slack_no_feature_flag( slack_client_mock_util: MagicMock, logger_mock: MagicMock, flask_global_mock: MagicMock, + mock_header_data, ) -> None: # `superset.models.helpers`, a dependency of following imports, # requires app context @@ -360,14 +336,7 @@ def test_send_slack_no_feature_flag( content = NotificationContent( name="test alert", - header_data={ - "notification_format": "PNG", - "notification_type": "Alert", - "owners": [1], - "notification_source": None, - "chart_id": None, - "dashboard_id": None, - }, + header_data=mock_header_data, embedded_data=pd.DataFrame( { "A": [1, 2, 3], diff --git a/tests/unit_tests/security/manager_test.py b/tests/unit_tests/security/manager_test.py index 924e2cbf28ca..40d89ba356f8 100644 --- a/tests/unit_tests/security/manager_test.py +++ b/tests/unit_tests/security/manager_test.py @@ -17,6 +17,8 @@ # pylint: disable=invalid-name, unused-argument, redefined-outer-name +import json + import pytest from flask_appbuilder.security.sqla.models import Role, User from pytest_mock import MockerFixture @@ -366,6 +368,7 @@ def test_raise_for_access_query_default_schema( database.get_default_catalog.return_value = None database.get_default_schema_for_query.return_value = "public" query = mocker.MagicMock() + query.catalog = None query.database = database query.sql = "SELECT * FROM ab_user" @@ -421,6 +424,7 @@ def test_raise_for_access_jinja_sql(mocker: MockerFixture, app_context: None) -> database.get_default_catalog.return_value = None database.get_default_schema_for_query.return_value = "public" query = mocker.MagicMock() + query.catalog = None query.database = database query.sql = "SELECT * FROM {% if True %}ab_user{% endif %} WHERE 1=1" @@ -434,7 +438,7 @@ def test_raise_for_access_jinja_sql(mocker: MockerFixture, app_context: None) -> viz=None, ) - get_table_access_error_object.assert_called_with({Table("ab_user", "public")}) + get_table_access_error_object.assert_called_with({Table("ab_user", "public", None)}) def test_raise_for_access_chart_for_datasource_permission( @@ -689,6 +693,340 @@ def test_query_context_modified_mixed_chart(mocker: MockerFixture) -> None: assert not query_context_modified(query_context) +def test_query_context_modified_sankey_tampered(mocker: MockerFixture) -> None: + """ + Test the `query_context_modified` function for a sankey chart request. + """ + query_context = mocker.MagicMock() + query_context.queries = [ + QueryObject( + apply_fetch_values_predicate=False, + columns=["bot_id", "channel_id"], + extras={"having": "", "where": ""}, + filter=[ + { + "col": "bot_profile__updated", + "op": "TEMPORAL_RANGE", + "val": "No filter", + } + ], + from_dttm=None, + granularity=None, + inner_from_dttm=None, + inner_to_dttm=None, + is_rowcount=False, + is_timeseries=False, + metrics=["count"], + order_desc=True, + orderby=[], + row_limit=10000, + row_offset=0, + series_columns=[], + series_limit=0, + series_limit_metric=None, + time_shift=None, + to_dttm=None, + ), + ] + query_context.form_data = { + "datasource": "12__table", + "viz_type": "sankey_v2", + "slice_id": 97, + "url_params": {}, + "source": "bot_id", + "target": "channel_id", + "metric": "count", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "No filter", + "expressionType": "SIMPLE", + "operator": "TEMPORAL_RANGE", + "subject": "bot_profile__updated", + } + ], + "row_limit": 10000, + "color_scheme": "supersetColors", + "dashboards": [11], + "extra_form_data": {}, + "label_colors": {}, + "shared_label_colors": [], + "map_label_colors": {}, + "extra_filters": [], + "dashboardId": 11, + "force": False, + "result_format": "json", + "result_type": "full", + } + query_context.slice_.id = 97 + query_context.slice_.params_dict = { + "datasource": "12__table", + "viz_type": "sankey_v2", + "slice_id": 97, + "source": "bot_id", + "target": "channel_id", + "metric": "count", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "No filter", + "expressionType": "SIMPLE", + "operator": "TEMPORAL_RANGE", + "subject": "bot_profile__updated", + } + ], + "row_limit": 10000, + "color_scheme": "supersetColors", + "extra_form_data": {}, + "dashboards": [11], + } + query_context.slice_.query_context = json.dumps( + { + "datasource": {"id": 12, "type": "table"}, + "force": False, + "queries": [ + { + "filters": [ + { + "col": "bot_profile__updated", + "op": "TEMPORAL_RANGE", + "val": "No filter", + } + ], + "extras": {"having": "", "where": ""}, + "applied_time_extras": {}, + "columns": [], + "metrics": ["count"], + "annotation_layers": [], + "row_limit": 10000, + "series_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "groupby": ["bot_id", "channel_id"], + } + ], + "form_data": { + "datasource": "12__table", + "viz_type": "sankey_v2", + "slice_id": 97, + "source": "bot_id", + "target": "channel_id", + "metric": "count", + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "No filter", + "expressionType": "SIMPLE", + "operator": "TEMPORAL_RANGE", + "subject": "bot_profile__updated", + } + ], + "row_limit": 10000, + "color_scheme": "supersetColors", + "extra_form_data": {}, + "dashboards": [11], + "force": False, + "result_format": "json", + "result_type": "full", + }, + "result_format": "json", + "result_type": "full", + } + ) + assert not query_context_modified(query_context) + + +def test_query_context_modified_orderby(mocker: MockerFixture) -> None: + """ + Test the `query_context_modified` function when the ORDER BY is modified. + """ + tampered_groupby: AdhocMetric = { + "aggregate": "", + "column": None, + "expressionType": "SQL", + "hasCustomLabel": False, + "label": "random()", + "sqlExpression": "random()", + } + + query_context = mocker.MagicMock() + query_context.queries = [ + QueryObject( + apply_fetch_values_predicate=False, + columns=["gender"], + extras={"having": "", "where": ""}, + filter=[{"col": "ds", "op": "TEMPORAL_RANGE", "val": "No filter"}], + from_dttm=None, + granularity=None, + inner_from_dttm=None, + inner_to_dttm=None, + is_rowcount=False, + is_timeseries=False, + metrics=["count"], + order_desc=True, + orderby=[(tampered_groupby, False)], + row_limit=1000, + row_offset=0, + series_columns=[], + series_limit=0, + series_limit_metric=tampered_groupby, + time_shift=None, + to_dttm=None, + ), + ] + query_context.form_data = { + "datasource": "2__table", + "viz_type": "table", + "slice_id": 101, + "url_params": { + "datasource_id": "2", + "datasource_type": "table", + "save_action": "saveas", + "slice_id": "101", + }, + "query_mode": "aggregate", + "groupby": ["gender"], + "time_grain_sqla": "P1D", + "temporal_columns_lookup": {"ds": True}, + "metrics": ["count"], + "all_columns": [], + "percent_metrics": [], + "adhoc_filters": [ + { + "clause": "WHERE", + "comparator": "No filter", + "expressionType": "SIMPLE", + "operator": "TEMPORAL_RANGE", + "subject": "ds", + } + ], + "timeseries_limit_metric": { + "aggregate": None, + "column": None, + "datasourceWarning": False, + "expressionType": "SQL", + "hasCustomLabel": False, + "label": "random()", + "optionName": "metric_3kwbghgzkv9_wz84h9j1p5d", + "sqlExpression": "random()", + }, + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "order_desc": True, + "table_timestamp_format": "smart_date", + "allow_render_html": True, + "show_cell_bars": True, + "color_pn": True, + "comparison_color_scheme": "Green", + "comparison_type": "values", + "extra_form_data": {}, + "force": False, + "result_format": "json", + "result_type": "full", + } + query_context.slice_.id = 101 + query_context.slice_.params_dict = { + "datasource": "2__table", + "viz_type": "table", + "query_mode": "aggregate", + "groupby": ["gender"], + "time_grain_sqla": "P1D", + "temporal_columns_lookup": {"ds": True}, + "metrics": ["count"], + "all_columns": [], + "percent_metrics": [], + "adhoc_filters": [ + { + "clause": "WHERE", + "subject": "ds", + "operator": "TEMPORAL_RANGE", + "comparator": "No filter", + "expressionType": "SIMPLE", + } + ], + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "order_desc": True, + "table_timestamp_format": "smart_date", + "allow_render_html": True, + "show_cell_bars": True, + "color_pn": True, + "comparison_color_scheme": "Green", + "comparison_type": "values", + "extra_form_data": {}, + "dashboards": [], + } + query_context.slice_.query_context = json.dumps( + { + "datasource": {"id": 2, "type": "table"}, + "force": False, + "queries": [ + { + "filters": [ + {"col": "ds", "op": "TEMPORAL_RANGE", "val": "No filter"} + ], + "extras": {"having": "", "where": ""}, + "applied_time_extras": {}, + "columns": ["gender"], + "metrics": ["count"], + "orderby": [], + "annotation_layers": [], + "row_limit": 1000, + "series_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + "post_processing": [], + "time_offsets": [], + } + ], + "form_data": { + "datasource": "2__table", + "viz_type": "table", + "query_mode": "aggregate", + "groupby": ["gender"], + "time_grain_sqla": "P1D", + "temporal_columns_lookup": {"ds": True}, + "metrics": ["count"], + "all_columns": [], + "percent_metrics": [], + "adhoc_filters": [ + { + "clause": "WHERE", + "subject": "ds", + "operator": "TEMPORAL_RANGE", + "comparator": "No filter", + "expressionType": "SIMPLE", + } + ], + "order_by_cols": [], + "row_limit": 1000, + "server_page_length": 10, + "order_desc": True, + "table_timestamp_format": "smart_date", + "allow_render_html": True, + "show_cell_bars": True, + "color_pn": True, + "comparison_color_scheme": "Green", + "comparison_type": "values", + "extra_form_data": {}, + "dashboards": [], + "force": False, + "result_format": "json", + "result_type": "full", + }, + "result_format": "json", + "result_type": "full", + } + ) + assert query_context_modified(query_context) + + def test_get_catalog_perm() -> None: """ Test the `get_catalog_perm` method. @@ -736,6 +1074,7 @@ def test_raise_for_access_catalog( database.get_default_catalog.return_value = "db1" database.get_default_schema_for_query.return_value = "public" query = mocker.MagicMock() + query.catalog = "db1" query.database = database query.sql = "SELECT * FROM ab_user" @@ -776,7 +1115,8 @@ def test_get_datasources_accessible_by_user_schema_access( database.database_name = "db1" database.get_default_catalog.return_value = "catalog2" - can_access = mocker.patch.object(sm, "can_access", return_value=True) + # False for catalog_access, True for schema_access + can_access = mocker.patch.object(sm, "can_access", side_effect=[False, True]) datasource_names = [ DatasourceName("table1", "schema1", "catalog2"), @@ -795,7 +1135,12 @@ def test_get_datasources_accessible_by_user_schema_access( # Even though we passed `catalog=None,` the schema check uses the default catalog # when building the schema permission, since the DB supports catalog. - can_access.assert_called_with("schema_access", "[db1].[catalog2].[schema1]") + can_access.assert_has_calls( + [ + mocker.call("catalog_access", "[db1].[catalog2]"), + mocker.call("schema_access", "[db1].[catalog2].[schema1]"), + ] + ) def test_get_catalogs_accessible_by_user_schema_access( diff --git a/tests/unit_tests/sql/__init__.py b/tests/unit_tests/sql/__init__.py new file mode 100644 index 000000000000..13a83393a912 --- /dev/null +++ b/tests/unit_tests/sql/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/unit_tests/sql/parse_tests.py b/tests/unit_tests/sql/parse_tests.py new file mode 100644 index 000000000000..ada6314457fb --- /dev/null +++ b/tests/unit_tests/sql/parse_tests.py @@ -0,0 +1,981 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# pylint: disable=invalid-name, redefined-outer-name, too-many-lines + + +import pytest +from sqlglot import Dialects + +from superset.exceptions import SupersetParseError +from superset.sql.parse import ( + extract_tables_from_statement, + KustoKQLStatement, + split_kql, + SQLGLOT_DIALECTS, + SQLScript, + SQLStatement, + Table, +) + + +def test_table() -> None: + """ + Test the `Table` class and its string conversion. + + Special characters in the table, schema, or catalog name should be escaped correctly. + """ + assert str(Table("tbname")) == "tbname" + assert str(Table("tbname", "schemaname")) == "schemaname.tbname" + assert ( + str(Table("tbname", "schemaname", "catalogname")) + == "catalogname.schemaname.tbname" + ) + assert ( + str(Table("table.name", "schema/name", "catalog\nname")) + == "catalog%0Aname.schema%2Fname.table%2Ename" + ) + + +def extract_tables_from_sql(sql: str, engine: str = "postgresql") -> set[Table]: + """ + Helper function to extract tables from SQL. + """ + dialect = SQLGLOT_DIALECTS.get(engine) + return { + table + for statement in SQLScript(sql, engine).statements + for table in extract_tables_from_statement(statement._parsed, dialect) + } + + +def test_extract_tables_from_sql() -> None: + """ + Test that referenced tables are parsed correctly from the SQL. + """ + assert extract_tables_from_sql("SELECT * FROM tbname") == {Table("tbname")} + assert extract_tables_from_sql("SELECT * FROM tbname foo") == {Table("tbname")} + assert extract_tables_from_sql("SELECT * FROM tbname AS foo") == {Table("tbname")} + + # underscore + assert extract_tables_from_sql("SELECT * FROM tb_name") == {Table("tb_name")} + + # quotes + assert extract_tables_from_sql('SELECT * FROM "tbname"') == {Table("tbname")} + + # unicode + assert extract_tables_from_sql('SELECT * FROM "tb_name" WHERE city = "Lübeck"') == { + Table("tb_name") + } + + # columns + assert extract_tables_from_sql("SELECT field1, field2 FROM tb_name") == { + Table("tb_name") + } + assert extract_tables_from_sql("SELECT t1.f1, t2.f2 FROM t1, t2") == { + Table("t1"), + Table("t2"), + } + + # named table + assert extract_tables_from_sql( + "SELECT a.date, a.field FROM left_table a LIMIT 10" + ) == {Table("left_table")} + + assert extract_tables_from_sql( + "SELECT FROM (SELECT FROM forbidden_table) AS forbidden_table;" + ) == {Table("forbidden_table")} + + assert extract_tables_from_sql( + "select * from (select * from forbidden_table) forbidden_table" + ) == {Table("forbidden_table")} + + +def test_extract_tables_subselect() -> None: + """ + Test that tables inside subselects are parsed correctly. + """ + assert extract_tables_from_sql( + """ +SELECT sub.* +FROM ( + SELECT * + FROM s1.t1 + WHERE day_of_week = 'Friday' + ) sub, s2.t2 +WHERE sub.resolution = 'NONE' +""" + ) == {Table("t1", "s1"), Table("t2", "s2")} + + assert extract_tables_from_sql( + """ +SELECT sub.* +FROM ( + SELECT * + FROM s1.t1 + WHERE day_of_week = 'Friday' +) sub +WHERE sub.resolution = 'NONE' +""" + ) == {Table("t1", "s1")} + + assert extract_tables_from_sql( + """ +SELECT * FROM t1 +WHERE s11 > ANY ( + SELECT COUNT(*) /* no hint */ FROM t2 + WHERE NOT EXISTS ( + SELECT * FROM t3 + WHERE ROW(5*t2.s1,77)=( + SELECT 50,11*s1 FROM t4 + ) + ) +) +""" + ) == {Table("t1"), Table("t2"), Table("t3"), Table("t4")} + + +def test_extract_tables_select_in_expression() -> None: + """ + Test that parser works with `SELECT`s used as expressions. + """ + assert extract_tables_from_sql("SELECT f1, (SELECT count(1) FROM t2) FROM t1") == { + Table("t1"), + Table("t2"), + } + assert extract_tables_from_sql( + "SELECT f1, (SELECT count(1) FROM t2) as f2 FROM t1" + ) == { + Table("t1"), + Table("t2"), + } + + +def test_extract_tables_parenthesis() -> None: + """ + Test that parenthesis are parsed correctly. + """ + assert extract_tables_from_sql("SELECT f1, (x + y) AS f2 FROM t1") == {Table("t1")} + + +def test_extract_tables_with_schema() -> None: + """ + Test that schemas are parsed correctly. + """ + assert extract_tables_from_sql("SELECT * FROM schemaname.tbname") == { + Table("tbname", "schemaname") + } + assert extract_tables_from_sql('SELECT * FROM "schemaname"."tbname"') == { + Table("tbname", "schemaname") + } + assert extract_tables_from_sql('SELECT * FROM "schemaname"."tbname" foo') == { + Table("tbname", "schemaname") + } + assert extract_tables_from_sql('SELECT * FROM "schemaname"."tbname" AS foo') == { + Table("tbname", "schemaname") + } + + +def test_extract_tables_union() -> None: + """ + Test that `UNION` queries work as expected. + """ + assert extract_tables_from_sql("SELECT * FROM t1 UNION SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables_from_sql("SELECT * FROM t1 UNION ALL SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables_from_sql( + "SELECT * FROM t1 INTERSECT ALL SELECT * FROM t2" + ) == { + Table("t1"), + Table("t2"), + } + + +def test_extract_tables_select_from_values() -> None: + """ + Test that selecting from values returns no tables. + """ + assert extract_tables_from_sql("SELECT * FROM VALUES (13, 42)") == set() + + +def test_extract_tables_select_array() -> None: + """ + Test that queries selecting arrays work as expected. + """ + assert extract_tables_from_sql( + """ +SELECT ARRAY[1, 2, 3] AS my_array +FROM t1 LIMIT 10 +""" + ) == {Table("t1")} + + +def test_extract_tables_select_if() -> None: + """ + Test that queries with an `IF` work as expected. + """ + assert extract_tables_from_sql( + """ +SELECT IF(CARDINALITY(my_array) >= 3, my_array[3], NULL) +FROM t1 LIMIT 10 +""" + ) == {Table("t1")} + + +def test_extract_tables_with_catalog() -> None: + """ + Test that catalogs are parsed correctly. + """ + assert extract_tables_from_sql("SELECT * FROM catalogname.schemaname.tbname") == { + Table("tbname", "schemaname", "catalogname") + } + + +def test_extract_tables_illdefined() -> None: + """ + Test that ill-defined tables return an empty set. + """ + with pytest.raises(SupersetParseError) as excinfo: + extract_tables_from_sql("SELECT * FROM schemaname.") + assert str(excinfo.value) == "Error parsing near '.' at line 1:25" + + with pytest.raises(SupersetParseError) as excinfo: + extract_tables_from_sql("SELECT * FROM catalogname.schemaname.") + assert str(excinfo.value) == "Error parsing near '.' at line 1:37" + + with pytest.raises(SupersetParseError) as excinfo: + extract_tables_from_sql("SELECT * FROM catalogname..") + assert str(excinfo.value) == "Error parsing near '.' at line 1:27" + + with pytest.raises(SupersetParseError) as excinfo: + extract_tables_from_sql('SELECT * FROM "tbname') + assert str(excinfo.value) == "Unable to parse script" + + # odd edge case that works + assert extract_tables_from_sql("SELECT * FROM catalogname..tbname") == { + Table(table="tbname", schema=None, catalog="catalogname") + } + + +def test_extract_tables_show_tables_from() -> None: + """ + Test `SHOW TABLES FROM`. + """ + assert ( + extract_tables_from_sql("SHOW TABLES FROM s1 like '%order%'", "mysql") == set() + ) + + +def test_format_show_tables() -> None: + """ + Test format when `ast.sql()` raises an exception. + + In that case sqlparse should be used instead. + """ + assert ( + SQLScript("SHOW TABLES FROM s1 like '%order%'", "mysql").format() + == "SHOW TABLES FROM s1 LIKE '%order%'" + ) + + +def test_format_no_dialect() -> None: + """ + Test format with an engine that has no corresponding dialect. + """ + assert ( + SQLScript("SELECT col FROM t WHERE col NOT IN (1, 2)", "firebolt").format() + == "SELECT col\nFROM t\nWHERE col NOT IN (1,\n 2)" + ) + + +def test_split_no_dialect() -> None: + """ + Test the statement split when the engine has no corresponding dialect. + """ + sql = "SELECT col FROM t WHERE col NOT IN (1, 2); SELECT * FROM t; SELECT foo" + statements = SQLScript(sql, "firebolt").statements + assert len(statements) == 3 + assert statements[0]._sql == "SELECT col FROM t WHERE col NOT IN (1, 2)" + assert statements[1]._sql == "SELECT * FROM t" + assert statements[2]._sql == "SELECT foo" + + +def test_extract_tables_show_columns_from() -> None: + """ + Test `SHOW COLUMNS FROM`. + """ + assert extract_tables_from_sql("SHOW COLUMNS FROM t1") == {Table("t1")} + + +def test_extract_tables_where_subquery() -> None: + """ + Test that tables in a `WHERE` subquery are parsed correctly. + """ + assert extract_tables_from_sql( + """ +SELECT name +FROM t1 +WHERE regionkey = (SELECT max(regionkey) FROM t2) +""" + ) == {Table("t1"), Table("t2")} + + assert extract_tables_from_sql( + """ +SELECT name +FROM t1 +WHERE regionkey IN (SELECT regionkey FROM t2) +""" + ) == {Table("t1"), Table("t2")} + + assert extract_tables_from_sql( + """ +SELECT name +FROM t1 +WHERE EXISTS (SELECT 1 FROM t2 WHERE t1.regionkey = t2.regionkey); +""" + ) == {Table("t1"), Table("t2")} + + +def test_extract_tables_describe() -> None: + """ + Test `DESCRIBE`. + """ + assert extract_tables_from_sql("DESCRIBE t1") == {Table("t1")} + + +def test_extract_tables_show_partitions() -> None: + """ + Test `SHOW PARTITIONS`. + """ + assert extract_tables_from_sql( + """ +SHOW PARTITIONS FROM orders +WHERE ds >= '2013-01-01' ORDER BY ds DESC +""" + ) == {Table("orders")} + + +def test_extract_tables_join() -> None: + """ + Test joins. + """ + assert extract_tables_from_sql( + "SELECT t1.*, t2.* FROM t1 JOIN t2 ON t1.a = t2.a;" + ) == { + Table("t1"), + Table("t2"), + } + + assert extract_tables_from_sql( + """ +SELECT a.date, b.name +FROM left_table a +JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) == {Table("left_table"), Table("right_table")} + + assert extract_tables_from_sql( + """ +SELECT a.date, b.name +FROM left_table a +LEFT INNER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) == {Table("left_table"), Table("right_table")} + + assert extract_tables_from_sql( + """ +SELECT a.date, b.name +FROM left_table a +RIGHT OUTER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) == {Table("left_table"), Table("right_table")} + + assert extract_tables_from_sql( + """ +SELECT a.date, b.name +FROM left_table a +FULL OUTER JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.date = b.date +""" + ) == {Table("left_table"), Table("right_table")} + + +def test_extract_tables_semi_join() -> None: + """ + Test `LEFT SEMI JOIN`. + """ + assert extract_tables_from_sql( + """ +SELECT a.date, b.name +FROM left_table a +LEFT SEMI JOIN ( + SELECT + CAST((b.year) as VARCHAR) date, + name + FROM right_table +) b +ON a.data = b.date +""" + ) == {Table("left_table"), Table("right_table")} + + +def test_extract_tables_combinations() -> None: + """ + Test a complex case with nested queries. + """ + assert extract_tables_from_sql( + """ +SELECT * FROM t1 +WHERE s11 > ANY ( + SELECT * FROM t1 UNION ALL SELECT * FROM ( + SELECT t6.*, t3.* FROM t6 JOIN t3 ON t6.a = t3.a + ) tmp_join + WHERE NOT EXISTS ( + SELECT * FROM t3 + WHERE ROW(5*t3.s1,77)=( + SELECT 50,11*s1 FROM t4 + ) + ) +) +""" + ) == {Table("t1"), Table("t3"), Table("t4"), Table("t6")} + + assert extract_tables_from_sql( + """ +SELECT * FROM ( + SELECT * FROM ( + SELECT * FROM ( + SELECT * FROM EmployeeS + ) AS S1 + ) AS S2 +) AS S3 +""" + ) == {Table("EmployeeS")} + + +def test_extract_tables_with() -> None: + """ + Test `WITH`. + """ + assert extract_tables_from_sql( + """ +WITH + x AS (SELECT a FROM t1), + y AS (SELECT a AS b FROM t2), + z AS (SELECT b AS c FROM t3) +SELECT c FROM z +""" + ) == {Table("t1"), Table("t2"), Table("t3")} + + assert extract_tables_from_sql( + """ +WITH + x AS (SELECT a FROM t1), + y AS (SELECT a AS b FROM x), + z AS (SELECT b AS c FROM y) +SELECT c FROM z +""" + ) == {Table("t1")} + + +def test_extract_tables_reusing_aliases() -> None: + """ + Test that the parser follows aliases. + """ + assert extract_tables_from_sql( + """ +with q1 as ( select key from q2 where key = '5'), +q2 as ( select key from src where key = '5') +select * from (select key from q1) a +""" + ) == {Table("src")} + + # weird query with circular dependency + assert ( + extract_tables_from_sql( + """ +with src as ( select key from q2 where key = '5'), +q2 as ( select key from src where key = '5') +select * from (select key from src) a +""" + ) + == set() + ) + + +def test_extract_tables_multistatement() -> None: + """ + Test that the parser works with multiple statements. + """ + assert extract_tables_from_sql("SELECT * FROM t1; SELECT * FROM t2") == { + Table("t1"), + Table("t2"), + } + assert extract_tables_from_sql("SELECT * FROM t1; SELECT * FROM t2;") == { + Table("t1"), + Table("t2"), + } + assert extract_tables_from_sql( + "ADD JAR file:///hive.jar; SELECT * FROM t1;", + engine="hive", + ) == {Table("t1")} + + +def test_extract_tables_complex() -> None: + """ + Test a few complex queries. + """ + assert extract_tables_from_sql( + """ +SELECT sum(m_examples) AS "sum__m_example" +FROM ( + SELECT + COUNT(DISTINCT id_userid) AS m_examples, + some_more_info + FROM my_b_table b + JOIN my_t_table t ON b.ds=t.ds + JOIN my_l_table l ON b.uid=l.uid + WHERE + b.rid IN ( + SELECT other_col + FROM inner_table + ) + AND l.bla IN ('x', 'y') + GROUP BY 2 + ORDER BY 2 ASC +) AS "meh" +ORDER BY "sum__m_example" DESC +LIMIT 10; +""" + ) == { + Table("my_l_table"), + Table("my_b_table"), + Table("my_t_table"), + Table("inner_table"), + } + + assert extract_tables_from_sql( + """ +SELECT * +FROM table_a AS a, table_b AS b, table_c as c +WHERE a.id = b.id and b.id = c.id +""" + ) == {Table("table_a"), Table("table_b"), Table("table_c")} + + assert extract_tables_from_sql( + """ +SELECT somecol AS somecol +FROM ( + WITH bla AS ( + SELECT col_a + FROM a + WHERE + 1=1 + AND column_of_choice NOT IN ( + SELECT interesting_col + FROM b + ) + ), + rb AS ( + SELECT yet_another_column + FROM ( + SELECT a + FROM c + GROUP BY the_other_col + ) not_table + LEFT JOIN bla foo + ON foo.prop = not_table.bad_col0 + WHERE 1=1 + GROUP BY + not_table.bad_col1 , + not_table.bad_col2 , + ORDER BY not_table.bad_col_3 DESC , + not_table.bad_col4 , + not_table.bad_col5 + ) + SELECT random_col + FROM d + WHERE 1=1 + UNION ALL SELECT even_more_cols + FROM e + WHERE 1=1 + UNION ALL SELECT lets_go_deeper + FROM f + WHERE 1=1 + WHERE 2=2 + GROUP BY last_col + LIMIT 50000 +) +""" + ) == {Table("a"), Table("b"), Table("c"), Table("d"), Table("e"), Table("f")} + + +def test_extract_tables_mixed_from_clause() -> None: + """ + Test that the parser handles a `FROM` clause with table and subselect. + """ + assert extract_tables_from_sql( + """ +SELECT * +FROM table_a AS a, (select * from table_b) AS b, table_c as c +WHERE a.id = b.id and b.id = c.id +""" + ) == {Table("table_a"), Table("table_b"), Table("table_c")} + + +def test_extract_tables_nested_select() -> None: + """ + Test that the parser handles selects inside functions. + """ + assert extract_tables_from_sql( + """ +select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(TABLE_NAME) +from INFORMATION_SCHEMA.COLUMNS +WHERE TABLE_SCHEMA like "%bi%"),0x7e))); +""", + "mysql", + ) == {Table("COLUMNS", "INFORMATION_SCHEMA")} + + assert extract_tables_from_sql( + """ +select (extractvalue(1,concat(0x7e,(select GROUP_CONCAT(COLUMN_NAME) +from INFORMATION_SCHEMA.COLUMNS +WHERE TABLE_NAME="bi_achievement_daily"),0x7e))); +""", + "mysql", + ) == {Table("COLUMNS", "INFORMATION_SCHEMA")} + + +def test_extract_tables_complex_cte_with_prefix() -> None: + """ + Test that the parser handles CTEs with prefixes. + """ + assert extract_tables_from_sql( + """ +WITH CTE__test (SalesPersonID, SalesOrderID, SalesYear) +AS ( + SELECT SalesPersonID, SalesOrderID, YEAR(OrderDate) AS SalesYear + FROM SalesOrderHeader + WHERE SalesPersonID IS NOT NULL +) +SELECT SalesPersonID, COUNT(SalesOrderID) AS TotalSales, SalesYear +FROM CTE__test +GROUP BY SalesYear, SalesPersonID +ORDER BY SalesPersonID, SalesYear; +""" + ) == {Table("SalesOrderHeader")} + + +def test_extract_tables_identifier_list_with_keyword_as_alias() -> None: + """ + Test that aliases that are keywords are parsed correctly. + """ + assert extract_tables_from_sql( + """ +WITH + f AS (SELECT * FROM foo), + match AS (SELECT * FROM f) +SELECT * FROM match +""" + ) == {Table("foo")} + + +def test_sqlscript() -> None: + """ + Test the `SQLScript` class. + """ + script = SQLScript("SELECT 1; SELECT 2;", "sqlite") + + assert len(script.statements) == 2 + assert script.format() == "SELECT\n 1;\nSELECT\n 2" + assert script.statements[0].format() == "SELECT\n 1" + + script = SQLScript("SET a=1; SET a=2; SELECT 3;", "sqlite") + assert script.get_settings() == {"a": "2"} + + query = SQLScript( + """set querytrace; +Events | take 100""", + "kustokql", + ) + assert query.get_settings() == {"querytrace": True} + + +def test_sqlstatement() -> None: + """ + Test the `SQLStatement` class. + """ + statement = SQLStatement( + "SELECT * FROM table1 UNION ALL SELECT * FROM table2", + "sqlite", + ) + + assert statement.tables == { + Table(table="table1", schema=None, catalog=None), + Table(table="table2", schema=None, catalog=None), + } + assert ( + statement.format() + == "SELECT\n *\nFROM table1\nUNION ALL\nSELECT\n *\nFROM table2" + ) + + statement = SQLStatement("SET a=1", "sqlite") + assert statement.get_settings() == {"a": "1"} + + +def test_kustokqlstatement_split_script() -> None: + """ + Test the `KustoKQLStatement` split method. + """ + statements = KustoKQLStatement.split_script( + """ +let totalPagesPerDay = PageViews +| summarize by Page, Day = startofday(Timestamp) +| summarize count() by Day; +let materializedScope = PageViews +| summarize by Page, Day = startofday(Timestamp); +let cachedResult = materialize(materializedScope); +cachedResult +| project Page, Day1 = Day +| join kind = inner +( + cachedResult + | project Page, Day2 = Day +) +on Page +| where Day2 > Day1 +| summarize count() by Day1, Day2 +| join kind = inner + totalPagesPerDay +on $left.Day1 == $right.Day +| project Day1, Day2, Percentage = count_*100.0/count_1 + """, + "kustokql", + ) + assert len(statements) == 4 + + +def test_kustokqlstatement_with_program() -> None: + """ + Test the `KustoKQLStatement` split method when the KQL has a program. + """ + statements = KustoKQLStatement.split_script( + """ +print program = ``` + public class Program { + public static void Main() { + System.Console.WriteLine("Hello!"); + } + }``` + """, + "kustokql", + ) + assert len(statements) == 1 + + +def test_kustokqlstatement_with_set() -> None: + """ + Test the `KustoKQLStatement` split method when the KQL has a set command. + """ + statements = KustoKQLStatement.split_script( + """ +set querytrace; +Events | take 100 + """, + "kustokql", + ) + assert len(statements) == 2 + assert statements[0].format() == "set querytrace" + assert statements[1].format() == "Events | take 100" + + +@pytest.mark.parametrize( + "kql,statements", + [ + ('print banner=strcat("Hello", ", ", "World!")', 1), + (r"print 'O\'Malley\'s'", 1), + (r"print 'O\'Mal;ley\'s'", 1), + ("print ```foo;\nbar;\nbaz;```\n", 1), + ], +) +def test_kustokql_statement_split_special(kql: str, statements: int) -> None: + assert len(KustoKQLStatement.split_script(kql, "kustokql")) == statements + + +def test_split_kql() -> None: + """ + Test the `split_kql` function. + """ + kql = """ +let totalPagesPerDay = PageViews +| summarize by Page, Day = startofday(Timestamp) +| summarize count() by Day; +let materializedScope = PageViews +| summarize by Page, Day = startofday(Timestamp); +let cachedResult = materialize(materializedScope); +cachedResult +| project Page, Day1 = Day +| join kind = inner +( + cachedResult + | project Page, Day2 = Day +) +on Page +| where Day2 > Day1 +| summarize count() by Day1, Day2 +| join kind = inner + totalPagesPerDay +on $left.Day1 == $right.Day +| project Day1, Day2, Percentage = count_*100.0/count_1 + """ + assert split_kql(kql) == [ + """ +let totalPagesPerDay = PageViews +| summarize by Page, Day = startofday(Timestamp) +| summarize count() by Day""", + """ +let materializedScope = PageViews +| summarize by Page, Day = startofday(Timestamp)""", + """ +let cachedResult = materialize(materializedScope)""", + """ +cachedResult +| project Page, Day1 = Day +| join kind = inner +( + cachedResult + | project Page, Day2 = Day +) +on Page +| where Day2 > Day1 +| summarize count() by Day1, Day2 +| join kind = inner + totalPagesPerDay +on $left.Day1 == $right.Day +| project Day1, Day2, Percentage = count_*100.0/count_1 + """, + ] + + +@pytest.mark.parametrize( + ("engine", "sql", "expected"), + [ + # SQLite tests + ("sqlite", "SELECT 1", False), + ("sqlite", "INSERT INTO foo VALUES (1)", True), + ("sqlite", "UPDATE foo SET bar = 2 WHERE id = 1", True), + ("sqlite", "DELETE FROM foo WHERE id = 1", True), + ("sqlite", "CREATE TABLE foo (id INT, bar TEXT)", True), + ("sqlite", "DROP TABLE foo", True), + ("sqlite", "EXPLAIN SELECT * FROM foo", False), + ("sqlite", "PRAGMA table_info(foo)", False), + ("postgresql", "SELECT 1", False), + ("postgresql", "INSERT INTO foo (id, bar) VALUES (1, 'test')", True), + ("postgresql", "UPDATE foo SET bar = 'new' WHERE id = 1", True), + ("postgresql", "DELETE FROM foo WHERE id = 1", True), + ("postgresql", "CREATE TABLE foo (id SERIAL PRIMARY KEY, bar TEXT)", True), + ("postgresql", "DROP TABLE foo", True), + ("postgresql", "EXPLAIN ANALYZE SELECT * FROM foo", False), + ("postgresql", "EXPLAIN ANALYZE DELETE FROM foo", True), + ("postgresql", "SHOW search_path", False), + ("postgresql", "SET search_path TO public", False), + ( + "postgres", + """ + with source as ( + select 1 as one + ) + select * from source + """, + False, + ), + ("trino", "SELECT 1", False), + ("trino", "INSERT INTO foo VALUES (1, 'bar')", True), + ("trino", "UPDATE foo SET bar = 'baz' WHERE id = 1", True), + ("trino", "DELETE FROM foo WHERE id = 1", True), + ("trino", "CREATE TABLE foo (id INT, bar VARCHAR)", True), + ("trino", "DROP TABLE foo", True), + ("trino", "EXPLAIN SELECT * FROM foo", False), + ("trino", "SHOW SCHEMAS", False), + ("trino", "SET SESSION optimization_level = '3'", False), + ("kustokql", "tbl | limit 100", False), + ("kustokql", "let foo = 1; tbl | where bar == foo", False), + ("kustokql", ".show tables", False), + ("kustokql", "print 1", False), + ("kustokql", "set querytrace; Events | take 100", False), + ("kustokql", ".drop table foo", True), + ("kustokql", ".set-or-append table foo <| bar", True), + ], +) +def test_has_mutation(engine: str, sql: str, expected: bool) -> None: + """ + Test the `has_mutation` method. + """ + assert SQLScript(sql, engine).has_mutation() == expected + + +def test_get_settings() -> None: + """ + Test `get_settings` in some edge cases. + """ + sql = """ +set +-- this is a tricky comment +search_path -- another one += bar; +SELECT * FROM some_table; + """ + assert SQLScript(sql, "postgresql").get_settings() == {"search_path": "bar"} + + +@pytest.mark.parametrize( + "app", + [{"SQLGLOT_DIALECTS_EXTENSIONS": {"custom": Dialects.MYSQL}}], + indirect=True, +) +def test_custom_dialect(app: None) -> None: + """ + Test that custom dialects are loaded correctly. + """ + assert SQLGLOT_DIALECTS.get("custom") == Dialects.MYSQL diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py index 6259d6272db6..cf7d5b87d343 100644 --- a/tests/unit_tests/sql_parse_tests.py +++ b/tests/unit_tests/sql_parse_tests.py @@ -17,7 +17,7 @@ # pylint: disable=invalid-name, redefined-outer-name, too-many-lines from typing import Optional -from unittest.mock import Mock +from unittest import mock import pytest import sqlparse @@ -30,6 +30,7 @@ QueryClauseValidationException, SupersetSecurityException, ) +from superset.sql.parse import Table from superset.sql_parse import ( add_table_name, check_sql_functions_exist, @@ -39,18 +40,13 @@ has_table_query, insert_rls_as_subquery, insert_rls_in_predicate, - KustoKQLStatement, ParsedQuery, sanitize_clause, - split_kql, - SQLScript, - SQLStatement, strip_comments_from_sql, - Table, ) -def extract_tables(query: str, engine: Optional[str] = None) -> set[Table]: +def extract_tables(query: str, engine: str = "base") -> set[Table]: """ Helper function to extract tables referenced in a query. """ @@ -285,7 +281,7 @@ def test_extract_tables_illdefined() -> None: extract_tables('SELECT * FROM "tbname') assert ( str(excinfo.value) - == "You may have an error in your SQL statement. Error tokenizing 'SELECT * FROM \"tbnam'" + == "You may have an error in your SQL statement. Unable to tokenize script" ) # odd edge case that works @@ -1290,46 +1286,66 @@ def test_sqlparse_issue_652(): @pytest.mark.parametrize( - "sql,expected", + ("engine", "sql", "expected"), [ - ("SELECT * FROM table", True), - ("SELECT a FROM (SELECT 1 AS a) JOIN (SELECT * FROM table)", True), - ("(SELECT COUNT(DISTINCT name) AS foo FROM birth_names)", True), - ("COUNT(*)", False), - ("SELECT a FROM (SELECT 1 AS a)", False), - ("SELECT a FROM (SELECT 1 AS a) JOIN table", True), - ("SELECT * FROM (SELECT 1 AS foo, 2 AS bar) ORDER BY foo ASC, bar", False), - ("SELECT * FROM other_table", True), - ("extract(HOUR from from_unixtime(hour_ts)", False), - ("(SELECT * FROM table)", True), - ("(SELECT COUNT(DISTINCT name) from birth_names)", True), + ("postgresql", "extract(HOUR from from_unixtime(hour_ts))", False), + ("postgresql", "SELECT * FROM table", True), + ("postgresql", "(SELECT * FROM table)", True), ( + "postgresql", + "SELECT a FROM (SELECT 1 AS a) JOIN (SELECT * FROM table)", + True, + ), + ( + "postgresql", + "(SELECT COUNT(DISTINCT name) AS foo FROM birth_names)", + True, + ), + ("postgresql", "COUNT(*)", False), + ("postgresql", "SELECT a FROM (SELECT 1 AS a)", False), + ("postgresql", "SELECT a FROM (SELECT 1 AS a) JOIN table", True), + ( + "postgresql", + "SELECT * FROM (SELECT 1 AS foo, 2 AS bar) ORDER BY foo ASC, bar", + False, + ), + ("postgresql", "SELECT * FROM other_table", True), + ("postgresql", "(SELECT COUNT(DISTINCT name) from birth_names)", True), + ( + "postgresql", "(SELECT table_name FROM information_schema.tables WHERE table_name LIKE '%user%' LIMIT 1)", True, ), ( + "postgresql", "(SELECT table_name FROM /**/ information_schema.tables WHERE table_name LIKE '%user%' LIMIT 1)", True, ), ( + "postgresql", "SELECT FROM (SELECT FROM forbidden_table) AS forbidden_table;", True, ), ( + "postgresql", "SELECT * FROM (SELECT * FROM forbidden_table) forbidden_table", True, ), + ( + "postgresql", + "((select users.id from (select 'majorie' as a) b, users where b.a = users.name and users.name in ('majorie') limit 1) like 'U%')", + True, + ), ], ) -def test_has_table_query(sql: str, expected: bool) -> None: +def test_has_table_query(engine: str, sql: str, expected: bool) -> None: """ Test if a given statement queries a table. This is used to prevent ad-hoc metrics from querying unauthorized tables, bypassing row-level security. """ - statement = sqlparse.parse(sql)[0] - assert has_table_query(statement) == expected + assert has_table_query(sql, engine) == expected @pytest.mark.parametrize( @@ -1834,49 +1850,6 @@ def test_is_select() -> None: assert ParsedQuery("USE foo; SELECT * FROM bar").is_select() -def test_sqlquery() -> None: - """ - Test the `SQLScript` class. - """ - script = SQLScript("SELECT 1; SELECT 2;", "sqlite") - - assert len(script.statements) == 2 - assert script.format() == "SELECT\n 1;\nSELECT\n 2" - assert script.statements[0].format() == "SELECT\n 1" - - script = SQLScript("SET a=1; SET a=2; SELECT 3;", "sqlite") - assert script.get_settings() == {"a": "2"} - - query = SQLScript( - """set querytrace; -Events | take 100""", - "kustokql", - ) - assert query.get_settings() == {"querytrace": True} - - -def test_sqlstatement() -> None: - """ - Test the `SQLStatement` class. - """ - statement = SQLStatement( - "SELECT * FROM table1 UNION ALL SELECT * FROM table2", - "sqlite", - ) - - assert statement.tables == { - Table(table="table1", schema=None, catalog=None), - Table(table="table2", schema=None, catalog=None), - } - assert ( - statement.format() - == "SELECT\n *\nFROM table1\nUNION ALL\nSELECT\n *\nFROM table2" - ) - - statement = SQLStatement("SET a=1", "sqlite") - assert statement.get_settings() == {"a": "1"} - - @pytest.mark.parametrize( "engine", [ @@ -1915,146 +1888,33 @@ def test_sqlstatement() -> None: ], ) def test_extract_tables_from_jinja_sql( - engine: str, macro: str, expected: set[Table] + mocker: MockerFixture, + engine: str, + macro: str, + expected: set[Table], ) -> None: assert ( extract_tables_from_jinja_sql( sql=f"'{{{{ {engine}.{macro} }}}}'", - database=Mock(), + database=mocker.Mock(), ) == expected ) -def test_kustokqlstatement_split_query() -> None: - """ - Test the `KustoKQLStatement` split method. - """ - statements = KustoKQLStatement.split_query( - """ -let totalPagesPerDay = PageViews -| summarize by Page, Day = startofday(Timestamp) -| summarize count() by Day; -let materializedScope = PageViews -| summarize by Page, Day = startofday(Timestamp); -let cachedResult = materialize(materializedScope); -cachedResult -| project Page, Day1 = Day -| join kind = inner -( - cachedResult - | project Page, Day2 = Day +@mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"ENABLE_TEMPLATE_PROCESSING": False}, + clear=True, ) -on Page -| where Day2 > Day1 -| summarize count() by Day1, Day2 -| join kind = inner - totalPagesPerDay -on $left.Day1 == $right.Day -| project Day1, Day2, Percentage = count_*100.0/count_1 - """, - "kustokql", - ) - assert len(statements) == 4 - - -def test_kustokqlstatement_with_program() -> None: - """ - Test the `KustoKQLStatement` split method when the KQL has a program. - """ - statements = KustoKQLStatement.split_query( - """ -print program = ``` - public class Program { - public static void Main() { - System.Console.WriteLine("Hello!"); - } - }``` - """, - "kustokql", - ) - assert len(statements) == 1 - - -def test_kustokqlstatement_with_set() -> None: +def test_extract_tables_from_jinja_sql_disabled(mocker: MockerFixture) -> None: """ - Test the `KustoKQLStatement` split method when the KQL has a set command. + Test the function when the feature flag is disabled. """ - statements = KustoKQLStatement.split_query( - """ -set querytrace; -Events | take 100 - """, - "kustokql", - ) - assert len(statements) == 2 - assert statements[0].format() == "set querytrace" - assert statements[1].format() == "Events | take 100" + database = mocker.Mock() + database.db_engine_spec.engine = "mssql" - -@pytest.mark.parametrize( - "kql,statements", - [ - ('print banner=strcat("Hello", ", ", "World!")', 1), - (r"print 'O\'Malley\'s'", 1), - (r"print 'O\'Mal;ley\'s'", 1), - ("print ```foo;\nbar;\nbaz;```\n", 1), - ], -) -def test_kustokql_statement_split_special(kql: str, statements: int) -> None: - assert len(KustoKQLStatement.split_query(kql, "kustokql")) == statements - - -def test_split_kql() -> None: - """ - Test the `split_kql` function. - """ - kql = """ -let totalPagesPerDay = PageViews -| summarize by Page, Day = startofday(Timestamp) -| summarize count() by Day; -let materializedScope = PageViews -| summarize by Page, Day = startofday(Timestamp); -let cachedResult = materialize(materializedScope); -cachedResult -| project Page, Day1 = Day -| join kind = inner -( - cachedResult - | project Page, Day2 = Day -) -on Page -| where Day2 > Day1 -| summarize count() by Day1, Day2 -| join kind = inner - totalPagesPerDay -on $left.Day1 == $right.Day -| project Day1, Day2, Percentage = count_*100.0/count_1 - """ - assert split_kql(kql) == [ - """ -let totalPagesPerDay = PageViews -| summarize by Page, Day = startofday(Timestamp) -| summarize count() by Day""", - """ -let materializedScope = PageViews -| summarize by Page, Day = startofday(Timestamp)""", - """ -let cachedResult = materialize(materializedScope)""", - """ -cachedResult -| project Page, Day1 = Day -| join kind = inner -( - cachedResult - | project Page, Day2 = Day -) -on Page -| where Day2 > Day1 -| summarize count() by Day1, Day2 -| join kind = inner - totalPagesPerDay -on $left.Day1 == $right.Day -| project Day1, Day2, Percentage = count_*100.0/count_1 - """, - ] + assert extract_tables_from_jinja_sql( + sql="SELECT 1 FROM t", + database=database, + ) == {Table("t")} diff --git a/tests/unit_tests/utils/excel_tests.py b/tests/unit_tests/utils/excel_tests.py index c15f69a0c62a..745beff5052a 100644 --- a/tests/unit_tests/utils/excel_tests.py +++ b/tests/unit_tests/utils/excel_tests.py @@ -18,8 +18,10 @@ from datetime import datetime, timezone import pandas as pd +from pandas.api.types import is_numeric_dtype -from superset.utils.excel import df_to_excel +from superset.utils.core import GenericDataType +from superset.utils.excel import apply_column_types, df_to_excel def test_timezone_conversion() -> None: @@ -27,5 +29,66 @@ def test_timezone_conversion() -> None: Test that columns with timezones are converted to a string. """ df = pd.DataFrame({"dt": [datetime(2023, 1, 1, 0, 0, tzinfo=timezone.utc)]}) + apply_column_types(df, [GenericDataType.TEMPORAL]) contents = df_to_excel(df) assert pd.read_excel(contents)["dt"][0] == "2023-01-01 00:00:00+00:00" + + +def test_column_data_types_with_one_numeric_column(): + df = pd.DataFrame( + { + "col0": ["123", "1", "2", "3"], + "col1": ["456", "5.67", "0", ".45"], + "col2": [ + datetime(2023, 1, 1, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 2, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 3, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 4, 0, 0, tzinfo=timezone.utc), + ], + "col3": ["True", "False", "True", "False"], + } + ) + coltypes: list[GenericDataType] = [ + GenericDataType.STRING, + GenericDataType.NUMERIC, + GenericDataType.TEMPORAL, + GenericDataType.BOOLEAN, + ] + + # only col1 should be converted to numeric, according to coltypes definition + assert not is_numeric_dtype(df["col1"]) + apply_column_types(df, coltypes) + assert not is_numeric_dtype(df["col0"]) + assert is_numeric_dtype(df["col1"]) + assert not is_numeric_dtype(df["col2"]) + assert not is_numeric_dtype(df["col3"]) + + +def test_column_data_types_with_failing_conversion(): + df = pd.DataFrame( + { + "col0": ["123", "1", "2", "3"], + "col1": ["456", "non_numeric_value", "0", ".45"], + "col2": [ + datetime(2023, 1, 1, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 2, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 3, 0, 0, tzinfo=timezone.utc), + datetime(2023, 1, 4, 0, 0, tzinfo=timezone.utc), + ], + "col3": ["True", "False", "True", "False"], + } + ) + coltypes: list[GenericDataType] = [ + GenericDataType.STRING, + GenericDataType.NUMERIC, + GenericDataType.TEMPORAL, + GenericDataType.BOOLEAN, + ] + + # should not fail neither convert + assert not is_numeric_dtype(df["col1"]) + apply_column_types(df, coltypes) + assert not is_numeric_dtype(df["col0"]) + assert not is_numeric_dtype(df["col1"]) + assert not is_numeric_dtype(df["col2"]) + assert not is_numeric_dtype(df["col3"]) diff --git a/tests/unit_tests/utils/json_tests.py b/tests/unit_tests/utils/json_tests.py index 2eb7f7c2a8d1..0a302dfb4497 100644 --- a/tests/unit_tests/utils/json_tests.py +++ b/tests/unit_tests/utils/json_tests.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +import copy import datetime import math from unittest.mock import MagicMock @@ -146,3 +147,48 @@ def test_validate_json(): str(excinfo.value) == "Unterminated string starting at: line 1 column 28 (char 27)" ) + + +def test_sensitive_fields() -> None: + """ + Test masking/unmasking of sensitive fields. + """ + payload = { + "password": "SECRET", + "credentials": { + "user_id": "alice", + "user_token": "TOKEN", + }, + } + sensitive_fields = {"$.password", "$.credentials.user_token"} + + redacted_payload = json.redact_sensitive(payload, sensitive_fields) + assert redacted_payload == { + "password": "XXXXXXXXXX", + "credentials": { + "user_id": "alice", + "user_token": "XXXXXXXXXX", + }, + } + + new_payload = copy.deepcopy(redacted_payload) + new_payload["credentials"]["user_id"] = "bob" + + assert json.reveal_sensitive(payload, new_payload, sensitive_fields) == { + "password": "SECRET", + "credentials": { + "user_id": "bob", + "user_token": "TOKEN", + }, + } + + new_payload = copy.deepcopy(redacted_payload) + new_payload["credentials"]["user_token"] = "NEW_TOKEN" + + assert json.reveal_sensitive(payload, new_payload, sensitive_fields) == { + "password": "SECRET", + "credentials": { + "user_id": "alice", + "user_token": "NEW_TOKEN", + }, + }