diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e225f98b5712..5bed4f7e13fa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -188,220 +188,6 @@ jobs: - name: Web UI Checks run: core/trino-main/bin/check_webui.sh - test-jdbc-compatibility: - runs-on: ubuntu-latest - timeout-minutes: 30 - env: - SECRETS_PRESENT: ${{ secrets.SECRETS_PRESENT }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout tags so version in Manifest is set properly - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - - name: Test old JDBC vs current server - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-driver gib-impacted.log; then - testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh - fi - - name: Test current JDBC vs old server - if: always() - run: | - if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-server gib-impacted.log; then - $MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server - fi - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Upload heap dump - uses: actions/upload-artifact@v3 - if: failure() && env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository - with: - name: heap dump ${{ github.job }} - if-no-files-found: 'ignore' - path: | - **/*.hprof - retention-days: ${{ env.HEAP_DUMP_RETENTION_DAYS }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - - hive-tests: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - config: - - config-hdp3 - # TODO: config-apache-hive3 - timeout-minutes: 60 - env: - SECRETS_PRESENT: ${{ secrets.SECRETS_PRESENT }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - - name: Install Hive Module - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -am -pl :trino-hive-hadoop2 - - name: Run Hive Tests - run: | - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_tests.sh - - name: Run Hive S3 Tests - env: - AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - S3_BUCKET: "trino-ci-test" - S3_BUCKET_ENDPOINT: "https://s3.us-east-2.amazonaws.com" - run: | - if [ "${AWS_ACCESS_KEY_ID}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh - if [ matrix.config == 'config-hdp3' ]; then - # JsonSerde class needed for the S3 Select JSON tests is only available on hdp3. - plugin/trino-hive-hadoop2/bin/run_hive_s3_select_json_tests.sh - fi - fi - - name: Run Hive AWS Tests - env: - AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - AWS_REGION: us-east-2 - S3_BUCKET: "trino-ci-test" - S3_BUCKET_ENDPOINT: "s3.us-east-2.amazonaws.com" - run: | - if [ "${AWS_ACCESS_KEY_ID}" != "" ]; then - $MAVEN test ${MAVEN_TEST} -pl :trino-hive -P aws-tests - fi - - name: Run Hive Azure ABFS Access Key Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_ACCESS_KEY: ${{ secrets.AZURE_ABFS_ACCESSKEY }} - run: | - if [ "${ABFS_CONTAINER}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh - fi - - name: Run Hive Azure ABFS OAuth Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_OAUTH_ENDPOINT: ${{ secrets.AZURE_ABFS_OAUTH_ENDPOINT }} - ABFS_OAUTH_CLIENTID: ${{ secrets.AZURE_ABFS_OAUTH_CLIENTID }} - ABFS_OAUTH_SECRET: ${{ secrets.AZURE_ABFS_OAUTH_SECRET }} - run: | - if [ -n "$ABFS_CONTAINER" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh - fi - - name: Run Hive Azure WASB Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - WASB_CONTAINER: ${{ secrets.AZURE_WASB_CONTAINER }} - WASB_ACCOUNT: ${{ secrets.AZURE_WASB_ACCOUNT }} - WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESSKEY }} - run: | - if [ "${WASB_CONTAINER}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh - fi - - name: Run Hive Azure ADL Tests - if: matrix.config != 'config-empty' # Hive 1.x does not support Azure storage - env: - ADL_NAME: ${{ secrets.AZURE_ADL_NAME }} - ADL_CLIENT_ID: ${{ secrets.AZURE_ADL_CLIENTID }} - ADL_CREDENTIAL: ${{ secrets.AZURE_ADL_CREDENTIAL }} - ADL_REFRESH_URL: ${{ secrets.AZURE_ADL_REFRESHURL }} - run: | - if [ "${ADL_NAME}" != "" ]; then - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh - fi - - name: Run Hive Alluxio Tests - run: | - source plugin/trino-hive-hadoop2/conf/hive-tests-${{ matrix.config }}.sh && - plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} (${{ matrix.config }}) - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Upload heap dump - uses: actions/upload-artifact@v3 - if: failure() && env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository - with: - name: heap dump ${{ github.job }} (${{ matrix.config }}) - if-no-files-found: 'ignore' - path: | - **/*.hprof - retention-days: ${{ env.HEAP_DUMP_RETENTION_DAYS }} - - name: Update PR check - uses: ./.github/actions/update-check - if: >- - failure() && - github.event_name == 'repository_dispatch' && - github.event.client_payload.slash_command.args.named.sha != '' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha - with: - pull_request_number: ${{ github.event.client_payload.pull_request.number }} - check_name: ${{ github.job }} (${{ matrix.config }}) with secrets - conclusion: ${{ job.status }} - github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - test-other-modules: runs-on: ubuntu-latest timeout-minutes: 60 @@ -486,250 +272,6 @@ jobs: if: steps.cache.outputs.cache-hit != 'true' run: rm -rf ~/.m2/repository - build-test-matrix: - runs-on: ubuntu-latest - outputs: - matrix: ${{ steps.set-matrix.outputs.matrix }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - with: - cache: false - - name: Maven validate - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN validate ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -Dgib.logImpactedTo=gib-impacted.log -P disable-check-spi-dependencies -pl '!:trino-docs' - - name: Set matrix - id: set-matrix - run: | - # GIB doesn't run on master, so make sure the file always exist - touch gib-impacted.log - cat < .github/test-matrix.yaml - include: - - { modules: [ client/trino-jdbc, plugin/trino-base-jdbc, plugin/trino-thrift, plugin/trino-memory ] } - - { modules: core/trino-main } - - { modules: core/trino-main, jdk: 19 } - - { modules: plugin/trino-accumulo } - - { modules: plugin/trino-bigquery } - - { modules: plugin/trino-bigquery, profile: cloud-tests-arrow } - - { modules: plugin/trino-cassandra } - - { modules: plugin/trino-clickhouse } - - { modules: plugin/trino-delta-lake } - - { modules: plugin/trino-delta-lake, profile: cloud-tests } - - { modules: plugin/trino-delta-lake, profile: gcs-tests } - - { modules: plugin/trino-druid } - - { modules: plugin/trino-elasticsearch } - - { modules: plugin/trino-hive } - - { modules: plugin/trino-hive, profile: test-parquet } - - { modules: plugin/trino-hudi } - - { modules: plugin/trino-iceberg } - - { modules: plugin/trino-iceberg, profile: additional-catalog-tests } - - { modules: plugin/trino-iceberg, profile: cloud-tests } - - { modules: plugin/trino-kafka } - - { modules: plugin/trino-kudu } - - { modules: plugin/trino-mariadb } - - { modules: plugin/trino-mongodb } - - { modules: plugin/trino-mysql } - - { modules: plugin/trino-oracle } - - { modules: plugin/trino-phoenix5 } - - { modules: plugin/trino-pinot } - - { modules: plugin/trino-postgresql } - - { modules: plugin/trino-raptor-legacy } - - { modules: plugin/trino-redis } - - { modules: plugin/trino-singlestore } - - { modules: plugin/trino-sqlserver } - - { modules: testing/trino-faulttolerant-tests, profile: default } - - { modules: plugin/trino-delta-lake, profile: fte-tests } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-delta } - - { modules: plugin/trino-hive, profile: fte-tests } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-hive } - - { modules: plugin/trino-iceberg, profile: fte-tests } - - { modules: testing/trino-faulttolerant-tests, profile: test-fault-tolerant-iceberg } - - { modules: plugin/trino-postgresql, profile: fte-tests } - - { modules: plugin/trino-mongodb, profile: fte-tests } - - { modules: plugin/trino-mysql, profile: fte-tests } - - { modules: plugin/trino-sqlserver, profile: fte-tests } - - { modules: testing/trino-tests } - EOF - ./.github/bin/build-matrix-from-impacted.py -v -i gib-impacted.log -m .github/test-matrix.yaml -o matrix.json - echo "Matrix: $(jq '.' matrix.json)" - echo "matrix=$(jq -c '.' matrix.json)" >> $GITHUB_OUTPUT - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - - test: - runs-on: ubuntu-latest - needs: build-test-matrix - if: needs.build-test-matrix.outputs.matrix != '{}' - strategy: - fail-fast: false - matrix: ${{ fromJson(needs.build-test-matrix.outputs.matrix) }} - timeout-minutes: 60 - env: - SECRETS_PRESENT: ${{ secrets.SECRETS_PRESENT }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB - ref: | - ${{ github.event_name == 'repository_dispatch' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha && - format('refs/pull/{0}/head', github.event.client_payload.pull_request.number) || '' }} - - uses: ./.github/actions/setup - with: - java-version: ${{ matrix.jdk != '' && matrix.jdk || '17' }} - - name: Cleanup node - # This is required as a virtual environment update 20210219.1 left too little space for MemSQL to work - if: matrix.modules == 'plugin/trino-singlestore' - run: .github/bin/cleanup-node.sh - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN clean install ${MAVEN_FAST_INSTALL} ${MAVEN_GIB} -am -pl "${{ matrix.modules }}" - - name: Maven Tests - if: >- - matrix.modules != 'plugin/trino-singlestore' - && ! (contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'cloud-tests')) - && ! (contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'gcs-tests')) - && ! (contains(matrix.modules, 'trino-iceberg') && contains(matrix.profile, 'cloud-tests')) - && ! (contains(matrix.modules, 'trino-bigquery') && contains(matrix.profile, 'cloud-tests-arrow')) - run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }} - # Additional tests for selected modules - - name: Cloud Delta Lake Tests - # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication - env: - ABFS_CONTAINER: ${{ secrets.AZURE_ABFS_CONTAINER }} - ABFS_ACCOUNT: ${{ secrets.AZURE_ABFS_ACCOUNT }} - ABFS_ACCESSKEY: ${{ secrets.AZURE_ABFS_ACCESSKEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESSKEY }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRETKEY }} - AWS_REGION: us-east-2 - # Run tests if any of the secrets is present. Do not skip tests when one secret renamed, or secret name has a typo. - if: >- - contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'cloud-tests') && - (env.ABFS_ACCOUNT != '' || env.ABFS_CONTAINER != '' || env.ABFS_ACCESSKEY != '' || env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '') - run: | - $MAVEN test ${MAVEN_TEST} ${{ format('-P {0}', matrix.profile) }} -pl :trino-delta-lake \ - -Dhive.hadoop2.azure-abfs-container="${ABFS_CONTAINER}" \ - -Dhive.hadoop2.azure-abfs-account="${ABFS_ACCOUNT}" \ - -Dhive.hadoop2.azure-abfs-access-key="${ABFS_ACCESSKEY}" - - name: GCS Delta Lake Tests - # Cloud tests are separate because they are time intensive, requiring cross-cloud network communication - env: - GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} - # Run tests if any of the secrets is present. Do not skip tests when one secret renamed, or secret name has a typo. - if: >- - contains(matrix.modules, 'trino-delta-lake') && contains(matrix.profile, 'gcs-tests') && env.GCP_CREDENTIALS_KEY != '' - run: | - $MAVEN test ${MAVEN_TEST} -P gcs-tests -pl :trino-delta-lake \ - -Dtesting.gcp-storage-bucket="trino-ci-test" \ - -Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}" - - name: Memsql Tests - env: - MEMSQL_LICENSE: ${{ secrets.MEMSQL_LICENSE }} - if: matrix.modules == 'plugin/trino-singlestore' && env.MEMSQL_LICENSE != '' - run: | - $MAVEN test ${MAVEN_TEST} -pl :trino-singlestore -Dmemsql.license=${MEMSQL_LICENSE} - - name: Cloud BigQuery Tests - env: - BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CREDENTIALS_KEY }} - if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-arrow') && env.BIGQUERY_CREDENTIALS_KEY != '' - run: | - $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests \ - -Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" \ - -Dtesting.gcp-storage-bucket="trino-ci-test" \ - -Dtesting.alternate-bq-project-id=bigquery-cicd-alternate - - name: Cloud BigQuery Arrow Serialization Tests - env: - BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CREDENTIALS_KEY }} - if: matrix.modules == 'plugin/trino-bigquery' && contains(matrix.profile, 'cloud-tests-arrow') && env.BIGQUERY_CREDENTIALS_KEY != '' - run: | - $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-arrow \ - -Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" \ - -Dtesting.gcp-storage-bucket="trino-ci-test" - - name: Cloud BigQuery Case Insensitive Mapping Tests - env: - BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY }} - if: matrix.modules == 'plugin/trino-bigquery' && !contains(matrix.profile, 'cloud-tests-arrow') && env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '' - run: | - $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}" - - name: Iceberg Cloud Tests - env: - AWS_ACCESS_KEY_ID: ${{ secrets.TRINO_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.TRINO_AWS_SECRET_ACCESS_KEY }} - AWS_REGION: us-east-2 - S3_BUCKET: trino-ci-test - GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }} - if: >- - contains(matrix.modules, 'trino-iceberg') && contains(matrix.profile, 'cloud-tests') && - (env.AWS_ACCESS_KEY_ID != '' || env.AWS_SECRET_ACCESS_KEY != '' || env.GCP_CREDENTIALS_KEY != '') - run: | - $MAVEN test ${MAVEN_TEST} -pl :trino-iceberg ${{ format('-P {0}', matrix.profile) }} \ - -Ds3.bucket=${S3_BUCKET} \ - -Dtesting.gcp-storage-bucket="trino-ci-test-us-east" \ - -Dtesting.gcp-credentials-key="${GCP_CREDENTIALS_KEY}" - - name: Sanitize artifact name - if: always() - run: | - # Generate a valid artifact name and make it available to next steps as - # an environment variable ARTIFACT_NAME - # ", :, <, >, |, *, ?, \, / are not allowed in artifact names, replace it with an underscore - name=$(echo -n "${{ matrix.modules }}" | sed -e 's/[":<>|\*\?\\\/]/_/g') - echo "ARTIFACT_NAME=$name" >> $GITHUB_ENV - - name: Upload test results - uses: actions/upload-artifact@v3 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ env.ARTIFACT_NAME }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v3 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} (${{ env.ARTIFACT_NAME }}) - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - name: Upload heap dump - uses: actions/upload-artifact@v3 - if: failure() && env.SECRETS_PRESENT == '' && github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository - with: - name: heap dump ${{ github.job }} (${{ env.ARTIFACT_NAME }}) - if-no-files-found: 'ignore' - path: | - **/*.hprof - retention-days: ${{ env.HEAP_DUMP_RETENTION_DAYS }} - - name: Update PR check - uses: ./.github/actions/update-check - if: >- - failure() && - github.event_name == 'repository_dispatch' && - github.event.client_payload.slash_command.args.named.sha != '' && - github.event.client_payload.pull_request.head.sha == github.event.client_payload.slash_command.args.named.sha - with: - pull_request_number: ${{ github.event.client_payload.pull_request.number }} - check_name: ${{ github.job }} with secrets - conclusion: ${{ job.status }} - github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Clean local Maven repo - # Avoid creating a cache entry because this job doesn't download all dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: rm -rf ~/.m2/repository - build-pt: runs-on: ubuntu-latest outputs: @@ -785,131 +327,26 @@ jobs: cat < .github/test-pt-matrix.yaml config: - default - - hdp3 - # TODO: config-apache-hive3 suite: - - suite-1 - - suite-2 - - suite-3 - # suite-4 does not exist - - suite-5 - - suite-azure - - suite-delta-lake-databricks73 - - suite-delta-lake-databricks91 - - suite-delta-lake-databricks104 - - suite-delta-lake-databricks113 - - suite-gcs - - suite-clients - - suite-functions - - suite-tpch - - suite-storage-formats-detailed - exclude: - - config: default - ignore exclusion if: >- - ${{ github.event_name != 'pull_request' - || github.event.pull_request.head.repo.full_name == github.repository - || contains(github.event.pull_request.labels.*.name, 'tests:all') - || contains(github.event.pull_request.labels.*.name, 'tests:hive') - }} - - - suite: suite-azure - config: default - - suite: suite-azure - ignore exclusion if: >- - ${{ secrets.AZURE_ABFS_CONTAINER != '' && - secrets.AZURE_ABFS_ACCOUNT != '' && - secrets.AZURE_ABFS_ACCESSKEY != '' }} - - - suite: suite-gcs - config: default - - suite: suite-gcs - ignore exclusion if: >- - ${{ secrets.GCP_CREDENTIALS_KEY != '' }} - - - suite: suite-delta-lake-databricks73 - config: hdp3 - - suite: suite-delta-lake-databricks73 - ignore exclusion if: >- - ${{ secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks91 - config: hdp3 - - suite: suite-delta-lake-databricks91 - ignore exclusion if: >- - ${{ secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks104 - config: hdp3 - - suite: suite-delta-lake-databricks104 - ignore exclusion if: >- - ${{ secrets.DATABRICKS_TOKEN != '' }} - - suite: suite-delta-lake-databricks113 - config: hdp3 - - suite: suite-delta-lake-databricks113 - ignore exclusion if: >- - ${{ secrets.DATABRICKS_TOKEN != '' }} - - ignore exclusion if: - # Do not use this property outside of the matrix configuration. - # - # This is added to all matrix entries so they may be conditionally - # excluded by adding them to the excludes list with a GHA expression - # for this property. - # - If the expression evaluates to true, it will never match the a - # actual value of the property, and will therefore not be excluded. - # - If the expression evaluates to false, it will match the actual - # value of the property, and the exclusion will apply normally. - - "false" - include: - # this suite is not meant to be run with different configs - - config: default - suite: suite-6-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-7-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-8-non-generic - # this suite is not meant to be run with different configs - - config: default - suite: suite-tpcds - # this suite is not meant to be run with different configs - - config: default - suite: suite-parquet - # this suite is not meant to be run with different configs - - config: default - suite: suite-oauth2 - # this suite is not meant to be run with different configs - - config: default - suite: suite-ldap - # this suite is not meant to be run with different configs - - config: default - suite: suite-compatibility - # this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3. - - config: apache-hive3 - suite: suite-hms-only - # this suite is not meant to be run with different configs - - config: default - suite: suite-all - # this suite is not meant to be run with different configs - - config: default - suite: suite-delta-lake-oss - # this suite is not meant to be run with different configs - - config: default - suite: suite-kafka - # this suite is not meant to be run with different configs - - config: default - suite: suite-cassandra - # this suite is not meant to be run with different configs - - config: default - suite: suite-clickhouse - # this suite is not meant to be run with different configs - - config: default - suite: suite-mysql - # this suite is not meant to be run with different configs - - config: default - suite: suite-iceberg - # this suite is not meant to be run with different configs - - config: default - suite: suite-hudi + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic + - suite-7-non-generic EOF - name: Build PT matrix (all) if: | diff --git a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/authentication/KerberosAuthentication.java b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/authentication/KerberosAuthentication.java index dce7ba4efba4..99b227132584 100644 --- a/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/authentication/KerberosAuthentication.java +++ b/lib/trino-plugin-toolkit/src/main/java/io/trino/plugin/base/authentication/KerberosAuthentication.java @@ -58,8 +58,11 @@ public Subject getSubject() public void attemptLogin(Subject subject) { try { - LoginContext loginContext = new LoginContext("", subject, null, configuration); - loginContext.login(); + synchronized (subject.getPrivateCredentials()) { + subject.getPrivateCredentials().clear(); + LoginContext loginContext = new LoginContext("", subject, null, configuration); + loginContext.login(); + } } catch (LoginException e) { throw new RuntimeException(e);