diff --git a/.github/bin/build-matrix-from-impacted.py b/.github/bin/build-matrix-from-impacted.py new file mode 100755 index 000000000000..f915e4a06eee --- /dev/null +++ b/.github/bin/build-matrix-from-impacted.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +import argparse +import yaml +import json +import logging +import sys + + +def main(): + parser = argparse.ArgumentParser( + description="Filter test matrix modules using list of impacted modules." + ) + parser.add_argument( + "-m", + "--matrix", + type=argparse.FileType("r"), + default=".github/test-matrix.yaml", + help="A YAML file with the test matrix", + ) + parser.add_argument( + "-i", + "--impacted", + type=argparse.FileType("r"), + default="gib-impacted.log", + help="File containing list of impacted modules, one per line, " + "as paths, not artifact ids", + ) + parser.add_argument( + "-o", + "--output", + type=argparse.FileType("w"), + default=sys.stdout, + help="Filename to write impacted modules matrix JSON to", + ) + parser.add_argument( + "-v", + "--verbose", + action="store_const", + dest="loglevel", + const=logging.INFO, + default=logging.WARNING, + help="Print info level logs", + ) + + args = parser.parse_args() + logging.basicConfig(level=args.loglevel) + build(args.matrix, args.impacted, args.output) + + +def build(matrix_file, impacted_file, output_file): + matrix = yaml.load(matrix_file, Loader=yaml.Loader) + impacted = list(filter(None, [line.strip() for line in impacted_file.readlines()])) + logging.info("Read matrix: %s", matrix) + logging.info("Read impacted: %s", impacted) + include = [] + for item in matrix.get("include", []): + modules = item.get("modules", []) + if isinstance(modules, str): + modules = [modules] + if not any(module in impacted for module in modules): + logging.info("Excluding matrix section: %s", item) + continue + include.append( + { + # concatenate because matrix values should be primitives + "modules": ",".join(modules), + "profile": item.get("profile", ""), + } + ) + if include: + matrix["include"] = include + json.dump(matrix, output_file) + output_file.write("\n") + + +if __name__ == "__main__": + main() diff --git a/.github/bin/git-fetch-base-ref.sh b/.github/bin/git-fetch-base-ref.sh new file mode 100755 index 000000000000..ecd71db13843 --- /dev/null +++ b/.github/bin/git-fetch-base-ref.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -euo pipefail + +if [ -z "${GITHUB_BASE_REF:-}" ] || [ "$GITHUB_BASE_REF" == master ]; then + echo >&2 "GITHUB_BASE_REF is not set or is master, not fetching it" + exit 0 +fi + +git fetch --no-tags --prune origin "$GITHUB_BASE_REF" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c208772e28c6..4d020dcddd8a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,8 +16,8 @@ env: # maven.wagon.rto is in millis, defaults to 30m MAVEN_OPTS: "-Xmx512M -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" MAVEN_INSTALL_OPTS: "-Xmx2G -XX:+ExitOnOutOfMemoryError -Dmaven.wagon.rto=60000" - MAVEN_FAST_INSTALL: "-B --strict-checksums -V --quiet -T C1 -DskipTests -Dair.check.skip-all" - MAVEN_TEST: "-B --strict-checksums -Dair.check.skip-all --fail-at-end" + MAVEN_FAST_INSTALL: "-B --strict-checksums -V --quiet -T C1 -DskipTests -Dmaven.source.skip=true -Dair.check.skip-all -P gib -Dgib.referenceBranch=refs/remotes/origin/${{ github.event.pull_request.base.ref }}" + MAVEN_TEST: "-B --strict-checksums -Dmaven.source.skip=true -Dair.check.skip-all --fail-at-end -P gib -Dgib.referenceBranch=refs/remotes/origin/${{ github.event.pull_request.base.ref }}" RETRY: .github/bin/retry # Testcontainers kills image pulls if they don't make progress for > 30s and retries for 2m before failing. This means # that if an image doesn't download all it's layers within ~2m then any other concurrent pull will be killed because @@ -51,8 +51,7 @@ jobs: java-version: ${{ matrix.java-version }} cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Maven Checks run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" @@ -74,14 +73,17 @@ jobs: timeout-minutes: 45 steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Maven Install run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" @@ -90,9 +92,9 @@ jobs: run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" # Run Error Prone on one module with a retry to ensure all runtime dependencies are fetched - $RETRY $MAVEN ${MAVEN_TEST} -T C1 clean test-compile -P errorprone-compiler -pl ':trino-spi' + $RETRY $MAVEN ${MAVEN_TEST} -T C1 clean test-compile -P gib,errorprone-compiler -pl ':trino-spi' # The main Error Prone run - $MAVEN ${MAVEN_TEST} -T C1 clean test-compile -P errorprone-compiler \ + $MAVEN ${MAVEN_TEST} -T C1 clean test-compile -P gib,errorprone-compiler \ -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' web-ui-checks: @@ -110,23 +112,30 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 # checkout tags so version in Manifest is set properly + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Maven Install run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -pl '!:trino-test-jdbc-compatibility-old-driver,!:trino-docs,!:trino-server,!:trino-server-rpm' + $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' - name: Test old JDBC vs current server - run: testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh + run: | + if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-driver gib-impacted.log; then + testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh + fi - name: Test current JDBC vs old server if: always() - run: $MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server + run: | + if [ ! -f gib-impacted.log ] || grep -q testing/trino-test-jdbc-compatibility-old-server gib-impacted.log; then + $MAVEN test ${MAVEN_TEST} -pl :trino-test-jdbc-compatibility-old-server + fi - name: Upload test results uses: actions/upload-artifact@v2 # Upload all test reports only on failure, because the artifacts are large @@ -161,14 +170,17 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Install Hive Module run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" @@ -274,14 +286,17 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Maven Install run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" @@ -335,170 +350,126 @@ jobs: **/surefire-reports/TEST-*.xml retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - test: + build-test-matrix: runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - modules: - - ":trino-main" - - ":trino-tests" - - ":trino-raptor-legacy" - - ":trino-accumulo" - - ":trino-cassandra" - - ":trino-clickhouse" - - ":trino-hive" - - ":trino-hive -P test-parquet" - - ":trino-hive -P test-failure-recovery" - - ":trino-hive -P test-fault-tolerant-execution" - - ":trino-elasticsearch,:trino-elasticsearch -P test-stats" - - ":trino-mongodb" - - ":trino-kafka" - - ":trino-pinot" - - ":trino-redis" - - ":trino-mysql" - - ":trino-postgresql" - - ":trino-sqlserver" - - ":trino-oracle" - - ":trino-kudu" - - ":trino-druid" - - ":trino-iceberg" - - ":trino-iceberg -P test-failure-recovery" - - ":trino-phoenix,:trino-phoenix5" - - ":trino-jdbc,:trino-base-jdbc,:trino-thrift,:trino-memory" - timeout-minutes: 60 + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 - cache: 'maven' - - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Install + - name: Maven validate run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -am -pl $(echo '${{ matrix.modules }}' | cut -d' ' -f1) - - name: Maven Tests - run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} - - name: Sanitize artifact name - if: always() - run: | - # Generate a valid artifact name and make it available to next steps as - # an environment variable ARTIFACT_NAME - # ", :, <, >, |, *, ?, \, / are not allowed in artifact names but we only use : so we remove it - name=$(echo -n "${{ matrix.modules }}" | sed -e 's/[:]//g') - echo "ARTIFACT_NAME=$name" >> $GITHUB_ENV - - name: Upload test results - uses: actions/upload-artifact@v2 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ env.ARTIFACT_NAME }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v2 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} (${{ env.ARTIFACT_NAME }}) - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} + $RETRY $MAVEN validate ${MAVEN_FAST_INSTALL} -P disable-check-spi-dependencies -pl '!:trino-docs,!:trino-server,!:trino-server-rpm' + - id: set-matrix + run: | + cat < .github/test-matrix.yaml + include: + - { modules: core/trino-main } + - { modules: testing/trino-tests } + - { modules: plugin/trino-raptor-legacy } + - { modules: plugin/trino-accumulo } + - { modules: plugin/trino-cassandra } + - { modules: plugin/trino-clickhouse } + - { modules: plugin/trino-hive } + - { modules: plugin/trino-hive, profile: test-parquet } + - { modules: plugin/trino-hive, profile: test-failure-recovery } + - { modules: plugin/trino-hive, profile: test-fault-tolerant-execution } + - { modules: plugin/trino-elasticsearch } + - { modules: plugin/trino-elasticsearch, profile: test-stats } + - { modules: plugin/trino-mongodb } + - { modules: plugin/trino-kafka } + - { modules: plugin/trino-pinot } + - { modules: plugin/trino-redis } + - { modules: plugin/trino-mysql } + - { modules: plugin/trino-postgresql } + - { modules: plugin/trino-sqlserver } + - { modules: plugin/trino-memsql } + - { modules: plugin/trino-oracle } + - { modules: plugin/trino-kudu } + - { modules: plugin/trino-druid } + - { modules: plugin/trino-iceberg } + - { modules: plugin/trino-iceberg, profile: test-failure-recovery } + - { modules: [ plugin/trino-phoenix, plugin/trino-phoenix5 ] } + - { modules: [ client/trino-jdbc, plugin/trino-base-jdbc, plugin/trino-thrift, plugin/trino-memory ] } + - { modules: plugin/trino-bigquery } + EOF + ./.github/bin/build-matrix-from-impacted.py -v -m .github/test-matrix.yaml -o matrix.json + echo "Matrix: $(jq '.' matrix.json)" + echo "::set-output name=matrix::$(jq -c '.' matrix.json)" - test-memsql: + test: runs-on: ubuntu-latest - timeout-minutes: 30 + needs: build-test-matrix + if: needs.build-test-matrix.outputs.matrix != '{}' + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.build-test-matrix.outputs.matrix) }} + timeout-minutes: 60 steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 # checkout all commits to be able to determine merge base for GIB + - name: Fetch base ref to find merge-base for GIB + run: .github/bin/git-fetch-base-ref.sh - uses: actions/setup-java@v2 with: distribution: 'zulu' java-version: 11 cache: 'maven' - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" + run: echo "::add-matcher::.github/problem-matcher.json" - name: Cleanup node # This is required as a virtual environment update 20210219.1 left too little space for MemSQL to work + if: matrix.modules == ':trino-memsql' run: .github/bin/cleanup-node.sh - name: Maven Install run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -am -pl :trino-memsql + $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -am -pl "${{ matrix.modules }}" + - name: Maven Tests + if: matrix.modules != 'plugin/trino-memsql' + run: $MAVEN test ${MAVEN_TEST} -pl ${{ matrix.modules }} ${{ matrix.profile != '' && format('-P {0}', matrix.profile) || '' }} + # Additional tests for selected modules - name: Memsql Tests env: MEMSQL_LICENSE: ${{ secrets.MEMSQL_LICENSE }} + if: matrix.modules == 'plugin/trino-memsql' && env.MEMSQL_LICENSE != '' run: | - if [ "${MEMSQL_LICENSE}" != "" ]; then - $MAVEN test ${MAVEN_TEST} -pl :trino-memsql -Dmemsql.license=${MEMSQL_LICENSE} - fi - - name: Upload test results - uses: actions/upload-artifact@v2 - # Upload all test reports only on failure, because the artifacts are large - if: failure() - with: - name: result ${{ github.job }} - path: | - **/target/surefire-reports - **/target/checkstyle-* - - name: Upload test report - uses: actions/upload-artifact@v2 - # Always upload the test report for the annotate.yml workflow, - # but only the single XML file to keep the artifact small - if: always() - with: - # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} - path: | - **/surefire-reports/TEST-*.xml - retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} - - test-bigquery: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v2 - with: - distribution: 'zulu' - java-version: 11 - cache: 'maven' - - name: Configure Problem Matchers - run: | - echo "::add-matcher::.github/problem-matcher.json" - - name: Maven Install - run: | - export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -am -pl :trino-bigquery - - name: Basic BigQuery Tests - run: $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery + $MAVEN test ${MAVEN_TEST} -pl :trino-memsql -Dmemsql.license=${MEMSQL_LICENSE} - name: Cloud BigQuery Tests env: BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CREDENTIALS_KEY }} + if: matrix.modules == 'plugin/trino-bigquery' && env.BIGQUERY_CREDENTIALS_KEY != '' run: | - if [ "${BIGQUERY_CREDENTIALS_KEY}" != "" ]; then - $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests -Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" - fi + $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests -Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" - name: Cloud BigQuery Case Insensitive Mapping Tests env: - BIGQUERY_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY }} + BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY: ${{ secrets.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY }} + if: matrix.modules == 'plugin/trino-bigquery' && env.BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY != '' run: | - if [ "${BIGQUERY_CREDENTIALS_KEY}" != "" ]; then - $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CREDENTIALS_KEY}" - fi + $MAVEN test ${MAVEN_TEST} -pl :trino-bigquery -Pcloud-tests-case-insensitive-mapping -Dbigquery.credentials-key="${BIGQUERY_CASE_INSENSITIVE_CREDENTIALS_KEY}" + - name: Sanitize artifact name + if: always() + run: | + # Generate a valid artifact name and make it available to next steps as + # an environment variable ARTIFACT_NAME + # ", :, <, >, |, *, ?, \, / are not allowed in artifact names, replace it with an underscore + name=$(echo -n "${{ matrix.modules }}" | sed -e 's/[":<>|\*\?\\\/]/_/g') + echo "ARTIFACT_NAME=$name" >> $GITHUB_ENV - name: Upload test results uses: actions/upload-artifact@v2 # Upload all test reports only on failure, because the artifacts are large if: failure() with: - name: result ${{ github.job }} + name: result ${{ env.ARTIFACT_NAME }} path: | **/target/surefire-reports **/target/checkstyle-* @@ -509,7 +480,7 @@ jobs: if: always() with: # Name prefix is checked in the `Annotate checks` workflow - name: test report ${{ github.job }} + name: test report ${{ github.job }} (${{ env.ARTIFACT_NAME }}) path: | **/surefire-reports/TEST-*.xml retention-days: ${{ env.TEST_REPORT_RETENTION_DAYS }} @@ -613,7 +584,8 @@ jobs: - name: Maven Install run: | export MAVEN_OPTS="${MAVEN_INSTALL_OPTS}" - $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -pl '!:trino-docs,!:trino-server-rpm' + # GIB needs to be explicitly disabled, because the gib profile enables it, but the trino-server module requires all of its dependencies to be built + $RETRY $MAVEN install ${MAVEN_FAST_INSTALL} -Dgib.disable -pl '!:trino-docs,!:trino-server-rpm' - name: Free Disk Space run: | docker image prune -af diff --git a/.gitignore b/.gitignore index 79edafec0f87..167b85020069 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,4 @@ benchmark_outputs node_modules product-test-reports .vscode/ +/gib-impacted.log diff --git a/plugin/trino-hive-hadoop2/bin/common.sh b/plugin/trino-hive-hadoop2/bin/common.sh index 59ae828a2e1d..e5282c5a6b90 100644 --- a/plugin/trino-hive-hadoop2/bin/common.sh +++ b/plugin/trino-hive-hadoop2/bin/common.sh @@ -211,3 +211,14 @@ function deploy_core_site_xml() { 'sed "${@:2}" "/docker/files/$1" > /etc/hadoop/conf/core-site.xml' \ bash "$template" "${args[@]}" } + +# Checks if Gitflow Incremental Builder (GIB) is enabled and the trino-hive-hadoop2 module should be build and/or tested +function abort_if_not_gib_impacted() { + local module=plugin/trino-hive-hadoop2 + local impacted_log=gib-impacted.log + if [ -f "$impacted_log" ] && ! grep -q "^${module}$" "$impacted_log"; then + echo >&2 "Module $module not present in $impacted_log, exiting" + exit 0 + fi + return 0 +} diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh index 915aa4a16732..60c0fd1ec5dc 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_abfs_access_key_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + check_vars ABFS_CONTAINER ABFS_ACCOUNT ABFS_ACCESS_KEY cleanup_hadoop_docker_containers @@ -26,7 +28,7 @@ stop_unnecessary_hadoop_services # run product tests pushd $PROJECT_ROOT set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-abfs-access-key \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-abfs-access-key \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh index cac02c3b56d9..87fe5e34291c 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_abfs_oauth_tests.sh @@ -3,6 +3,8 @@ set -euxo pipefail . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + check_vars ABFS_ACCOUNT ABFS_CONTAINER \ ABFS_OAUTH_ENDPOINT ABFS_OAUTH_CLIENTID ABFS_OAUTH_SECRET @@ -27,7 +29,7 @@ stop_unnecessary_hadoop_services pushd $PROJECT_ROOT set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-abfs-oauth \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-abfs-oauth \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh index 7fe0b2bfaef4..c064eb923fa7 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_adl_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + check_vars ADL_NAME ADL_CLIENT_ID ADL_CREDENTIAL ADL_REFRESH_URL cleanup_hadoop_docker_containers @@ -26,7 +28,7 @@ stop_unnecessary_hadoop_services # run product tests pushd $PROJECT_ROOT set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-adl \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-adl \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh index 0097729ace84..822b7aec9f75 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_alluxio_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + export ALLUXIO_BASE_IMAGE="alluxio/alluxio" export ALLUXIO_IMAGE_TAG="2.1.2" @@ -56,7 +58,7 @@ function main () { # run product tests pushd ${PROJECT_ROOT} set +e - ./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-alluxio \ + ./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-alluxio \ -Dhive.hadoop2.alluxio.host=localhost \ -Dhive.hadoop2.alluxio.port=19998 \ -Dhive.hadoop2.hiveVersionMajor="${TESTS_HIVE_VERSION_MAJOR}" \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh index b27b4aee5d6f..97b0cb530e3e 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_s3_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + check_vars S3_BUCKET S3_BUCKET_ENDPOINT \ AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY @@ -53,7 +55,7 @@ retry check_hadoop # run product tests pushd "${PROJECT_ROOT}" set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-s3 \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-s3 \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_tests.sh index 9ec8b801df32..34c02e83d590 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + cleanup_hadoop_docker_containers start_hadoop_docker_containers @@ -21,7 +23,7 @@ HADOOP_MASTER_IP=$(hadoop_master_ip) # run product tests pushd "${PROJECT_ROOT}" set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2 \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2 \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh b/plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh index 53b659268219..40c19ddf8c47 100755 --- a/plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh +++ b/plugin/trino-hive-hadoop2/bin/run_hive_wasb_tests.sh @@ -4,6 +4,8 @@ set -euo pipefail -x . "${BASH_SOURCE%/*}/common.sh" +abort_if_not_gib_impacted + check_vars WASB_CONTAINER WASB_ACCOUNT WASB_ACCESS_KEY cleanup_hadoop_docker_containers @@ -26,7 +28,7 @@ stop_unnecessary_hadoop_services # run product tests pushd $PROJECT_ROOT set +e -./mvnw -B -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-wasb \ +./mvnw ${MAVEN_TEST:--B} -pl :trino-hive-hadoop2 test -P test-hive-hadoop2-wasb \ -DHADOOP_USER_NAME=hive \ -Dhive.hadoop2.metastoreHost=localhost \ -Dhive.hadoop2.metastorePort=9083 \ diff --git a/pom.xml b/pom.xml index bff43d621961..6a420a7cd823 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ io.airlift airbase - 119 + 121 io.trino @@ -1863,5 +1863,58 @@ + + + disable-check-spi-dependencies + + + + io.trino + trino-maven-plugin + + + default-check-spi-dependencies + none + + + + + + + + gib + + + master + + + + + com.vackosar.gitflowincrementalbuilder + gitflow-incremental-builder + 3.15.0 + true + + master + true + true + true + true + true + impacted + + true + -Dmaven.source.skip=true -Dair.check.skip-all + + true + true + true + gib-impacted.log + + + + + diff --git a/testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh b/testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh index 0cbadf61d71c..d594b7213acf 100755 --- a/testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh +++ b/testing/trino-test-jdbc-compatibility-old-driver/bin/run_tests.sh @@ -4,7 +4,7 @@ set -xeuo pipefail trap "exit" INT # allows to terminate script on ctrl+c instead of terminating single mvnw execution maven="${BASH_SOURCE%/*}/../../../mvnw" -maven_run_tests="${maven} clean test -Dair.check.skip-all=true -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -B -pl :trino-test-jdbc-compatibility-old-driver" +maven_run_tests="${maven} clean test ${MAVEN_TEST:--B} -pl :trino-test-jdbc-compatibility-old-driver" "${maven}" -version