diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b22840bb..d592c13f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,26 +8,34 @@ jobs: strategy: fail-fast: false matrix: - config: + include: - image: accumulo test: accumulo - image: dns + platforms: linux/amd64,linux/arm64 test: dns - image: centos7-oj17 + platforms: linux/amd64,linux/arm64 - image: centos7-oj17-openldap-referrals + platforms: linux/amd64,linux/arm64 test: openldap - image: spark3-iceberg + platforms: linux/amd64,linux/arm64 test: spark3-iceberg - image: spark3-delta + platforms: linux/amd64,linux/arm64 test: spark3-delta - image: spark3-hudi + platforms: linux/amd64,linux/arm64 test: spark3-hudi - image: kerberos + platforms: linux/amd64,linux/arm64 test: kerberos - image: gpdb-6 test: gpdb-6 - image: hdp2.6-hive-kerberized-2 - image: hive3.1-hive + platforms: linux/amd64,linux/arm64 test: hive3.1-hive - image: hdp2.6-hive-kerberized test: hdp2.6-hive @@ -40,13 +48,20 @@ jobs: - image: cdh5.15-hive-kerberized-kms # TODO add test https://github.com/trinodb/trino/issues/14543 - image: phoenix5 + platforms: linux/amd64,linux/arm64 steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # checkout tags so version in Manifest is set properly - - name: Build ${{ matrix.config.image }} - run: make "testing/${{ matrix.config.image }}" - - name: Test ${{ matrix.config.test }} - if: ${{ matrix.config.test != '' }} + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + - name: Build ${{ matrix.image }} + env: + PLATFORMS: ${{ matrix.platforms }} + run: make "testing/${{ matrix.image }}" + - name: Test ${{ matrix.test }} + env: + PLATFORMS: ${{ matrix.platforms }} + if: ${{ matrix.test != '' }} shell: 'script -q -e -c "bash {0}"' - run: make test IMAGE_TO_TEST="${{ matrix.config.test }}" + run: make test IMAGE_TO_TEST="${{ matrix.test }}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 42a5dd84..92f25c97 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -49,9 +49,55 @@ jobs: run: | if [ "$DO_RELEASE" != "true" ]; then echo "Skipping the release step because not starting from a snapshot version" - else - make release + exit 0 fi + single_arch=( + testing/accumulo + testing/cdh5.12-hive + testing/cdh5.12-hive-kerberized + testing/cdh5.15-hive + testing/cdh5.15-hive-kerberized + testing/cdh5.15-hive-kerberized-kms + testing/gpdb-6 + testing/hdp2.6-hive + testing/hdp2.6-hive-kerberized + testing/hdp2.6-hive-kerberized-2 + testing/hdp3.1-hive + testing/hdp3.1-hive-kerberized + ) + multi_arch=( + testing/centos7-oj11 + testing/centos7-oj17 + testing/centos7-oj17-openldap + testing/centos7-oj17-openldap-referrals + testing/dns + testing/hive3.1-hive + testing/kerberos + testing/phoenix5 + testing/spark3-delta + testing/spark3-iceberg + testing/spark3-hudi + ) + to_release=("${single_arch[@]}" "${multi_arch[@]}") + make meta + read -a images < <(make list) + export LC_ALL=C + mapfile -t ignored < <( \ + comm -23 \ + <(printf '%s\n' "${images[@]}" | sort) \ + <(printf '%s\n' "${to_release[@]}" | sort) \ + ) + if [ "${#ignored[@]}" -ne 0 ]; then + echo "Images that would not get released: ${ignored[*]}" + echo "Must be explicitly added to either single_arch or multi_arch list" + exit 2 + fi + make prepare-release + make "${single_arch[@]/%/@$VERSION}" + make "${multi_arch[@]/%/@$VERSION}" PLATFORM="linux/amd64,linux/arm64" + remote=("${to_release[@]/#/ghcr.io/trinodb/}") + remote=("${remote[@]/%/:$VERSION}") + ./bin/push.sh "${remote[@]}" - name: Set next development version run: | diff --git a/Makefile b/Makefile index 549c2b82..f8bd0926 100644 --- a/Makefile +++ b/Makefile @@ -18,6 +18,8 @@ LABEL := io.trino.git.hash=$(shell git rev-parse HEAD) DEPEND_SH=bin/depend.sh FLAG_SH=bin/flag.sh +BUILD_SH=$(realpath bin/build.sh) +TAG_SH=$(realpath bin/tag.sh) PUSH_SH=bin/push.sh TEST_SH=bin/test.sh BUILDDIR=build @@ -95,9 +97,10 @@ images: $(LATEST_TAGS) # # Release images to Dockerhub # -.PHONY: release push-release snapshot build-snapshot push-snapshot +.PHONY: prepare-release release push-release snapshot build-snapshot push-snapshot -release: require-clean-repo require-on-master require-release-version push-release +prepare-release: require-clean-repo require-on-master require-release-version +release: prepare-release push-release push-release: $(RELEASE_TAGS) $(PUSH_SH) $(call docker-tag,$(call resolved-image-name,$^)) @@ -179,7 +182,7 @@ $(FLAGDIR)/%.flags: %/Dockerfile $(FLAG_SH) # supposed to mean. # - TODO replace :unlabelled with something more appropriate. $(UNLABELLED_TAGS): %@unlabelled: %/Dockerfile %@latest - docker tag $*:latest $(call docker-tag,$@) + $(TAG_SH) $*:latest $(call docker-tag,$@) # # We don't need to specify any (real) dependencies other than the Dockerfile @@ -192,18 +195,17 @@ $(LATEST_TAGS): %@latest: %/Dockerfile %-parent-check @echo @echo "Building [$@] image using buildkit" @echo - cd $* && time $(SHELL) -c "( docker buildx build --compress --progress=plain --add-host hadoop-master:127.0.0.2 ${BUILD_ARGS} $(DBFLAGS_$*) -t $(call docker-tag,$@) --label $(LABEL) . )" - docker history $(call docker-tag,$@) + cd $* && time $(BUILD_SH) $(call docker-tag,$@) ${BUILD_ARGS} $(DBFLAGS_$*) --label $(LABEL) $(VERSION_TAGS): %@$(VERSION): %@latest - docker tag $(call docker-tag,$^) $(call docker-tag,$@) - docker tag $(call docker-tag,$^) $(call docker-tag,$(call resolved-image-name,$^)) - docker tag $(call docker-tag,$@) $(call docker-tag,$(call resolved-image-name,$@)) + $(TAG_SH) $(call docker-tag,$^) $(call docker-tag,$@) + $(TAG_SH) $(call docker-tag,$^) $(call docker-tag,$(call resolved-image-name,$^)) + $(TAG_SH) $(call docker-tag,$@) $(call docker-tag,$(call resolved-image-name,$@)) $(GIT_HASH_TAGS): %@$(GIT_HASH): %@latest - docker tag $(call docker-tag,$^) $(call docker-tag,$@) - docker tag $(call docker-tag,$^) $(call docker-tag,$(call resolved-image-name,$^)) - docker tag $(call docker-tag,$@) $(call docker-tag,$(call resolved-image-name,$@)) + $(TAG_SH) $(call docker-tag,$^) $(call docker-tag,$@) + $(TAG_SH) $(call docker-tag,$^) $(call docker-tag,$(call resolved-image-name,$^)) + $(TAG_SH) $(call docker-tag,$@) $(call docker-tag,$(call resolved-image-name,$@)) # # Verify that the parent image specified in the Dockerfile is either @@ -258,3 +260,7 @@ test: .PHONY: clean clean: -rm -r $(BUILDDIR) + +.PHONY: list +list: + @echo $(IMAGE_DIRS) diff --git a/bin/build.sh b/bin/build.sh new file mode 100755 index 00000000..dc1b456d --- /dev/null +++ b/bin/build.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +usage() { + echo "$0 {image} [args]" >&2 +} + +if [ $# -lt 1 ]; then + usage + exit 1 +fi + +image=$1 +shift + +if [ -z "${PLATFORMS:-}" ]; then + docker buildx build \ + --compress \ + --progress=plain \ + --add-host hadoop-master:127.0.0.2 \ + -t "$image" \ + --load \ + "$@" \ + . + exit 0 +fi + +IFS=, read -ra platforms <<<"$PLATFORMS" +export ARCH +for platform in "${platforms[@]}"; do + IFS=: read -r name tag <<<"$image" + ARCH="-${platform//\//-}" + docker buildx build \ + --platform "$platform" \ + --compress \ + --progress=plain \ + --add-host hadoop-master:127.0.0.2 \ + -t "${name}:${tag}${ARCH}" \ + --load \ + "$@" \ + . +done diff --git a/bin/depend.sh b/bin/depend.sh index 8cfd26c1..4bc37692 100755 --- a/bin/depend.sh +++ b/bin/depend.sh @@ -145,6 +145,8 @@ shift known_images="$*" parent_image_tag=$(find_parent "$target_dockerfile") +# remove the $ARCH arg since it should be resolved to an empty string anyway +parent_image_tag=${parent_image_tag//\$ARCH/} ec=$? case $ec in 0) ;; diff --git a/bin/lib.sh b/bin/lib.sh new file mode 100644 index 00000000..0069f046 --- /dev/null +++ b/bin/lib.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +function expand_multiarch_tags() { + local platforms + local name + local tag=$1 + shift + + if [ -z "${PLATFORMS:-}" ]; then + echo "$tag" + return + fi + + IFS=, read -ra platforms <<<"$PLATFORMS" + IFS=: read -r name tag <<<"$tag" + + for platform in "${platforms[@]}"; do + echo "${name}:${tag}-${platform//\//-}" + done +} diff --git a/bin/push.sh b/bin/push.sh index eca34dca..eda021d9 100755 --- a/bin/push.sh +++ b/bin/push.sh @@ -2,10 +2,31 @@ set -xeuo pipefail -while [ "$#" -gt 0 ]; do - while ! docker push "$1"; do - echo "Failed to push $1, retrying in 30s..." +function push_retry() { + local image=$1 + + while ! docker push "$image"; do + echo "Failed to push $image, retrying in 30s..." sleep 30 done - shift +} + +if [ -z "$PLATFORMS" ]; then + for image in "$@"; do + push_retry "$image" + done + exit 0 +fi + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +# shellcheck source=bin/lib.sh +source "$SCRIPT_DIR/lib.sh" + +for image in "$@"; do + mapfile -t expanded_names < <(expand_multiarch_tags "$image") + for name in "${expanded_names[@]}"; do + push_retry "$name" + done + docker manifest create "$image" "${expanded_names[@]}" + docker manifest push "$image" done diff --git a/bin/tag.sh b/bin/tag.sh new file mode 100755 index 00000000..d7a084e2 --- /dev/null +++ b/bin/tag.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +set -xeuo pipefail + +usage() { + echo "$0 {src} {dst}" >&2 +} + +if [ $# -lt 2 ]; then + usage + exit 1 +fi + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +# shellcheck source=bin/lib.sh +source "$SCRIPT_DIR/lib.sh" + +mapfile -t src_tags < <(expand_multiarch_tags "$1") +mapfile -t dst_tags < <(expand_multiarch_tags "$2") + +for i in "${!src_tags[@]}"; do + src=${src_tags[$i]} + dst=${dst_tags[$i]} + docker tag "$src" "$dst" +done diff --git a/bin/test.sh b/bin/test.sh index 6a9f806e..9560104d 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -130,7 +130,9 @@ function getAvailableEnvironments() { grep -v files | grep -v common | xargs -n1 basename } -SCRIPT_DIR=${BASH_SOURCE%/*} +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" +# shellcheck source=bin/lib.sh +source "$SCRIPT_DIR/lib.sh" PROJECT_ROOT="${SCRIPT_DIR}/.." DOCKER_CONF_LOCATION="${PROJECT_ROOT}/etc/compose" @@ -153,55 +155,68 @@ stop_all_containers # catch terminate signals trap terminate INT TERM EXIT -environment_compose up -d +if [ -n "${PLATFORMS:-}" ]; then + IFS=, read -ra platforms <<<"$PLATFORMS" + platforms=("${platforms[@]//\//-}") + platforms=("${platforms[@]/#/-}") +else + platforms=("") +fi +export ARCH +for ARCH in "${platforms[@]}"; do -# start docker logs for the external services -environment_compose logs --no-color -f & + environment_compose up -d -LOGS_PID=$! + # start docker logs for the external services + environment_compose logs --no-color -f & -if [[ ${ENVIRONMENT} == *"accumulo"* ]]; then - retry check_health accumulo -elif [[ ${ENVIRONMENT} == *"dns"* ]]; then - retry check_health dns -elif [[ ${ENVIRONMENT} == "kerberos" ]]; then - run_kerberos_tests -elif [[ ${ENVIRONMENT} == *"gpdb"* ]]; then - # wait until gpdb process is started - retry check_gpdb + LOGS_PID=$! - # run tests - set -x - set +e - sleep 10 - run_gpdb_tests -elif [[ ${ENVIRONMENT} == *"hive"* ]]; then - # wait until hadoop processes is started - retry check_hadoop - - # run tests - set -x - set +e - sleep 10 - run_hadoop_tests - if [[ ${ENVIRONMENT} == *"3.1-hive" ]]; then - run_hive_transactional_tests + if [[ ${ENVIRONMENT} == *"accumulo"* ]]; then + retry check_health accumulo + elif [[ ${ENVIRONMENT} == *"dns"* ]]; then + retry check_health dns + elif [[ ${ENVIRONMENT} == "kerberos" ]]; then + run_kerberos_tests + elif [[ ${ENVIRONMENT} == *"gpdb"* ]]; then + # wait until gpdb process is started + retry check_gpdb + + # run tests + set -x + set +e + sleep 10 + run_gpdb_tests + elif [[ ${ENVIRONMENT} == *"hive"* ]]; then + # wait until hadoop processes is started + retry check_hadoop + + # run tests + set -x + set +e + sleep 10 + run_hadoop_tests + if [[ ${ENVIRONMENT} == *"3.1-hive" ]]; then + run_hive_transactional_tests + fi + elif [[ ${ENVIRONMENT} == *"openldap"* ]]; then + retry check_openldap + elif [[ ${ENVIRONMENT} == *"spark"* ]]; then + retry check_health spark + else + echo >&2 "ERROR: no test defined for ${ENVIRONMENT}" + cleanup + exit 2 fi -elif [[ ${ENVIRONMENT} == *"openldap"* ]]; then - retry check_openldap -elif [[ ${ENVIRONMENT} == *"spark"* ]]; then - retry check_health spark -else - echo >&2 "ERROR: no test defined for ${ENVIRONMENT}" - exit 2 -fi -EXIT_CODE=$? -set -e + EXIT_CODE=$? + set -e + + cleanup +done # execution finished successfully -# disable trap, run cleanup manually +# disable trap trap - INT TERM EXIT -cleanup exit ${EXIT_CODE} diff --git a/etc/compose/accumulo/docker-compose.yml b/etc/compose/accumulo/docker-compose.yml index 37913895..85addef8 100644 --- a/etc/compose/accumulo/docker-compose.yml +++ b/etc/compose/accumulo/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: accumulo: - image: testing/accumulo:latest + image: testing/accumulo:latest$ARCH diff --git a/etc/compose/dns/docker-compose.yml b/etc/compose/dns/docker-compose.yml index 1ee12f69..9a4a05c2 100644 --- a/etc/compose/dns/docker-compose.yml +++ b/etc/compose/dns/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: dns: - image: testing/dns:latest + image: testing/dns:latest$ARCH diff --git a/etc/compose/hive3.1-hive/docker-compose.yml b/etc/compose/hive3.1-hive/docker-compose.yml index 3e6809bb..5696b4c7 100644 --- a/etc/compose/hive3.1-hive/docker-compose.yml +++ b/etc/compose/hive3.1-hive/docker-compose.yml @@ -2,4 +2,4 @@ version: '2.0' services: hadoop-master: hostname: hadoop-master - image: testing/hive3.1-hive:latest + image: testing/hive3.1-hive:latest$ARCH diff --git a/etc/compose/kerberos/docker-compose.yml b/etc/compose/kerberos/docker-compose.yml index 8f1e9257..895cb6fc 100644 --- a/etc/compose/kerberos/docker-compose.yml +++ b/etc/compose/kerberos/docker-compose.yml @@ -1,6 +1,6 @@ version: '2.0' services: kerberos: - image: testing/kerberos:latest + image: testing/kerberos:latest$ARCH environment: - TRINODEV_POST_BOOTSTRAP_COMMAND=create_principal -p ala -k ala.keytab diff --git a/etc/compose/openldap/docker-compose.yml b/etc/compose/openldap/docker-compose.yml index cf509aa2..9283ca45 100644 --- a/etc/compose/openldap/docker-compose.yml +++ b/etc/compose/openldap/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: openldap: - image: testing/centos7-oj17-openldap:latest + image: testing/centos7-oj17-openldap:latest$ARCH diff --git a/etc/compose/spark3-delta/docker-compose.yml b/etc/compose/spark3-delta/docker-compose.yml index ec7e4923..f04dcf9c 100644 --- a/etc/compose/spark3-delta/docker-compose.yml +++ b/etc/compose/spark3-delta/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: spark: - image: testing/spark3-delta:latest + image: testing/spark3-delta:latest$ARCH diff --git a/etc/compose/spark3-hudi/docker-compose.yml b/etc/compose/spark3-hudi/docker-compose.yml index 55a183df..b27800af 100644 --- a/etc/compose/spark3-hudi/docker-compose.yml +++ b/etc/compose/spark3-hudi/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: spark: - image: testing/spark3-hudi:latest + image: testing/spark3-hudi:latest$ARCH diff --git a/etc/compose/spark3-iceberg/docker-compose.yml b/etc/compose/spark3-iceberg/docker-compose.yml index 7b01b0a8..f638f6eb 100644 --- a/etc/compose/spark3-iceberg/docker-compose.yml +++ b/etc/compose/spark3-iceberg/docker-compose.yml @@ -1,4 +1,4 @@ version: '2.0' services: spark: - image: testing/spark3-iceberg:latest + image: testing/spark3-iceberg:latest$ARCH diff --git a/testing/accumulo/Dockerfile b/testing/accumulo/Dockerfile index 9202ea0d..e8f4b602 100644 --- a/testing/accumulo/Dockerfile +++ b/testing/accumulo/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj17:unlabelled +ARG ARCH +FROM testing/centos7-oj17:unlabelled$ARCH ARG ACCUMULO_VERSION=1.7.4 ARG HADOOP_VERSION=2.6.5 diff --git a/testing/centos7-oj11/Dockerfile b/testing/centos7-oj11/Dockerfile index 67625cab..2c5a203b 100644 --- a/testing/centos7-oj11/Dockerfile +++ b/testing/centos7-oj11/Dockerfile @@ -22,8 +22,8 @@ RUN \ wget \ && \ \ - # Install Zulu JDK 11.0.16.1 \ - rpm -i https://cdn.azul.com/zulu$([ "$(arch)" != "aarch64" ] || echo "-embedded")/bin/zulu11.58.23-ca-jdk11.0.16.1-linux."$(arch)".rpm && \ + # Install Zulu JDK 11.0.17 \ + rpm -i https://cdn.azul.com/zulu$([ "$(arch)" != "aarch64" ] || echo "-embedded")/bin/zulu11.60.19-ca-jdk11.0.17-linux."$(arch)".rpm && \ # Set JDK 11 as a default one alternatives --set java /usr/lib/jvm/zulu-11/bin/java && \ alternatives --set javac /usr/lib/jvm/zulu-11/bin/javac && \ diff --git a/testing/centos7-oj17-openldap-referrals/Dockerfile b/testing/centos7-oj17-openldap-referrals/Dockerfile index 7edfc2c0..7f18c763 100644 --- a/testing/centos7-oj17-openldap-referrals/Dockerfile +++ b/testing/centos7-oj17-openldap-referrals/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj17-openldap:unlabelled +ARG ARCH +FROM testing/centos7-oj17-openldap:unlabelled$ARCH # COPY CONFIGURATION COPY ./files / diff --git a/testing/centos7-oj17-openldap/Dockerfile b/testing/centos7-oj17-openldap/Dockerfile index 22fdf241..0c4c0b79 100644 --- a/testing/centos7-oj17-openldap/Dockerfile +++ b/testing/centos7-oj17-openldap/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj17:unlabelled +ARG ARCH +FROM testing/centos7-oj17:unlabelled$ARCH # INSTALL OPENLDAP RUN yum -y install openssl openldap openldap-clients openldap-servers \ diff --git a/testing/centos7-oj17/Dockerfile b/testing/centos7-oj17/Dockerfile index 662dc48c..d9fbec6a 100644 --- a/testing/centos7-oj17/Dockerfile +++ b/testing/centos7-oj17/Dockerfile @@ -22,8 +22,8 @@ RUN \ wget \ && \ \ - # Install Zulu JDK 17.0.4 \ - rpm -i https://cdn.azul.com/zulu/bin/zulu17.36.13-ca-jdk17.0.4-linux.x86_64.rpm && \ + # Install Zulu JDK 17.0.5 \ + rpm -i "https://cdn.azul.com/zulu/bin/zulu17.38.21-ca-jdk17.0.5-linux.$(arch).rpm" && \ # Set JDK 17 as a default one alternatives --set java /usr/lib/jvm/zulu-17/bin/java && \ alternatives --set javac /usr/lib/jvm/zulu-17/bin/javac && \ diff --git a/testing/hive3.1-hive/Dockerfile b/testing/hive3.1-hive/Dockerfile index eab3552c..dbd9c495 100644 --- a/testing/hive3.1-hive/Dockerfile +++ b/testing/hive3.1-hive/Dockerfile @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj17:unlabelled +ARG ARCH +FROM testing/centos7-oj17:unlabelled$ARCH -ARG JAVA8_ZULU_VERSION=8.56.0.21-ca-jdk8.0.302 +ARG JAVA8_ZULU_VERSION=8.64.0.19-ca-jdk8.0.345 RUN yum install -y \ mariadb-server \ @@ -22,8 +23,8 @@ RUN yum install -y \ psmisc \ which && \ # Install Zulu JDK - echo "Downloading zulu${JAVA8_ZULU_VERSION}-linux.x86_64.rpm..." && \ - curl -o /tmp/jdk8.rpm --url https://cdn.azul.com/zulu/bin/zulu${JAVA8_ZULU_VERSION}-linux.x86_64.rpm && \ + echo "Downloading zulu${JAVA8_ZULU_VERSION}-linux.$(uname -m).rpm..." && \ + curl -fLsS -o /tmp/jdk8.rpm --url https://cdn.azul.com/zulu$(test "$(uname -m)" != "aarch64" || echo "-embedded")/bin/zulu${JAVA8_ZULU_VERSION}-linux.$(uname -m).rpm && \ yum -y localinstall /tmp/jdk8.rpm && \ rm /tmp/jdk8.rpm && \ # Set JDK 8 as a default one @@ -41,18 +42,18 @@ ARG HIVE_VERSION=3.1.2 ARG HADOOP_BINARY_PATH=https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz ARG HIVE_BINARY_PATH=https://apache.mivzakim.net/hive/hive-$HIVE_VERSION/apache-hive-$HIVE_VERSION-bin.tar.gz -RUN curl -o /tmp/hadoop.tar.gz --url $HADOOP_BINARY_PATH && \ +RUN curl -fLsS -o /tmp/hadoop.tar.gz --url $HADOOP_BINARY_PATH && \ tar xzf /tmp/hadoop.tar.gz --directory /opt && mv /opt/hadoop-$HADOOP_VERSION /opt/hadoop -RUN curl -o /tmp/hive.tar.gz --url $HIVE_BINARY_PATH && \ +RUN curl -fLsS -o /tmp/hive.tar.gz --url $HIVE_BINARY_PATH && \ tar xzf /tmp/hive.tar.gz --directory /opt && mv /opt/apache-hive-${HIVE_VERSION}-bin /opt/hive ARG MYSQL_CONNECTOR_VERSION=8.0.13 ARG AWS_SDK_VERSION=1.11.906 RUN mkdir /opt/hive/auxlib && \ - curl -o /opt/hive/auxlib/mysql-connector-java-$MYSQL_CONNECTOR_VERSION.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/$MYSQL_CONNECTOR_VERSION/mysql-connector-java-$MYSQL_CONNECTOR_VERSION.jar && \ - curl -o /opt/hive/auxlib/aws-java-sdk-core-$AWS_SDK_VERSION.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-core/$AWS_SDK_VERSION/aws-java-sdk-core-$AWS_SDK_VERSION.jar && \ - curl -o /opt/hive/auxlib/aws-java-sdk-s3-$AWS_SDK_VERSION.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-s3/$AWS_SDK_VERSION/aws-java-sdk-s3-$AWS_SDK_VERSION.jar + curl -fLsS -o /opt/hive/auxlib/mysql-connector-java-$MYSQL_CONNECTOR_VERSION.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/$MYSQL_CONNECTOR_VERSION/mysql-connector-java-$MYSQL_CONNECTOR_VERSION.jar && \ + curl -fLsS -o /opt/hive/auxlib/aws-java-sdk-core-$AWS_SDK_VERSION.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-core/$AWS_SDK_VERSION/aws-java-sdk-core-$AWS_SDK_VERSION.jar && \ + curl -fLsS -o /opt/hive/auxlib/aws-java-sdk-s3-$AWS_SDK_VERSION.jar https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-s3/$AWS_SDK_VERSION/aws-java-sdk-s3-$AWS_SDK_VERSION.jar ENV HADOOP_HOME=/opt/hadoop ENV HIVE_HOME=/opt/hive diff --git a/testing/kerberos/Dockerfile b/testing/kerberos/Dockerfile index 5f740ce9..4e5120db 100644 --- a/testing/kerberos/Dockerfile +++ b/testing/kerberos/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj17:unlabelled +ARG ARCH +FROM testing/centos7-oj17:unlabelled$ARCH # INSTALL KERBEROS RUN yum install -y krb5-libs krb5-server krb5-workstation \ diff --git a/testing/phoenix5/Dockerfile b/testing/phoenix5/Dockerfile index 5911836b..e5047abf 100644 --- a/testing/phoenix5/Dockerfile +++ b/testing/phoenix5/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj11:unlabelled +ARG ARCH +FROM testing/centos7-oj11:unlabelled$ARCH ARG HBASE_VERSION=2.4.7 ARG ZOOKEEPER_VERSION=3.5.7 diff --git a/testing/spark3-delta/Dockerfile b/testing/spark3-delta/Dockerfile index 46112a1f..fb795631 100644 --- a/testing/spark3-delta/Dockerfile +++ b/testing/spark3-delta/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj11:unlabelled +ARG ARCH +FROM testing/centos7-oj11:unlabelled$ARCH ARG SPARK_VERSION=3.3.0 ARG HADOOP_VERSION=3 diff --git a/testing/spark3-hudi/Dockerfile b/testing/spark3-hudi/Dockerfile index 29742be3..436ef98a 100644 --- a/testing/spark3-hudi/Dockerfile +++ b/testing/spark3-hudi/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj11:unlabelled +ARG ARCH +FROM testing/centos7-oj11:unlabelled$ARCH ARG SPARK_VERSION=3.2.1 ARG HADOOP_VERSION=3.2 diff --git a/testing/spark3-iceberg/Dockerfile b/testing/spark3-iceberg/Dockerfile index c04b5095..3f9f7c4a 100644 --- a/testing/spark3-iceberg/Dockerfile +++ b/testing/spark3-iceberg/Dockerfile @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM testing/centos7-oj11:unlabelled +ARG ARCH +FROM testing/centos7-oj11:unlabelled$ARCH ARG SPARK_VERSION=3.3.0 ARG HADOOP_VERSION=3