diff --git a/bin/docker-image-tool.sh b/bin/docker-image-tool.sh
index 037e0c70b07a..18c6c5099717 100755
--- a/bin/docker-image-tool.sh
+++ b/bin/docker-image-tool.sh
@@ -233,7 +233,6 @@ Commands:
Options:
-f file (Optional) Dockerfile to build for JVM based Jobs. By default builds the Dockerfile shipped with Spark.
- For Java 17, use `-f kubernetes/dockerfiles/spark/Dockerfile.java17`
-p file (Optional) Dockerfile to build for PySpark Jobs. Builds Python dependencies and ships with Spark.
Skips building PySpark docker image if not specified.
-R file (Optional) Dockerfile to build for SparkR Jobs. Builds R dependencies and ships with Spark.
@@ -277,10 +276,6 @@ Examples:
# Note: buildx, which does cross building, needs to do the push during build
# So there is no separate push step with -X
- - Build and push Java17-based image with tag "v3.3.0" to docker.io/myrepo
- $0 -r docker.io/myrepo -t v3.3.0 -f kubernetes/dockerfiles/spark/Dockerfile.java17 build
- $0 -r docker.io/myrepo -t v3.3.0 push
-
EOF
}
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 20c537e0e672..03baa77090e2 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -813,7 +813,7 @@ object KubernetesIntegrationTests {
val bindingsDir = s"$sparkHome/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/bindings"
val javaImageTag = sys.props.get("spark.kubernetes.test.javaImageTag")
val dockerFile = sys.props.getOrElse("spark.kubernetes.test.dockerFile",
- s"$sparkHome/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile.java17")
+ s"$sparkHome/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile")
val pyDockerFile = sys.props.getOrElse("spark.kubernetes.test.pyDockerFile",
s"$bindingsDir/python/Dockerfile")
val rDockerFile = sys.props.getOrElse("spark.kubernetes.test.rDockerFile",
diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile
index 30338b6f91c7..53026016ee26 100644
--- a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile
+++ b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-ARG java_image_tag=11-jre-focal
+ARG java_image_tag=17-jre
FROM eclipse-temurin:${java_image_tag}
diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile.java17 b/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile.java17
deleted file mode 100644
index 194242996ca7..000000000000
--- a/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/Dockerfile.java17
+++ /dev/null
@@ -1,62 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# We need to build from debian:bullseye-slim because openjdk switches its underlying OS
-# from debian to oraclelinux from openjdk:12
-FROM debian:bullseye-slim
-
-ARG spark_uid=185
-
-# Before building the docker image, first build and make a Spark distribution following
-# the instructions in https://spark.apache.org/docs/latest/building-spark.html.
-# If this docker file is being used in the context of building your images from a Spark
-# distribution, the docker build command should be invoked from the top level directory
-# of the Spark distribution. E.g.:
-# docker build -t spark:latest -f kubernetes/dockerfiles/spark/Dockerfile .
-
-RUN set -ex && \
- apt-get update && \
- ln -s /lib /lib64 && \
- apt install -y bash tini libc6 libpam-modules krb5-user libnss3 procps openjdk-17-jre net-tools && \
- mkdir -p /opt/spark && \
- mkdir -p /opt/spark/examples && \
- mkdir -p /opt/spark/work-dir && \
- touch /opt/spark/RELEASE && \
- rm /bin/sh && \
- ln -sv /bin/bash /bin/sh && \
- echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
- chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
- rm -rf /var/cache/apt/* && rm -rf /var/lib/apt/lists/*
-
-COPY jars /opt/spark/jars
-COPY bin /opt/spark/bin
-COPY sbin /opt/spark/sbin
-COPY kubernetes/dockerfiles/spark/entrypoint.sh /opt/
-COPY kubernetes/dockerfiles/spark/decom.sh /opt/
-COPY examples /opt/spark/examples
-COPY kubernetes/tests /opt/spark/tests
-COPY data /opt/spark/data
-
-ENV SPARK_HOME /opt/spark
-
-WORKDIR /opt/spark/work-dir
-RUN chmod g+w /opt/spark/work-dir
-RUN chmod a+x /opt/decom.sh
-
-ENTRYPOINT [ "/opt/entrypoint.sh" ]
-
-# Specify the User that the actual main process will run as
-USER ${spark_uid}
diff --git a/resource-managers/kubernetes/integration-tests/README.md b/resource-managers/kubernetes/integration-tests/README.md
index af0b1ec3dc76..5e784980ab55 100644
--- a/resource-managers/kubernetes/integration-tests/README.md
+++ b/resource-managers/kubernetes/integration-tests/README.md
@@ -20,9 +20,8 @@ To run tests with Java 11 instead of Java 8, use `--java-image-tag` to specify t
To run tests with a custom docker image, use `--docker-file` to specify the Dockerfile.
Note that if both `--docker-file` and `--java-image-tag` are used, `--docker-file` is preferred,
and the custom Dockerfile need to include a Java installation by itself.
-Dockerfile.java17 is an example of custom Dockerfile, and you can specify it to run tests with Java 17.
- ./dev/dev-run-integration-tests.sh --docker-file ../docker/src/main/dockerfiles/spark/Dockerfile.java17
+ ./dev/dev-run-integration-tests.sh --docker-file ../docker/src/main/dockerfiles/spark/Dockerfile
To run tests with Hadoop 2.x instead of Hadoop 3.x, use `--hadoop-profile`.
diff --git a/resource-managers/kubernetes/integration-tests/pom.xml b/resource-managers/kubernetes/integration-tests/pom.xml
index 7580982c2330..1857ed911794 100644
--- a/resource-managers/kubernetes/integration-tests/pom.xml
+++ b/resource-managers/kubernetes/integration-tests/pom.xml
@@ -43,7 +43,7 @@
- Dockerfile.java17
+ Dockerfile
org.apache.spark.deploy.k8s.integrationtest.YuniKornTag