diff --git a/external/docker/spark-test/base/Dockerfile b/external/docker/spark-test/base/Dockerfile index 5bec5d3f16548..d4a30c4681cba 100644 --- a/external/docker/spark-test/base/Dockerfile +++ b/external/docker/spark-test/base/Dockerfile @@ -15,23 +15,14 @@ # limitations under the License. # -FROM ubuntu:xenial +FROM ubuntu:20.04 # Upgrade package index -# install a few other useful packages plus Open Jdk 8 +# install a few other useful packages plus Open Java 11 # Remove unneeded /var/lib/apt/lists/* after install to reduce the # docker image size (by ~30MB) RUN apt-get update && \ - apt-get install -y less openjdk-8-jre-headless iproute2 vim-tiny sudo openssh-server && \ + apt-get install -y less openjdk-11-jre-headless iproute2 vim-tiny sudo openssh-server && \ rm -rf /var/lib/apt/lists/* -ENV SCALA_VERSION 2.12.10 -ENV CDH_VERSION cdh4 -ENV SCALA_HOME /opt/scala-$SCALA_VERSION ENV SPARK_HOME /opt/spark -ENV PATH $SPARK_HOME:$SCALA_HOME/bin:$PATH - -# Install Scala -ADD https://www.scala-lang.org/files/archive/scala-$SCALA_VERSION.tgz / -RUN (cd / && gunzip < scala-$SCALA_VERSION.tgz)|(cd /opt && tar -xvf -) -RUN rm /scala-$SCALA_VERSION.tgz diff --git a/external/docker/spark-test/master/default_cmd b/external/docker/spark-test/master/default_cmd index 5a7da3446f6d2..96a36cd0bb682 100755 --- a/external/docker/spark-test/master/default_cmd +++ b/external/docker/spark-test/master/default_cmd @@ -22,7 +22,4 @@ echo "CONTAINER_IP=$IP" export SPARK_LOCAL_IP=$IP export SPARK_PUBLIC_DNS=$IP -# Avoid the default Docker behavior of mapping our IP address to an unreachable host name -umount /etc/hosts - /opt/spark/bin/spark-class org.apache.spark.deploy.master.Master -i $IP diff --git a/external/docker/spark-test/worker/default_cmd b/external/docker/spark-test/worker/default_cmd index 31b06cb0eb047..2401f5565aa0b 100755 --- a/external/docker/spark-test/worker/default_cmd +++ b/external/docker/spark-test/worker/default_cmd @@ -22,7 +22,4 @@ echo "CONTAINER_IP=$IP" export SPARK_LOCAL_IP=$IP export SPARK_PUBLIC_DNS=$IP -# Avoid the default Docker behavior of mapping our IP address to an unreachable host name -umount /etc/hosts - /opt/spark/bin/spark-class org.apache.spark.deploy.worker.Worker $1