Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 3 additions & 12 deletions external/docker/spark-test/base/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,14 @@
# limitations under the License.
#

FROM ubuntu:xenial
FROM ubuntu:20.04

# Upgrade package index
# install a few other useful packages plus Open Jdk 8
# install a few other useful packages plus Open Java 11
# Remove unneeded /var/lib/apt/lists/* after install to reduce the
# docker image size (by ~30MB)
RUN apt-get update && \
apt-get install -y less openjdk-8-jre-headless iproute2 vim-tiny sudo openssh-server && \
apt-get install -y less openjdk-11-jre-headless iproute2 vim-tiny sudo openssh-server && \
rm -rf /var/lib/apt/lists/*

ENV SCALA_VERSION 2.12.10
ENV CDH_VERSION cdh4
ENV SCALA_HOME /opt/scala-$SCALA_VERSION
ENV SPARK_HOME /opt/spark
ENV PATH $SPARK_HOME:$SCALA_HOME/bin:$PATH

# Install Scala
ADD https://www.scala-lang.org/files/archive/scala-$SCALA_VERSION.tgz /
RUN (cd / && gunzip < scala-$SCALA_VERSION.tgz)|(cd /opt && tar -xvf -)
RUN rm /scala-$SCALA_VERSION.tgz
3 changes: 0 additions & 3 deletions external/docker/spark-test/master/default_cmd
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,4 @@ echo "CONTAINER_IP=$IP"
export SPARK_LOCAL_IP=$IP
export SPARK_PUBLIC_DNS=$IP

# Avoid the default Docker behavior of mapping our IP address to an unreachable host name
umount /etc/hosts

/opt/spark/bin/spark-class org.apache.spark.deploy.master.Master -i $IP
3 changes: 0 additions & 3 deletions external/docker/spark-test/worker/default_cmd
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,4 @@ echo "CONTAINER_IP=$IP"
export SPARK_LOCAL_IP=$IP
export SPARK_PUBLIC_DNS=$IP

# Avoid the default Docker behavior of mapping our IP address to an unreachable host name
umount /etc/hosts

/opt/spark/bin/spark-class org.apache.spark.deploy.worker.Worker $1