diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml
index 75189f49e8619..79afb46f3011c 100644
--- a/.github/workflows/benchmark.yml
+++ b/.github/workflows/benchmark.yml
@@ -27,9 +27,9 @@ on:
required: true
default: '*'
jdk:
- description: 'JDK version: 8, 11, 17 or 21-ea'
+ description: 'JDK version: 17 or 21'
required: true
- default: '8'
+ default: '17'
scala:
description: 'Scala version: 2.13'
required: true
diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml
index 531d4f86ee79f..72934b4fafa85 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -25,7 +25,7 @@ on:
java:
required: false
type: string
- default: 8
+ default: 17
branch:
description: Branch to run the build against
required: false
@@ -649,11 +649,11 @@ jobs:
if [ -f ./dev/free_disk_space_container ]; then
./dev/free_disk_space_container
fi
- - name: Install Java 8
+ - name: Install Java ${{ inputs.java }}
uses: actions/setup-java@v3
with:
distribution: zulu
- java-version: 8
+ java-version: ${{ inputs.java }}
- name: License test
run: ./dev/check-license
- name: Dependencies test
@@ -777,7 +777,6 @@ jobs:
fail-fast: false
matrix:
java:
- - 11
- 17
- 21
runs-on: ubuntu-22.04
@@ -868,11 +867,11 @@ jobs:
key: tpcds-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
tpcds-coursier-
- - name: Install Java 8
+ - name: Install Java ${{ inputs.java }}
uses: actions/setup-java@v3
with:
distribution: zulu
- java-version: 8
+ java-version: ${{ inputs.java }}
- name: Cache TPC-DS generated data
id: cache-tpcds-sf-1
uses: actions/cache@v3
@@ -974,11 +973,11 @@ jobs:
key: docker-integration-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
docker-integration-coursier-
- - name: Install Java 8
+ - name: Install Java ${{ inputs.java }}
uses: actions/setup-java@v3
with:
distribution: zulu
- java-version: 8
+ java-version: ${{ inputs.java }}
- name: Run tests
run: |
./dev/run-tests --parallelism 1 --modules docker-integration-tests --included-tags org.apache.spark.tags.DockerTest
diff --git a/.github/workflows/build_ansi.yml b/.github/workflows/build_ansi.yml
index be91432884553..b39c1ec20e226 100644
--- a/.github/workflows/build_ansi.yml
+++ b/.github/workflows/build_ansi.yml
@@ -17,7 +17,7 @@
# under the License.
#
-name: "Build / ANSI (master, Hadoop 3, JDK 8, Scala 2.13)"
+name: "Build / ANSI (master, Hadoop 3, JDK 17, Scala 2.13)"
on:
schedule:
@@ -31,7 +31,7 @@ jobs:
uses: ./.github/workflows/build_and_test.yml
if: github.repository == 'apache/spark'
with:
- java: 8
+ java: 17
branch: master
hadoop: hadoop3
envs: >-
diff --git a/.github/workflows/build_coverage.yml b/.github/workflows/build_coverage.yml
index 9c448bb2b1f2e..8780ad9124a75 100644
--- a/.github/workflows/build_coverage.yml
+++ b/.github/workflows/build_coverage.yml
@@ -17,7 +17,7 @@
# under the License.
#
-name: "Build / Coverage (master, Scala 2.13, Hadoop 3, JDK 8)"
+name: "Build / Coverage (master, Scala 2.13, Hadoop 3, JDK 17)"
on:
schedule:
@@ -31,7 +31,7 @@ jobs:
uses: ./.github/workflows/build_and_test.yml
if: github.repository == 'apache/spark'
with:
- java: 8
+ java: 17
branch: master
hadoop: hadoop3
envs: >-
diff --git a/.github/workflows/build_java11.yml b/.github/workflows/build_java11.yml
deleted file mode 100644
index f601e8622902a..0000000000000
--- a/.github/workflows/build_java11.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-name: "Build (master, Scala 2.13, Hadoop 3, JDK 11)"
-
-on:
- schedule:
- - cron: '0 16 * * *'
-
-jobs:
- run-build:
- permissions:
- packages: write
- name: Run
- uses: ./.github/workflows/build_and_test.yml
- if: github.repository == 'apache/spark'
- with:
- java: 11
- branch: master
- hadoop: hadoop3
- envs: >-
- {
- "SKIP_MIMA": "true",
- "SKIP_UNIDOC": "true"
- }
- jobs: >-
- {
- "build": "true",
- "pyspark": "true",
- "sparkr": "true",
- "tpcds-1g": "true",
- "docker-integration-tests": "true"
- }
diff --git a/.github/workflows/build_java17.yml b/.github/workflows/build_java17.yml
deleted file mode 100644
index 1e5a49101de67..0000000000000
--- a/.github/workflows/build_java17.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-name: "Build (master, Scala 2.13, Hadoop 3, JDK 17)"
-
-on:
- schedule:
- - cron: '0 22 * * *'
-
-jobs:
- run-build:
- permissions:
- packages: write
- name: Run
- uses: ./.github/workflows/build_and_test.yml
- if: github.repository == 'apache/spark'
- with:
- java: 17
- branch: master
- hadoop: hadoop3
- envs: >-
- {
- "SKIP_MIMA": "true",
- "SKIP_UNIDOC": "true"
- }
- jobs: >-
- {
- "build": "true",
- "pyspark": "true",
- "sparkr": "true",
- "tpcds-1g": "true",
- "docker-integration-tests": "true"
- }
diff --git a/.github/workflows/build_rockdb_as_ui_backend.yml b/.github/workflows/build_rockdb_as_ui_backend.yml
index 56b664270f110..e11ec85b8b176 100644
--- a/.github/workflows/build_rockdb_as_ui_backend.yml
+++ b/.github/workflows/build_rockdb_as_ui_backend.yml
@@ -17,7 +17,7 @@
# under the License.
#
-name: "Build / RocksDB as UI Backend (master, Hadoop 3, JDK 8, Scala 2.13)"
+name: "Build / RocksDB as UI Backend (master, Hadoop 3, JDK 17, Scala 2.13)"
on:
schedule:
@@ -31,7 +31,7 @@ jobs:
uses: ./.github/workflows/build_and_test.yml
if: github.repository == 'apache/spark'
with:
- java: 8
+ java: 17
branch: master
hadoop: hadoop3
envs: >-
diff --git a/.github/workflows/publish_snapshot.yml b/.github/workflows/publish_snapshot.yml
index 9bdb52e4e92da..7ed836f016b9b 100644
--- a/.github/workflows/publish_snapshot.yml
+++ b/.github/workflows/publish_snapshot.yml
@@ -47,11 +47,18 @@ jobs:
key: snapshot-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
snapshot-maven-
- - name: Install Java 8
+ - name: Install Java 8 for branch-3.x
+ if: matrix.branch == 'branch-3.5' || matrix.branch == 'branch-3.4' || matrix.branch == 'branch-3.3'
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
+ - name: Install Java 17
+ if: matrix.branch != 'branch-3.5' && matrix.branch != 'branch-3.4' && matrix.branch != 'branch-3.3'
+ uses: actions/setup-java@v3
+ with:
+ distribution: temurin
+ java-version: 17
- name: Publish snapshot
env:
ASF_USERNAME: ${{ secrets.NEXUS_USER }}
diff --git a/dev/infra/Dockerfile b/dev/infra/Dockerfile
index d196d0e97c579..767606d299a3e 100644
--- a/dev/infra/Dockerfile
+++ b/dev/infra/Dockerfile
@@ -30,7 +30,7 @@ RUN apt-get update && apt-get install -y \
pkg-config \
curl \
wget \
- openjdk-8-jdk \
+ openjdk-17-jdk-headless \
gfortran \
libopenblas-dev \
liblapack-dev \
diff --git a/docs/building-spark.md b/docs/building-spark.md
index b7d1bacea3cc7..908d9e910a663 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -27,7 +27,7 @@ license: |
## Apache Maven
The Maven-based build is the build of reference for Apache Spark.
-Building Spark using Maven requires Maven 3.9.4 and Java 8/11/17.
+Building Spark using Maven requires Maven 3.9.4 and Java 17.
Spark requires Scala 2.13; support for Scala 2.12 was removed in Spark 4.0.0.
### Setting up Maven's Memory Usage
diff --git a/docs/index.md b/docs/index.md
index 4620c4f072b42..1523918d2decb 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -34,12 +34,11 @@ source, visit [Building Spark](building-spark.html).
Spark runs on both Windows and UNIX-like systems (e.g. Linux, Mac OS), and it should run on any platform that runs a supported version of Java. This should include JVMs on x86_64 and ARM64. It's easy to run locally on one machine --- all you need is to have `java` installed on your system `PATH`, or the `JAVA_HOME` environment variable pointing to a Java installation.
-Spark runs on Java 8/11/17, Scala 2.13, Python 3.8+, and R 3.5+.
-Java 8 prior to version 8u371 support is deprecated as of Spark 3.5.0.
+Spark runs on Java 17, Scala 2.13, Python 3.8+, and R 3.5+.
When using the Scala API, it is necessary for applications to use the same version of Scala that Spark was compiled for.
For example, when using Scala 2.13, use Spark compiled for 2.13, and compile code/applications for Scala 2.13 as well.
-For Java 11, setting `-Dio.netty.tryReflectionSetAccessible=true` is required for the Apache Arrow library. This prevents the `java.lang.UnsupportedOperationException: sun.misc.Unsafe or java.nio.DirectByteBuffer.(long, int) not available` error when Apache Arrow uses Netty internally.
+For Java 17, setting `-Dio.netty.tryReflectionSetAccessible=true` is required for the Apache Arrow library. This prevents the `java.lang.UnsupportedOperationException: sun.misc.Unsafe or java.nio.DirectByteBuffer.(long, int) not available` error when Apache Arrow uses Netty internally.
# Running the Examples and Shell
diff --git a/docs/security.md b/docs/security.md
index 3c6fd507fec6d..30cfd660c41bf 100644
--- a/docs/security.md
+++ b/docs/security.md
@@ -498,8 +498,8 @@ replaced with one of the above namespaces.
A comma-separated list of ciphers. The specified ciphers must be supported by JVM.
The reference list of protocols can be found in the "JSSE Cipher Suite Names" section
- of the Java security guide. The list for Java 8 can be found at
- this
+ of the Java security guide. The list for Java 17 can be found at
+ this
page.
Note: If not set, the default cipher suite for the JRE will be used.
@@ -537,8 +537,8 @@ replaced with one of the above namespaces.
TLS protocol to use. The protocol must be supported by JVM.
The reference list of protocols can be found in the "Additional JSSE Standard Names"
- section of the Java security guide. For Java 8, the list can be found at
- this
+ section of the Java security guide. For Java 17, the list can be found at
+ this
page.
@@ -591,7 +591,7 @@ Or via SparkConf "spark.hadoop.hadoop.security.credential.provider.path=jceks://
## Preparing the key stores
Key stores can be generated by `keytool` program. The reference documentation for this tool for
-Java 8 is [here](https://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html).
+Java 17 is [here](https://docs.oracle.com/en/java/javase/17/docs/specs/man/keytool.html).
The most basic steps to configure the key stores and the trust store for a Spark Standalone
deployment mode is as follows:
diff --git a/pom.xml b/pom.xml
index 65aa656aca2be..7b6b56439711d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -112,7 +112,7 @@
UTF-8
UTF-8
- 1.8
+ 17
${java.version}
${java.version}
3.9.4
@@ -2934,7 +2934,7 @@
-deprecation
-feature
-explaintypes
- -target:jvm-1.8
+ -target:17
-Wconf:cat=deprecation:wv,any:e
-Wunused:imports