From d48ecf971bff093406cfed20ff7d42682cb8e295 Mon Sep 17 00:00:00 2001 From: HyukjinKwon Date: Tue, 23 Jun 2020 21:53:54 +0900 Subject: [PATCH] Drop R < 3.5 support --- R/WINDOWS.md | 4 ++-- R/pkg/DESCRIPTION | 2 +- R/pkg/inst/profile/general.R | 4 ---- R/pkg/inst/profile/shell.R | 4 ---- docs/index.md | 3 +-- 5 files changed, 4 insertions(+), 13 deletions(-) diff --git a/R/WINDOWS.md b/R/WINDOWS.md index dbc27178bdb8c..9fe4a22bf22b2 100644 --- a/R/WINDOWS.md +++ b/R/WINDOWS.md @@ -22,8 +22,8 @@ To build SparkR on Windows, the following steps are required 1. Make sure `bash` is available and in `PATH` if you already have a built-in `bash` on Windows. If you do not have, install [Cygwin](https://www.cygwin.com/). -2. Install R (>= 3.1) and [Rtools](https://cloud.r-project.org/bin/windows/Rtools/). Make sure to -include Rtools and R in `PATH`. Note that support for R prior to version 3.4 is deprecated as of Spark 3.0.0. +2. Install R (>= 3.5) and [Rtools](https://cloud.r-project.org/bin/windows/Rtools/). Make sure to +include Rtools and R in `PATH`. 3. Install JDK that SparkR supports (see `R/pkg/DESCRIPTION`), and set `JAVA_HOME` in the system environment variables. diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION index 52d7e1f4daa53..c5c08bd7a0636 100644 --- a/R/pkg/DESCRIPTION +++ b/R/pkg/DESCRIPTION @@ -15,7 +15,7 @@ URL: https://www.apache.org/ https://spark.apache.org/ BugReports: https://spark.apache.org/contributing.html SystemRequirements: Java (>= 8, < 12) Depends: - R (>= 3.1), + R (>= 3.5), methods Suggests: knitr, diff --git a/R/pkg/inst/profile/general.R b/R/pkg/inst/profile/general.R index 3efb460846fc2..8c75c19ca7ac3 100644 --- a/R/pkg/inst/profile/general.R +++ b/R/pkg/inst/profile/general.R @@ -16,10 +16,6 @@ # .First <- function() { - if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { - warning("Support for R prior to version 3.4 is deprecated since Spark 3.0.0") - } - packageDir <- Sys.getenv("SPARKR_PACKAGE_DIR") dirs <- strsplit(packageDir, ",")[[1]] .libPaths(c(dirs, .libPaths())) diff --git a/R/pkg/inst/profile/shell.R b/R/pkg/inst/profile/shell.R index e4e0d032997de..f6c20e1a5ebc3 100644 --- a/R/pkg/inst/profile/shell.R +++ b/R/pkg/inst/profile/shell.R @@ -16,10 +16,6 @@ # .First <- function() { - if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { - warning("Support for R prior to version 3.4 is deprecated since Spark 3.0.0") - } - home <- Sys.getenv("SPARK_HOME") .libPaths(c(file.path(home, "R", "lib"), .libPaths())) Sys.setenv(NOAWT = 1) diff --git a/docs/index.md b/docs/index.md index 38f12dd4db77b..c0771ca170af5 100644 --- a/docs/index.md +++ b/docs/index.md @@ -44,10 +44,9 @@ source, visit [Building Spark](building-spark.html). Spark runs on both Windows and UNIX-like systems (e.g. Linux, Mac OS), and it should run on any platform that runs a supported version of Java. This should include JVMs on x86_64 and ARM64. It's easy to run locally on one machine --- all you need is to have `java` installed on your system `PATH`, or the `JAVA_HOME` environment variable pointing to a Java installation. -Spark runs on Java 8/11, Scala 2.12, Python 2.7+/3.4+ and R 3.1+. +Spark runs on Java 8/11, Scala 2.12, Python 2.7+/3.4+ and R 3.5+. Java 8 prior to version 8u92 support is deprecated as of Spark 3.0.0. Python 2 and Python 3 prior to version 3.6 support is deprecated as of Spark 3.0.0. -R prior to version 3.4 support is deprecated as of Spark 3.0.0. For the Scala API, Spark {{site.SPARK_VERSION}} uses Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala version ({{site.SCALA_BINARY_VERSION}}.x).