From dc2dbd923a1396ca5a7a950df35da57cc70c2ab8 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Mon, 12 Nov 2018 13:39:14 +0800 Subject: [PATCH 1/3] Deprecate R prior to version 3.4 in SparkR --- R/WINDOWS.md | 2 +- R/pkg/DESCRIPTION | 2 +- R/pkg/R/sparkR.R | 4 ++++ docs/index.md | 3 ++- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/R/WINDOWS.md b/R/WINDOWS.md index da668a69b867..34218b7bce22 100644 --- a/R/WINDOWS.md +++ b/R/WINDOWS.md @@ -3,7 +3,7 @@ To build SparkR on Windows, the following steps are required 1. Install R (>= 3.1) and [Rtools](http://cran.r-project.org/bin/windows/Rtools/). Make sure to -include Rtools and R in `PATH`. +include Rtools and R in `PATH`. Note that R prior to version 3.4 is deprecated as of Spark 3.0. 2. Install [JDK8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) and set diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION index cdaaa6104e6a..736da46eaa8d 100644 --- a/R/pkg/DESCRIPTION +++ b/R/pkg/DESCRIPTION @@ -15,7 +15,7 @@ URL: http://www.apache.org/ http://spark.apache.org/ BugReports: http://spark.apache.org/contributing.html SystemRequirements: Java (== 8) Depends: - R (>= 3.0), + R (>= 3.1), methods Suggests: knitr, diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R index ac289d38d01b..cb06bc73e15e 100644 --- a/R/pkg/R/sparkR.R +++ b/R/pkg/R/sparkR.R @@ -283,6 +283,10 @@ sparkR.session <- function( enableHiveSupport = TRUE, ...) { + if (utils::compareVersion(R.version$minor, "4.0") == -1) { + warning("R prior to version 3.4 is deprecated as of Spark 3.0.") + } + sparkConfigMap <- convertNamedListToEnv(sparkConfig) namedParams <- list(...) if (length(namedParams) > 0) { diff --git a/docs/index.md b/docs/index.md index ac38f1d4c53c..46851cb994ee 100644 --- a/docs/index.md +++ b/docs/index.md @@ -31,7 +31,8 @@ Spark runs on both Windows and UNIX-like systems (e.g. Linux, Mac OS). It's easy locally on one machine --- all you need is to have `java` installed on your system `PATH`, or the `JAVA_HOME` environment variable pointing to a Java installation. -Spark runs on Java 8+, Python 2.7+/3.4+ and R 3.1+. For the Scala API, Spark {{site.SPARK_VERSION}} +Spark runs on Java 8+, Python 2.7+/3.4+ and R 3.1+. R prior to version 3.4 is deprecated as of Spark 3.0. +For the Scala API, Spark {{site.SPARK_VERSION}} uses Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala version ({{site.SCALA_BINARY_VERSION}}.x). From 3ec34f12eea0a3463234d8acae3526b7ed11a4ec Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Tue, 13 Nov 2018 01:11:24 +0800 Subject: [PATCH 2/3] Check major version as well --- R/pkg/R/sparkR.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R index cb06bc73e15e..07de080a3e7c 100644 --- a/R/pkg/R/sparkR.R +++ b/R/pkg/R/sparkR.R @@ -283,7 +283,7 @@ sparkR.session <- function( enableHiveSupport = TRUE, ...) { - if (utils::compareVersion(R.version$minor, "4.0") == -1) { + if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { warning("R prior to version 3.4 is deprecated as of Spark 3.0.") } From f153413357a45b361a83c5eb93a9fcd5762c1499 Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Thu, 15 Nov 2018 08:38:51 +0800 Subject: [PATCH 3/3] Address comments --- R/WINDOWS.md | 2 +- R/pkg/R/sparkR.R | 4 ---- R/pkg/inst/profile/general.R | 4 ++++ R/pkg/inst/profile/shell.R | 4 ++++ docs/index.md | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/R/WINDOWS.md b/R/WINDOWS.md index 34218b7bce22..33a4c850cfda 100644 --- a/R/WINDOWS.md +++ b/R/WINDOWS.md @@ -3,7 +3,7 @@ To build SparkR on Windows, the following steps are required 1. Install R (>= 3.1) and [Rtools](http://cran.r-project.org/bin/windows/Rtools/). Make sure to -include Rtools and R in `PATH`. Note that R prior to version 3.4 is deprecated as of Spark 3.0. +include Rtools and R in `PATH`. Note that support for R prior to version 3.4 is deprecated as of Spark 3.0.0. 2. Install [JDK8](http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html) and set diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R index 07de080a3e7c..ac289d38d01b 100644 --- a/R/pkg/R/sparkR.R +++ b/R/pkg/R/sparkR.R @@ -283,10 +283,6 @@ sparkR.session <- function( enableHiveSupport = TRUE, ...) { - if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { - warning("R prior to version 3.4 is deprecated as of Spark 3.0.") - } - sparkConfigMap <- convertNamedListToEnv(sparkConfig) namedParams <- list(...) if (length(namedParams) > 0) { diff --git a/R/pkg/inst/profile/general.R b/R/pkg/inst/profile/general.R index 8c75c19ca7ac..3efb460846fc 100644 --- a/R/pkg/inst/profile/general.R +++ b/R/pkg/inst/profile/general.R @@ -16,6 +16,10 @@ # .First <- function() { + if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { + warning("Support for R prior to version 3.4 is deprecated since Spark 3.0.0") + } + packageDir <- Sys.getenv("SPARKR_PACKAGE_DIR") dirs <- strsplit(packageDir, ",")[[1]] .libPaths(c(dirs, .libPaths())) diff --git a/R/pkg/inst/profile/shell.R b/R/pkg/inst/profile/shell.R index 8a8111a8c541..32eb3671b594 100644 --- a/R/pkg/inst/profile/shell.R +++ b/R/pkg/inst/profile/shell.R @@ -16,6 +16,10 @@ # .First <- function() { + if (utils::compareVersion(paste0(R.version$major, ".", R.version$minor), "3.4.0") == -1) { + warning("Support for R prior to version 3.4 is deprecated since Spark 3.0.0") + } + home <- Sys.getenv("SPARK_HOME") .libPaths(c(file.path(home, "R", "lib"), .libPaths())) Sys.setenv(NOAWT = 1) diff --git a/docs/index.md b/docs/index.md index 46851cb994ee..bd287e3f8d83 100644 --- a/docs/index.md +++ b/docs/index.md @@ -31,7 +31,7 @@ Spark runs on both Windows and UNIX-like systems (e.g. Linux, Mac OS). It's easy locally on one machine --- all you need is to have `java` installed on your system `PATH`, or the `JAVA_HOME` environment variable pointing to a Java installation. -Spark runs on Java 8+, Python 2.7+/3.4+ and R 3.1+. R prior to version 3.4 is deprecated as of Spark 3.0. +Spark runs on Java 8+, Python 2.7+/3.4+ and R 3.1+. R prior to version 3.4 support is deprecated as of Spark 3.0.0. For the Scala API, Spark {{site.SPARK_VERSION}} uses Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala version ({{site.SCALA_BINARY_VERSION}}.x).