You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sh...@apache.org on 2018/05/12 00:00:55 UTC
spark git commit: [SPARKR] Require Java 8 for SparkR
Repository: spark
Updated Branches:
refs/heads/master 92f6f52ff -> f27a035da
[SPARKR] Require Java 8 for SparkR
This change updates the SystemRequirements and also includes a runtime check if the JVM is being launched by R. The runtime check is done by querying `java -version`
## How was this patch tested?
Tested on a Mac and Windows machine
Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Closes #21278 from shivaram/sparkr-skip-solaris.
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f27a035d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f27a035d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f27a035d
Branch: refs/heads/master
Commit: f27a035daf705766d3445e5c6a99867c11c552b0
Parents: 92f6f52
Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Authored: Fri May 11 17:00:51 2018 -0700
Committer: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Committed: Fri May 11 17:00:51 2018 -0700
----------------------------------------------------------------------
R/pkg/DESCRIPTION | 1 +
R/pkg/R/client.R | 35 +++++++++++++++++++++++++++++++++++
R/pkg/R/sparkR.R | 1 +
R/pkg/R/utils.R | 4 ++--
4 files changed, 39 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/f27a035d/R/pkg/DESCRIPTION
----------------------------------------------------------------------
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 855eb5b..f52d785 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -13,6 +13,7 @@ Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
License: Apache License (== 2.0)
URL: http://www.apache.org/ http://spark.apache.org/
BugReports: http://spark.apache.org/contributing.html
+SystemRequirements: Java (== 8)
Depends:
R (>= 3.0),
methods
http://git-wip-us.apache.org/repos/asf/spark/blob/f27a035d/R/pkg/R/client.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/client.R b/R/pkg/R/client.R
index 7244cc9..e9295e0 100644
--- a/R/pkg/R/client.R
+++ b/R/pkg/R/client.R
@@ -60,6 +60,40 @@ generateSparkSubmitArgs <- function(args, sparkHome, jars, sparkSubmitOpts, pack
combinedArgs
}
+checkJavaVersion <- function() {
+ javaBin <- "java"
+ javaHome <- Sys.getenv("JAVA_HOME")
+ javaReqs <- utils::packageDescription(utils::packageName(), fields=c("SystemRequirements"))
+ sparkJavaVersion <- as.numeric(tail(strsplit(javaReqs, "[(=)]")[[1]], n = 1L))
+ if (javaHome != "") {
+ javaBin <- file.path(javaHome, "bin", javaBin)
+ }
+
+ # If java is missing from PATH, we get an error in Unix and a warning in Windows
+ javaVersionOut <- tryCatch(
+ launchScript(javaBin, "-version", wait = TRUE, stdout = TRUE, stderr = TRUE),
+ error = function(e) {
+ stop("Java version check failed. Please make sure Java is installed",
+ " and set JAVA_HOME to point to the installation directory.", e)
+ },
+ warning = function(w) {
+ stop("Java version check failed. Please make sure Java is installed",
+ " and set JAVA_HOME to point to the installation directory.", w)
+ })
+ javaVersionFilter <- Filter(
+ function(x) {
+ grepl("java version", x)
+ }, javaVersionOut)
+
+ javaVersionStr <- strsplit(javaVersionFilter[[1]], "[\"]")[[1L]][2]
+ # javaVersionStr is of the form 1.8.0_92.
+ # Extract 8 from it to compare to sparkJavaVersion
+ javaVersionNum <- as.integer(strsplit(javaVersionStr, "[.]")[[1L]][2])
+ if (javaVersionNum != sparkJavaVersion) {
+ stop(paste("Java version", sparkJavaVersion, "is required for this package; found version:", javaVersionStr))
+ }
+}
+
launchBackend <- function(args, sparkHome, jars, sparkSubmitOpts, packages) {
sparkSubmitBinName <- determineSparkSubmitBin()
if (sparkHome != "") {
@@ -67,6 +101,7 @@ launchBackend <- function(args, sparkHome, jars, sparkSubmitOpts, packages) {
} else {
sparkSubmitBin <- sparkSubmitBinName
}
+
combinedArgs <- generateSparkSubmitArgs(args, sparkHome, jars, sparkSubmitOpts, packages)
cat("Launching java with spark-submit command", sparkSubmitBin, combinedArgs, "\n")
invisible(launchScript(sparkSubmitBin, combinedArgs))
http://git-wip-us.apache.org/repos/asf/spark/blob/f27a035d/R/pkg/R/sparkR.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 38ee794..d6a2d08 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -167,6 +167,7 @@ sparkR.sparkContext <- function(
submitOps <- getClientModeSparkSubmitOpts(
Sys.getenv("SPARKR_SUBMIT_ARGS", "sparkr-shell"),
sparkEnvirMap)
+ checkJavaVersion()
launchBackend(
args = path,
sparkHome = sparkHome,
http://git-wip-us.apache.org/repos/asf/spark/blob/f27a035d/R/pkg/R/utils.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/utils.R b/R/pkg/R/utils.R
index f1b5eca..c350197 100644
--- a/R/pkg/R/utils.R
+++ b/R/pkg/R/utils.R
@@ -746,7 +746,7 @@ varargsToJProperties <- function(...) {
props
}
-launchScript <- function(script, combinedArgs, wait = FALSE) {
+launchScript <- function(script, combinedArgs, wait = FALSE, stdout = "", stderr = "") {
if (.Platform$OS.type == "windows") {
scriptWithArgs <- paste(script, combinedArgs, sep = " ")
# on Windows, intern = F seems to mean output to the console. (documentation on this is missing)
@@ -756,7 +756,7 @@ launchScript <- function(script, combinedArgs, wait = FALSE) {
# stdout = F means discard output
# stdout = "" means to its console (default)
# Note that the console of this child process might not be the same as the running R process.
- system2(script, combinedArgs, stdout = "", wait = wait)
+ system2(script, combinedArgs, stdout = stdout, wait = wait, stderr = stderr)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org