You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by da...@apache.org on 2015/05/30 00:08:26 UTC

spark git commit: [SPARK-7954] [SPARKR] Create SparkContext in sparkRSQL init

Repository: spark
Updated Branches:
  refs/heads/master 82a396c2f -> 5fb97dca9


[SPARK-7954] [SPARKR] Create SparkContext in sparkRSQL init

cc davies

Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>

Closes #6507 from shivaram/sparkr-init and squashes the following commits:

6fdd169 [Shivaram Venkataraman] Create SparkContext in sparkRSQL init


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5fb97dca
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5fb97dca
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5fb97dca

Branch: refs/heads/master
Commit: 5fb97dca9bcfc29ac33823554c8783997e811b99
Parents: 82a396c
Author: Shivaram Venkataraman <sh...@cs.berkeley.edu>
Authored: Fri May 29 15:08:30 2015 -0700
Committer: Davies Liu <da...@databricks.com>
Committed: Fri May 29 15:08:30 2015 -0700

----------------------------------------------------------------------
 R/pkg/R/sparkR.R | 24 +++++++++++++++++++-----
 1 file changed, 19 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/5fb97dca/R/pkg/R/sparkR.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 68387f0..5ced7c6 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -225,14 +225,21 @@ sparkR.init <- function(
 #' sqlContext <- sparkRSQL.init(sc)
 #'}
 
-sparkRSQL.init <- function(jsc) {
+sparkRSQL.init <- function(jsc = NULL) {
   if (exists(".sparkRSQLsc", envir = .sparkREnv)) {
     return(get(".sparkRSQLsc", envir = .sparkREnv))
   }
 
+  # If jsc is NULL, create a Spark Context
+  sc <- if (is.null(jsc)) {
+    sparkR.init()
+  } else {
+    jsc
+  }
+
   sqlContext <- callJStatic("org.apache.spark.sql.api.r.SQLUtils",
-                        "createSQLContext",
-                        jsc)
+                            "createSQLContext",
+                            sc)
   assign(".sparkRSQLsc", sqlContext, envir = .sparkREnv)
   sqlContext
 }
@@ -249,12 +256,19 @@ sparkRSQL.init <- function(jsc) {
 #' sqlContext <- sparkRHive.init(sc)
 #'}
 
-sparkRHive.init <- function(jsc) {
+sparkRHive.init <- function(jsc = NULL) {
   if (exists(".sparkRHivesc", envir = .sparkREnv)) {
     return(get(".sparkRHivesc", envir = .sparkREnv))
   }
 
-  ssc <- callJMethod(jsc, "sc")
+  # If jsc is NULL, create a Spark Context
+  sc <- if (is.null(jsc)) {
+    sparkR.init()
+  } else {
+    jsc
+  }
+
+  ssc <- callJMethod(sc, "sc")
   hiveCtx <- tryCatch({
     newJObject("org.apache.spark.sql.hive.HiveContext", ssc)
   }, error = function(err) {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org