You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by yh...@apache.org on 2015/12/29 20:44:24 UTC

spark git commit: [SPARK-11199][SPARKR] Improve R context management story and add getOrCreate

Repository: spark
Updated Branches:
  refs/heads/master 8e629b10c -> f6ecf1433


[SPARK-11199][SPARKR] Improve R context management story and add getOrCreate

* Changes api.r.SQLUtils to use ```SQLContext.getOrCreate``` instead of creating a new context.
* Adds a simple test

[SPARK-11199] #comment link with JIRA

Author: Hossein <ho...@databricks.com>

Closes #9185 from falaki/SPARK-11199.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f6ecf143
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f6ecf143
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f6ecf143

Branch: refs/heads/master
Commit: f6ecf143335d734b8f22c59649c6bbd4d5401745
Parents: 8e629b1
Author: Hossein <ho...@databricks.com>
Authored: Tue Dec 29 11:44:20 2015 -0800
Committer: Yin Huai <yh...@databricks.com>
Committed: Tue Dec 29 11:44:20 2015 -0800

----------------------------------------------------------------------
 R/pkg/inst/tests/testthat/test_sparkSQL.R                        | 4 ++++
 .../src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala     | 2 +-
 2 files changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f6ecf143/R/pkg/inst/tests/testthat/test_sparkSQL.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index c2b6adb..7b508b8 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -62,6 +62,10 @@ mockLinesComplexType <-
 complexTypeJsonPath <- tempfile(pattern="sparkr-test", fileext=".tmp")
 writeLines(mockLinesComplexType, complexTypeJsonPath)
 
+test_that("calling sparkRSQL.init returns existing SQL context", {
+  expect_equal(sparkRSQL.init(sc), sqlContext)
+})
+
 test_that("infer types and check types", {
   expect_equal(infer_type(1L), "integer")
   expect_equal(infer_type(1.0), "double")

http://git-wip-us.apache.org/repos/asf/spark/blob/f6ecf143/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
index b3f1346..67da7b8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala
@@ -32,7 +32,7 @@ private[r] object SQLUtils {
   SerDe.registerSqlSerDe((readSqlObject, writeSqlObject))
 
   def createSQLContext(jsc: JavaSparkContext): SQLContext = {
-    new SQLContext(jsc)
+    SQLContext.getOrCreate(jsc.sc)
   }
 
   def getJavaSparkContext(sqlCtx: SQLContext): JavaSparkContext = {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org