You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/10/06 02:49:53 UTC

git commit: [SPARK-3792][SQL] Enable JavaHiveQLSuite

Repository: spark
Updated Branches:
  refs/heads/master 79b2108de -> 58f5361ca


[SPARK-3792][SQL] Enable JavaHiveQLSuite

Do not use TestSQLContext in JavaHiveQLSuite, that may lead to two SparkContexts in one jvm and enable JavaHiveQLSuite

Author: scwf <wa...@huawei.com>

Closes #2652 from scwf/fix-JavaHiveQLSuite and squashes the following commits:

be35c91 [scwf] enable JavaHiveQLSuite


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/58f5361c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/58f5361c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/58f5361c

Branch: refs/heads/master
Commit: 58f5361caaa2f898e38ae4b3794167881e20a818
Parents: 79b2108
Author: scwf <wa...@huawei.com>
Authored: Sun Oct 5 17:47:20 2014 -0700
Committer: Michael Armbrust <mi...@databricks.com>
Committed: Sun Oct 5 17:49:41 2014 -0700

----------------------------------------------------------------------
 .../sql/hive/api/java/JavaHiveQLSuite.scala     | 27 +++++++-------------
 1 file changed, 9 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/58f5361c/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
index 9644b70..46b11b5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/api/java/JavaHiveQLSuite.scala
@@ -25,34 +25,30 @@ import org.apache.spark.api.java.JavaSparkContext
 import org.apache.spark.sql.api.java.JavaSchemaRDD
 import org.apache.spark.sql.execution.ExplainCommand
 import org.apache.spark.sql.hive.test.TestHive
-import org.apache.spark.sql.test.TestSQLContext
 
 // Implicits
 import scala.collection.JavaConversions._
 
 class JavaHiveQLSuite extends FunSuite {
-  lazy val javaCtx = new JavaSparkContext(TestSQLContext.sparkContext)
+  lazy val javaCtx = new JavaSparkContext(TestHive.sparkContext)
 
   // There is a little trickery here to avoid instantiating two HiveContexts in the same JVM
   lazy val javaHiveCtx = new JavaHiveContext(javaCtx) {
     override val sqlContext = TestHive
   }
 
-  ignore("SELECT * FROM src") {
+  test("SELECT * FROM src") {
     assert(
       javaHiveCtx.sql("SELECT * FROM src").collect().map(_.getInt(0)) ===
         TestHive.sql("SELECT * FROM src").collect().map(_.getInt(0)).toSeq)
   }
 
-  private val explainCommandClassName =
-    classOf[ExplainCommand].getSimpleName.stripSuffix("$")
-
   def isExplanation(result: JavaSchemaRDD) = {
     val explanation = result.collect().map(_.getString(0))
-    explanation.size > 1 && explanation.head.startsWith(explainCommandClassName)
+    explanation.size > 1 && explanation.head.startsWith("== Physical Plan ==")
   }
 
-  ignore("Query Hive native command execution result") {
+  test("Query Hive native command execution result") {
     val tableName = "test_native_commands"
 
     assertResult(0) {
@@ -63,23 +59,18 @@ class JavaHiveQLSuite extends FunSuite {
       javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value STRING)").count()
     }
 
-    javaHiveCtx.sql("SHOW TABLES").registerTempTable("show_tables")
-
     assert(
       javaHiveCtx
-        .sql("SELECT result FROM show_tables")
+        .sql("SHOW TABLES")
         .collect()
         .map(_.getString(0))
         .contains(tableName))
 
-    assertResult(Array(Array("key", "int", "None"), Array("value", "string", "None"))) {
-      javaHiveCtx.sql(s"DESCRIBE $tableName").registerTempTable("describe_table")
-
-
+    assertResult(Array(Array("key", "int"), Array("value", "string"))) {
       javaHiveCtx
-        .sql("SELECT result FROM describe_table")
+        .sql(s"describe $tableName")
         .collect()
-        .map(_.getString(0).split("\t").map(_.trim))
+        .map(row => Array(row.get(0).asInstanceOf[String], row.get(1).asInstanceOf[String]))
         .toArray
     }
 
@@ -89,7 +80,7 @@ class JavaHiveQLSuite extends FunSuite {
     TestHive.reset()
   }
 
-  ignore("Exactly once semantics for DDL and command statements") {
+  test("Exactly once semantics for DDL and command statements") {
     val tableName = "test_exactly_once"
     val q0 = javaHiveCtx.sql(s"CREATE TABLE $tableName(key INT, value STRING)")
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org