You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2018/10/22 07:14:31 UTC

carbondata git commit: [HOTFIX] Fix SDV test case failure after PR #2645

Repository: carbondata
Updated Branches:
  refs/heads/master a9e405f47 -> 8c49e5b42


[HOTFIX] Fix SDV test case failure after PR #2645

**Changes:

1. #2645 has blocked the schema for external table. But SDV test cases were not updated. Hence updated the test cases
2. table properties was not blocked for external table, so even when a valid table property is passed, validation of table properties will fail for external table, as fields (schema) will be null.
Hence blocked this with proper error message**

This closes #2809


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8c49e5b4
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8c49e5b4
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8c49e5b4

Branch: refs/heads/master
Commit: 8c49e5b42fa786ad1df11d2b55c5a9e802bd579f
Parents: a9e405f
Author: ajantha-bhat <aj...@gmail.com>
Authored: Wed Oct 10 16:51:12 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Mon Oct 22 15:14:03 2018 +0800

----------------------------------------------------------------------
 .../sdv/generated/SDKwriterTestCase.scala       | 32 ++++++++++----------
 .../createTable/TestCreateExternalTable.scala   |  2 +-
 .../sql/parser/CarbonSparkSqlParserUtil.scala   |  9 +++++-
 3 files changed, 25 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8c49e5b4/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
index 668d9d1..619bfb3 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
@@ -21,7 +21,7 @@ package org.apache.carbondata.cluster.sdv.generated
 import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, InputStream}
 import java.util
 
-import org.apache.spark.sql.Row
+import org.apache.spark.sql.{AnalysisException, Row}
 import org.apache.spark.sql.common.util.QueryTest
 import org.scalatest.BeforeAndAfterEach
 import scala.collection.JavaConverters._
@@ -194,7 +194,7 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
     sql("DROP TABLE IF EXISTS sdkTable")
 
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
     checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
@@ -228,7 +228,7 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
     sql("DROP TABLE IF EXISTS sdkTable")
 
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
@@ -266,11 +266,11 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
     sql("DROP TABLE IF EXISTS sdkTable1")
     sql("DROP TABLE IF EXISTS sdkTable2")
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable1(name string,age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable1 STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable2(name string,age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable2 STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
@@ -278,14 +278,13 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
     checkAnswer(sql("select count(*) from sdkTable1"), Seq(Row(6)))
   }
 
-  test("test create External Table with Schema with partition, external table should " +
-       "ignore schema and partition") {
+  test("test create External Table without Schema") {
     buildTestDataSingleFile()
     assert(FileFactory.getCarbonFile(writerPath).exists())
     sql("DROP TABLE IF EXISTS sdkTable")
 
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
@@ -301,7 +300,7 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
     sql("DROP TABLE IF EXISTS table1")
 
     sql(
-      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
@@ -323,17 +322,18 @@ class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
       Seq(Row(0)))
   }
 
-  test("test create External Table with Table properties should ignore tblproperties") {
+  test("test create External Table with Table properties should fail") {
     buildTestDataSingleFile()
     assert(FileFactory.getCarbonFile(writerPath).exists())
     sql("DROP TABLE IF EXISTS sdkTable")
 
-    sql(
-      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
-         |'carbondata' LOCATION
-         |'$writerPath' TBLPROPERTIES('sort_scope'='batch_sort') """.stripMargin)
-
-    checkExistence(sql("Describe formatted sdkTable "), false, "batch_sort")
+    val ex = intercept[AnalysisException] {
+      sql(
+        s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+           |'carbondata' LOCATION
+           |'$writerPath' TBLPROPERTIES('sort_scope'='batch_sort') """.stripMargin)
+    }
+    assert(ex.message.contains("table properties are not supported for external table"))
   }
 
   test("Read sdk writer output file and test without carbondata and carbonindex files should fail")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8c49e5b4/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala
index 56f038e..f0e7a1e 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateExternalTable.scala
@@ -101,7 +101,7 @@ class TestCreateExternalTable extends QueryTest with BeforeAndAfterAll {
            |LOCATION '$storeLocation/origin'
      """.stripMargin)
     }
-    assert(ex.message.contains("Schema may not be specified for external table"))
+    assert(ex.message.contains("Schema must not be specified for external table"))
 
     sql("DROP TABLE IF EXISTS source")
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8c49e5b4/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index 1594c18..0378bf9 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -123,6 +123,13 @@ object CarbonSparkSqlParserUtil {
     if (partitionFields.nonEmpty && options.isStreaming) {
       operationNotAllowed("Streaming is not allowed on partitioned table", partitionColumns)
     }
+
+    if (external && fields.isEmpty && tableProperties.nonEmpty) {
+      // as fields are always zero for external table, cannot validate table properties.
+      operationNotAllowed(
+        "table properties are not supported for external table", tablePropertyList)
+    }
+
     // validate tblProperties
     val bucketFields = parser.getBucketFields(tableProperties, fields, options)
     var isTransactionalTable: Boolean = true
@@ -132,7 +139,7 @@ object CarbonSparkSqlParserUtil {
         // user provided schema for this external table, this is not allow currently
         // see CARBONDATA-2866
         operationNotAllowed(
-          "Schema may not be specified for external table", columns)
+          "Schema must not be specified for external table", columns)
       }
       if (partitionByStructFields.nonEmpty) {
         operationNotAllowed(