You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ku...@apache.org on 2018/06/29 08:42:46 UTC

carbondata git commit: [CARBONDATA-2647] [CARBONDATA-2648] Fix cache level display in describe formatted command

Repository: carbondata
Updated Branches:
  refs/heads/master 64ae5ae0b -> dac8a4b00


[CARBONDATA-2647] [CARBONDATA-2648] Fix cache level display in describe formatted command

1. Correct CACHE_LEVEL display in describe formatted command. It was always displays BLOCK
   even though val was configured BLOCKLET.
2. Correct the method arguments to pass dbName first and then tableName.
3. Added test case for blocking column_meta_cache and cache_level on child dataMaps.

This closes #2426


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/dac8a4b0
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/dac8a4b0
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/dac8a4b0

Branch: refs/heads/master
Commit: dac8a4b006955c9015ecd1f440bcb79bd58b2906
Parents: 64ae5ae
Author: manishgupta88 <to...@gmail.com>
Authored: Thu Jun 28 19:18:07 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Fri Jun 29 14:11:39 2018 +0530

----------------------------------------------------------------------
 ...WithColumnMetCacheAndCacheLevelProperty.scala | 19 +++++++++++++++++++
 .../carbondata/spark/util/CommonUtil.scala       |  4 +---
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala  |  3 ++-
 .../table/CarbonDescribeFormattedCommand.scala   |  3 ++-
 4 files changed, 24 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/dac8a4b0/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
index dbe9c75..8d4be3c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/alterTable/TestAlterTableWithColumnMetCacheAndCacheLevelProperty.scala
@@ -160,6 +160,25 @@ class TestAlterTableWithColumnMetCacheAndCacheLevelProperty extends QueryTest wi
     checkExistence(descResult, true, "CACHE_LEVEL")
   }
 
+  test("validate column_meta_cache and cache_level on child dataMap- ALTER_CACHE_LEVEL_07") {
+    intercept [Exception] {
+      sql("CREATE DATAMAP agg1 ON TABLE alter_column_meta_cache USING 'preaggregate' DMPROPERTIES('column_meta_cache'='c2') AS SELECT c2,sum(c3) FROM alter_column_meta_cache GROUP BY c2")
+    }
+
+    intercept [Exception] {
+      sql("CREATE DATAMAP agg1 ON TABLE alter_column_meta_cache USING 'preaggregate' DMPROPERTIES('cache_level'='blocklet') AS SELECT c2,sum(c3) FROM alter_column_meta_cache GROUP BY c2")
+    }
+
+    // create datamap
+    sql("CREATE DATAMAP agg1 ON TABLE alter_column_meta_cache USING 'preaggregate' AS SELECT c2,sum(c3) FROM alter_column_meta_cache GROUP BY c2")
+    intercept [Exception] {
+      sql("Alter table alter_column_meta_cache_agg1 SET TBLPROPERTIES('column_meta_cache'='c2')")
+    }
+    intercept [Exception] {
+      sql("Alter table alter_column_meta_cache_agg1 SET TBLPROPERTIES('cache_level'='BLOCKLET')")
+    }
+  }
+
   override def afterAll: Unit = {
     // drop table
     dropTable

http://git-wip-us.apache.org/repos/asf/carbondata/blob/dac8a4b0/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
index 4723e6b..de1ac49 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/CommonUtil.scala
@@ -976,9 +976,7 @@ object CommonUtil {
         }
         // check if the column exists in the table
         if (!tableColumns.contains(col.toLowerCase)) {
-          val errorMessage = s"Column $col does not exists in the table ${
-            databaseName
-          }.${ tableIdentifier.table }"
+          val errorMessage = s"Column $col does not exists in the table ${ tableIdentifier.table }"
           throw new MalformedCarbonCommandException(errorMessage)
         }
       })

http://git-wip-us.apache.org/repos/asf/carbondata/blob/dac8a4b0/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index 13d1ff7..f60a413 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -384,8 +384,9 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
     if (tableProperties.get(CarbonCommonConstants.COLUMN_META_CACHE).isDefined) {
       // validate the column_meta_cache option
       val tableColumns = dims.map(x => x.name.get) ++ msrs.map(x => x.name.get)
-      CommonUtil.validateColumnMetaCacheFields(tableName,
+      CommonUtil.validateColumnMetaCacheFields(
         dbName.getOrElse(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
+        tableName,
         tableColumns,
         tableProperties.get(CarbonCommonConstants.COLUMN_META_CACHE).get,
         tableProperties)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/dac8a4b0/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 23b5cba..7b19325 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -109,7 +109,8 @@ private[sql] case class CarbonDescribeFormattedCommand(
       .LOAD_SORT_SCOPE_DEFAULT), tblProps.asScala.getOrElse("sort_scope", CarbonCommonConstants
       .LOAD_SORT_SCOPE_DEFAULT)))
     // add Cache Level property
-    results ++= Seq(("CACHE_LEVEL", tblProps.getOrDefault("CACHE_LEVEL", "BLOCK"), ""))
+    results ++= Seq(("CACHE_LEVEL", tblProps.asScala.getOrElse(CarbonCommonConstants.CACHE_LEVEL,
+      CarbonCommonConstants.CACHE_LEVEL_DEFAULT_VALUE), ""))
     val isStreaming = tblProps.asScala.getOrElse("streaming", "false")
     results ++= Seq(("Streaming", isStreaming, ""))
     val isLocalDictEnabled = tblProps.asScala