You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/07/12 15:04:57 UTC

[21/50] [abbrv] carbondata git commit: Fixed described formatted for sort_columns after alter

Fixed described formatted for sort_columns after alter


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e9329ee7
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e9329ee7
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e9329ee7

Branch: refs/heads/datamap
Commit: e9329ee7c1adc913d6e65c970e6312a5b18c6ec2
Parents: 5f9741e
Author: Ayush Mantri <aa...@gmail.com>
Authored: Thu Jun 29 15:18:20 2017 +0530
Committer: kumarvishal <ku...@gmail.com>
Committed: Mon Jul 3 20:30:18 2017 +0530

----------------------------------------------------------------------
 .../core/metadata/schema/table/CarbonTable.java  | 19 +++++++++++++++++++
 .../execution/command/carbonTableSchema.scala    |  6 +++---
 .../execution/command/carbonTableSchema.scala    |  6 +++---
 .../AlterTableValidationTestCase.scala           | 19 ++++++++++++++++++-
 4 files changed, 43 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/e9329ee7/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index ae97262..16ded57 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -691,6 +691,25 @@ public class CarbonTable implements Serializable {
     tableMeasuresMap.put(tableName, visibleMeasures);
   }
 
+  /**
+   * Method to get the list of sort columns
+   *
+   * @param tableName
+   * @return List of Sort column
+   */
+  public List<String> getSortColumns(String tableName) {
+    List<String> sort_columsList = new ArrayList<String>(allDimensions.size());
+    List<CarbonDimension> carbonDimensions = tableDimensionsMap.get(tableName);
+    for (CarbonDimension dim : carbonDimensions) {
+      if (dim.isSortColumn()) {
+        sort_columsList.add(dim.getColName());
+      }
+    }
+    return sort_columsList;
+  }
+
+
+
   public int getNumberOfSortColumns() {
     return numberOfSortColumns;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e9329ee7/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 3477abb..70c8407 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -851,9 +851,9 @@ private[sql] case class DescribeCommandFormatted(
     } else {
       results ++= Seq(("ADAPTIVE", "", ""))
     }
-    results ++= Seq(("SORT_COLUMNS", relation.metaData.carbonTable.getAllDimensions
-      .subList(0, relation.metaData.carbonTable.getNumberOfSortColumns).asScala
-      .map(column => column.getColName).mkString(","), ""))
+    results ++= Seq(("SORT_COLUMNS", relation.metaData.carbonTable.getSortColumns(
+      relation.tableMeta.carbonTableIdentifier.getTableName).asScala
+      .map(column => column).mkString(","), ""))
     val dimension = carbonTable
       .getDimensionByTableName(relation.tableMeta.carbonTableIdentifier.getTableName)
     results ++= getColumnGroups(dimension.asScala.toList)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e9329ee7/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 8fe4bd7..bb5bdd1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -902,9 +902,9 @@ private[sql] case class DescribeCommandFormatted(
     } else {
       results ++= Seq(("ADAPTIVE", "", ""))
     }
-    results ++= Seq(("SORT_COLUMNS", relation.metaData.carbonTable.getAllDimensions
-      .subList(0, relation.metaData.carbonTable.getNumberOfSortColumns).asScala
-      .map(column => column.getColName).mkString(","), ""))
+    results ++= Seq(("SORT_COLUMNS", relation.metaData.carbonTable.getSortColumns(
+      relation.tableMeta.carbonTableIdentifier.getTableName).asScala
+      .map(column => column).mkString(","), ""))
     val dimension = carbonTable
       .getDimensionByTableName(relation.tableMeta.carbonTableIdentifier.getTableName)
     results ++= getColumnGroups(dimension.asScala.toList)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e9329ee7/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index 972b0c2..ae64498 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -423,7 +423,22 @@ class AlterTableValidationTestCase extends QueryTest with BeforeAndAfterAll {
     sql("alter table Default.uniqdata rename to uniqdata1")
     checkAnswer(sql("select * from Default.uniqdata1"), Row(1,"hello"))
   }
-
+  test("describe formatted for default sort_columns pre and post alter") {
+    sql("CREATE TABLE defaultSortColumnsWithAlter (empno int, empname String, designation String,role String, doj Timestamp) STORED BY 'org.apache.carbondata.format' " +
+        "tblproperties('DICTIONARY_INCLUDE'='empno','DICTIONARY_EXCLUDE'='role')")
+    sql("alter table defaultSortColumnsWithAlter drop columns (designation)")
+    sql("alter table defaultSortColumnsWithAlter add columns (designation12 String)")
+    checkExistence(sql("describe formatted defaultSortColumnsWithAlter"),true,"SORT_COLUMNS")
+    checkExistence(sql("describe formatted defaultSortColumnsWithAlter"),true,"empno,empname,role,doj")
+  }
+  test("describe formatted for specified sort_columns pre and post alter") {
+    sql("CREATE TABLE specifiedSortColumnsWithAlter (empno int, empname String, designation String,role String, doj Timestamp) STORED BY 'org.apache.carbondata.format' " +
+        "tblproperties('sort_columns'='empno,empname,designation,role,doj','DICTIONARY_INCLUDE'='empno','DICTIONARY_EXCLUDE'='role')")
+    sql("alter table specifiedSortColumnsWithAlter drop columns (designation)")
+    sql("alter table specifiedSortColumnsWithAlter add columns (designation12 String)")
+    checkExistence(sql("describe formatted specifiedSortColumnsWithAlter"),true,"SORT_COLUMNS")
+    checkExistence(sql("describe formatted specifiedSortColumnsWithAlter"),true,"empno,empname,role,doj")
+  }
   override def afterAll {
     sql("DROP TABLE IF EXISTS restructure")
     sql("DROP TABLE IF EXISTS restructure_new")
@@ -432,5 +447,7 @@ class AlterTableValidationTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS restructure_badnew")
     sql("DROP TABLE IF EXISTS lock_rename")
     sql("drop table if exists uniqdata")
+    sql("drop table if exists defaultSortColumnsWithAlter")
+    sql("drop table if exists specifiedSortColumnsWithAlter")
   }
 }