You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ma...@apache.org on 2019/01/07 05:58:05 UTC

carbondata git commit: [CARBONDATA-3223] Fixed Wrong Datasize and Indexsize calculation for old store using Show Segments

Repository: carbondata
Updated Branches:
  refs/heads/master 923dab1b5 -> 72da33495


[CARBONDATA-3223] Fixed Wrong Datasize and Indexsize calculation for old store using Show Segments

Problem: Table Created and Loading on older version(1.1) was showing data-size and index-size 0B when refreshed on new version. This was
because when the data-size was coming as "null" we were not computing it, directly assigning 0 value to it.

Solution: Showing the old datasize and indexsize as NA.

Also refactored SetQuerySegment code for better understandability.

This closes #3047


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/72da3349
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/72da3349
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/72da3349

Branch: refs/heads/master
Commit: 72da33495362fdbf4cd0e24331ca77a1fab470f6
Parents: 923dab1
Author: manishnalla1994 <ma...@gmail.com>
Authored: Wed Jan 2 18:00:36 2019 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Mon Jan 7 11:33:06 2019 +0530

----------------------------------------------------------------------
 .../hadoop/api/CarbonInputFormat.java           | 25 +++++++++++++++-----
 .../org/apache/carbondata/api/CarbonStore.scala |  4 ++--
 .../org/apache/spark/sql/CarbonCountStar.scala  |  2 +-
 3 files changed, 22 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/72da3349/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index 24691f2..26144e2 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -277,12 +277,7 @@ m filterExpression
   public static void setQuerySegment(Configuration conf, AbsoluteTableIdentifier identifier) {
     String dbName = identifier.getCarbonTableIdentifier().getDatabaseName().toLowerCase();
     String tbName = identifier.getCarbonTableIdentifier().getTableName().toLowerCase();
-    String segmentNumbersFromProperty = CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.CARBON_INPUT_SEGMENTS + dbName + "." + tbName, "*");
-    if (!segmentNumbersFromProperty.trim().equals("*")) {
-      CarbonInputFormat.setSegmentsToAccess(conf,
-          Segment.toSegmentList(segmentNumbersFromProperty.split(","), null));
-    }
+    getQuerySegmentToAccess(conf, dbName, tbName);
   }
 
   /**
@@ -827,4 +822,22 @@ m filterExpression
     }
     return projectColumns.toArray(new String[projectColumns.size()]);
   }
+
+  private static void getQuerySegmentToAccess(Configuration conf, String dbName, String tableName) {
+    String segmentNumbersFromProperty = CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_INPUT_SEGMENTS + dbName + "." + tableName, "*");
+    if (!segmentNumbersFromProperty.trim().equals("*")) {
+      CarbonInputFormat.setSegmentsToAccess(conf,
+          Segment.toSegmentList(segmentNumbersFromProperty.split(","), null));
+    }
+  }
+
+  /**
+   * Set `CARBON_INPUT_SEGMENTS` from property to configuration
+   */
+  public static void setQuerySegment(Configuration conf, CarbonTable carbonTable) {
+    String tableName = carbonTable.getTableName();
+    getQuerySegmentToAccess(conf, carbonTable.getDatabaseName(), tableName);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/72da3349/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
index da9d4c2..11db430 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/api/CarbonStore.scala
@@ -107,8 +107,8 @@ object CarbonStore {
             (indices.asScala.map(_.getFile_size).sum, FileFactory.getCarbonFile(indexPath).getSize)
           } else {
             // for batch segment, we can get the data size from table status file directly
-            (if (load.getDataSize == null) 0L else load.getDataSize.toLong,
-              if (load.getIndexSize == null) 0L else load.getIndexSize.toLong)
+            (if (load.getDataSize == null) -1L else load.getDataSize.toLong,
+              if (load.getIndexSize == null) -1L else load.getIndexSize.toLong)
           }
 
           if (showHistory) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/72da3349/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
index ac8eb64..297cb54 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonCountStar.scala
@@ -52,7 +52,7 @@ case class CarbonCountStar(
       .setConfigurationToCurrentThread(sparkSession.sessionState.newHadoopConf())
     val absoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier
     val (job, tableInputFormat) = createCarbonInputFormat(absoluteTableIdentifier)
-    CarbonInputFormat.setQuerySegment(job.getConfiguration, absoluteTableIdentifier)
+    CarbonInputFormat.setQuerySegment(job.getConfiguration, carbonTable)
 
     // get row count
     val rowCount = CarbonUpdateUtil.getRowCount(