You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2016/08/06 10:00:19 UTC

[02/20] incubator-carbondata git commit: [CARBONDATA-127] Issue while type casting data read from sort temp file to big decimal type (#893)

[CARBONDATA-127] Issue while type casting data read from sort temp file to big decimal type (#893)

Analysis: Whenever we perform data load operation involving huge data with decimal datatypes, then during intermediate merging pf sort temp file we are trying to typecast byte array to big decimal value after reading from object array. During this operation typecast exception is thrown.

Impact area: data load flow with huge data and measures with big decimal datatype

Fix: Typescast the object array value to byte array instaed of bigdecimal

Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/1695d606
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/1695d606
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/1695d606

Branch: refs/heads/master
Commit: 1695d606eb3f770a7fd6003280b15ed0e520d6f0
Parents: f2e6033
Author: manishgupta88 <to...@gmail.com>
Authored: Mon Aug 1 16:15:37 2016 +0530
Committer: Kumar Vishal <ku...@gmail.com>
Committed: Mon Aug 1 16:15:37 2016 +0530

----------------------------------------------------------------------
 .../carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala | 6 ++++++
 .../sortandgroupby/sortdata/IntermediateFileMerger.java        | 5 +----
 2 files changed, 7 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1695d606/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
index fb28568..2f6b9f8 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
@@ -37,6 +37,8 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists hiveBigDecimal")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE, "1")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT, "2")
     sql("CREATE TABLE IF NOT EXISTS carbonTable (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary Decimal(17,2))STORED BY 'org.apache.carbondata.format'")
     sql("create table if not exists hiveTable(ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary Decimal(17,2))row format delimited fields terminated by ','")
     sql("LOAD DATA LOCAL INPATH './src/test/resources/decimalDataWithHeader.csv' into table carbonTable")
@@ -165,6 +167,10 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists hiveBigDecimal")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE,
+      CarbonCommonConstants.SORT_SIZE_DEFAULT_VAL)
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT,
+      CarbonCommonConstants.SORT_INTERMEDIATE_FILES_LIMIT_DEFAULT_VALUE)
   }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1695d606/processing/src/main/java/org/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java b/processing/src/main/java/org/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
index 779b4e8..56f6414 100644
--- a/processing/src/main/java/org/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
+++ b/processing/src/main/java/org/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
@@ -25,7 +25,6 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.math.BigDecimal;
 import java.util.AbstractQueue;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
@@ -35,7 +34,6 @@ import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.constants.CarbonCommonConstants;
 import org.carbondata.core.util.CarbonUtil;
 import org.carbondata.core.util.CarbonUtilException;
-import org.carbondata.core.util.DataTypeUtil;
 import org.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
 import org.carbondata.processing.util.RemoveDictionaryUtil;
 
@@ -341,8 +339,7 @@ public class IntermediateFileMerger implements Callable<Void> {
             Long val = (Long) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
             stream.writeLong(val);
           } else if (aggType[counter] == CarbonCommonConstants.BIG_DECIMAL_MEASURE) {
-            BigDecimal val = (BigDecimal) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
-            byte[] bigDecimalInBytes = DataTypeUtil.bigDecimalToByte(val);
+            byte[] bigDecimalInBytes = (byte[]) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
             stream.writeInt(bigDecimalInBytes.length);
             stream.write(bigDecimalInBytes);
           }