You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/07/28 06:10:01 UTC

[1/7] carbondata git commit: [CARBONDATA-1098] Change page statistics use exact type and use column page in writer

Repository: carbondata
Updated Branches:
  refs/heads/master c504dd2d0 -> 742269079


http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/v1/CarbonFactDataWriterImplV1.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v1/CarbonFactDataWriterImplV1.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v1/CarbonFactDataWriterImplV1.java
index fab1a39..0f1b52b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v1/CarbonFactDataWriterImplV1.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v1/CarbonFactDataWriterImplV1.java
@@ -24,17 +24,17 @@ import java.nio.channels.FileChannel;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.columnar.ColGroupBlockStorage;
 import org.apache.carbondata.core.datastore.columnar.IndexStorage;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
+import org.apache.carbondata.core.datastore.page.key.TablePageKey;
+import org.apache.carbondata.core.datastore.page.statistics.TablePageStatistics;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.NodeHolder;
 import org.apache.carbondata.core.writer.CarbonFooterWriter;
 import org.apache.carbondata.format.FileFooter;
-import org.apache.carbondata.processing.store.TablePageKey;
-import org.apache.carbondata.processing.store.TablePageStatistics;
 import org.apache.carbondata.processing.store.writer.AbstractFactDataWriter;
 import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 
@@ -47,12 +47,11 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     super(dataWriterVo);
   }
 
-  @Override
-  public NodeHolder buildDataNodeHolder(EncodedData encoded,
-      TablePageStatistics stats, TablePageKey key)
+  protected NodeHolder buildNodeHolder(EncodedTablePage encodedTablePage)
       throws CarbonDataWriterException {
     // if there are no NO-Dictionary column present in the table then
     // set the empty byte array
+    TablePageKey key = encodedTablePage.getPageKey();
     byte[] startKey = key.getStartKey();
     byte[] endKey = key.getEndKey();
     byte[] noDictionaryStartKey = key.getNoDictStartKey();
@@ -70,46 +69,48 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     int totalKeySize = 0;
     int keyBlockSize = 0;
 
-    IndexStorage[] keyStorageArray = encoded.indexStorages;
-    boolean[] isSortedData = new boolean[keyStorageArray.length];
-    int[] keyLengths = new int[keyStorageArray.length];
-    byte[][] allMinValue = new byte[keyStorageArray.length][];
-    byte[][] allMaxValue = new byte[keyStorageArray.length][];
-    boolean[] colGrpBlock = new boolean[keyStorageArray.length];
-    byte[][] keyBlockData = encoded.dimensions;
-    byte[][] measureArray = encoded.measures;
+    int numDimensions = encodedTablePage.getNumDimensions();
+    boolean[] isSortedData = new boolean[numDimensions];
+    int[] keyLengths = new int[numDimensions];
+    int[] keyBlockIdxLengths = new int[numDimensions];
+    byte[][] allMinValue = new byte[numDimensions][];
+    byte[][] allMaxValue = new byte[numDimensions][];
+    byte[][] keyBlockData = NodeHolder.getKeyArray(encodedTablePage);
+    byte[][] measureArray = NodeHolder.getDataArray(encodedTablePage);
+    TablePageStatistics stats = new TablePageStatistics(encodedTablePage.getDimensions(),
+        encodedTablePage.getMeasures());
 
-    for (int i = 0; i < keyLengths.length; i++) {
-      keyLengths[i] = keyBlockData[i].length;
-      isSortedData[i] = keyStorageArray[i].isAlreadySorted();
+    EncodedDimensionPage[] dimensions = encodedTablePage.getDimensions();
+    for (int i = 0; i < dimensions.length; i++) {
+      IndexStorage indexStorage = dimensions[i].getIndexStorage();
+      keyLengths[i] = dimensions[i].getEncodedData().length;
+      isSortedData[i] = indexStorage.isAlreadySorted();
       if (!isSortedData[i]) {
         keyBlockSize++;
 
       }
       totalKeySize += keyLengths[i];
+      byte[] min = stats.getDimensionMinValue()[i];
+      byte[] max = stats.getDimensionMaxValue()[i];
       if (dataWriterVo.getIsComplexType()[i] || dataWriterVo.getIsDictionaryColumn()[i]) {
-        allMinValue[i] = keyStorageArray[i].getMin();
-        allMaxValue[i] = keyStorageArray[i].getMax();
+        allMinValue[i] = min;
+        allMaxValue[i] = max;
       } else {
-        allMinValue[i] = updateMinMaxForNoDictionary(keyStorageArray[i].getMin());
-        allMaxValue[i] = updateMinMaxForNoDictionary(keyStorageArray[i].getMax());
-      }
-      //if keyStorageArray is instance of ColGroupBlockStorage than it's colGroup chunk
-      if (keyStorageArray[i] instanceof ColGroupBlockStorage) {
-        colGrpBlock[i] = true;
+        allMinValue[i] = updateMinMaxForNoDictionary(min);
+        allMaxValue[i] = updateMinMaxForNoDictionary(max);
       }
     }
-    int[] keyBlockIdxLengths = new int[keyBlockSize];
     byte[][] dataAfterCompression = new byte[keyBlockSize][];
     byte[][] indexMap = new byte[keyBlockSize][];
     int idx = 0;
-    for (int i = 0; i < isSortedData.length; i++) {
+    for (int i = 0; i < dimensions.length; i++) {
+      IndexStorage indexStorage = dimensions[i].getIndexStorage();
       if (!isSortedData[i]) {
         dataAfterCompression[idx] =
-            numberCompressor.compress((int[])keyStorageArray[i].getRowIdPage());
-        if (null != keyStorageArray[i].getRowIdRlePage()
-            && ((int[])keyStorageArray[i].getRowIdRlePage()).length > 0) {
-          indexMap[idx] = numberCompressor.compress((int[])keyStorageArray[i].getRowIdRlePage());
+            numberCompressor.compress((int[])indexStorage.getRowIdPage());
+        if (null != indexStorage.getRowIdRlePage()
+            && ((int[])indexStorage.getRowIdRlePage()).length > 0) {
+          indexMap[idx] = numberCompressor.compress((int[])indexStorage.getRowIdRlePage());
         } else {
           indexMap[idx] = new byte[0];
         }
@@ -128,10 +129,11 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     int[] dataIndexMapLength = new int[compressDataBlockSize];
     idx = 0;
     for (int i = 0; i < dataWriterVo.getRleEncodingForDictDim().length; i++) {
+      IndexStorage indexStorage = dimensions[i].getIndexStorage();
       if (dataWriterVo.getRleEncodingForDictDim()[i]) {
         try {
           compressedDataIndex[idx] =
-              numberCompressor.compress((int[])keyStorageArray[i].getDataRlePage());
+              numberCompressor.compress((int[])indexStorage.getDataRlePage());
           dataIndexMapLength[idx] = compressedDataIndex[idx].length;
           idx++;
         } catch (Exception e) {
@@ -154,7 +156,8 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     holder.setMeasureNullValueIndex(stats.getNullBitSet());
     // end key format will be <length of dictionary key><length of no
     // dictionary key><DictionaryKey><No Dictionary key>
-    byte[] updatedNoDictionaryEndKey = updateNoDictionaryStartAndEndKey(noDictionaryEndKey);
+    byte[] updatedNoDictionaryEndKey =
+        encodedTablePage.getPageKey().updateNoDictionaryStartAndEndKey(noDictionaryEndKey);
     ByteBuffer buffer = ByteBuffer.allocate(
         CarbonCommonConstants.INT_SIZE_IN_BYTE + CarbonCommonConstants.INT_SIZE_IN_BYTE
             + endKey.length + updatedNoDictionaryEndKey.length);
@@ -165,7 +168,8 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     buffer.rewind();
     holder.setEndKey(buffer.array());
     holder.setMeasureLenght(msrLength);
-    byte[] updatedNoDictionaryStartKey = updateNoDictionaryStartAndEndKey(noDictionaryStartKey);
+    byte[] updatedNoDictionaryStartKey =
+        encodedTablePage.getPageKey().updateNoDictionaryStartAndEndKey(noDictionaryStartKey);
     // start key format will be <length of dictionary key><length of no
     // dictionary key><DictionaryKey><No Dictionary key>
     buffer = ByteBuffer.allocate(
@@ -185,38 +189,28 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     holder.setCompressedIndexMap(indexMap);
     holder.setDataIndexMapLength(dataIndexMapLength);
     holder.setCompressedDataIndex(compressedDataIndex);
-    holder.setMeasureStats(stats.getMeasurePageStatistics());
     holder.setTotalDimensionArrayLength(totalKeySize);
     holder.setTotalMeasureArrayLength(totalMsrArrySize);
     //setting column min max value
     holder.setDimensionColumnMaxData(allMaxValue);
     holder.setDimensionColumnMinData(allMinValue);
     holder.setRleEncodingForDictDim(dataWriterVo.getRleEncodingForDictDim());
-    holder.setColGrpBlocks(colGrpBlock);
+    holder.setEncodedData(encodedTablePage);
     return holder;
   }
 
-  @Override public void writeBlockletData(NodeHolder holder) throws CarbonDataWriterException {
-    if (holder.getEntryCount() == 0) {
+  @Override public void writeTablePage(EncodedTablePage encodedTablePage)
+      throws CarbonDataWriterException {
+    if (encodedTablePage.getPageSize() == 0) {
       return;
     }
-    int indexBlockSize = 0;
-    for (int i = 0; i < holder.getKeyBlockIndexLength().length; i++) {
-      indexBlockSize += holder.getKeyBlockIndexLength()[i] + CarbonCommonConstants.INT_SIZE_IN_BYTE;
-    }
-
-    for (int i = 0; i < holder.getDataIndexMapLength().length; i++) {
-      indexBlockSize += holder.getDataIndexMapLength()[i];
-    }
-
-    long blockletDataSize =
-        holder.getTotalDimensionArrayLength() + holder.getTotalMeasureArrayLength()
-            + indexBlockSize;
+    long blockletDataSize = encodedTablePage.getEncodedSize();
     updateBlockletFileChannel(blockletDataSize);
+    NodeHolder nodeHolder = buildNodeHolder(encodedTablePage);
     // write data to file and get its offset
-    long offset = writeDataToFile(holder, fileChannel);
+    long offset = writeDataToFile(nodeHolder, fileChannel);
     // get the blocklet info for currently added blocklet
-    BlockletInfoColumnar blockletInfo = getBlockletInfo(holder, offset);
+    BlockletInfoColumnar blockletInfo = getBlockletInfo(nodeHolder, offset);
     // add blocklet info to list
     blockletInfoList.add(blockletInfo);
     LOGGER.info("A new blocklet is added, its data size is: " + blockletDataSize + " Byte");
@@ -231,6 +225,7 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
    */
   private long writeDataToFile(NodeHolder nodeHolder, FileChannel channel)
       throws CarbonDataWriterException {
+    int numDimensions = nodeHolder.getKeyArray().length;
     // create byte buffer
     byte[][] compressedIndex = nodeHolder.getCompressedIndex();
     byte[][] compressedIndexMap = nodeHolder.getCompressedIndexMap();
@@ -262,16 +257,17 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
       // add measure data array to byte buffer
 
       ByteBuffer buffer1 = null;
-      for (int i = 0; i < compressedIndex.length; i++) {
-        buffer1 = ByteBuffer.allocate(nodeHolder.getKeyBlockIndexLength()[i]);
-        buffer1.putInt(compressedIndex[i].length);
-        buffer1.put(compressedIndex[i]);
-        if (compressedIndexMap[i].length > 0) {
-          buffer1.put(compressedIndexMap[i]);
+      for (int i = 0; i < numDimensions; i++) {
+        if (nodeHolder.getKeyBlockIndexLength()[i] > 0) {
+          buffer1 = ByteBuffer.allocate(nodeHolder.getKeyBlockIndexLength()[i]);
+          buffer1.putInt(compressedIndex[i].length);
+          buffer1.put(compressedIndex[i]);
+          if (compressedIndexMap[i].length > 0) {
+            buffer1.put(compressedIndexMap[i]);
+          }
+          buffer1.rewind();
+          byteBuffer.put(buffer1.array());
         }
-        buffer1.rewind();
-        byteBuffer.put(buffer1.array());
-
       }
       for (int i = 0; i < compressedDataIndex.length; i++) {
         byteBuffer.put(compressedDataIndex[i]);
@@ -356,12 +352,7 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
     info.setStartKey(nodeHolder.getStartKey());
     // set end key
     info.setEndKey(nodeHolder.getEndKey());
-    info.setStats(nodeHolder.getStats());
-    // return leaf metadata
-
-    //colGroup Blocks
-    info.setColGrpBlocks(nodeHolder.getColGrpBlocks());
-
+    info.setEncodedTablePage(nodeHolder.getEncodedData());
     return info;
   }
 
@@ -374,7 +365,7 @@ public class CarbonFactDataWriterImplV1 extends AbstractFactDataWriter<int[]> {
       long currentPosition = channel.size();
       CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
       FileFooter convertFileMeta = CarbonMetadataUtil
-          .convertFileFooter(blockletInfoList, localCardinality.length, localCardinality,
+          .convertFileFooter(blockletInfoList, localCardinality,
               thriftColumnSchemaList, dataWriterVo.getSegmentProperties());
       fillBlockIndexInfoDetails(convertFileMeta.getNum_rows(), carbonDataFileName, currentPosition);
       writer.writeFooter(convertFileMeta, currentPosition);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/v2/CarbonFactDataWriterImplV2.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v2/CarbonFactDataWriterImplV2.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v2/CarbonFactDataWriterImplV2.java
index c835332..e19a5ce 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v2/CarbonFactDataWriterImplV2.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v2/CarbonFactDataWriterImplV2.java
@@ -27,6 +27,7 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
@@ -62,17 +63,19 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
   /**
    * Below method will be used to write the data to carbon data file
    *
-   * @param holder
+   * @param encodedTablePage
    * @throws CarbonDataWriterException any problem in writing operation
    */
-  @Override public void writeBlockletData(NodeHolder holder) throws CarbonDataWriterException {
-    if (holder.getEntryCount() == 0) {
+  @Override public void writeTablePage(EncodedTablePage encodedTablePage)
+      throws CarbonDataWriterException {
+    NodeHolder nodeHolder = buildNodeHolder(encodedTablePage);
+    if (encodedTablePage.getPageSize() == 0) {
       return;
     }
     // size to calculate the size of the blocklet
     int size = 0;
     // get the blocklet info object
-    BlockletInfoColumnar blockletInfo = getBlockletInfo(holder, 0);
+    BlockletInfoColumnar blockletInfo = getBlockletInfo(encodedTablePage, 0);
 
     List<DataChunk2> datachunks = null;
     try {
@@ -90,16 +93,16 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
       size += dataChunkByteArray[i].length;
     }
     // add row id index length
-    for (int i = 0; i < holder.getKeyBlockIndexLength().length; i++) {
-      size += holder.getKeyBlockIndexLength()[i];
+    for (int i = 0; i < nodeHolder.getKeyBlockIndexLength().length; i++) {
+      size += nodeHolder.getKeyBlockIndexLength()[i];
     }
     // add rle index length
-    for (int i = 0; i < holder.getDataIndexMapLength().length; i++) {
-      size += holder.getDataIndexMapLength()[i];
+    for (int i = 0; i < nodeHolder.getDataIndexMapLength().length; i++) {
+      size += nodeHolder.getDataIndexMapLength()[i];
     }
     // add dimension column data page and measure column data page size
     long blockletDataSize =
-        holder.getTotalDimensionArrayLength() + holder.getTotalMeasureArrayLength() + size;
+        nodeHolder.getTotalDimensionArrayLength() + nodeHolder.getTotalMeasureArrayLength() + size;
     // if size of the file already reached threshold size then create a new file and get the file
     // channel object
     updateBlockletFileChannel(blockletDataSize);
@@ -119,7 +122,7 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
       throw new CarbonDataWriterException("Problem while getting the file channel size", e);
     }
     // write data to file and get its offset
-    writeDataToFile(holder, dataChunkByteArray, fileChannel);
+    writeDataToFile(nodeHolder, dataChunkByteArray, fileChannel);
     // add blocklet info to list
     blockletInfoList.add(blockletInfo);
     LOGGER.info("A new blocklet is added, its data size is: " + blockletDataSize + " Byte");
@@ -134,10 +137,6 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
    * <MColumn1DataChunk><MColumn1DataPage>
    * <MColumn2DataChunk><MColumn2DataPage>
    * <MColumn2DataChunk><MColumn2DataPage>
-   *
-   * @param nodeHolder
-   * @param dataChunksBytes
-   * @param channel
    * @throws CarbonDataWriterException
    */
   private void writeDataToFile(NodeHolder nodeHolder, byte[][] dataChunksBytes, FileChannel channel)
@@ -158,11 +157,15 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
     for (int i = 0; i < nodeHolder.getIsSortedKeyBlock().length; i++) {
       currentDataChunksOffset.add(offset);
       currentDataChunksLength.add((short) dataChunksBytes[i].length);
-      bufferSize += dataChunksBytes[i].length + nodeHolder.getKeyLengths()[i] + (!nodeHolder
-          .getIsSortedKeyBlock()[i] ? nodeHolder.getKeyBlockIndexLength()[rowIdIndex] : 0) + (
-          dataWriterVo.getRleEncodingForDictDim()[i] ?
-              nodeHolder.getCompressedDataIndex()[rleIndex].length :
-              0);
+      int size1 = (!nodeHolder.getIsSortedKeyBlock()[i] ?
+          nodeHolder.getKeyBlockIndexLength()[rowIdIndex] :
+          0);
+      int size2 = (dataWriterVo.getRleEncodingForDictDim()[i] ?
+          nodeHolder.getCompressedDataIndex()[rleIndex].length :
+          0);
+      bufferSize += dataChunksBytes[i].length +
+          nodeHolder.getKeyLengths()[i] +
+          size1 + size2;
       offset += dataChunksBytes[i].length;
       offset += nodeHolder.getKeyLengths()[i];
       if (!nodeHolder.getIsSortedKeyBlock()[i]) {
@@ -182,14 +185,16 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
       buffer.put(nodeHolder.getKeyArray()[i]);
       if (!nodeHolder.getIsSortedKeyBlock()[i]) {
         buffer.putInt(nodeHolder.getCompressedIndex()[rowIdIndex].length);
-        buffer.put(nodeHolder.getCompressedIndex()[rowIdIndex]);
+        byte[] b1 = nodeHolder.getCompressedIndex()[rowIdIndex];
+        buffer.put(b1);
         if (nodeHolder.getCompressedIndexMap()[rowIdIndex].length > 0) {
           buffer.put(nodeHolder.getCompressedIndexMap()[rowIdIndex]);
         }
         rowIdIndex++;
       }
       if (dataWriterVo.getRleEncodingForDictDim()[i]) {
-        buffer.put(nodeHolder.getCompressedDataIndex()[rleIndex]);
+        byte[] b2 = nodeHolder.getCompressedDataIndex()[rleIndex];
+        buffer.put(b2);
         rleIndex++;
       }
     }
@@ -232,7 +237,9 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
    *
    * @return BlockletInfo - blocklet metadata
    */
-  protected BlockletInfoColumnar getBlockletInfo(NodeHolder nodeHolder, long offset) {
+  protected BlockletInfoColumnar getBlockletInfo(EncodedTablePage encodedTablePage, long offset) {
+    NodeHolder nodeHolder = buildNodeHolder(encodedTablePage);
+
     // create the info object for leaf entry
     BlockletInfoColumnar info = new BlockletInfoColumnar();
     //add rleEncodingForDictDim array
@@ -258,12 +265,7 @@ public class CarbonFactDataWriterImplV2 extends CarbonFactDataWriterImplV1 {
     info.setStartKey(nodeHolder.getStartKey());
     // set end key
     info.setEndKey(nodeHolder.getEndKey());
-    info.setStats(nodeHolder.getStats());
-    // return leaf metadata
-
-    //colGroup Blocks
-    info.setColGrpBlocks(nodeHolder.getColGrpBlocks());
-
+    info.setEncodedTablePage(encodedTablePage);
     return info;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
index 9afbb55..adb97ae 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
@@ -26,9 +26,10 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
-import org.apache.carbondata.core.datastore.columnar.ColGroupBlockStorage;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
 import org.apache.carbondata.core.metadata.blocklet.index.BlockletBTreeIndex;
 import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex;
 import org.apache.carbondata.core.metadata.index.BlockIndexInfo;
@@ -36,11 +37,8 @@ import org.apache.carbondata.core.util.ByteUtil;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.NodeHolder;
 import org.apache.carbondata.format.BlockletInfo3;
 import org.apache.carbondata.format.FileFooter3;
-import org.apache.carbondata.processing.store.TablePageKey;
-import org.apache.carbondata.processing.store.TablePageStatistics;
 import org.apache.carbondata.processing.store.writer.AbstractFactDataWriter;
 import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 
@@ -77,181 +75,6 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter<short[]>
     dataWriterHolder = new DataWriterHolder();
   }
 
-  /**
-   * Below method will be used to build the node holder object
-   * This node holder object will be used to persist data which will
-   * be written in carbon data file
-   */
-  @Override public NodeHolder buildDataNodeHolder(EncodedData encoded,
-      TablePageStatistics stats, TablePageKey key) throws CarbonDataWriterException {
-    // if there are no NO-Dictionary column present in the table then
-    // set the empty byte array
-    byte[] startKey = key.getStartKey();
-    byte[] endKey = key.getEndKey();
-    byte[] noDictionaryStartKey = key.getNoDictStartKey();
-    byte[] noDictionaryEndKey = key.getNoDictEndKey();
-    if (null == noDictionaryEndKey) {
-      noDictionaryEndKey = new byte[0];
-    }
-    if (null == noDictionaryStartKey) {
-      noDictionaryStartKey = new byte[0];
-    }
-    // total measure length;
-    int totalMsrArrySize = 0;
-    // current measure length;
-    int currentMsrLenght = 0;
-    int numDimensions = encoded.dimensions.length;
-    int totalKeySize = 0;
-    boolean[] isSortedData = new boolean[numDimensions];
-    int[] keyLengths = new int[numDimensions];
-    boolean[] colGrpBlock = new boolean[numDimensions];
-    int[] keyBlockIdxLengths = new int[numDimensions];
-    byte[][] dataAfterCompression = new byte[numDimensions][];
-    byte[][] indexMap = new byte[numDimensions][];
-    for (int i = 0; i < numDimensions; i++) {
-      isSortedData[i] = encoded.indexStorages[i].isAlreadySorted();
-      keyLengths[i] = encoded.dimensions[i].length;
-      totalKeySize += keyLengths[i];
-      if (!isSortedData[i]) {
-        dataAfterCompression[i] =
-            getByteArray((short[])encoded.indexStorages[i].getRowIdPage());
-        if (null != encoded.indexStorages[i].getRowIdRlePage() &&
-            ((short[])encoded.indexStorages[i].getRowIdRlePage()).length > 0) {
-          indexMap[i] = getByteArray((short[])encoded.indexStorages[i].getRowIdRlePage());
-        } else {
-          indexMap[i] = new byte[0];
-        }
-        keyBlockIdxLengths[i] = (dataAfterCompression[i].length + indexMap[i].length)
-            + CarbonCommonConstants.INT_SIZE_IN_BYTE;
-      }
-      // if keyStorageArray is instance of ColGroupBlockStorage than it's colGroup chunk
-      if (encoded.indexStorages[i] instanceof ColGroupBlockStorage) {
-        colGrpBlock[i] = true;
-      }
-    }
-    byte[][] compressedDataIndex = new byte[numDimensions][];
-    int[] dataIndexMapLength = new int[numDimensions];
-    for (int i = 0; i < dataWriterVo.getRleEncodingForDictDim().length; i++) {
-      if (dataWriterVo.getRleEncodingForDictDim()[i]) {
-        try {
-          compressedDataIndex[i] =
-              getByteArray((short[])encoded.indexStorages[i].getDataRlePage());
-          dataIndexMapLength[i] = compressedDataIndex[i].length;
-        } catch (Exception e) {
-          throw new CarbonDataWriterException(e.getMessage(), e);
-        }
-      }
-    }
-    int[] msrLength = new int[dataWriterVo.getMeasureCount()];
-    // calculate the total size required for all the measure and get the
-    // each measure size
-    for (int i = 0; i < encoded.measures.length; i++) {
-      currentMsrLenght = encoded.measures[i].length;
-      totalMsrArrySize += currentMsrLenght;
-      msrLength[i] = currentMsrLenght;
-    }
-    NodeHolder holder = new NodeHolder();
-    holder.setDataArray(encoded.measures);
-    holder.setKeyArray(encoded.dimensions);
-    holder.setMeasureNullValueIndex(stats.getNullBitSet());
-    // end key format will be <length of dictionary key><length of no
-    // dictionary key><DictionaryKey><No Dictionary key>
-    byte[] updatedNoDictionaryEndKey = updateNoDictionaryStartAndEndKey(noDictionaryEndKey);
-    ByteBuffer buffer = ByteBuffer.allocate(
-        CarbonCommonConstants.INT_SIZE_IN_BYTE + CarbonCommonConstants.INT_SIZE_IN_BYTE
-            + endKey.length + updatedNoDictionaryEndKey.length);
-    buffer.putInt(endKey.length);
-    buffer.putInt(updatedNoDictionaryEndKey.length);
-    buffer.put(endKey);
-    buffer.put(updatedNoDictionaryEndKey);
-    buffer.rewind();
-    holder.setEndKey(buffer.array());
-    holder.setMeasureLenght(msrLength);
-    byte[] updatedNoDictionaryStartKey = updateNoDictionaryStartAndEndKey(noDictionaryStartKey);
-    // start key format will be <length of dictionary key><length of no
-    // dictionary key><DictionaryKey><No Dictionary key>
-    buffer = ByteBuffer.allocate(
-        CarbonCommonConstants.INT_SIZE_IN_BYTE + CarbonCommonConstants.INT_SIZE_IN_BYTE
-            + startKey.length + updatedNoDictionaryStartKey.length);
-    buffer.putInt(startKey.length);
-    buffer.putInt(updatedNoDictionaryStartKey.length);
-    buffer.put(startKey);
-    buffer.put(updatedNoDictionaryStartKey);
-    buffer.rewind();
-    holder.setStartKey(buffer.array());
-    holder.setEntryCount(key.getPageSize());
-    holder.setKeyLengths(keyLengths);
-    holder.setKeyBlockIndexLength(keyBlockIdxLengths);
-    holder.setIsSortedKeyBlock(isSortedData);
-    holder.setCompressedIndex(dataAfterCompression);
-    holder.setCompressedIndexMap(indexMap);
-    holder.setDataIndexMapLength(dataIndexMapLength);
-    holder.setCompressedDataIndex(compressedDataIndex);
-    holder.setMeasureStats(stats.getMeasurePageStatistics());
-    holder.setTotalDimensionArrayLength(totalKeySize);
-    holder.setTotalMeasureArrayLength(totalMsrArrySize);
-    holder.setMeasureColumnMaxData(stats.getMeasureMaxValue());
-    holder.setMeasureColumnMinData(stats.getMeasureMinValue());
-    holder.setDimensionColumnMaxData(stats.getDimensionMaxValue());
-    holder.setDimensionColumnMinData(stats.getDimensionMinValue());
-    holder.setRleEncodingForDictDim(dataWriterVo.getRleEncodingForDictDim());
-    holder.setColGrpBlocks(colGrpBlock);
-    List<byte[]> dimensionDataChunk2 = null;
-    List<byte[]> measureDataChunk2 = null;
-    try {
-      dimensionDataChunk2 = CarbonMetadataUtil
-          .getDataChunk2(holder, thriftColumnSchemaList, dataWriterVo.getSegmentProperties(), true);
-      measureDataChunk2 = CarbonMetadataUtil
-          .getDataChunk2(holder, thriftColumnSchemaList, dataWriterVo.getSegmentProperties(),
-              false);
-
-    } catch (IOException e) {
-      throw new CarbonDataWriterException(e.getMessage());
-    }
-    holder.setHolderSize(calculateSize(holder, dimensionDataChunk2, measureDataChunk2));
-    return holder;
-  }
-
-  private int calculateSize(NodeHolder holder, List<byte[]> dimensionDataChunk2,
-      List<byte[]> measureDataChunk2) {
-    int size = 0;
-    // add row id index length
-    for (int i = 0; i < holder.getKeyBlockIndexLength().length; i++) {
-      if (!holder.getIsSortedKeyBlock()[i]) {
-        size += holder.getKeyBlockIndexLength()[i];
-      }
-    }
-    // add rle index length
-    for (int i = 0; i < holder.getDataIndexMapLength().length; i++) {
-      if (holder.getRleEncodingForDictDim()[i]) {
-        size += holder.getDataIndexMapLength()[i];
-      }
-    }
-    for (int i = 0; i < dimensionDataChunk2.size(); i++) {
-      size += dimensionDataChunk2.get(i).length;
-    }
-    for (int i = 0; i < measureDataChunk2.size(); i++) {
-      size += measureDataChunk2.get(i).length;
-    }
-    size += holder.getTotalDimensionArrayLength() + holder.getTotalMeasureArrayLength();
-    return size;
-  }
-
-  /**
-   * Below method will be used to convert short array to byte array
-   *
-   * @param data in short data
-   * @return byte array
-   */
-  private byte[] getByteArray(short[] data) {
-    ByteBuffer buffer = ByteBuffer.allocate(data.length * 2);
-    for (int i = 0; i < data.length; i++) {
-      buffer.putShort(data[i]);
-    }
-    buffer.flip();
-    return buffer.array();
-  }
-
   @Override protected void writeBlockletInfoToFile(FileChannel channel, String filePath)
       throws CarbonDataWriterException {
     try {
@@ -277,65 +100,65 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter<short[]>
   }
 
   /**
-   * Below method will be used to write blocklet data to file
+   * Below method will be used to write one table page data
    */
-  @Override public void writeBlockletData(NodeHolder holder) throws CarbonDataWriterException {
-    // check the number of pages present in data holder, if pages is exceeding threshold
-    // it will write the pages to file
+  @Override public void writeTablePage(EncodedTablePage encodedTablePage)
+      throws CarbonDataWriterException {
     // condition for writting all the pages
-    if (!holder.isWriteAll()) {
+    if (!encodedTablePage.isLastPage()) {
       boolean isAdded = false;
-      // check if size more than blocklet size then write the page
-      if (dataWriterHolder.getSize() + holder.getHolderSize() >= blockletSize) {
+      // check if size more than blocklet size then write the page to file
+      if (dataWriterHolder.getSize() + encodedTablePage.getEncodedSize() >= blockletSize) {
         // if one page size is more than blocklet size
-        if (dataWriterHolder.getNodeHolder().size() == 0) {
+        if (dataWriterHolder.getEncodedTablePages().size() == 0) {
           isAdded = true;
-          dataWriterHolder.addNodeHolder(holder);
+          dataWriterHolder.addPage(encodedTablePage);
         }
 
         LOGGER.info("Number of Pages for blocklet is: " + dataWriterHolder.getNumberOfPagesAdded()
             + " :Rows Added: " + dataWriterHolder.getTotalRows());
         // write the data
-        writeDataToFile(fileChannel);
+        writeBlockletToFile();
       }
       if (!isAdded) {
-        dataWriterHolder.addNodeHolder(holder);
+        dataWriterHolder.addPage(encodedTablePage);
       }
     } else {
       //for last blocklet check if the last page will exceed the blocklet size then write
       // existing pages and then last page
-      if (holder.getEntryCount() > 0) {
-        dataWriterHolder.addNodeHolder(holder);
+      if (encodedTablePage.getPageSize() > 0) {
+        dataWriterHolder.addPage(encodedTablePage);
       }
       if (dataWriterHolder.getNumberOfPagesAdded() > 0) {
         LOGGER.info("Number of Pages for blocklet is: " + dataWriterHolder.getNumberOfPagesAdded()
             + " :Rows Added: " + dataWriterHolder.getTotalRows());
-        writeDataToFile(fileChannel);
+        writeBlockletToFile();
       }
     }
   }
 
-  private void writeDataToFile(FileChannel channel) {
-    // get the list of node holder list
-    List<NodeHolder> nodeHolderList = dataWriterHolder.getNodeHolder();
+  /**
+   * Write one blocklet data to file
+   */
+  private void writeBlockletToFile() {
+    // get the list of all encoded table page
+    List<EncodedTablePage> encodedTablePageList = dataWriterHolder.getEncodedTablePages();
+    int numDimensions = encodedTablePageList.get(0).getNumDimensions();
+    int numMeasures = encodedTablePageList.get(0).getNumMeasures();
     long blockletDataSize = 0;
     // get data chunks for all the column
-    byte[][] dataChunkBytes =
-        new byte[nodeHolderList.get(0).getKeyArray().length + nodeHolderList.get(0)
-            .getDataArray().length][];
-    int measureStartIndex = nodeHolderList.get(0).getKeyArray().length;
+    byte[][] dataChunkBytes = new byte[numDimensions + numMeasures][];
+    int measureStartIndex = numDimensions;
     // calculate the size of data chunks
     try {
-      for (int i = 0; i < nodeHolderList.get(0).getKeyArray().length; i++) {
+      for (int i = 0; i < numDimensions; i++) {
         dataChunkBytes[i] = CarbonUtil.getByteArray(
-            CarbonMetadataUtil.getDataChunk3(nodeHolderList, thriftColumnSchemaList,
-                dataWriterVo.getSegmentProperties(), i, true));
+            CarbonMetadataUtil.getDimensionDataChunk3(encodedTablePageList, i));
         blockletDataSize += dataChunkBytes[i].length;
       }
-      for (int i = 0; i < nodeHolderList.get(0).getDataArray().length; i++) {
-        dataChunkBytes[measureStartIndex] = CarbonUtil.getByteArray(CarbonMetadataUtil
-            .getDataChunk3(nodeHolderList, thriftColumnSchemaList,
-                dataWriterVo.getSegmentProperties(), i, false));
+      for (int i = 0; i < numMeasures; i++) {
+        dataChunkBytes[measureStartIndex] = CarbonUtil.getByteArray(
+            CarbonMetadataUtil.getMeasureDataChunk3(encodedTablePageList, i));
         blockletDataSize += dataChunkBytes[measureStartIndex].length;
         measureStartIndex++;
       }
@@ -346,117 +169,96 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter<short[]>
     blockletDataSize += dataWriterHolder.getSize();
     // to check if data size will exceed the block size then create a new file
     updateBlockletFileChannel(blockletDataSize);
+
     // write data to file
-    writeDataToFile(fileChannel, dataChunkBytes);
+    try {
+      if (fileChannel.size() == 0) {
+        // write the header if file is empty
+        writeHeaderToFile(fileChannel);
+      }
+      writeBlockletToFile(fileChannel, dataChunkBytes);
+    } catch (IOException e) {
+      throw new CarbonDataWriterException("Problem when writing file", e);
+    }
     // clear the data holder
     dataWriterHolder.clear();
   }
 
   /**
-   * Below method will be used to write data in carbon data file
-   * Data Format
+   * write file header
+   */
+  private void writeHeaderToFile(FileChannel channel) throws IOException {
+    byte[] fileHeader = CarbonUtil.getByteArray(
+        CarbonMetadataUtil.getFileHeader(
+            true, thriftColumnSchemaList, dataWriterVo.getSchemaUpdatedTimeStamp()));
+    ByteBuffer buffer = ByteBuffer.wrap(fileHeader);
+    channel.write(buffer);
+  }
+
+  /**
+   * Write one blocklet data into file
+   * File format:
    * <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>>
    * <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>>
    * <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>>
    * <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>>
-   * Each page will contain column data, Inverted index and rle index
-   *
-   * @param channel
-   * @param dataChunkBytes
    */
-  private void writeDataToFile(FileChannel channel, byte[][] dataChunkBytes) {
-    long offset = 0;
-    // write the header
-    try {
-      if (fileChannel.size() == 0) {
-        // below code is to write the file header
-        byte[] fileHeader = CarbonUtil.getByteArray(CarbonMetadataUtil
-            .getFileHeader(true, thriftColumnSchemaList, dataWriterVo.getSchemaUpdatedTimeStamp()));
-        ByteBuffer buffer = ByteBuffer.wrap(fileHeader);
-        fileChannel.write(buffer);
-      }
-      offset = channel.size();
-    } catch (IOException e) {
-      throw new CarbonDataWriterException("Problem while getting the file channel size");
-    }
+  private void writeBlockletToFile(FileChannel channel, byte[][] dataChunkBytes)
+      throws IOException {
+    long offset = channel.size();
     // to maintain the offset of each data chunk in blocklet
     List<Long> currentDataChunksOffset = new ArrayList<>();
     // to maintain the length of each data chunk in blocklet
     List<Integer> currentDataChunksLength = new ArrayList<>();
-    // get the node holder list
-    List<NodeHolder> nodeHolderList = dataWriterHolder.getNodeHolder();
-    int numberOfDimension = nodeHolderList.get(0).getKeyArray().length;
-    int numberOfMeasures = nodeHolderList.get(0).getDataArray().length;
-    NodeHolder nodeHolder = null;
+    List<EncodedTablePage> encodedTablePages = dataWriterHolder.getEncodedTablePages();
+    int numberOfDimension = encodedTablePages.get(0).getNumDimensions();
+    int numberOfMeasures = encodedTablePages.get(0).getNumMeasures();
     ByteBuffer buffer = null;
-    int bufferSize = 0;
     long dimensionOffset = 0;
     long measureOffset = 0;
     int numberOfRows = 0;
-    long totalSize = 0;
     // calculate the number of rows in each blocklet
-    for (int j = 0; j < nodeHolderList.size(); j++) {
-      numberOfRows += nodeHolderList.get(j).getEntryCount();
-      totalSize += nodeHolderList.get(j).getHolderSize();
+    for (EncodedTablePage encodedTablePage : encodedTablePages) {
+      numberOfRows += encodedTablePage.getPageSize();
     }
-    try {
-      for (int i = 0; i < numberOfDimension; i++) {
-        currentDataChunksOffset.add(offset);
-        currentDataChunksLength.add(dataChunkBytes[i].length);
-        buffer = ByteBuffer.wrap(dataChunkBytes[i]);
-        fileChannel.write(buffer);
-        offset += dataChunkBytes[i].length;
-        for (int j = 0; j < nodeHolderList.size(); j++) {
-          nodeHolder = nodeHolderList.get(j);
-          bufferSize = nodeHolder.getKeyLengths()[i] + (!nodeHolder.getIsSortedKeyBlock()[i] ?
-              nodeHolder.getKeyBlockIndexLength()[i] :
-              0) + (dataWriterVo.getRleEncodingForDictDim()[i] ?
-              nodeHolder.getCompressedDataIndex()[i].length :
-              0);
-          buffer = ByteBuffer.allocate(bufferSize);
-          buffer.put(nodeHolder.getKeyArray()[i]);
-          if (!nodeHolder.getIsSortedKeyBlock()[i]) {
-            buffer.putInt(nodeHolder.getCompressedIndex()[i].length);
-            buffer.put(nodeHolder.getCompressedIndex()[i]);
-            if (nodeHolder.getCompressedIndexMap()[i].length > 0) {
-              buffer.put(nodeHolder.getCompressedIndexMap()[i]);
-            }
-          }
-          if (nodeHolder.getRleEncodingForDictDim()[i]) {
-            buffer.put(nodeHolder.getCompressedDataIndex()[i]);
-          }
-          buffer.flip();
-          fileChannel.write(buffer);
-          offset += bufferSize;
-        }
+    for (int i = 0; i < numberOfDimension; i++) {
+      currentDataChunksOffset.add(offset);
+      currentDataChunksLength.add(dataChunkBytes[i].length);
+      buffer = ByteBuffer.wrap(dataChunkBytes[i]);
+      channel.write(buffer);
+      offset += dataChunkBytes[i].length;
+      for (EncodedTablePage encodedTablePage : encodedTablePages) {
+        EncodedDimensionPage dimension = encodedTablePage.getDimension(i);
+        int bufferSize = dimension.getSerializedSize();
+        buffer = dimension.serialize();
+        channel.write(buffer);
+        offset += bufferSize;
       }
-      dimensionOffset = offset;
-      int dataChunkStartIndex = nodeHolderList.get(0).getKeyArray().length;
-      for (int i = 0; i < numberOfMeasures; i++) {
-        nodeHolderList = dataWriterHolder.getNodeHolder();
-        currentDataChunksOffset.add(offset);
-        currentDataChunksLength.add(dataChunkBytes[dataChunkStartIndex].length);
-        buffer = ByteBuffer.wrap(dataChunkBytes[dataChunkStartIndex]);
-        fileChannel.write(buffer);
-        offset += dataChunkBytes[dataChunkStartIndex].length;
-        dataChunkStartIndex++;
-        for (int j = 0; j < nodeHolderList.size(); j++) {
-          nodeHolder = nodeHolderList.get(j);
-          bufferSize = nodeHolder.getDataArray()[i].length;
-          buffer = ByteBuffer.wrap(nodeHolder.getDataArray()[i]);
-          fileChannel.write(buffer);
-          offset += bufferSize;
-        }
+    }
+    dimensionOffset = offset;
+    int dataChunkStartIndex = encodedTablePages.get(0).getNumDimensions();
+    for (int i = 0; i < numberOfMeasures; i++) {
+      currentDataChunksOffset.add(offset);
+      currentDataChunksLength.add(dataChunkBytes[dataChunkStartIndex].length);
+      buffer = ByteBuffer.wrap(dataChunkBytes[dataChunkStartIndex]);
+      channel.write(buffer);
+      offset += dataChunkBytes[dataChunkStartIndex].length;
+      dataChunkStartIndex++;
+      for (EncodedTablePage encodedTablePage : encodedTablePages) {
+        EncodedMeasurePage measure = encodedTablePage.getMeasure(i);
+        int bufferSize = measure.getSerializedSize();
+        buffer = measure.serialize();
+        channel.write(buffer);
+        offset += bufferSize;
       }
-      measureOffset = offset;
-    } catch (IOException e) {
-      throw new CarbonDataWriterException("Problem while writing the data", e);
     }
-    blockletIndex.add(CarbonMetadataUtil
-        .getBlockletIndex(nodeHolderList, dataWriterVo.getSegmentProperties().getMeasures()));
+    measureOffset = offset;
+    blockletIndex.add(
+        CarbonMetadataUtil.getBlockletIndex(
+            encodedTablePages, dataWriterVo.getSegmentProperties().getMeasures()));
     BlockletInfo3 blockletInfo3 =
         new BlockletInfo3(numberOfRows, currentDataChunksOffset, currentDataChunksLength,
-            dimensionOffset, measureOffset, dataWriterHolder.getNodeHolder().size());
+            dimensionOffset, measureOffset, dataWriterHolder.getEncodedTablePages().size());
     blockletMetadata.add(blockletInfo3);
   }
 
@@ -538,7 +340,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter<short[]>
     closeExecutorService();
   }
 
-  @Override public void writeBlockletInfoToFile() throws CarbonDataWriterException {
+  @Override public void writeFooterToFile() throws CarbonDataWriterException {
     if (this.blockletMetadata.size() > 0) {
       writeBlockletInfoToFile(fileChannel, carbonDataFileTempPath);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/DataWriterHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/DataWriterHolder.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/DataWriterHolder.java
index a98f388..246fa86 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/DataWriterHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/DataWriterHolder.java
@@ -19,24 +19,24 @@ package org.apache.carbondata.processing.store.writer.v3;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.core.util.NodeHolder;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 
 public class DataWriterHolder {
-  private List<NodeHolder> nodeHolder;
+  private List<EncodedTablePage> encodedTablePage;
   private long currentSize;
 
   public DataWriterHolder() {
-    this.nodeHolder = new ArrayList<NodeHolder>();
+    this.encodedTablePage = new ArrayList<EncodedTablePage>();
   }
 
   public void clear() {
-    nodeHolder.clear();
+    encodedTablePage.clear();
     currentSize = 0;
   }
 
-  public void addNodeHolder(NodeHolder holder) {
-    this.nodeHolder.add(holder);
-    currentSize += holder.getHolderSize();
+  public void addPage(EncodedTablePage encodedTablePage) {
+    this.encodedTablePage.add(encodedTablePage);
+    currentSize += encodedTablePage.getEncodedSize();
   }
 
   public long getSize() {
@@ -45,18 +45,18 @@ public class DataWriterHolder {
   }
 
   public int getNumberOfPagesAdded() {
-    return nodeHolder.size();
+    return encodedTablePage.size();
   }
 
   public int getTotalRows() {
     int rows = 0;
-    for (NodeHolder nh : nodeHolder) {
-      rows += nh.getEntryCount();
+    for (EncodedTablePage nh : encodedTablePage) {
+      rows += nh.getPageSize();
     }
     return rows;
   }
 
-  public List<NodeHolder> getNodeHolder() {
-    return nodeHolder;
+  public List<EncodedTablePage> getEncodedTablePages() {
+    return encodedTablePage;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/util/NonDictionaryUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/NonDictionaryUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/NonDictionaryUtil.java
deleted file mode 100644
index c634e7c..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/util/NonDictionaryUtil.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.util;
-
-import java.nio.ByteBuffer;
-
-import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
-
-/**
- * This is the utility class for No Dictionary changes.
- */
-public class NonDictionaryUtil {
-
-  /**
-   * This method will form one single byte [] for all the high card dims.
-   * For example if you need to pack 2 columns c1 and c2 , it stores in following way
-   *  <total_len(short)><offsetLen(short)><offsetLen+c1_len(short)><c1(byte[])><c2(byte[])>
-   * @param byteBufferArr
-   * @return
-   */
-  public static byte[] packByteBufferIntoSingleByteArray(byte[][] byteBufferArr) {
-    // for empty array means there is no data to remove dictionary.
-    if (null == byteBufferArr || byteBufferArr.length == 0) {
-      return null;
-    }
-    int noOfCol = byteBufferArr.length;
-    short toDetermineLengthOfByteArr = 2;
-    short offsetLen = (short) (noOfCol * 2 + toDetermineLengthOfByteArr);
-    int totalBytes = calculateTotalBytes(byteBufferArr) + offsetLen;
-
-    ByteBuffer buffer = ByteBuffer.allocate(totalBytes);
-
-    // write the length of the byte [] as first short
-    buffer.putShort((short) (totalBytes - toDetermineLengthOfByteArr));
-    // writing the offset of the first element.
-    buffer.putShort(offsetLen);
-
-    // prepare index for byte []
-    for (int index = 0; index < byteBufferArr.length - 1; index++) {
-      int noOfBytes = byteBufferArr[index].length;
-
-      buffer.putShort((short) (offsetLen + noOfBytes));
-      offsetLen += noOfBytes;
-    }
-
-    // put actual data.
-    for (int index = 0; index < byteBufferArr.length; index++) {
-      buffer.put(byteBufferArr[index]);
-    }
-    buffer.rewind();
-    return buffer.array();
-
-  }
-
-  /**
-   * To calculate the total bytes in byte Buffer[].
-   *
-   * @param byteBufferArr
-   * @return
-   */
-  private static int calculateTotalBytes(byte[][] byteBufferArr) {
-    int total = 0;
-    for (int index = 0; index < byteBufferArr.length; index++) {
-      total += byteBufferArr[index].length;
-    }
-    return total;
-  }
-
-  /**
-   * Method to get the required Dimension from obj []
-   *
-   * @param index
-   * @param row
-   * @return
-   */
-  public static Integer getDimension(int index, Object[] row) {
-
-    Integer[] dimensions = (Integer[]) row[WriteStepRowUtil.DICTIONARY_DIMENSION];
-
-    return dimensions[index];
-
-  }
-
-  /**
-   * Method to get the required measure from obj []
-   *
-   * @param index
-   * @param row
-   * @return
-   */
-  public static Object getMeasure(int index, Object[] row) {
-    Object[] measures = (Object[]) row[WriteStepRowUtil.MEASURE];
-    return measures[index];
-  }
-
-  public static byte[] getByteArrayForNoDictionaryCols(Object[] row) {
-
-    return (byte[]) row[WriteStepRowUtil.NO_DICTIONARY_AND_COMPLEX];
-  }
-
-  public static void prepareOutObj(Object[] out, int[] dimArray, byte[][] byteBufferArr,
-      Object[] measureArray) {
-
-    out[WriteStepRowUtil.DICTIONARY_DIMENSION] = dimArray;
-    out[WriteStepRowUtil.NO_DICTIONARY_AND_COMPLEX] = byteBufferArr;
-    out[WriteStepRowUtil.MEASURE] = measureArray;
-
-  }
-
-  /**
-   * This method will extract the single dimension from the complete high card dims byte[].+     *
-   * The format of the byte [] will be,  Totallength,CompleteStartOffsets,Dat
-   *
-   * @param highCardArr
-   * @param index
-   * @param highCardinalityCount
-   * @param outBuffer
-   */
-  public static void extractSingleHighCardDims(byte[] highCardArr, int index,
-      int highCardinalityCount, ByteBuffer outBuffer) {
-    ByteBuffer buff = null;
-    short secIndex = 0;
-    short firstIndex = 0;
-    int length;
-    // if the requested index is a last one then we need to calculate length
-    // based on byte[] length.
-    if (index == highCardinalityCount - 1) {
-      // need to read 2 bytes(1 short) to determine starting offset and
-      // length can be calculated by array length.
-      buff = ByteBuffer.wrap(highCardArr, (index * 2) + 2, 2);
-    } else {
-      // need to read 4 bytes(2 short) to determine starting offset and
-      // length.
-      buff = ByteBuffer.wrap(highCardArr, (index * 2) + 2, 4);
-    }
-
-    firstIndex = buff.getShort();
-    // if it is a last dimension in high card then this will be last
-    // offset.so calculate length from total length
-    if (index == highCardinalityCount - 1) {
-      secIndex = (short) highCardArr.length;
-    } else {
-      secIndex = buff.getShort();
-    }
-
-    length = secIndex - firstIndex;
-
-    outBuffer.position(firstIndex);
-    outBuffer.limit(outBuffer.position() + length);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
----------------------------------------------------------------------
diff --git a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
index 4dec81d..aad0d3f 100644
--- a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
+++ b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
@@ -165,7 +165,7 @@ public class StoreCreator {
       loadModel.setFactTimeStamp(System.currentTimeMillis());
       loadModel.setMaxColumns("10");
 
-      executeGraph(loadModel, absoluteTableIdentifier.getStorePath());
+      loadData(loadModel, absoluteTableIdentifier.getStorePath());
 
     } catch (Exception e) {
       e.printStackTrace();
@@ -354,7 +354,7 @@ public class StoreCreator {
    * @param storeLocation
    * @throws Exception
    */
-  public static void executeGraph(CarbonLoadModel loadModel, String storeLocation)
+  public static void loadData(CarbonLoadModel loadModel, String storeLocation)
       throws Exception {
     new File(storeLocation).mkdirs();
     String outPutLoc = storeLocation + "/etl";


[7/7] carbondata git commit: Resolve rebase conflicts when rebasing branch encoding_override onto master

Posted by ja...@apache.org.
Resolve rebase conflicts when rebasing branch encoding_override onto master


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/74226907
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/74226907
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/74226907

Branch: refs/heads/master
Commit: 74226907990cdee41a6ccbd69e2a813077792f89
Parents: a5af0ff
Author: Raghunandan S <ca...@gmail.com>
Authored: Wed Jul 26 21:59:05 2017 +0800
Committer: Raghunandan S <ca...@gmail.com>
Committed: Fri Jul 28 01:24:38 2017 +0800

----------------------------------------------------------------------
 .../carbondata/core/datastore/TableSpec.java    | 38 ++++++++++-------
 .../AbstractMeasureChunkReaderV2V3Format.java   |  1 -
 ...CompressedMeasureChunkFileBasedReaderV1.java |  2 +-
 ...CompressedMeasureChunkFileBasedReaderV2.java | 18 ++++++--
 ...CompressedMeasureChunkFileBasedReaderV3.java | 16 +++++--
 .../core/datastore/page/ColumnPage.java         |  5 +--
 .../page/encoding/AdaptiveIntegralCodec.java    |  3 +-
 .../page/encoding/DeltaIntegralCodec.java       |  3 +-
 .../page/encoding/EncodingStrategy.java         |  2 +-
 .../core/datastore/page/key/TablePageKey.java   |  4 +-
 .../statistics/PrimitivePageStatsCollector.java | 44 +++++++++++++++++---
 .../page/statistics/SimpleStatsResult.java      |  4 ++
 .../statistics/VarLengthPageStatsCollector.java |  8 ++++
 .../core/metadata/ColumnPageCodecMeta.java      | 20 +++++++--
 .../core/metadata/ValueEncoderMeta.java         | 20 +++++++++
 .../apache/carbondata/core/util/CarbonUtil.java |  1 +
 .../newflow/sort/SortStepRowUtil.java           |  4 --
 .../sortdata/SortTempFileChunkHolder.java       |  1 +
 .../carbondata/processing/store/TablePage.java  | 12 +++---
 .../store/writer/AbstractFactDataWriter.java    | 12 +++---
 20 files changed, 162 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index f1d3546..818f46e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -75,7 +75,8 @@ public class TableSpec {
   private void addMeasures(List<CarbonMeasure> measures) {
     for (int i = 0; i < measures.size(); i++) {
       CarbonMeasure measure = measures.get(i);
-      measureSpec[i] = new MeasureSpec(measure.getColName(), measure.getDataType());
+      measureSpec[i] = new MeasureSpec(measure.getColName(), measure.getDataType(), measure
+          .getScale(), measure.getPrecision());
     }
   }
 
@@ -95,20 +96,14 @@ public class TableSpec {
     return dimensionSpec.length;
   }
 
-    public int getScale(int index) {
-      assert (index >= 0 && index < precision.length);
-      return scale[index];
-    }
+  /**
+   * return number of measures
+   */
+  public int getNumMeasures() {
+    return measureSpec.length;
+  }
 
-    public int getPrecision(int index) {
-      assert (index >= 0 && index < precision.length);
-      return precision[index];
-    }/**
-     * return number of measures
-     */
-    public int getNumMeasures() {
-      return measureSpec.length;
-    }public class ColumnSpec {
+  public class ColumnSpec {
     // field name of this column
     private String fieldName;
 
@@ -162,8 +157,21 @@ public class TableSpec {
 
   public class MeasureSpec extends ColumnSpec {
 
-    MeasureSpec(String fieldName, DataType dataType) {
+    private int scale;
+    private int precision;
+
+    MeasureSpec(String fieldName, DataType dataType, int scale, int precision) {
       super(fieldName, dataType);
+      this.scale = scale;
+      this.precision = precision;
+    }
+
+    public int getScale() {
+      return scale;
+    }
+
+    public int getPrecision() {
+      return precision;
     }
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
index dd61826..049aba9 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
@@ -17,7 +17,6 @@
 package org.apache.carbondata.core.datastore.chunk.reader.measure;
 
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.BitSet;
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
index 6bf65da..7df18db 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
@@ -98,7 +98,7 @@ public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChun
     DataChunk dataChunk = measureColumnChunks.get(blockIndex);
     ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
 
-    ColumnPageCodec codec = strategy.newCodec(meta);
+    ColumnPageCodec codec = strategy.newCodec(meta, -1, -1);
     ColumnPage page = codec.decode(measureRawColumnChunk.getRawData().array(),
         measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength());
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
index 7511b6e..f2679ae 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
@@ -18,6 +18,7 @@ package org.apache.carbondata.core.datastore.chunk.reader.measure.v2;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.List;
 
 import org.apache.carbondata.core.datastore.FileHolder;
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
@@ -130,12 +131,21 @@ public class CompressedMeasureChunkFileBasedReaderV2 extends AbstractMeasureChun
 
   protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk,
       DataChunk2 measureColumnChunk, int copyPoint) throws MemoryException, IOException {
-    // for measure, it should have only one ValueEncoderMeta
-    assert (measureColumnChunk.getEncoder_meta().size() == 1);
-    byte[] encodedMeta = measureColumnChunk.getEncoder_meta().get(0).array();
+    assert (measureColumnChunk.getEncoder_meta().size() > 0);
+    List<ByteBuffer> encoder_meta = measureColumnChunk.getEncoder_meta();
+    byte[] encodedMeta = encoder_meta.get(0).array();
 
     ValueEncoderMeta meta = CarbonUtil.deserializeEncoderMetaV3(encodedMeta);
-    ColumnPageCodec codec = strategy.newCodec(meta);
+    int scale = -1;
+    int precision = -1;
+    if (encoder_meta.size() > 1) {
+      ByteBuffer decimalInfo = encoder_meta.get(1);
+      scale = decimalInfo.getInt();
+      precision = decimalInfo.getInt();
+    }
+
+
+    ColumnPageCodec codec = strategy.newCodec(meta, scale, precision);
     byte[] rawData = measureRawColumnChunk.getRawData().array();
     return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
index 1881791..354aa38 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
@@ -18,6 +18,7 @@ package org.apache.carbondata.core.datastore.chunk.reader.measure.v3;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.List;
 
 import org.apache.carbondata.core.datastore.FileHolder;
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
@@ -226,13 +227,22 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
 
   protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk,
       DataChunk2 measureColumnChunk, int copyPoint) throws MemoryException {
+    List<ByteBuffer> encoder_meta = measureColumnChunk.getEncoder_meta();
     // for measure, it should have only one ValueEncoderMeta
-    assert (measureColumnChunk.getEncoder_meta().size() == 1);
-    byte[] encodedMeta = measureColumnChunk.getEncoder_meta().get(0).array();
+    assert (encoder_meta.size() > 0);
+    byte[] encodedMeta = encoder_meta.get(0).array();
+
+    int scale = -1;
+    int precision = -1;
+    if (encoder_meta.size() > 1) {
+      ByteBuffer decimalInfo = encoder_meta.get(1);
+      scale = decimalInfo.getInt();
+      precision = decimalInfo.getInt();
+    }
 
     ColumnPageCodecMeta meta = new ColumnPageCodecMeta();
     meta.deserialize(encodedMeta);
-    ColumnPageCodec codec = strategy.newCodec(meta);
+    ColumnPageCodec codec = strategy.newCodec(meta, scale, precision);
     byte[] rawData = measureRawColumnChunk.getRawData().array();
     return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
index 90300d9..245e95b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
@@ -59,8 +59,6 @@ public abstract class ColumnPage {
     this.pageSize = pageSize;
     this.scale = scale;
     this.precision = precision;
-    this.stats = new ColumnPageStatsVO(dataType);
-    this.nullBitSet = new BitSet(pageSize);
     if (dataType == DECIMAL) {
       decimalConverter = DecimalConverterFactory.INSTANCE.getDecimalConverter(precision, scale);
     }
@@ -184,7 +182,7 @@ public abstract class ColumnPage {
           instance = newDecimalPage(new byte[pageSize][], scale, precision);
           break;
         case BYTE_ARRAY:
-          instance = new SafeVarLengthColumnPage(dataType, pageSize);
+          instance = new SafeVarLengthColumnPage(dataType, pageSize, scale, precision);
           break;
         default:
           throw new RuntimeException("Unsupported data dataType: " + dataType);
@@ -328,6 +326,7 @@ public abstract class ColumnPage {
         break;
       case DECIMAL:
         putDecimal(rowId, (BigDecimal) value);
+        statsCollector.update(((BigDecimal) value).unscaledValue().longValue());
         break;
       case BYTE_ARRAY:
         putBytes(rowId, (byte[]) value);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
index ed8d734..6480bcd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
@@ -65,7 +65,8 @@ class AdaptiveIntegralCodec extends AdaptiveCompressionCodec {
 
   @Override
   public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
-    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length);
+    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length,
+        stats.getScale(), stats.getPrecision());
     return LazyColumnPage.newPage(page, codec);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
index 53a8295..ba61cd3 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
@@ -87,7 +87,8 @@ public class DeltaIntegralCodec extends AdaptiveCompressionCodec {
 
   @Override
   public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
-    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length);
+    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length,
+        stats.getScale(), stats.getPrecision());
     return LazyColumnPage.newPage(page, codec);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
index 29219ea..53c565d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
@@ -76,7 +76,7 @@ public abstract class EncodingStrategy {
           throw new RuntimeException("unsupported data type: " + stats.getDataType());
       }
     } else {
-      SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(meta, scale, precision);
+      SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(meta);
       switch (meta.getType()) {
         case BYTE:
         case SHORT:

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
index ef8307e..f8a10ca 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
@@ -68,12 +68,12 @@ public class TablePageKey {
       currentNoDictionaryKey = WriteStepRowUtil.getNoDictAndComplexDimension(row);
     }
     if (rowId == 0) {
-      startKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
+      startKey = WriteStepRowUtil.getMdk(row, mdkGenerator);
       noDictStartKey = currentNoDictionaryKey;
     }
     noDictEndKey = currentNoDictionaryKey;
     if (rowId == pageSize - 1) {
-      endKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
+      endKey = WriteStepRowUtil.getMdk(row, mdkGenerator);
       finalizeKeys();
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
index 73ada4b..af40f03 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
@@ -32,6 +32,7 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
   private int minInt, maxInt;
   private long minLong, maxLong;
   private double minDouble, maxDouble;
+  private int scale, precision;
 
   // scale of the double value
   private int decimal;
@@ -40,17 +41,19 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
   private BitSet nullBitSet;
 
   // this is for encode flow
-  public static PrimitivePageStatsCollector newInstance(DataType dataType, int pageSize) {
+  public static PrimitivePageStatsCollector newInstance(DataType dataType, int pageSize, int
+      scale, int precision) {
     switch (dataType) {
       default:
-        return new PrimitivePageStatsCollector(dataType, pageSize);
+        return new PrimitivePageStatsCollector(dataType, pageSize, scale, precision);
     }
   }
 
   // this is for decode flow, we do not need to create nullBits, so passing 0 as pageSize
   public static PrimitivePageStatsCollector newInstance(ColumnPageCodecMeta meta) {
     PrimitivePageStatsCollector instance =
-        new PrimitivePageStatsCollector(meta.getSrcDataType(), 0);
+        new PrimitivePageStatsCollector(meta.getSrcDataType(), 0, meta.getScale(),
+            meta.getPrecision());
     // set min max from meta
     switch (meta.getSrcDataType()) {
       case BYTE:
@@ -74,13 +77,20 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
         instance.maxDouble = (double) meta.getMaxValue();
         instance.decimal = meta.getDecimal();
         break;
+      case DECIMAL:
+        instance.minLong = (long) meta.getMinValue();
+        instance.maxLong = (long) meta.getMaxValue();
+        instance.decimal = meta.getDecimal();
+        instance.scale = meta.getScale();
+        instance.precision = meta.getPrecision();
+        break;
     }
     return instance;
   }
 
   public static PrimitivePageStatsCollector newInstance(ValueEncoderMeta meta) {
     PrimitivePageStatsCollector instance =
-        new PrimitivePageStatsCollector(meta.getType(), 0);
+        new PrimitivePageStatsCollector(meta.getType(), 0, meta.getScale(), meta.getPrecision());
     // set min max from meta
     switch (meta.getType()) {
       case BYTE:
@@ -104,11 +114,18 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
         instance.maxDouble = (double) meta.getMaxValue();
         instance.decimal = meta.getDecimal();
         break;
+      case DECIMAL:
+        instance.minDouble = (double) meta.getMinValue();
+        instance.maxDouble = (double) meta.getMaxValue();
+        instance.decimal = meta.getDecimal();
+        instance.scale = meta.getScale();
+        instance.precision = meta.getPrecision();
+        break;
     }
     return instance;
   }
 
-  private PrimitivePageStatsCollector(DataType dataType, int pageSize) {
+  private PrimitivePageStatsCollector(DataType dataType, int pageSize, int scale, int precision) {
     this.dataType = dataType;
     this.nullBitSet = new BitSet(pageSize);
     switch (dataType) {
@@ -134,6 +151,11 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
         decimal = 0;
         break;
       case DECIMAL:
+        minLong = Long.MAX_VALUE;
+        maxLong = Long.MIN_VALUE;
+        decimal = scale;
+        this.scale = scale;
+        this.precision = precision;
     }
   }
 
@@ -255,6 +277,8 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
         return minLong;
       case DOUBLE:
         return minDouble;
+      case DECIMAL:
+        return minLong;
     }
     return null;
   }
@@ -272,6 +296,8 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
         return maxLong;
       case DOUBLE:
         return maxDouble;
+      case DECIMAL:
+        return maxLong;
     }
     return null;
   }
@@ -291,4 +317,12 @@ public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, Si
     return dataType;
   }
 
+  @Override public int getScale() {
+    return scale;
+  }
+
+  @Override public int getPrecision() {
+    return precision;
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
index 1db86ff..b40d023 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
@@ -32,4 +32,8 @@ public interface SimpleStatsResult {
   int getDecimalPoint();
 
   DataType getDataType();
+
+  int getScale();
+
+  int getPrecision();
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
index e985f90..0fe5960 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
@@ -102,6 +102,14 @@ public class VarLengthPageStatsCollector implements ColumnPageStatsCollector {
       @Override public DataType getDataType() {
         return null;
       }
+
+      @Override public int getScale() {
+        return 0;
+      }
+
+      @Override public int getPrecision() {
+        return 0;
+      }
     };
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java b/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
index 20a7568..6322670 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
@@ -57,6 +57,8 @@ public class ColumnPageCodecMeta extends ValueEncoderMeta implements Serializabl
     meta.setMaxValue(stats.getMax());
     meta.setMinValue(stats.getMin());
     meta.setDecimal(stats.getDecimalPoint());
+    meta.setScale(stats.getScale());
+    meta.setPrecision(stats.getPrecision());
     return meta;
   }
 
@@ -169,8 +171,17 @@ public class ColumnPageCodecMeta extends ValueEncoderMeta implements Serializabl
         buffer.putDouble((Double) 0d); // unique value is obsoleted, maintain for compatibility
         break;
       case DECIMAL:
-        buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE + 3);
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + (CarbonCommonConstants
+                .INT_SIZE_IN_BYTE * 3)
+                + 3);
         buffer.putChar(getSrcDataTypeInChar());
+        buffer.putLong((Long) getMaxValue());
+        buffer.putLong((Long) getMinValue());
+        buffer.putLong((Long) 0L); // unique value is obsoleted, maintain for compatibility
+        buffer.putInt(getScale());
+        buffer.putInt(getPrecision());
+
         break;
     }
     buffer.putInt(getDecimal());
@@ -190,8 +201,11 @@ public class ColumnPageCodecMeta extends ValueEncoderMeta implements Serializabl
         buffer.getDouble(); // for non exist value which is obsoleted, it is backward compatibility;
         break;
       case BIG_DECIMAL_MEASURE:
-        this.setMaxValue(0.0);
-        this.setMinValue(0.0);
+        this.setMaxValue(buffer.getLong());
+        this.setMinValue(buffer.getLong());
+        buffer.getLong();
+        this.setScale(buffer.getInt());
+        this.setPrecision(buffer.getInt());
         break;
       case BYTE_VALUE_MEASURE:
         this.setMaxValue(buffer.get());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java b/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
index 971359d..f1ebbd3 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
@@ -49,6 +49,10 @@ public class ValueEncoderMeta implements Serializable {
 
   private byte dataTypeSelected;
 
+  private int scale;
+
+  private int precision;
+
   public Object getMaxValue() {
     return maxValue;
   }
@@ -109,4 +113,20 @@ public class ValueEncoderMeta implements Serializable {
   public void setDataTypeSelected(byte dataTypeSelected) {
     this.dataTypeSelected = dataTypeSelected;
   }
+
+  public int getScale() {
+    return scale;
+  }
+
+  public void setScale(int scale) {
+    this.scale = scale;
+  }
+
+  public int getPrecision() {
+    return precision;
+  }
+
+  public void setPrecision(int precision) {
+    this.precision = precision;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index eff8f0d..7628415 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -53,6 +53,7 @@ import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
 import org.apache.carbondata.core.datastore.columnar.UnBlockIndexer;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
 import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
index 1bcbe54..7857f4e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
@@ -17,10 +17,6 @@
 
 package org.apache.carbondata.processing.newflow.sort;
 
-import java.math.BigDecimal;
-
-import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.sortdata.SortParameters;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
index b17c69a..9732e66 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
@@ -35,6 +35,7 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.ByteUtil.UnsafeComparer;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
index 9881e8e..03f3e5e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
@@ -47,7 +47,6 @@ import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.DataTypeUtil;
 
 import org.apache.spark.sql.types.Decimal;
 
@@ -82,13 +81,13 @@ public class TablePage {
     int numDictDimension = model.getMDKeyGenerator().getDimCount();
     dictDimensionPages = new ColumnPage[numDictDimension];
     for (int i = 0; i < dictDimensionPages.length; i++) {
-      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize);
+      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize, -1, -1);
       page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
       dictDimensionPages[i] = page;
     }
     noDictDimensionPages = new ColumnPage[model.getNoDictionaryCount()];
     for (int i = 0; i < noDictDimensionPages.length; i++) {
-      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize);
+      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize, -1, -1);
       page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
       noDictDimensionPages[i] = page;
     }
@@ -100,11 +99,12 @@ public class TablePage {
     }
     measurePage = new ColumnPage[model.getMeasureCount()];
     DataType[] dataTypes = model.getMeasureDataType();
-    TableSpec.MeasureSpec measureSpec = model.getTableSpec().getMeasureSpec();
     for (int i = 0; i < measurePage.length; i++) {
+      TableSpec.MeasureSpec measureSpec = model.getTableSpec().getMeasureSpec(i);
       ColumnPage page = ColumnPage
-          .newPage(dataTypes[i], pageSize, measureSpec.getScale(i), measureSpec.getPrecision(i));
-      page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize));
+          .newPage(dataTypes[i], pageSize, measureSpec.getScale(), measureSpec.getPrecision());
+      page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize,
+          measureSpec.getScale(), measureSpec.getPrecision()));
       measurePage[i] = page;
     }
     boolean hasNoDictionary = noDictDimensionPages.length > 0;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74226907/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index b83a82a..a34ed01 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -24,7 +24,7 @@ import java.io.FileFilter;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
+//import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -61,7 +61,6 @@ import org.apache.carbondata.core.util.CarbonMergerUtil;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.NodeHolder;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.core.writer.CarbonIndexFileWriter;
 import org.apache.carbondata.format.BlockIndex;
@@ -608,10 +607,11 @@ public abstract class AbstractFactDataWriter<T> implements CarbonFactDataWriter<
    * @return min max value without length
    */
   protected byte[] updateMinMaxForNoDictionary(byte[] valueWithLength) {
-    ByteBuffer buffer = ByteBuffer.wrap(valueWithLength);
-    byte[] actualValue = new byte[buffer.getShort()];
-    buffer.get(actualValue);
-    return actualValue;
+    return valueWithLength;
+//    ByteBuffer buffer = ByteBuffer.wrap(valueWithLength);
+//    byte[] actualValue = new byte[buffer.getShort()];
+//    buffer.get(actualValue);
+//    return actualValue;
   }
 
   /**


[2/7] carbondata git commit: [CARBONDATA-1098] Change page statistics use exact type and use column page in writer

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java
index 755308e..f5d7dc7 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/DataTypeUtilTest.java
@@ -50,14 +50,6 @@ public class DataTypeUtilTest {
 
   }
 
-  @Test public void testGetAggType() {
-    assertTrue(getAggType(DataType.DECIMAL) == 'b');
-    assertTrue(getAggType(DataType.INT) == 'd');
-    assertTrue(getAggType(DataType.LONG) == 'd');
-    assertTrue(getAggType(DataType.NULL) == 'n');
-
-  }
-
   @Test public void testBigDecimalToByte() {
     byte[] result = bigDecimalToByte(BigDecimal.ONE);
     assertTrue(result == result);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/test/java/org/apache/carbondata/core/writer/CarbonFooterWriterTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/writer/CarbonFooterWriterTest.java b/core/src/test/java/org/apache/carbondata/core/writer/CarbonFooterWriterTest.java
index 64651e5..5fc6df9 100644
--- a/core/src/test/java/org/apache/carbondata/core/writer/CarbonFooterWriterTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/writer/CarbonFooterWriterTest.java
@@ -24,21 +24,26 @@ import java.util.BitSet;
 import java.util.List;
 import java.util.UUID;
 
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
+import org.apache.carbondata.core.metadata.CodecMetaFactory;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.reader.CarbonFooterReader;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.format.ColumnSchema;
 
 import junit.framework.TestCase;
-import org.apache.carbondata.format.ColumnSchema;
+import mockit.Mock;
+import mockit.MockUp;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -87,9 +92,7 @@ public class CarbonFooterWriterTest extends TestCase{
     int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
     SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, colCardinality);
 		writer.writeFooter(CarbonMetadataUtil.convertFileFooter(
-				infoColumnars,
-				6,
-				cardinalities,columnSchema, segmentProperties
+				infoColumnars, cardinalities,columnSchema, segmentProperties
 				), 0);
 
     CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
@@ -125,41 +128,7 @@ public class CarbonFooterWriterTest extends TestCase{
     return dimColumn;
  }
 
-  /**
-   * test writing fact metadata.
-   */
-  @Test public void testReadFactMetadata() throws IOException {
-    deleteFile();
-    createFile();
-    CarbonFooterWriter writer = new CarbonFooterWriter(filePath);
-    List<BlockletInfoColumnar> infoColumnars = getBlockletInfoColumnars();
-    int[] cardinalities = new int[] { 2, 4, 5, 7, 9, 10};
-    List<ColumnSchema> columnSchema = Arrays.asList(new ColumnSchema[]{getDimensionColumn("IMEI1"),
-						getDimensionColumn("IMEI2"),
-						getDimensionColumn("IMEI3"),
-						getDimensionColumn("IMEI4"),
-						getDimensionColumn("IMEI5"),
-						getDimensionColumn("IMEI6")});
-    List<org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema> wrapperColumnSchema = Arrays.asList(new org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema[]{getWrapperDimensionColumn("IMEI1"),
-    	getWrapperDimensionColumn("IMEI2"),
-    	getWrapperDimensionColumn("IMEI3"),
-    	getWrapperDimensionColumn("IMEI4"),
-    	getWrapperDimensionColumn("IMEI5"),
-    	getWrapperDimensionColumn("IMEI6")});
-    int[] colCardinality = CarbonUtil.getFormattedCardinality(cardinalities, wrapperColumnSchema);
-    SegmentProperties segmentProperties = new SegmentProperties(wrapperColumnSchema, cardinalities);
-    writer.writeFooter(CarbonMetadataUtil
-        .convertFileFooter(infoColumnars, 6, colCardinality,
-        		columnSchema,segmentProperties), 0);
-
-    CarbonFooterReader metaDataReader = new CarbonFooterReader(filePath, 0);
-    List<BlockletInfoColumnar> nodeInfoColumnars =
-        CarbonMetadataUtil.convertBlockletInfo(metaDataReader.readFooter());
-
-    assertTrue(nodeInfoColumnars.size() == infoColumnars.size());
-  }
-
-  private List<BlockletInfoColumnar> getBlockletInfoColumnars() {
+  private List<BlockletInfoColumnar> getBlockletInfoColumnars() throws IOException {
     BlockletInfoColumnar infoColumnar = new BlockletInfoColumnar();
     infoColumnar.setStartKey(new byte[] { 1, 2, 3 });
     infoColumnar.setEndKey(new byte[] { 8, 9, 10 });
@@ -179,27 +148,47 @@ public class CarbonFooterWriterTest extends TestCase{
     infoColumnar.setMeasureLength(new int[] { 6, 7 });
     infoColumnar.setMeasureOffset(new long[] { 33, 99 });
     infoColumnar.setAggKeyBlock(new boolean[] { true, true, true, true });
-    infoColumnar.setColGrpBlocks(new boolean[] { false, false, false, false });
     infoColumnar.setMeasureNullValueIndex(new BitSet[] {new BitSet(),new BitSet()});
+    infoColumnar.setEncodedTablePage(EncodedTablePage.newEmptyInstance());
+
+    final ValueEncoderMeta meta = CodecMetaFactory.createMeta();
+
+    new MockUp<ColumnPageCodecMeta>() {
+      @SuppressWarnings("unused") @Mock
+      public byte[] serialize() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public byte[] getMaxAsBytes() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public byte[] getMinAsBytes() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public DataType getSrcDataType() {
+        return DataType.DOUBLE;
+      }
+
+    };
+
+    new MockUp<EncodedMeasurePage>() {
+      @SuppressWarnings("unused") @Mock
+      public ValueEncoderMeta getMetaData() {
+        return meta;
+      }
+    };
+
+    final EncodedMeasurePage measure = new EncodedMeasurePage(2, new byte[]{0,1}, meta,
+        new BitSet());
+    new MockUp<EncodedTablePage>() {
+      @SuppressWarnings("unused") @Mock
+      public EncodedMeasurePage getMeasure(int measureIndex) {
+        return measure;
+      }
+    };
 
-    ValueEncoderMeta[] metas = new ValueEncoderMeta[2];
-    metas[0] = new ValueEncoderMeta();
-    metas[0].setMinValue(0);
-    metas[0].setMaxValue(44d);
-    metas[0].setUniqueValue(0d);
-    metas[0].setDecimal(0);
-    metas[0].setType(CarbonCommonConstants.DOUBLE_MEASURE);
-    metas[0].setDataTypeSelected((byte)0);
-    metas[1] = new ValueEncoderMeta();
-    metas[1].setMinValue(0);
-    metas[1].setMaxValue(55d);
-    metas[1].setUniqueValue(0d);
-    metas[1].setDecimal(0);
-    metas[1].setType(CarbonCommonConstants.DOUBLE_MEASURE);
-    metas[1].setDataTypeSelected((byte)0);
-
-    MeasurePageStatsVO stats = MeasurePageStatsVO.build(metas);
-    infoColumnar.setStats(stats);
     List<BlockletInfoColumnar> infoColumnars = new ArrayList<BlockletInfoColumnar>();
     infoColumnars.add(infoColumnar);
     return infoColumnars;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/format/src/main/thrift/carbondata.thrift
----------------------------------------------------------------------
diff --git a/format/src/main/thrift/carbondata.thrift b/format/src/main/thrift/carbondata.thrift
index b4cbc4e..8acd0b1 100644
--- a/format/src/main/thrift/carbondata.thrift
+++ b/format/src/main/thrift/carbondata.thrift
@@ -119,7 +119,7 @@ struct DataChunk{
  * in Row Major format.
  *
  * For V3, one data chunk is one page data of 32K rows.
- * For V2 & V1, one data chunk is one blocklet data.
+ * For V2, one data chunk is one blocklet data.
  */
 struct DataChunk2{
     1: required ChunkCompressionMeta chunk_meta; // The metadata of a chunk

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
index 2980ad3..3ca8cf1 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
@@ -116,8 +116,8 @@ class CarbonHiveSerDe extends AbstractSerDe {
   @Override public Writable serialize(Object obj, ObjectInspector objectInspector)
       throws SerDeException {
     if (!objInspector.getCategory().equals(ObjectInspector.Category.STRUCT)) {
-      throw new SerDeException("Cannot serialize " + objInspector.getCategory()
-          + ". Can only serialize a struct");
+      throw new SerDeException("Cannot serializeStartKey " + objInspector.getCategory()
+          + ". Can only serializeStartKey a struct");
     }
     serializedSize += ((StructObjectInspector) objInspector).getAllStructFieldRefs().size();
     status = LAST_OPERATION.SERIALIZE;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java b/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
index be17823..757a342 100644
--- a/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
+++ b/integration/hive/src/test/java/org/apache/carbondata/hive/TestCarbonSerde.java
@@ -87,7 +87,7 @@
 //    assertEquals("deserialization gives the wrong object", t, row);
 //
 //    // Serialize
-//    final ArrayWritable serializedArr = (ArrayWritable) serDe.serialize(row, oi);
+//    final ArrayWritable serializedArr = (ArrayWritable) serDe.serializeStartKey(row, oi);
 //    assertEquals("size correct after serialization", serDe.getSerDeStats().getRawDataSize(),
 //        serializedArr.get().length);
 //    assertTrue("serialized object should be equal to starting object",

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv b/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv
index 8c90d27..db4bca9 100644
--- a/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv
+++ b/integration/spark-common-test/src/test/resources/complexTypeDecimal.csv
@@ -1,9 +1,9 @@
 ID,date,country,name,phonetype,serialname,salary,complex
-1.2,2015/7/23,china,aaa1,phone197,ASD69643,15000,3.113$3.33
-2,2015/7/24,china,aaa2,phone756,ASD42892,15001,3.123$7.33
-4.3,2015/7/26,china,aaa4,phone2435,ASD66902,15003,3.123$56.33
-5,2015/7/27,china,aaa5,phone2441,ASD90633,15004,3.133$5.33
-6.5,2015/7/28,china,aaa6,phone294,ASD59961,15005,3.133$54.33
-8,2015/7/30,china,aaa8,phone1848,ASD57308,15007,32.13$56.33
-9.1,2015/7/18,china,aaa9,phone706,ASD86717,15008,3.213$44.33
-10,2015/7/19,usa,aaa10,phone685,ASD30505,15009,32.13$33.33
\ No newline at end of file
+1.2,2015/07/23,china,aaa1,phone197,ASD69643,15000,3.113$3.33
+2,2015/07/24,china,aaa2,phone756,ASD42892,15001,3.123$7.33
+4.3,2015/07/26,china,aaa4,phone2435,ASD66902,15003,3.123$56.33
+5,2015/07/27,china,aaa5,phone2441,ASD90633,15004,3.133$5.33
+6.5,2015/07/28,china,aaa6,phone294,ASD59961,15005,3.133$54.33
+8,2015/07/30,china,aaa8,phone1848,ASD57308,15007,32.13$56.33
+9.1,2015/07/18,china,aaa9,phone706,ASD86717,15008,3.213$44.33
+10,2015/07/19,usa,aaa10,phone685,ASD30505,15009,32.13$33.33
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv b/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv
index ae67723..e8c023b 100644
--- a/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv
+++ b/integration/spark-common-test/src/test/resources/complexTypeDecimalNestedHive.csv
@@ -1,4 +1,4 @@
-1.2,2015/7/23,china,aaa1,phone197,ASD69643,15000,3.113:imei$3.33:imsi
+1.2,2015-7-23 00:00:00,china,aaa1,phone197,ASD69643,15000,3.113:imei$3.33:imsi
 2,2015/7/24,china,aaa2,phone756,ASD42892,15001,3.123:imei$7.33:imsi
 4.3,2015/7/26,china,aaa4,phone2435,ASD66902,15003,3.123:imei$56.33:imsi
 5,2015/7/27,china,aaa5,phone2441,ASD90633,15004,3.133:imei$5.33:imsi

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/core/datastore/columnar/ColGroupBlockStorage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/core/datastore/columnar/ColGroupBlockStorage.java b/processing/src/main/java/org/apache/carbondata/core/datastore/columnar/ColGroupBlockStorage.java
deleted file mode 100644
index 70ac3d9..0000000
--- a/processing/src/main/java/org/apache/carbondata/core/datastore/columnar/ColGroupBlockStorage.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.columnar;
-
-import java.util.concurrent.Callable;
-
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.columnar.IndexStorage;
-import org.apache.carbondata.processing.store.colgroup.ColGroupDataHolder;
-import org.apache.carbondata.processing.store.colgroup.ColGroupMinMax;
-
-/**
- * it is holder of column group dataPage and also min max for colgroup block dataPage
- */
-public class ColGroupBlockStorage implements IndexStorage, Callable<IndexStorage> {
-
-  private byte[][] dataPage;
-
-  private ColGroupMinMax colGrpMinMax;
-
-  public ColGroupBlockStorage(SegmentProperties segmentProperties, int colGrpIndex,
-      byte[][] dataPage) {
-    colGrpMinMax = new ColGroupMinMax(segmentProperties, colGrpIndex);
-    this.dataPage = dataPage;
-    for (int i = 0; i < dataPage.length; i++) {
-      colGrpMinMax.add(dataPage[i]);
-    }
-  }
-
-  /**
-   * sorting is not required for colgroup storage and hence return true
-   */
-  @Override public boolean isAlreadySorted() {
-    return true;
-  }
-
-  /**
-   * for column group storage its not required
-   */
-  public ColGroupDataHolder getRowIdPage() {
-    //not required for column group storage
-    return null;
-  }
-
-  /**
-   * for column group storage its not required
-   */
-  public ColGroupDataHolder getRowIdRlePage() {
-    // not required for column group storage
-    return null;
-  }
-
-  /**
-   * for column group storage its not required
-   */
-  public byte[][] getDataPage() {
-    return dataPage;
-  }
-
-  /**
-   * for column group storage its not required
-   */
-  public ColGroupDataHolder getDataRlePage() {
-    //not required for column group
-    return null;
-  }
-
-  /**
-   * for column group storage its not required
-   */
-  @Override public int getTotalSize() {
-    return dataPage.length;
-  }
-
-  @Override public byte[] getMin() {
-    return colGrpMinMax.getMin();
-  }
-
-  @Override public byte[] getMax() {
-    return colGrpMinMax.getMax();
-  }
-
-  /**
-   * return self
-   */
-  @Override public IndexStorage call() throws Exception {
-    return this;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
index 50fb4c5..1bcbe54 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/sort/SortStepRowUtil.java
@@ -17,8 +17,12 @@
 
 package org.apache.carbondata.processing.newflow.sort;
 
+import java.math.BigDecimal;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.util.DataTypeUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.sortdata.SortParameters;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
 
 public class SortStepRowUtil {
   public static Object[] convertRow(Object[] data, SortParameters parameters,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/newflow/steps/DataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/newflow/steps/DataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/newflow/steps/DataWriterProcessorStepImpl.java
index 912cd3a..aad874b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/newflow/steps/DataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/newflow/steps/DataWriterProcessorStepImpl.java
@@ -177,7 +177,7 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
         readCounter++;
       }
     } catch (Exception e) {
-      throw new CarbonDataLoadingException("unable to generate the mdkey", e);
+      throw new CarbonDataLoadingException(e);
     }
     rowCounter.getAndAdd(batch.getSize());
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
index 6528d44..653da7b 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/IntermediateFileMerger.java
@@ -31,8 +31,8 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
 
 public class IntermediateFileMerger implements Callable<Void> {
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparator.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparator.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparator.java
index bc6640d..11c42a9 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparator.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparator.java
@@ -22,7 +22,7 @@ import java.util.Comparator;
 
 import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
 import org.apache.carbondata.core.util.ByteUtil.UnsafeComparer;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 
 public class RowComparator implements Comparator<Object[]> {
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparatorForNormalDims.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparatorForNormalDims.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparatorForNormalDims.java
index 8d914ea..be29bf8 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparatorForNormalDims.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/RowComparatorForNormalDims.java
@@ -18,7 +18,7 @@ package org.apache.carbondata.processing.sortandgroupby.sortdata;
 
 import java.util.Comparator;
 
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 
 /**
  * This class is used as comparator for comparing dims which are non high cardinality dims.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
index 1bcbaa8..b17c69a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/SortTempFileChunkHolder.java
@@ -35,9 +35,8 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.ByteUtil.UnsafeComparer;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.DataTypeUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
 
 public class SortTempFileChunkHolder implements Comparable<SortTempFileChunkHolder> {
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/UnCompressedTempSortFileWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/UnCompressedTempSortFileWriter.java b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/UnCompressedTempSortFileWriter.java
index d629cdc..51b3964 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/UnCompressedTempSortFileWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sortandgroupby/sortdata/UnCompressedTempSortFileWriter.java
@@ -23,8 +23,8 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
 import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
 
 public class UnCompressedTempSortFileWriter extends AbstractTempSortFileWriter {
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
index 3ed5888..6ed5d31 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
@@ -38,7 +38,7 @@ import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
 import org.apache.carbondata.core.datastore.GenericDataType;
 import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.columnar.ColumnarSplitter;
@@ -51,7 +51,6 @@ import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.NodeHolder;
 import org.apache.carbondata.processing.newflow.sort.SortScopeOptions;
 import org.apache.carbondata.processing.store.file.FileManager;
 import org.apache.carbondata.processing.store.file.IFileManagerComposite;
@@ -148,8 +147,6 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
    */
   private ColumnarFormatVersion version;
 
-  private TablePageEncoder encoder;
-
   private SortScopeOptions.SortScope sortScope;
 
   /**
@@ -201,7 +198,6 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
       }
     }
     this.version = CarbonProperties.getInstance().getFormatVersion();
-    this.encoder = new TablePageEncoder(model);
     String noInvertedIdxCol = "";
     for (CarbonDimension cd : model.getSegmentProperties().getDimensions()) {
       if (!cd.isUseInvertedIndex()) {
@@ -343,35 +339,26 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
   }
 
   /**
-   * generate the NodeHolder from the input rows (one page in case of V3 format)
+   * generate the EncodedTablePage from the input rows (one page in case of V3 format)
    */
-  private NodeHolder processDataRows(List<CarbonRow> dataRows)
+  private EncodedTablePage processDataRows(List<CarbonRow> dataRows)
       throws CarbonDataWriterException, KeyGenException, MemoryException, IOException {
     if (dataRows.size() == 0) {
-      return new NodeHolder();
+      return EncodedTablePage.newEmptyInstance();
     }
     TablePage tablePage = new TablePage(model, dataRows.size());
-    TablePageKey keys = new TablePageKey(model, dataRows.size());
     int rowId = 0;
 
     // convert row to columnar data
     for (CarbonRow row : dataRows) {
-      tablePage.addRow(rowId, row);
-      keys.update(rowId, row);
-      rowId++;
+      tablePage.addRow(rowId++, row);
     }
 
-    // apply and compress dimensions and measure
-    EncodedData encodedData = encoder.encode(tablePage);
-
-    TablePageStatistics tablePageStatistics = new TablePageStatistics(
-        model.getTableSpec(), tablePage, encodedData, tablePage.getMeasureStats());
-
-    NodeHolder nodeHolder = dataWriter.buildDataNodeHolder(encodedData, tablePageStatistics, keys);
+    EncodedTablePage encoded = tablePage.encode();
     tablePage.freeMemory();
 
     LOGGER.info("Number Of records processed: " + dataRows.size());
-    return nodeHolder;
+    return encoded;
   }
 
   /**
@@ -470,7 +457,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
       }
       consumerExecutorService.shutdownNow();
       processWriteTaskSubmitList(consumerExecutorServiceTaskList);
-      this.dataWriter.writeBlockletInfoToFile();
+      this.dataWriter.writeFooterToFile();
       LOGGER.info("All blocklets have been finished writing");
       // close all the open stream for both the files
       this.dataWriter.closeWriter();
@@ -666,7 +653,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
     /**
      * array of blocklet data holder objects
      */
-    private NodeHolder[] nodeHolders;
+    private EncodedTablePage[] encodedTablePages;
     /**
      * flag to check whether the producer has completed processing for holder
      * object which is required to be picked form an index
@@ -678,7 +665,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
     private int currentIndex;
 
     private BlockletDataHolder() {
-      nodeHolders = new NodeHolder[numberOfCores];
+      encodedTablePages = new EncodedTablePage[numberOfCores];
       available = new AtomicBoolean(false);
     }
 
@@ -686,32 +673,32 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
      * @return a node holder object
      * @throws InterruptedException if consumer thread is interrupted
      */
-    public synchronized NodeHolder get() throws InterruptedException {
-      NodeHolder nodeHolder = nodeHolders[currentIndex];
+    public synchronized EncodedTablePage get() throws InterruptedException {
+      EncodedTablePage encodedTablePage = encodedTablePages[currentIndex];
       // if node holder is null means producer thread processing the data which has to
       // be inserted at this current index has not completed yet
-      if (null == nodeHolder && !processingComplete) {
+      if (null == encodedTablePage && !processingComplete) {
         available.set(false);
       }
       while (!available.get()) {
         wait();
       }
-      nodeHolder = nodeHolders[currentIndex];
-      nodeHolders[currentIndex] = null;
+      encodedTablePage = encodedTablePages[currentIndex];
+      encodedTablePages[currentIndex] = null;
       currentIndex++;
       // reset current index when it reaches length of node holder array
-      if (currentIndex >= nodeHolders.length) {
+      if (currentIndex >= encodedTablePages.length) {
         currentIndex = 0;
       }
-      return nodeHolder;
+      return encodedTablePage;
     }
 
     /**
-     * @param nodeHolder
+     * @param encodedTablePage
      * @param index
      */
-    public synchronized void put(NodeHolder nodeHolder, int index) {
-      nodeHolders[index] = nodeHolder;
+    public synchronized void put(EncodedTablePage encodedTablePage, int index) {
+      encodedTablePages[index] = encodedTablePage;
       // notify the consumer thread when index at which object is to be inserted
       // becomes equal to current index from where data has to be picked for writing
       if (index == currentIndex) {
@@ -729,14 +716,14 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
     private BlockletDataHolder blockletDataHolder;
     private List<CarbonRow> dataRows;
     private int sequenceNumber;
-    private boolean isWriteAll;
+    private boolean isLastPage;
 
     private Producer(BlockletDataHolder blockletDataHolder, List<CarbonRow> dataRows,
-        int sequenceNumber, boolean isWriteAll) {
+        int sequenceNumber, boolean isLastPage) {
       this.blockletDataHolder = blockletDataHolder;
       this.dataRows = dataRows;
       this.sequenceNumber = sequenceNumber;
-      this.isWriteAll = isWriteAll;
+      this.isLastPage = isLastPage;
     }
 
     /**
@@ -747,11 +734,11 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
      */
     @Override public Void call() throws Exception {
       try {
-        NodeHolder nodeHolder = processDataRows(dataRows);
-        nodeHolder.setWriteAll(isWriteAll);
+        EncodedTablePage encodedTablePage = processDataRows(dataRows);
+        encodedTablePage.setIsLastPage(isLastPage);
         // insert the object in array according to sequence number
         int indexInNodeHolderArray = (sequenceNumber - 1) % numberOfCores;
-        blockletDataHolder.put(nodeHolder, indexInNodeHolderArray);
+        blockletDataHolder.put(encodedTablePage, indexInNodeHolderArray);
         return null;
       } catch (Throwable throwable) {
         LOGGER.error(throwable, "Error in producer");
@@ -781,11 +768,11 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
      */
     @Override public Void call() throws Exception {
       while (!processingComplete || blockletProcessingCount.get() > 0) {
-        NodeHolder nodeHolder = null;
+        EncodedTablePage encodedTablePage = null;
         try {
-          nodeHolder = blockletDataHolder.get();
-          if (null != nodeHolder) {
-            dataWriter.writeBlockletData(nodeHolder);
+          encodedTablePage = blockletDataHolder.get();
+          if (null != encodedTablePage) {
+            dataWriter.writeTablePage(encodedTablePage);
           }
           blockletProcessingCount.decrementAndGet();
         } catch (Throwable throwable) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
index 7930763..c5a9bec 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
@@ -22,23 +22,45 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.Iterator;
 import java.util.List;
 
+import org.apache.carbondata.core.datastore.DimensionType;
 import org.apache.carbondata.core.datastore.GenericDataType;
 import org.apache.carbondata.core.datastore.TableSpec;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.compression.CompressorFactory;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
 import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
+import org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingStrategy;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodingStrategy;
+import org.apache.carbondata.core.datastore.page.key.TablePageKey;
+import org.apache.carbondata.core.datastore.page.statistics.PrimitivePageStatsCollector;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.datastore.page.statistics.VarLengthPageStatsCollector;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.core.util.DataTypeUtil;
+import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
 
 import org.apache.spark.sql.types.Decimal;
 
-
 /**
  * Represent a page data for all columns, we store its data in columnar layout, so that
  * all processing apply to TablePage can be done in vectorized fashion.
@@ -55,24 +77,30 @@ public class TablePage {
   private ComplexColumnPage[] complexDimensionPage;
   private ColumnPage[] measurePage;
 
-  private MeasurePageStatsVO measurePageStatistics;
-
   // the num of rows in this page, it must be less than short value (65536)
   private int pageSize;
 
   private CarbonFactDataHandlerModel model;
 
+  private TablePageKey key;
+
+  private ColumnarFormatVersion version = CarbonProperties.getInstance().getFormatVersion();
+
   TablePage(CarbonFactDataHandlerModel model, int pageSize) throws MemoryException {
     this.model = model;
     this.pageSize = pageSize;
     int numDictDimension = model.getMDKeyGenerator().getDimCount();
     dictDimensionPage = new ColumnPage[numDictDimension];
     for (int i = 0; i < dictDimensionPage.length; i++) {
-      dictDimensionPage[i] = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+      ColumnPage page = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+      page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
+      dictDimensionPage[i] = page;
     }
     noDictDimensionPage = new ColumnPage[model.getNoDictionaryCount()];
     for (int i = 0; i < noDictDimensionPage.length; i++) {
-      noDictDimensionPage[i] = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+      ColumnPage page = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+      page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
+      noDictDimensionPage[i] = page;
     }
     complexDimensionPage = new ComplexColumnPage[model.getComplexColumnCount()];
     for (int i = 0; i < complexDimensionPage.length; i++) {
@@ -84,22 +112,33 @@ public class TablePage {
     DataType[] dataTypes = model.getMeasureDataType();
     TableSpec.MeasureSpec measureSpec = model.getTableSpec().getMeasureSpec();
     for (int i = 0; i < measurePage.length; i++) {
-      measurePage[i] = ColumnPage
+      ColumnPage page = ColumnPage
           .newPage(dataTypes[i], pageSize, measureSpec.getScale(i), measureSpec.getPrecision(i));
+      page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize));
+      measurePage[i] = page;
     }
+    boolean hasNoDictionary = noDictDimensionPage.length > 0;
+    this.key = new TablePageKey(pageSize, model.getMDKeyGenerator(), model.getSegmentProperties(),
+        hasNoDictionary);
   }
 
   /**
-   * Add one row to the internal store, it will be converted into columnar layout
+   * Add one row to the internal store
    *
    * @param rowId Id of the input row
    * @param row   row object
    */
   public void addRow(int rowId, CarbonRow row) throws KeyGenException {
-    // convert each column category
+    // convert each column category, update key and stats
+    byte[] mdk = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
+    convertToColumnarAndAddToPages(rowId, row, mdk);
+    key.update(rowId, row, mdk);
+  }
 
+  // convert the input row object to columnar data and add to column pages
+  private void convertToColumnarAndAddToPages(int rowId, CarbonRow row, byte[] mdk)
+      throws KeyGenException {
     // 1. convert dictionary columns
-    byte[] mdk = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
     byte[][] keys = model.getSegmentProperties().getFixedLengthKeySplitter().splitKey(mdk);
     for (int i = 0; i < dictDimensionPage.length; i++) {
       dictDimensionPage[i].putData(rowId, keys[i]);
@@ -137,11 +176,6 @@ public class TablePage {
       }
       measurePage[i].putData(rowId, value);
     }
-
-    // update statistics if it is last row
-    if (rowId + 1 == pageSize) {
-      this.measurePageStatistics = new MeasurePageStatsVO(measurePage);
-    }
   }
 
   /**
@@ -160,10 +194,10 @@ public class TablePage {
     // initialize the page if first row
     if (rowId == 0) {
       int depthInComplexColumn = complexDataType.getColsCount();
-      getComplexDimensionPage()[index] = new ComplexColumnPage(pageSize, depthInComplexColumn);
+      complexDimensionPage[index] = new ComplexColumnPage(pageSize, depthInComplexColumn);
     }
 
-    int depthInComplexColumn = getComplexDimensionPage()[index].getDepth();
+    int depthInComplexColumn = complexDimensionPage[index].getDepth();
     // this is the result columnar data which will be added to page,
     // size of this list is the depth of complex column, we will fill it by input data
     List<ArrayList<byte[]>> encodedComplexColumnar = new ArrayList<>();
@@ -187,8 +221,7 @@ public class TablePage {
     }
 
     for (int depth = 0; depth < depthInComplexColumn; depth++) {
-      getComplexDimensionPage()[index]
-          .putComplexData(rowId, depth, encodedComplexColumnar.get(depth));
+      complexDimensionPage[index].putComplexData(rowId, depth, encodedComplexColumnar.get(depth));
     }
   }
 
@@ -217,26 +250,161 @@ public class TablePage {
     return output;
   }
 
-  ColumnPage[] getDictDimensionPage() {
-    return dictDimensionPage;
+  EncodedTablePage encode() throws KeyGenException, MemoryException, IOException {
+    // encode dimensions and measure
+    EncodedDimensionPage[] dimensions = encodeAndCompressDimensions();
+    EncodedMeasurePage[] measures = encodeAndCompressMeasures();
+    return EncodedTablePage.newInstance(pageSize, dimensions, measures, key);
   }
 
-  ColumnPage[] getNoDictDimensionPage() {
-    return noDictDimensionPage;
+  private EncodingStrategy encodingStrategy = new DefaultEncodingStrategy();
+
+  // apply measure and set encodedData in `encodedData`
+  private EncodedMeasurePage[] encodeAndCompressMeasures()
+      throws MemoryException, IOException {
+    EncodedMeasurePage[] encodedMeasures = new EncodedMeasurePage[measurePage.length];
+    for (int i = 0; i < measurePage.length; i++) {
+      SimpleStatsResult stats = (SimpleStatsResult)(measurePage[i].getStatistics());
+      ColumnPageCodec encoder = encodingStrategy.createCodec(stats);
+      encodedMeasures[i] = (EncodedMeasurePage) encoder.encode(measurePage[i]);
+    }
+    return encodedMeasures;
+  }
+
+  private IndexStorage encodeAndCompressDictDimension(byte[][] data, boolean isSort,
+      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
+    if (isUseInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
+      } else {
+        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
+      } else {
+        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
   }
 
-  ComplexColumnPage[] getComplexDimensionPage() {
-    return complexDimensionPage;
+  private IndexStorage encodeAndCompressDirectDictDimension(byte[][] data, boolean isSort,
+      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
+    if (isUseInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
+      } else {
+        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
+      } else {
+        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
   }
 
-  ColumnPage[] getMeasurePage() {
-    return measurePage;
+  private IndexStorage encodeAndCompressComplexDimension(byte[][] data) {
+    if (version == ColumnarFormatVersion.V3) {
+      return new BlockIndexerStorageForShort(data, false, false, false);
+    } else {
+      return new BlockIndexerStorageForInt(data, false, false, false);
+    }
   }
 
-  MeasurePageStatsVO getMeasureStats() {
-    return measurePageStatistics;
+  private IndexStorage encodeAndCompressNoDictDimension(byte[][] data, boolean isSort,
+      boolean isUseInvertedIndex, boolean isRleApplicable) {
+    if (isUseInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForShort(data, isRleApplicable, true, isSort);
+      } else {
+        return new BlockIndexerStorageForInt(data, isRleApplicable, true, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        return new BlockIndexerStorageForNoInvertedIndexForShort(data, true);
+      } else {
+        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
   }
 
+  // apply and compress each dimension, set encoded data in `encodedData`
+  private EncodedDimensionPage[] encodeAndCompressDimensions()
+      throws KeyGenException {
+    TableSpec.DimensionSpec dimensionSpec = model.getTableSpec().getDimensionSpec();
+    int dictionaryColumnCount = -1;
+    int noDictionaryColumnCount = -1;
+    int indexStorageOffset = 0;
+    IndexStorage[] indexStorages = new IndexStorage[dimensionSpec.getNumExpandedDimensions()];
+    Compressor compressor = CompressorFactory.getInstance().getCompressor();
+    EncodedDimensionPage[] compressedColumns = new EncodedDimensionPage[indexStorages.length];
+    boolean[] isUseInvertedIndex = model.getIsUseInvertedIndex();
+    for (int i = 0; i < dimensionSpec.getNumSimpleDimensions(); i++) {
+      ColumnPage page;
+      byte[] flattened;
+      boolean isSortColumn = model.isSortColumn(i);
+      switch (dimensionSpec.getType(i)) {
+        case GLOBAL_DICTIONARY:
+          // dictionary dimension
+          page = dictDimensionPage[++dictionaryColumnCount];
+          indexStorages[indexStorageOffset] = encodeAndCompressDictDimension(
+              page.getByteArrayPage(),
+              isSortColumn,
+              isUseInvertedIndex[i] & isSortColumn,
+              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
+          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
+          break;
+        case DIRECT_DICTIONARY:
+          // timestamp and date column
+          page = dictDimensionPage[++dictionaryColumnCount];
+          indexStorages[indexStorageOffset] = encodeAndCompressDirectDictDimension(
+              page.getByteArrayPage(),
+              isSortColumn,
+              isUseInvertedIndex[i] & isSortColumn,
+              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
+          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
+          break;
+        case PLAIN_VALUE:
+          // high cardinality dimension, encoded as plain string
+          page = noDictDimensionPage[++noDictionaryColumnCount];
+          indexStorages[indexStorageOffset] = encodeAndCompressNoDictDimension(
+              page.getByteArrayPage(),
+              isSortColumn,
+              isUseInvertedIndex[i] & isSortColumn,
+              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
+          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
+          break;
+        case COMPLEX:
+          // we need to add complex column at last, so skipping it here
+          continue;
+        default:
+          throw new RuntimeException("unsupported dimension type: " + dimensionSpec.getType(i));
+      }
+      byte[] compressedData = compressor.compressByte(flattened);
+      compressedColumns[indexStorageOffset] = new EncodedDimensionPage(
+          pageSize, compressedData, indexStorages[indexStorageOffset], dimensionSpec.getType(i));
+      SimpleStatsResult stats = (SimpleStatsResult) page.getStatistics();
+      compressedColumns[indexStorageOffset].setNullBitSet(stats.getNullBits());
+      indexStorageOffset++;
+    }
+
+    // handle complex type column
+    for (int i = 0; i < dimensionSpec.getNumComplexDimensions(); i++) {
+      Iterator<byte[][]> iterator = complexDimensionPage[i].iterator();
+      while (iterator.hasNext()) {
+        byte[][] data = iterator.next();
+        indexStorages[indexStorageOffset] = encodeAndCompressComplexDimension(data);
+        byte[] flattened = ByteUtil.flatten(data);
+        byte[] compressedData = compressor.compressByte(flattened);
+        compressedColumns[indexStorageOffset] = new EncodedDimensionPage(
+            pageSize, compressedData, indexStorages[indexStorageOffset], DimensionType.COMPLEX);
+        indexStorageOffset++;
+      }
+    }
+    return compressedColumns;
+  }
 }
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/TablePageEncoder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageEncoder.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePageEncoder.java
deleted file mode 100644
index 8547845..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageEncoder.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.store;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.carbondata.core.datastore.TableSpec;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
-import org.apache.carbondata.core.datastore.columnar.IndexStorage;
-import org.apache.carbondata.core.datastore.compression.Compressor;
-import org.apache.carbondata.core.datastore.compression.CompressorFactory;
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
-import org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingStrategy;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
-import org.apache.carbondata.core.datastore.page.encoding.EncodingStrategy;
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
-import org.apache.carbondata.core.util.ByteUtil;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
-
-class TablePageEncoder {
-
-  private ColumnarFormatVersion version;
-
-  private boolean[] isUseInvertedIndex;
-
-  private CarbonFactDataHandlerModel model;
-
-  private static final EncodingStrategy encodingStrategy = new DefaultEncodingStrategy();
-
-  TablePageEncoder(CarbonFactDataHandlerModel model) {
-    this.version = CarbonProperties.getInstance().getFormatVersion();
-    this.model = model;
-    this.isUseInvertedIndex = model.getIsUseInvertedIndex();
-  }
-
-  // function to apply all columns in one table page
-  EncodedData encode(TablePage tablePage)
-      throws KeyGenException, MemoryException, IOException {
-    EncodedData encodedData = new EncodedData();
-    encodeAndCompressDimensions(tablePage, encodedData);
-    encodeAndCompressMeasures(tablePage, encodedData);
-    return encodedData;
-  }
-
-  // apply measure and set encodedData in `encodedData`
-  private void encodeAndCompressMeasures(TablePage tablePage, EncodedData encodedData)
-      throws MemoryException, IOException {
-    ColumnPage[] measurePage = tablePage.getMeasurePage();
-    byte[][] encodedMeasures = new byte[measurePage.length][];
-    for (int i = 0; i < measurePage.length; i++) {
-      ColumnPageCodec encoder = encodingStrategy.createCodec(measurePage[i].getStatistics());
-      encodedMeasures[i] = encoder.encode(measurePage[i]);
-    }
-    encodedData.measures = encodedMeasures;
-  }
-
-  private IndexStorage encodeAndCompressDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
-  private IndexStorage encodeAndCompressDirectDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
-  private IndexStorage encodeAndCompressComplexDimension(byte[][] data) {
-    if (version == ColumnarFormatVersion.V3) {
-      return new BlockIndexerStorageForShort(data, false, false, false);
-    } else {
-      return new BlockIndexerStorageForInt(data, false, false, false);
-    }
-  }
-
-  private IndexStorage encodeAndCompressNoDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, true, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, true, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, true);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
-  // apply and compress each dimension, set encoded data in `encodedData`
-  private void encodeAndCompressDimensions(TablePage tablePage, EncodedData encodedData)
-      throws KeyGenException {
-    TableSpec.DimensionSpec dimensionSpec = model.getTableSpec().getDimensionSpec();
-    int dictionaryColumnCount = -1;
-    int noDictionaryColumnCount = -1;
-    int indexStorageOffset = 0;
-    IndexStorage[] indexStorages = new IndexStorage[dimensionSpec.getNumExpandedDimensions()];
-    Compressor compressor = CompressorFactory.getInstance().getCompressor();
-    byte[][] compressedColumns = new byte[indexStorages.length][];
-    for (int i = 0; i < dimensionSpec.getNumSimpleDimensions(); i++) {
-      byte[] flattened;
-      boolean isSortColumn = model.isSortColumn(i);
-      switch (dimensionSpec.getType(i)) {
-        case GLOBAL_DICTIONARY:
-          // dictionary dimension
-          indexStorages[indexStorageOffset] = encodeAndCompressDictDimension(
-              tablePage.getDictDimensionPage()[++dictionaryColumnCount].getByteArrayPage(),
-              isSortColumn, isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
-          break;
-        case DIRECT_DICTIONARY:
-          // timestamp and date column
-          indexStorages[indexStorageOffset] = encodeAndCompressDirectDictDimension(
-              tablePage.getDictDimensionPage()[++dictionaryColumnCount].getByteArrayPage(),
-              isSortColumn, isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
-          break;
-        case PLAIN_VALUE:
-          // high cardinality dimension, encoded as plain string
-          indexStorages[indexStorageOffset] = encodeAndCompressNoDictDimension(
-              tablePage.getNoDictDimensionPage()[++noDictionaryColumnCount].getByteArrayPage(),
-              isSortColumn, isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
-          break;
-        case COMPLEX:
-          // we need to add complex column at last, so skipping it here
-          continue;
-        default:
-          throw new RuntimeException("unsupported dimension type: " + dimensionSpec.getType(i));
-      }
-      compressedColumns[indexStorageOffset] = compressor.compressByte(flattened);
-      indexStorageOffset++;
-    }
-
-    // handle complex type column
-    for (int i = 0; i < dimensionSpec.getNumComplexDimensions(); i++) {
-      Iterator<byte[][]> iterator = tablePage.getComplexDimensionPage()[i].iterator();
-      while (iterator.hasNext()) {
-        byte[][] data = iterator.next();
-        indexStorages[indexStorageOffset] = encodeAndCompressComplexDimension(data);
-        byte[] flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
-        compressedColumns[indexStorageOffset] = compressor.compressByte(flattened);
-        indexStorageOffset++;
-      }
-    }
-
-    encodedData.indexStorages = indexStorages;
-    encodedData.dimensions = compressedColumns;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/TablePageKey.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageKey.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePageKey.java
deleted file mode 100644
index a66575e..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageKey.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.store;
-
-import org.apache.carbondata.core.datastore.row.CarbonRow;
-import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.processing.util.NonDictionaryUtil;
-
-public class TablePageKey {
-  private int pageSize;
-
-  private byte[][] currentNoDictionaryKey;
-
-  // MDK start key
-  private byte[] startKey;
-
-  // MDK end key
-  private byte[] endKey;
-
-  // startkey for no dictionary columns
-  private byte[][] noDictStartKey;
-
-  // endkey for no diciotn
-  private byte[][] noDictEndKey;
-
-  // startkey for no dictionary columns after packing into one column
-  private byte[] packedNoDictStartKey;
-
-  // endkey for no dictionary columns after packing into one column
-  private byte[] packedNoDictEndKey;
-
-  private CarbonFactDataHandlerModel model;
-
-  TablePageKey(CarbonFactDataHandlerModel model, int pageSize) {
-    this.model = model;
-    this.pageSize = pageSize;
-  }
-
-  /** update all keys based on the input row */
-  void update(int rowId, CarbonRow row) throws KeyGenException {
-    if (model.getNoDictionaryCount() > 0 || model.getComplexIndexMap().size() > 0) {
-      currentNoDictionaryKey = WriteStepRowUtil.getNoDictAndComplexDimension(row);
-    }
-    if (rowId == 0) {
-      startKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
-      noDictStartKey = currentNoDictionaryKey;
-    }
-    noDictEndKey = currentNoDictionaryKey;
-    if (rowId == pageSize - 1) {
-      endKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
-      finalizeKeys();
-    }
-  }
-
-  /** update all keys if SORT_COLUMNS option is used when creating table */
-  private void finalizeKeys() {
-    // If SORT_COLUMNS is used, may need to update start/end keys since the they may
-    // contains dictionary columns that are not in SORT_COLUMNS, which need to be removed from
-    // start/end key
-    int numberOfDictSortColumns = model.getSegmentProperties().getNumberOfDictSortColumns();
-    if (numberOfDictSortColumns > 0) {
-      // if SORT_COLUMNS contain dictionary columns
-      int[] keySize = model.getSegmentProperties().getFixedLengthKeySplitter().getBlockKeySize();
-      if (keySize.length > numberOfDictSortColumns) {
-        // if there are some dictionary columns that are not in SORT_COLUMNS, it will come to here
-        int newMdkLength = 0;
-        for (int i = 0; i < numberOfDictSortColumns; i++) {
-          newMdkLength += keySize[i];
-        }
-        byte[] newStartKeyOfSortKey = new byte[newMdkLength];
-        byte[] newEndKeyOfSortKey = new byte[newMdkLength];
-        System.arraycopy(startKey, 0, newStartKeyOfSortKey, 0, newMdkLength);
-        System.arraycopy(endKey, 0, newEndKeyOfSortKey, 0, newMdkLength);
-        startKey = newStartKeyOfSortKey;
-        endKey = newEndKeyOfSortKey;
-      }
-    } else {
-      startKey = new byte[0];
-      endKey = new byte[0];
-    }
-
-    // Do the same update for noDictionary start/end Key
-    int numberOfNoDictSortColumns = model.getSegmentProperties().getNumberOfNoDictSortColumns();
-    if (numberOfNoDictSortColumns > 0) {
-      // if sort_columns contain no-dictionary columns
-      if (noDictStartKey.length > numberOfNoDictSortColumns) {
-        byte[][] newNoDictionaryStartKey = new byte[numberOfNoDictSortColumns][];
-        byte[][] newNoDictionaryEndKey = new byte[numberOfNoDictSortColumns][];
-        System.arraycopy(
-            noDictStartKey, 0, newNoDictionaryStartKey, 0, numberOfNoDictSortColumns);
-        System.arraycopy(
-            noDictEndKey, 0, newNoDictionaryEndKey, 0, numberOfNoDictSortColumns);
-        noDictStartKey = newNoDictionaryStartKey;
-        noDictEndKey = newNoDictionaryEndKey;
-      }
-      packedNoDictStartKey =
-          NonDictionaryUtil.packByteBufferIntoSingleByteArray(noDictStartKey);
-      packedNoDictEndKey =
-          NonDictionaryUtil.packByteBufferIntoSingleByteArray(noDictEndKey);
-    }
-  }
-
-  public byte[] getStartKey() {
-    return startKey;
-  }
-
-  public byte[] getEndKey() {
-    return endKey;
-  }
-
-  public byte[] getNoDictStartKey() {
-    return packedNoDictStartKey;
-  }
-
-  public byte[] getNoDictEndKey() {
-    return packedNoDictEndKey;
-  }
-
-  public int getPageSize() {
-    return pageSize;
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/TablePageStatistics.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageStatistics.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePageStatistics.java
deleted file mode 100644
index 13eaac9..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePageStatistics.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.store;
-
-import java.nio.ByteBuffer;
-import java.util.BitSet;
-
-import org.apache.carbondata.core.datastore.TableSpec;
-import org.apache.carbondata.core.datastore.columnar.IndexStorage;
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
-
-// Statistics of dimension and measure column in a TablePage
-public class TablePageStatistics {
-
-  // number of dimension after complex column expanded
-  private int numDimensionsExpanded;
-
-  // min of each dimension column
-  private byte[][] dimensionMinValue;
-
-  // max of each dimension column
-  private byte[][] dimensionMaxValue;
-
-  // min of each measure column
-  private byte[][] measureMinValue;
-
-  // max os each measure column
-  private byte[][] measureMaxValue;
-
-  // null bit set for each measure column
-  private BitSet[] nullBitSet;
-
-  // measure stats
-  // TODO: there are redundant stats
-  private MeasurePageStatsVO measurePageStatistics;
-
-  private TableSpec tableSpec;
-
-  TablePageStatistics(TableSpec tableSpec, TablePage tablePage,
-      EncodedData encodedData, MeasurePageStatsVO measurePageStatistics) {
-    this.numDimensionsExpanded = tableSpec.getDimensionSpec().getNumExpandedDimensions();
-    int numMeasures = tableSpec.getMeasureSpec().getNumMeasures();
-    this.dimensionMinValue = new byte[numDimensionsExpanded][];
-    this.dimensionMaxValue = new byte[numDimensionsExpanded][];
-    this.measureMinValue = new byte[numMeasures][];
-    this.measureMaxValue = new byte[numMeasures][];
-    this.nullBitSet = new BitSet[numMeasures];
-    this.tableSpec = tableSpec;
-    this.measurePageStatistics = measurePageStatistics;
-    updateMinMax(tablePage, encodedData);
-    updateNullBitSet(tablePage);
-  }
-
-  private void updateMinMax(TablePage tablePage, EncodedData encodedData) {
-    IndexStorage[] keyStorageArray = encodedData.indexStorages;
-    byte[][] measureArray = encodedData.measures;
-
-    for (int i = 0; i < numDimensionsExpanded; i++) {
-      switch (tableSpec.getDimensionSpec().getType(i)) {
-        case GLOBAL_DICTIONARY:
-        case DIRECT_DICTIONARY:
-        case COLUMN_GROUP:
-        case COMPLEX:
-          dimensionMinValue[i] = keyStorageArray[i].getMin();
-          dimensionMaxValue[i] = keyStorageArray[i].getMax();
-          break;
-        case PLAIN_VALUE:
-          dimensionMinValue[i] = updateMinMaxForNoDictionary(keyStorageArray[i].getMin());
-          dimensionMaxValue[i] = updateMinMaxForNoDictionary(keyStorageArray[i].getMax());
-          break;
-      }
-    }
-    for (int i = 0; i < measureArray.length; i++) {
-      ColumnPageStatsVO stats = tablePage.getMeasurePage()[i].getStatistics();
-      measureMaxValue[i] = stats.minBytes();
-      measureMinValue[i] = stats.maxBytes();
-    }
-  }
-
-  private void updateNullBitSet(TablePage tablePage) {
-    nullBitSet = new BitSet[tablePage.getMeasurePage().length];
-    ColumnPage[] measurePages = tablePage.getMeasurePage();
-    for (int i = 0; i < nullBitSet.length; i++) {
-      nullBitSet[i] = measurePages[i].getNullBitSet();
-    }
-  }
-
-  /**
-   * Below method will be used to update the min or max value
-   * by removing the length from it
-   *
-   * @return min max value without length
-   */
-  private byte[] updateMinMaxForNoDictionary(byte[] valueWithLength) {
-    ByteBuffer buffer = ByteBuffer.wrap(valueWithLength);
-    byte[] actualValue = new byte[buffer.getShort()];
-    buffer.get(actualValue);
-    return actualValue;
-  }
-
-  public byte[][] getDimensionMinValue() {
-    return dimensionMinValue;
-  }
-
-  public byte[][] getDimensionMaxValue() {
-    return dimensionMaxValue;
-  }
-
-  public byte[][] getMeasureMinValue() {
-    return measureMinValue;
-  }
-
-  public byte[][] getMeasureMaxValue() {
-    return measureMaxValue;
-  }
-
-  public BitSet[] getNullBitSet() {
-    return nullBitSet;
-  }
-
-  public MeasurePageStatsVO getMeasurePageStatistics() {
-    return measurePageStatistics;
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index 3756273..b83a82a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -44,6 +44,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
 import org.apache.carbondata.core.metadata.CarbonMetadata;
@@ -583,7 +584,7 @@ public abstract class AbstractFactDataWriter<T> implements CarbonFactDataWriter<
    *
    * @throws CarbonDataWriterException
    */
-  @Override public void writeBlockletInfoToFile() throws CarbonDataWriterException {
+  @Override public void writeFooterToFile() throws CarbonDataWriterException {
     if (this.blockletInfoList.size() > 0) {
       writeBlockletInfoToFile(fileChannel, carbonDataFileTempPath);
     }
@@ -597,7 +598,8 @@ public abstract class AbstractFactDataWriter<T> implements CarbonFactDataWriter<
    * @throws CarbonDataWriterException
    * @throws CarbonDataWriterException throws new CarbonDataWriterException if any problem
    */
-  public abstract void writeBlockletData(NodeHolder nodeHolder) throws CarbonDataWriterException;
+  public abstract void writeTablePage(EncodedTablePage encodedTablePage)
+      throws CarbonDataWriterException;
 
   /**
    * Below method will be used to update the min or max value
@@ -613,36 +615,6 @@ public abstract class AbstractFactDataWriter<T> implements CarbonFactDataWriter<
   }
 
   /**
-   * Below method will be used to update the no dictionary start and end key
-   *
-   * @param key key to be updated
-   * @return return no dictionary key
-   */
-  protected byte[] updateNoDictionaryStartAndEndKey(byte[] key) {
-    if (key.length == 0) {
-      return key;
-    }
-    // add key to byte buffer remove the length part of the data
-    ByteBuffer buffer = ByteBuffer.wrap(key, 2, key.length - 2);
-    // create a output buffer without length
-    ByteBuffer output = ByteBuffer.allocate(key.length - 2);
-    short numberOfByteToStorLength = 2;
-    // as length part is removed, so each no dictionary value index
-    // needs to be reshuffled by 2 bytes
-    int numberOfNoDictSortColumns =
-        dataWriterVo.getSegmentProperties().getNumberOfNoDictSortColumns();
-    for (int i = 0; i < numberOfNoDictSortColumns; i++) {
-      output.putShort((short) (buffer.getShort() - numberOfByteToStorLength));
-    }
-    // copy the data part
-    while (buffer.hasRemaining()) {
-      output.put(buffer.get());
-    }
-    output.rewind();
-    return output.array();
-  }
-
-  /**
    * This method will copy the carbon data file from local store location to
    * carbon store location
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonFactDataWriter.java
index 56ee762..f194f74 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonFactDataWriter.java
@@ -18,35 +18,21 @@
 package org.apache.carbondata.processing.store.writer;
 
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
-import org.apache.carbondata.core.datastore.page.encoding.EncodedData;
-import org.apache.carbondata.core.util.NodeHolder;
-import org.apache.carbondata.processing.store.TablePageKey;
-import org.apache.carbondata.processing.store.TablePageStatistics;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 
 public interface CarbonFactDataWriter<T> {
 
   /**
-   * This method will be used to create NodeHolder for a table page
+   * write a encoded table page
    */
-
-  NodeHolder buildDataNodeHolder(EncodedData encoded, TablePageStatistics stats,
-      TablePageKey key) throws CarbonDataWriterException;
-
-  /**
-   * If node holder flag is enabled the object will be added to list
-   * and all the blocklets will be return together. If disabled then this
-   * method will itself will call for writing the fact data
-   *
-   * @param holder
-   */
-  void writeBlockletData(NodeHolder holder) throws CarbonDataWriterException;
+  void writeTablePage(EncodedTablePage encodedTablePage) throws CarbonDataWriterException;
 
   /**
    * Below method will be used to write the leaf meta data to file
    *
    * @throws CarbonDataWriterException
    */
-  void writeBlockletInfoToFile() throws CarbonDataWriterException;
+  void writeFooterToFile() throws CarbonDataWriterException;
 
   /**
    * Below method will be used to initialise the writer


[3/7] carbondata git commit: [CARBONDATA-1098] Change page statistics use exact type and use column page in writer

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
new file mode 100644
index 0000000..73ada4b
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/PrimitivePageStatsCollector.java
@@ -0,0 +1,294 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.statistics;
+
+import java.math.BigDecimal;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+
+/** statics for primitive column page */
+public class PrimitivePageStatsCollector implements ColumnPageStatsCollector, SimpleStatsResult {
+  private DataType dataType;
+  private byte minByte, maxByte;
+  private short minShort, maxShort;
+  private int minInt, maxInt;
+  private long minLong, maxLong;
+  private double minDouble, maxDouble;
+
+  // scale of the double value
+  private int decimal;
+
+  // The index of the rowId whose value is null, will be set to 1
+  private BitSet nullBitSet;
+
+  // this is for encode flow
+  public static PrimitivePageStatsCollector newInstance(DataType dataType, int pageSize) {
+    switch (dataType) {
+      default:
+        return new PrimitivePageStatsCollector(dataType, pageSize);
+    }
+  }
+
+  // this is for decode flow, we do not need to create nullBits, so passing 0 as pageSize
+  public static PrimitivePageStatsCollector newInstance(ColumnPageCodecMeta meta) {
+    PrimitivePageStatsCollector instance =
+        new PrimitivePageStatsCollector(meta.getSrcDataType(), 0);
+    // set min max from meta
+    switch (meta.getSrcDataType()) {
+      case BYTE:
+        instance.minByte = (byte) meta.getMinValue();
+        instance.maxByte = (byte) meta.getMaxValue();
+        break;
+      case SHORT:
+        instance.minShort = (short) meta.getMinValue();
+        instance.maxShort = (short) meta.getMaxValue();
+        break;
+      case INT:
+        instance.minInt = (int) meta.getMinValue();
+        instance.maxInt = (int) meta.getMaxValue();
+        break;
+      case LONG:
+        instance.minLong = (long) meta.getMinValue();
+        instance.maxLong = (long) meta.getMaxValue();
+        break;
+      case DOUBLE:
+        instance.minDouble = (double) meta.getMinValue();
+        instance.maxDouble = (double) meta.getMaxValue();
+        instance.decimal = meta.getDecimal();
+        break;
+    }
+    return instance;
+  }
+
+  public static PrimitivePageStatsCollector newInstance(ValueEncoderMeta meta) {
+    PrimitivePageStatsCollector instance =
+        new PrimitivePageStatsCollector(meta.getType(), 0);
+    // set min max from meta
+    switch (meta.getType()) {
+      case BYTE:
+        instance.minByte = (byte) meta.getMinValue();
+        instance.maxByte = (byte) meta.getMaxValue();
+        break;
+      case SHORT:
+        instance.minShort = (short) meta.getMinValue();
+        instance.maxShort = (short) meta.getMaxValue();
+        break;
+      case INT:
+        instance.minInt = (int) meta.getMinValue();
+        instance.maxInt = (int) meta.getMaxValue();
+        break;
+      case LONG:
+        instance.minLong = (long) meta.getMinValue();
+        instance.maxLong = (long) meta.getMaxValue();
+        break;
+      case DOUBLE:
+        instance.minDouble = (double) meta.getMinValue();
+        instance.maxDouble = (double) meta.getMaxValue();
+        instance.decimal = meta.getDecimal();
+        break;
+    }
+    return instance;
+  }
+
+  private PrimitivePageStatsCollector(DataType dataType, int pageSize) {
+    this.dataType = dataType;
+    this.nullBitSet = new BitSet(pageSize);
+    switch (dataType) {
+      case BYTE:
+        minByte = Byte.MAX_VALUE;
+        maxByte = Byte.MIN_VALUE;
+        break;
+      case SHORT:
+        minShort = Short.MAX_VALUE;
+        maxShort = Short.MIN_VALUE;
+        break;
+      case INT:
+        minInt = Integer.MAX_VALUE;
+        maxInt = Integer.MIN_VALUE;
+        break;
+      case LONG:
+        minLong = Long.MAX_VALUE;
+        maxLong = Long.MIN_VALUE;
+        break;
+      case DOUBLE:
+        minDouble = Double.MAX_VALUE;
+        maxDouble = Double.MIN_VALUE;
+        decimal = 0;
+        break;
+      case DECIMAL:
+    }
+  }
+
+  @Override
+  public void updateNull(int rowId) {
+    nullBitSet.set(rowId);
+    long value = 0;
+    switch (dataType) {
+      case BYTE:
+        update((byte) value);
+        break;
+      case SHORT:
+        update((short) value);
+        break;
+      case INT:
+        update((int) value);
+        break;
+      case LONG:
+        update(value);
+        break;
+      case DOUBLE:
+        update(0d);
+        break;
+    }
+  }
+
+  @Override
+  public void update(byte value) {
+    if (minByte > value) {
+      minByte = value;
+    }
+    if (maxByte < value) {
+      maxByte = value;
+    }
+  }
+
+  @Override
+  public void update(short value) {
+    if (minShort > value) {
+      minShort = value;
+    }
+    if (maxShort < value) {
+      maxShort = value;
+    }
+  }
+
+  @Override
+  public void update(int value) {
+    if (minInt > value) {
+      minInt = value;
+    }
+    if (maxInt < value) {
+      maxInt = value;
+    }
+  }
+
+  @Override
+  public void update(long value) {
+    if (minLong > value) {
+      minLong = value;
+    }
+    if (maxLong < value) {
+      maxLong = value;
+    }
+  }
+
+  @Override
+  public void update(double value) {
+    if (minDouble > value) {
+      minDouble = value;
+    }
+    if (maxDouble < value) {
+      maxDouble = value;
+    }
+    int scale = BigDecimal.valueOf(value).scale();
+    if (scale < 0) {
+      decimal = scale;
+    } else {
+      decimal = Math.max(decimal, scale);
+    }
+  }
+
+  @Override
+  public void update(byte[] value) {
+  }
+
+  @Override
+  public Object getPageStats() {
+    return this;
+  }
+
+  @Override
+  public String toString() {
+    switch (dataType) {
+      case BYTE:
+        return String.format("min: %s, max: %s, decimal: %s ", minByte, maxByte, decimal);
+      case SHORT:
+        return String.format("min: %s, max: %s, decimal: %s ", minShort, maxShort, decimal);
+      case INT:
+        return String.format("min: %s, max: %s, decimal: %s ", minInt, maxInt, decimal);
+      case LONG:
+        return String.format("min: %s, max: %s, decimal: %s ", minLong, maxLong, decimal);
+      case DOUBLE:
+        return String.format("min: %s, max: %s, decimal: %s ", minDouble, maxDouble, decimal);
+    }
+    return super.toString();
+  }
+
+  @Override
+  public Object getMin() {
+    switch (dataType) {
+      case BYTE:
+        return minByte;
+      case SHORT:
+        return minShort;
+      case INT:
+        return minInt;
+      case LONG:
+        return minLong;
+      case DOUBLE:
+        return minDouble;
+    }
+    return null;
+  }
+
+  @Override
+  public Object getMax() {
+    switch (dataType) {
+      case BYTE:
+        return maxByte;
+      case SHORT:
+        return maxShort;
+      case INT:
+        return maxInt;
+      case LONG:
+        return maxLong;
+      case DOUBLE:
+        return maxDouble;
+    }
+    return null;
+  }
+
+  @Override
+  public BitSet getNullBits() {
+    return nullBitSet;
+  }
+
+  @Override
+  public int getDecimalPoint() {
+    return decimal;
+  }
+
+  @Override
+  public DataType getDataType() {
+    return dataType;
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
new file mode 100644
index 0000000..1db86ff
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/SimpleStatsResult.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.statistics;
+
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+
+public interface SimpleStatsResult {
+
+  Object getMin();
+
+  Object getMax();
+
+  BitSet getNullBits();
+
+  int getDecimalPoint();
+
+  DataType getDataType();
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
new file mode 100644
index 0000000..07de9c0
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/TablePageStatistics.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.statistics;
+
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
+import org.apache.carbondata.core.util.CarbonUtil;
+
+// Statistics of dimension and measure column in a TablePage
+public class TablePageStatistics {
+
+  // number of dimension after complex column expanded
+  private int numDimensionsExpanded;
+
+  // min of each dimension column
+  private byte[][] dimensionMinValue;
+
+  // max of each dimension column
+  private byte[][] dimensionMaxValue;
+
+  // min of each measure column
+  private byte[][] measureMinValue;
+
+  // max os each measure column
+  private byte[][] measureMaxValue;
+
+  // null bit set for each measure column
+  private BitSet[] nullBitSet;
+
+  public TablePageStatistics(EncodedDimensionPage[] dimensions,
+      EncodedMeasurePage[] measures) {
+    this.numDimensionsExpanded = dimensions.length;
+    int numMeasures = measures.length;
+    this.dimensionMinValue = new byte[numDimensionsExpanded][];
+    this.dimensionMaxValue = new byte[numDimensionsExpanded][];
+    this.measureMinValue = new byte[numMeasures][];
+    this.measureMaxValue = new byte[numMeasures][];
+    this.nullBitSet = new BitSet[numMeasures];
+    updateDimensionMinMax(dimensions);
+    updateMeasureMinMax(measures);
+  }
+
+  private void updateDimensionMinMax(EncodedDimensionPage[] dimensions) {
+    for (int i = 0; i < dimensions.length; i++) {
+      IndexStorage keyStorageArray = dimensions[i].getIndexStorage();
+      switch (dimensions[i].getDimensionType()) {
+        case GLOBAL_DICTIONARY:
+        case DIRECT_DICTIONARY:
+        case COLUMN_GROUP:
+        case COMPLEX:
+          dimensionMinValue[i] = keyStorageArray.getMin();
+          dimensionMaxValue[i] = keyStorageArray.getMax();
+          break;
+        case PLAIN_VALUE:
+          dimensionMinValue[i] = updateMinMaxForNoDictionary(keyStorageArray.getMin());
+          dimensionMaxValue[i] = updateMinMaxForNoDictionary(keyStorageArray.getMax());
+          break;
+      }
+    }
+  }
+
+  private void updateMeasureMinMax(EncodedMeasurePage[] measures) {
+    for (int i = 0; i < measures.length; i++) {
+      ValueEncoderMeta meta = measures[i].getMetaData();
+      if (meta instanceof ColumnPageCodecMeta) {
+        ColumnPageCodecMeta metadata = (ColumnPageCodecMeta) meta;
+        measureMaxValue[i] = metadata.getMaxAsBytes();
+        measureMinValue[i] = metadata.getMinAsBytes();
+      } else {
+        measureMaxValue[i] = CarbonUtil.getMaxValueAsBytes(meta);
+        measureMinValue[i] = CarbonUtil.getMinValueAsBytes(meta);
+      }
+      nullBitSet[i] = measures[i].getNullBitSet();
+    }
+  }
+
+  /**
+   * Below method will be used to update the min or max value
+   * by removing the length from it
+   *
+   * @return min max value without length
+   */
+  public static byte[] updateMinMaxForNoDictionary(byte[] valueWithLength) {
+    ByteBuffer buffer = ByteBuffer.wrap(valueWithLength);
+    byte[] actualValue = new byte[buffer.getShort()];
+    buffer.get(actualValue);
+    return actualValue;
+  }
+
+  public byte[][] getDimensionMinValue() {
+    return dimensionMinValue;
+  }
+
+  public byte[][] getDimensionMaxValue() {
+    return dimensionMaxValue;
+  }
+
+  public byte[][] getMeasureMinValue() {
+    return measureMinValue;
+  }
+
+  public byte[][] getMeasureMaxValue() {
+    return measureMaxValue;
+  }
+
+  public BitSet[] getNullBitSet() {
+    return nullBitSet;
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
new file mode 100644
index 0000000..e985f90
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/VarLengthPageStatsCollector.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.statistics;
+
+import java.util.BitSet;
+
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class VarLengthPageStatsCollector implements ColumnPageStatsCollector {
+
+  private byte[] min, max;
+
+  public static VarLengthPageStatsCollector newInstance() {
+    return new VarLengthPageStatsCollector();
+  }
+
+  private VarLengthPageStatsCollector() {
+  }
+
+  @Override
+  public void updateNull(int rowId) {
+
+  }
+
+  @Override
+  public void update(byte value) {
+
+  }
+
+  @Override
+  public void update(short value) {
+
+  }
+
+  @Override
+  public void update(int value) {
+
+  }
+
+  @Override
+  public void update(long value) {
+
+  }
+
+  @Override
+  public void update(double value) {
+
+  }
+
+  @Override
+  public void update(byte[] value) {
+    if (min == null && max == null) {
+      min = value;
+      max = value;
+    } else {
+      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
+        min = value;
+      }
+      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
+        max = value;
+      }
+    }
+  }
+
+  @Override
+  public Object getPageStats() {
+    // for binary type, we do not collect its stats
+    return new SimpleStatsResult() {
+
+      @Override public Object getMin() {
+        return min;
+      }
+
+      @Override public Object getMax() {
+        return max;
+      }
+
+      @Override public BitSet getNullBits() {
+        return null;
+      }
+
+      @Override public int getDecimalPoint() {
+        return 0;
+      }
+
+      @Override public DataType getDataType() {
+        return null;
+      }
+    };
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java b/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
index 47df6a5..da8a33d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
@@ -19,10 +19,13 @@ package org.apache.carbondata.core.metadata;
 
 import java.util.BitSet;
 
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 
+// It is used for V1 and V2 format only
 public class BlockletInfoColumnar {
 
+  private EncodedTablePage encodedTablePage;
+
   /**
    * measureOffset.
    */
@@ -85,8 +88,6 @@ public class BlockletInfoColumnar {
 
   private boolean[] aggKeyBlock;
 
-  private MeasurePageStatsVO stats;
-
   /**
    * column min array
    */
@@ -98,11 +99,6 @@ public class BlockletInfoColumnar {
   private byte[][] columnMinData;
 
   /**
-   * true if given index is colgroup block
-   */
-  private boolean[] colGrpBlock;
-
-  /**
    * bit set which will holds the measure
    * indexes which are null
    */
@@ -317,20 +313,6 @@ public class BlockletInfoColumnar {
   }
 
   /**
-   * @return
-   */
-  public boolean[] getColGrpBlocks() {
-    return this.colGrpBlock;
-  }
-
-  /**
-   * @param colGrpBlock
-   */
-  public void setColGrpBlocks(boolean[] colGrpBlock) {
-    this.colGrpBlock = colGrpBlock;
-  }
-
-  /**
    * @return the measureNullValueIndex
    */
   public BitSet[] getMeasureNullValueIndex() {
@@ -344,11 +326,11 @@ public class BlockletInfoColumnar {
     this.measureNullValueIndex = measureNullValueIndex;
   }
 
-  public MeasurePageStatsVO getStats() {
-    return stats;
+  public void setEncodedTablePage(EncodedTablePage encodedData) {
+    this.encodedTablePage = encodedData;
   }
 
-  public void setStats(MeasurePageStatsVO stats) {
-    this.stats = stats;
+  public EncodedTablePage getEncodedTablePage() {
+    return encodedTablePage;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/metadata/CodecMetaFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/CodecMetaFactory.java b/core/src/main/java/org/apache/carbondata/core/metadata/CodecMetaFactory.java
new file mode 100644
index 0000000..ac83333
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/CodecMetaFactory.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.metadata;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+import static org.apache.carbondata.core.metadata.datatype.DataType.*;
+import static org.apache.carbondata.core.metadata.datatype.DataType.LONG;
+
+public class CodecMetaFactory {
+
+  private static final ColumnarFormatVersion version =
+      CarbonProperties.getInstance().getFormatVersion();
+
+  public static ValueEncoderMeta createMeta() {
+    switch (version) {
+      case V1:
+      case V2:
+        return new ValueEncoderMeta();
+      case V3:
+        return ColumnPageCodecMeta.newInstance();
+      default:
+        throw new UnsupportedOperationException("unsupported version: " + version);
+    }
+  }
+
+  public static ValueEncoderMeta createMeta(SimpleStatsResult stats, DataType targetDataType) {
+    switch (version) {
+      case V1:
+      case V2:
+        ValueEncoderMeta meta = new ValueEncoderMeta();
+        switch (stats.getDataType()) {
+          case SHORT:
+            meta.setMaxValue((long)(short) stats.getMax());
+            meta.setMinValue((long)(short) stats.getMin());
+            break;
+          case INT:
+            meta.setMaxValue((long)(int) stats.getMax());
+            meta.setMinValue((long)(int) stats.getMin());
+            break;
+          default:
+            meta.setMaxValue(stats.getMax());
+            meta.setMinValue(stats.getMin());
+            break;
+        }
+        meta.setDecimal(stats.getDecimalPoint());
+        meta.setType(converType(stats.getDataType()));
+        return meta;
+      case V3:
+        return ColumnPageCodecMeta.newInstance(stats, targetDataType);
+      default:
+        throw new UnsupportedOperationException("unsupported version: " + version);
+    }
+  }
+
+  public static char converType(DataType type) {
+    switch (type) {
+      case BYTE:
+      case SHORT:
+      case INT:
+      case LONG:
+        return CarbonCommonConstants.BIG_INT_MEASURE;
+      case DOUBLE:
+        return CarbonCommonConstants.DOUBLE_MEASURE;
+      case DECIMAL:
+        return CarbonCommonConstants.BIG_DECIMAL_MEASURE;
+      default:
+        throw new RuntimeException("Unexpected type: " + type);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java b/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
new file mode 100644
index 0000000..20a7568
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/ColumnPageCodecMeta.java
@@ -0,0 +1,270 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.metadata;
+
+import java.io.Serializable;
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+
+/**
+ * It holds metadata for one column page
+ */
+public class ColumnPageCodecMeta extends ValueEncoderMeta implements Serializable {
+
+  private BitSet nullBitSet;
+
+  private DataType srcDataType;
+
+  private DataType targetDataType;
+
+  public static final char BYTE_VALUE_MEASURE = 'c';
+  public static final char SHORT_VALUE_MEASURE = 'j';
+  public static final char INT_VALUE_MEASURE = 'k';
+  public static final char BIG_INT_MEASURE = 'd';
+  public static final char DOUBLE_MEASURE = 'n';
+  public static final char BIG_DECIMAL_MEASURE = 'b';
+
+  static ColumnPageCodecMeta newInstance() {
+    return new ColumnPageCodecMeta();
+  }
+
+  static ColumnPageCodecMeta newInstance(
+      SimpleStatsResult stats, DataType targetDataType) {
+    ColumnPageCodecMeta meta = new ColumnPageCodecMeta();
+    meta.srcDataType = stats.getDataType();
+    meta.targetDataType = targetDataType;
+    meta.nullBitSet = stats.getNullBits();
+    meta.setType(CodecMetaFactory.converType(stats.getDataType()));
+    meta.setMaxValue(stats.getMax());
+    meta.setMinValue(stats.getMin());
+    meta.setDecimal(stats.getDecimalPoint());
+    return meta;
+  }
+
+  public DataType getTargetDataType() {
+    return targetDataType;
+  }
+
+  public void setSrcDataType(char type) {
+    switch (type) {
+      case BYTE_VALUE_MEASURE:
+        srcDataType = DataType.BYTE;
+        break;
+      case SHORT_VALUE_MEASURE:
+        srcDataType = DataType.SHORT;
+        break;
+      case INT_VALUE_MEASURE:
+        srcDataType = DataType.INT;
+        break;
+      case BIG_INT_MEASURE:
+        srcDataType = DataType.LONG;
+        break;
+      case DOUBLE_MEASURE:
+        srcDataType = DataType.DOUBLE;
+        break;
+      case BIG_DECIMAL_MEASURE:
+        srcDataType = DataType.DECIMAL;
+        break;
+      default:
+        throw new RuntimeException("Unexpected type: " + type);
+    }
+  }
+
+  private char getSrcDataTypeInChar() {
+    switch (srcDataType) {
+      case BYTE:
+        return BYTE_VALUE_MEASURE;
+      case SHORT:
+        return SHORT_VALUE_MEASURE;
+      case INT:
+        return INT_VALUE_MEASURE;
+      case LONG:
+        return BIG_INT_MEASURE;
+      case DOUBLE:
+        return DOUBLE_MEASURE;
+      case DECIMAL:
+        return BIG_DECIMAL_MEASURE;
+      default:
+        throw new RuntimeException("Unexpected type: " + targetDataType);
+    }
+  }
+
+  public BitSet getNullBitSet() {
+    return nullBitSet;
+  }
+
+  public void setNullBitSet(BitSet nullBitSet) {
+    this.nullBitSet = nullBitSet;
+  }
+
+  public DataType getSrcDataType() {
+    return srcDataType;
+  }
+
+  public byte[] serialize() {
+    ByteBuffer buffer = null;
+    switch (srcDataType) {
+      case BYTE:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        buffer.put((byte) getMaxValue());
+        buffer.put((byte) getMinValue());
+        buffer.putLong((Long) 0L); // unique value is obsoleted, maintain for compatibility
+        break;
+      case SHORT:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        buffer.putShort((short) getMaxValue());
+        buffer.putShort((short) getMinValue());
+        buffer.putLong((Long) 0L); // unique value is obsoleted, maintain for compatibility
+        break;
+      case INT:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        buffer.putInt((int) getMaxValue());
+        buffer.putInt((int) getMinValue());
+        buffer.putLong((Long) 0L); // unique value is obsoleted, maintain for compatibility
+        break;
+      case LONG:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        buffer.putLong((Long) getMaxValue());
+        buffer.putLong((Long) getMinValue());
+        buffer.putLong((Long) 0L); // unique value is obsoleted, maintain for compatibility
+        break;
+      case DOUBLE:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.DOUBLE_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        buffer.putDouble((Double) getMaxValue());
+        buffer.putDouble((Double) getMinValue());
+        buffer.putDouble((Double) 0d); // unique value is obsoleted, maintain for compatibility
+        break;
+      case DECIMAL:
+        buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE + 3);
+        buffer.putChar(getSrcDataTypeInChar());
+        break;
+    }
+    buffer.putInt(getDecimal());
+    buffer.put(getDataTypeSelected());
+    buffer.flip();
+    return buffer.array();
+  }
+
+  public void deserialize(byte[] encodeMeta) {
+    ByteBuffer buffer = ByteBuffer.wrap(encodeMeta);
+    char srcDataType = buffer.getChar();
+    this.setSrcDataType(srcDataType);
+    switch (srcDataType) {
+      case DOUBLE_MEASURE:
+        this.setMaxValue(buffer.getDouble());
+        this.setMinValue(buffer.getDouble());
+        buffer.getDouble(); // for non exist value which is obsoleted, it is backward compatibility;
+        break;
+      case BIG_DECIMAL_MEASURE:
+        this.setMaxValue(0.0);
+        this.setMinValue(0.0);
+        break;
+      case BYTE_VALUE_MEASURE:
+        this.setMaxValue(buffer.get());
+        this.setMinValue(buffer.get());
+        buffer.getLong();  // for non exist value which is obsoleted, it is backward compatibility;
+        break;
+      case SHORT_VALUE_MEASURE:
+        this.setMaxValue(buffer.getShort());
+        this.setMinValue(buffer.getShort());
+        buffer.getLong();  // for non exist value which is obsoleted, it is backward compatibility;
+        break;
+      case INT_VALUE_MEASURE:
+        this.setMaxValue(buffer.getInt());
+        this.setMinValue(buffer.getInt());
+        buffer.getLong();  // for non exist value which is obsoleted, it is backward compatibility;
+        break;
+      case BIG_INT_MEASURE:
+        this.setMaxValue(buffer.getLong());
+        this.setMinValue(buffer.getLong());
+        buffer.getLong();  // for non exist value which is obsoleted, it is backward compatibility;
+        break;
+      default:
+        throw new IllegalArgumentException("invalid measure type");
+    }
+    this.setDecimal(buffer.getInt());
+    buffer.get(); // for selectedDataType, obsoleted
+  }
+
+  public byte[] getMaxAsBytes() {
+    return getValueAsBytes(getMaxValue());
+  }
+
+  public byte[] getMinAsBytes() {
+    return getValueAsBytes(getMinValue());
+  }
+
+  /**
+   * convert value to byte array
+   */
+  private byte[] getValueAsBytes(Object value) {
+    ByteBuffer b;
+    switch (srcDataType) {
+      case BYTE:
+        b = ByteBuffer.allocate(8);
+        b.putLong((byte) value);
+        b.flip();
+        return b.array();
+      case SHORT:
+        b = ByteBuffer.allocate(8);
+        b.putLong((short) value);
+        b.flip();
+        return b.array();
+      case INT:
+        b = ByteBuffer.allocate(8);
+        b.putLong((int) value);
+        b.flip();
+        return b.array();
+      case LONG:
+        b = ByteBuffer.allocate(8);
+        b.putLong((long) value);
+        b.flip();
+        return b.array();
+      case DOUBLE:
+        b = ByteBuffer.allocate(8);
+        b.putDouble((double) value);
+        b.flip();
+        return b.array();
+      case DECIMAL:
+      case BYTE_ARRAY:
+        return new byte[8];
+      default:
+        throw new IllegalArgumentException("Invalid data type: " + targetDataType);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java b/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
index 741b999..971359d 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/ValueEncoderMeta.java
@@ -109,4 +109,4 @@ public class ValueEncoderMeta implements Serializable {
   public void setDataTypeSelected(byte dataTypeSelected) {
     this.dataTypeSelected = dataTypeSelected;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
index ad17240..4dadcc2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
@@ -27,7 +27,6 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.scan.collector.ScannedResultCollector;
 import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
 import org.apache.carbondata.core.scan.executor.infos.DimensionInfo;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
 import org.apache.carbondata.core.scan.executor.infos.MeasureInfo;
 import org.apache.carbondata.core.scan.model.QueryMeasure;
 import org.apache.carbondata.core.scan.result.AbstractScannedResult;
@@ -42,11 +41,6 @@ public abstract class AbstractScannedResultCollector implements ScannedResultCol
       LogServiceFactory.getLogService(AbstractScannedResultCollector.class.getName());
 
   /**
-   * restructuring info
-   */
-  private KeyStructureInfo restructureInfos;
-
-  /**
    * table block execution infos
    */
   protected BlockExecutionInfo tableBlockExecutionInfos;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index 555580a..60546ed 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -16,10 +16,8 @@
  */
 package org.apache.carbondata.core.util;
 
-import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -29,10 +27,10 @@ import java.util.Set;
 
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.datastore.compression.CompressorFactory;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.statistics.TablePageStatistics;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
@@ -76,11 +74,10 @@ public class CarbonMetadataUtil {
    * It converts list of BlockletInfoColumnar to FileFooter thrift objects
    *
    * @param infoList
-   * @param numCols
    * @param cardinalities
    * @return FileFooter
    */
-  public static FileFooter convertFileFooter(List<BlockletInfoColumnar> infoList, int numCols,
+  public static FileFooter convertFileFooter(List<BlockletInfoColumnar> infoList,
       int[] cardinalities, List<ColumnSchema> columnSchemaList, SegmentProperties segmentProperties)
       throws IOException {
     FileFooter footer = getFileFooter(infoList, cardinalities, columnSchemaList);
@@ -244,15 +241,19 @@ public class CarbonMetadataUtil {
     return blockletIndex;
   }
 
-  public static BlockletIndex getBlockletIndex(List<NodeHolder> nodeHolderList,
+  public static BlockletIndex getBlockletIndex(List<EncodedTablePage> encodedTablePageList,
       List<CarbonMeasure> carbonMeasureList) {
     BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex();
     // Calculating min/max for every each column.
-    byte[][] minCol = nodeHolderList.get(0).getDimensionColumnMinData().clone();
-    byte[][] maxCol = nodeHolderList.get(0).getDimensionColumnMaxData().clone();
-    for (NodeHolder nodeHolder : nodeHolderList) {
-      byte[][] columnMaxData = nodeHolder.getDimensionColumnMaxData();
-      byte[][] columnMinData = nodeHolder.getDimensionColumnMinData();
+    TablePageStatistics stats = new TablePageStatistics(encodedTablePageList.get(0).getDimensions(),
+        encodedTablePageList.get(0).getMeasures());
+    byte[][] minCol = stats.getDimensionMinValue().clone();
+    byte[][] maxCol = stats.getDimensionMaxValue().clone();
+    for (EncodedTablePage encodedTablePage : encodedTablePageList) {
+      stats = new TablePageStatistics(encodedTablePage.getDimensions(),
+          encodedTablePage.getMeasures());
+      byte[][] columnMaxData = stats.getDimensionMaxValue();
+      byte[][] columnMinData = stats.getDimensionMinValue();
       for (int i = 0; i < maxCol.length; i++) {
         if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(columnMaxData[i], maxCol[i]) > 0) {
           maxCol[i] = columnMaxData[i];
@@ -270,14 +271,18 @@ public class CarbonMetadataUtil {
       blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(min));
     }
 
-    byte[][] measureMaxValue = nodeHolderList.get(0).getMeasureColumnMaxData().clone();
-    byte[][] measureMinValue = nodeHolderList.get(0).getMeasureColumnMinData().clone();
+    stats = new TablePageStatistics(encodedTablePageList.get(0).getDimensions(),
+        encodedTablePageList.get(0).getMeasures());
+    byte[][] measureMaxValue = stats.getMeasureMaxValue().clone();
+    byte[][] measureMinValue = stats.getMeasureMinValue().clone();
     byte[] minVal = null;
     byte[] maxVal = null;
-    for (int i = 1; i < nodeHolderList.size(); i++) {
+    for (int i = 1; i < encodedTablePageList.size(); i++) {
       for (int j = 0; j < measureMinValue.length; j++) {
-        minVal = nodeHolderList.get(i).getMeasureColumnMinData()[j];
-        maxVal = nodeHolderList.get(i).getMeasureColumnMaxData()[j];
+        stats = new TablePageStatistics(
+            encodedTablePageList.get(i).getDimensions(), encodedTablePageList.get(i).getMeasures());
+        minVal = stats.getMeasureMinValue()[j];
+        maxVal = stats.getMeasureMaxValue()[j];
         if (compareMeasureData(measureMaxValue[j], maxVal, carbonMeasureList.get(j).getDataType())
             < 0) {
           measureMaxValue[j] = maxVal.clone();
@@ -296,8 +301,11 @@ public class CarbonMetadataUtil {
       blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(min));
     }
     BlockletBTreeIndex blockletBTreeIndex = new BlockletBTreeIndex();
-    blockletBTreeIndex.setStart_key(nodeHolderList.get(0).getStartKey());
-    blockletBTreeIndex.setEnd_key(nodeHolderList.get(nodeHolderList.size() - 1).getEndKey());
+    byte[] startKey = encodedTablePageList.get(0).getPageKey().serializeStartKey();
+    blockletBTreeIndex.setStart_key(startKey);
+    byte[] endKey = encodedTablePageList.get(
+        encodedTablePageList.size() - 1).getPageKey().serializeEndKey();
+    blockletBTreeIndex.setEnd_key(endKey);
     BlockletIndex blockletIndex = new BlockletIndex();
     blockletIndex.setMin_max_index(blockletMinMaxIndex);
     blockletIndex.setB_tree_index(blockletBTreeIndex);
@@ -333,10 +341,9 @@ public class CarbonMetadataUtil {
     int aggregateIndex = 0;
     boolean[] isSortedKeyColumn = blockletInfoColumnar.getIsSortedKeyColumn();
     boolean[] aggKeyBlock = blockletInfoColumnar.getAggKeyBlock();
-    boolean[] colGrpblock = blockletInfoColumnar.getColGrpBlocks();
     for (int i = 0; i < blockletInfoColumnar.getKeyLengths().length; i++) {
       DataChunk dataChunk = new DataChunk();
-      dataChunk.setChunk_meta(getChunkCompressionMeta());
+      dataChunk.setChunk_meta(getSnappyChunkCompressionMeta());
       List<Encoding> encodings = new ArrayList<Encoding>();
       if (containsEncoding(i, Encoding.DICTIONARY, columnSchema, segmentProperties)) {
         encodings.add(Encoding.DICTIONARY);
@@ -344,7 +351,6 @@ public class CarbonMetadataUtil {
       if (containsEncoding(i, Encoding.DIRECT_DICTIONARY, columnSchema, segmentProperties)) {
         encodings.add(Encoding.DIRECT_DICTIONARY);
       }
-      dataChunk.setRowMajor(colGrpblock[i]);
       // TODO : Once schema PR is merged and information needs to be passed
       // here.
       dataChunk.setColumn_ids(new ArrayList<Integer>());
@@ -377,7 +383,7 @@ public class CarbonMetadataUtil {
 
     for (int i = 0; i < blockletInfoColumnar.getMeasureLength().length; i++) {
       DataChunk dataChunk = new DataChunk();
-      dataChunk.setChunk_meta(getChunkCompressionMeta());
+      dataChunk.setChunk_meta(getSnappyChunkCompressionMeta());
       dataChunk.setRowMajor(false);
       // TODO : Once schema PR is merged and information needs to be passed
       // here.
@@ -400,8 +406,10 @@ public class CarbonMetadataUtil {
       // dataChunk.setPresence(new PresenceMeta());
       // TODO : Need to write ValueCompression meta here.
       List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>();
-      encoderMetaList.add(ByteBuffer.wrap(serializeEncoderMeta(
-          createValueEncoderMeta(blockletInfoColumnar.getStats(), i))));
+      encoderMetaList.add(
+          ByteBuffer.wrap(
+              serializeEncoderMeta(
+                      blockletInfoColumnar.getEncodedTablePage().getMeasure(i).getMetaData())));
       dataChunk.setEncoder_meta(encoderMetaList);
       colDataChunks.add(dataChunk);
     }
@@ -464,35 +472,10 @@ public class CarbonMetadataUtil {
     return aos.toByteArray();
   }
 
-  private static ValueEncoderMeta createValueEncoderMeta(MeasurePageStatsVO stats,
-      int index) {
-    ValueEncoderMeta encoderMeta = new ValueEncoderMeta();
-    encoderMeta.setMaxValue(stats.getMax(index));
-    encoderMeta.setMinValue(stats.getMin(index));
-    encoderMeta.setDataTypeSelected(stats.getDataTypeSelected(index));
-    encoderMeta.setType(getTypeInChar(stats.getDataType(index)));
-    return encoderMeta;
-  }
-
-  private static char getTypeInChar(DataType type) {
-    switch (type) {
-      case SHORT:
-      case INT:
-      case LONG:
-        return CarbonCommonConstants.BIG_INT_MEASURE;
-      case DOUBLE:
-        return CarbonCommonConstants.DOUBLE_MEASURE;
-      case DECIMAL:
-        return CarbonCommonConstants.BIG_DECIMAL_MEASURE;
-      default:
-        throw new RuntimeException("unsupported type: " + type);
-    }
-  }
-
   /**
    * Right now it is set to default values. We may use this in future
    */
-  private static ChunkCompressionMeta getChunkCompressionMeta() {
+  public static ChunkCompressionMeta getSnappyChunkCompressionMeta() {
     ChunkCompressionMeta chunkCompressionMeta = new ChunkCompressionMeta();
     chunkCompressionMeta.setCompression_codec(CompressionCodec.SNAPPY);
     chunkCompressionMeta.setTotal_compressed_size(0);
@@ -500,111 +483,16 @@ public class CarbonMetadataUtil {
     return chunkCompressionMeta;
   }
 
+
   /**
-   * It converts FileFooter thrift object to list of BlockletInfoColumnar
-   * objects
-   *
-   * @param footer
-   * @return
+   * Right now it is set to default values. We may use this in future
    */
-  public static List<BlockletInfoColumnar> convertBlockletInfo(FileFooter footer)
-      throws IOException {
-    List<BlockletInfoColumnar> listOfNodeInfo =
-        new ArrayList<BlockletInfoColumnar>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-    for (BlockletInfo blockletInfo : footer.getBlocklet_info_list()) {
-      BlockletInfoColumnar blockletInfoColumnar = new BlockletInfoColumnar();
-      blockletInfoColumnar.setNumberOfKeys(blockletInfo.getNum_rows());
-      List<DataChunk> columnChunks = blockletInfo.getColumn_data_chunks();
-      List<DataChunk> dictChunks = new ArrayList<DataChunk>();
-      List<DataChunk> nonDictColChunks = new ArrayList<DataChunk>();
-      for (DataChunk dataChunk : columnChunks) {
-        if (dataChunk.getEncoders().get(0).equals(Encoding.DICTIONARY)) {
-          dictChunks.add(dataChunk);
-        } else {
-          nonDictColChunks.add(dataChunk);
-        }
-      }
-      int[] keyLengths = new int[dictChunks.size()];
-      long[] keyOffSets = new long[dictChunks.size()];
-      long[] keyBlockIndexOffsets = new long[dictChunks.size()];
-      int[] keyBlockIndexLens = new int[dictChunks.size()];
-      long[] indexMapOffsets = new long[dictChunks.size()];
-      int[] indexMapLens = new int[dictChunks.size()];
-      boolean[] sortState = new boolean[dictChunks.size()];
-      int i = 0;
-      for (DataChunk dataChunk : dictChunks) {
-        keyLengths[i] = dataChunk.getData_page_length();
-        keyOffSets[i] = dataChunk.getData_page_offset();
-        keyBlockIndexOffsets[i] = dataChunk.getRowid_page_offset();
-        keyBlockIndexLens[i] = dataChunk.getRowid_page_length();
-        indexMapOffsets[i] = dataChunk.getRle_page_offset();
-        indexMapLens[i] = dataChunk.getRle_page_length();
-        sortState[i] = dataChunk.getSort_state().equals(SortState.SORT_EXPLICIT);
-        i++;
-      }
-      blockletInfoColumnar.setKeyLengths(keyLengths);
-      blockletInfoColumnar.setKeyOffSets(keyOffSets);
-      blockletInfoColumnar.setKeyBlockIndexOffSets(keyBlockIndexOffsets);
-      blockletInfoColumnar.setKeyBlockIndexLength(keyBlockIndexLens);
-      blockletInfoColumnar.setDataIndexMapOffsets(indexMapOffsets);
-      blockletInfoColumnar.setDataIndexMapLength(indexMapLens);
-      blockletInfoColumnar.setIsSortedKeyColumn(sortState);
-
-      int[] msrLens = new int[nonDictColChunks.size()];
-      long[] msrOffsets = new long[nonDictColChunks.size()];
-      ValueEncoderMeta[] encoderMetas = new ValueEncoderMeta[nonDictColChunks.size()];
-      i = 0;
-      for (DataChunk msrChunk : nonDictColChunks) {
-        msrLens[i] = msrChunk.getData_page_length();
-        msrOffsets[i] = msrChunk.getData_page_offset();
-        encoderMetas[i] = deserializeValueEncoderMeta(msrChunk.getEncoder_meta().get(0));
-        i++;
-      }
-      blockletInfoColumnar.setMeasureLength(msrLens);
-      blockletInfoColumnar.setMeasureOffset(msrOffsets);
-      blockletInfoColumnar.setStats(getMeasurePageStats(encoderMetas));
-      listOfNodeInfo.add(blockletInfoColumnar);
-    }
-
-    setBlockletIndex(footer, listOfNodeInfo);
-    return listOfNodeInfo;
-  }
-
-  private static ValueEncoderMeta deserializeValueEncoderMeta(ByteBuffer byteBuffer)
-      throws IOException {
-    ByteArrayInputStream bis = new ByteArrayInputStream(byteBuffer.array());
-    ObjectInputStream objStream = new ObjectInputStream(bis);
-    ValueEncoderMeta encoderMeta = null;
-    try {
-      encoderMeta = (ValueEncoderMeta) objStream.readObject();
-    } catch (ClassNotFoundException e) {
-      LOGGER.error("Error while reading ValueEncoderMeta");
-    }
-    return encoderMeta;
-
-  }
-
-  private static MeasurePageStatsVO getMeasurePageStats(ValueEncoderMeta[] encoderMetas) {
-    return MeasurePageStatsVO.build(encoderMetas);
-  }
-
-  private static void setBlockletIndex(FileFooter footer,
-      List<BlockletInfoColumnar> listOfNodeInfo) {
-    List<BlockletIndex> blockletIndexList = footer.getBlocklet_index_list();
-    for (int i = 0; i < blockletIndexList.size(); i++) {
-      BlockletBTreeIndex bTreeIndexList = blockletIndexList.get(i).getB_tree_index();
-      BlockletMinMaxIndex minMaxIndexList = blockletIndexList.get(i).getMin_max_index();
-
-      listOfNodeInfo.get(i).setStartKey(bTreeIndexList.getStart_key());
-      listOfNodeInfo.get(i).setEndKey(bTreeIndexList.getEnd_key());
-      byte[][] min = new byte[minMaxIndexList.getMin_values().size()][];
-      byte[][] max = new byte[minMaxIndexList.getMax_values().size()][];
-      for (int j = 0; j < minMaxIndexList.getMax_valuesSize(); j++) {
-        min[j] = minMaxIndexList.getMin_values().get(j).array();
-        max[j] = minMaxIndexList.getMax_values().get(j).array();
-      }
-      listOfNodeInfo.get(i).setColumnMaxData(max);
-    }
+  private static ChunkCompressionMeta getChunkCompressionMeta() {
+    ChunkCompressionMeta chunkCompressionMeta = new ChunkCompressionMeta();
+    chunkCompressionMeta.setCompression_codec(CompressionCodec.SNAPPY);
+    chunkCompressionMeta.setTotal_compressed_size(0);
+    chunkCompressionMeta.setTotal_uncompressed_size(0);
+    return chunkCompressionMeta;
   }
 
   /**
@@ -673,7 +561,6 @@ public class CarbonMetadataUtil {
     int aggregateIndex = 0;
     boolean[] isSortedKeyColumn = blockletInfoColumnar.getIsSortedKeyColumn();
     boolean[] aggKeyBlock = blockletInfoColumnar.getAggKeyBlock();
-    boolean[] colGrpblock = blockletInfoColumnar.getColGrpBlocks();
     for (int i = 0; i < blockletInfoColumnar.getKeyLengths().length; i++) {
       DataChunk2 dataChunk = new DataChunk2();
       dataChunk.setChunk_meta(getChunkCompressionMeta());
@@ -684,7 +571,6 @@ public class CarbonMetadataUtil {
       if (containsEncoding(i, Encoding.DIRECT_DICTIONARY, columnSchema, segmentProperties)) {
         encodings.add(Encoding.DIRECT_DICTIONARY);
       }
-      dataChunk.setRowMajor(colGrpblock[i]);
       // TODO : Once schema PR is merged and information needs to be passed
       // here.
       dataChunk.setData_page_length(blockletInfoColumnar.getKeyLengths()[i]);
@@ -732,8 +618,10 @@ public class CarbonMetadataUtil {
       // dataChunk.setPresence(new PresenceMeta());
       // TODO : Need to write ValueCompression meta here.
       List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>();
-      encoderMetaList.add(ByteBuffer.wrap(serializeEncoderMeta(
-          createValueEncoderMeta(blockletInfoColumnar.getStats(), i))));
+      encoderMetaList.add(
+          ByteBuffer.wrap(
+              serializeEncoderMeta(
+                      blockletInfoColumnar.getEncodedTablePage().getMeasure(i).getMetaData())));
       dataChunk.setEncoder_meta(encoderMetaList);
       colDataChunks.add(dataChunk);
     }
@@ -741,106 +629,18 @@ public class CarbonMetadataUtil {
   }
 
   /**
-   * Below method will be used to get the data chunk object for all the columns
-   *
-   * @param nodeHolderList       blocklet info
-   * @param columnSchema        list of columns
-   * @param segmentProperties    segment properties
-   * @return list of data chunks
-   * @throws IOException
+   * return DataChunk3 that contains the input DataChunk2 list
    */
-  private static List<DataChunk2> getDatachunk2(List<NodeHolder> nodeHolderList,
-      List<ColumnSchema> columnSchema, SegmentProperties segmentProperties, int index,
-      boolean isDimensionColumn) throws IOException {
-    List<DataChunk2> colDataChunks = new ArrayList<DataChunk2>();
-    DataChunk2 dataChunk = null;
-    NodeHolder nodeHolder = null;
-    for (int i = 0; i < nodeHolderList.size(); i++) {
-      nodeHolder = nodeHolderList.get(i);
-      dataChunk = new DataChunk2();
-      dataChunk.min_max = new BlockletMinMaxIndex();
-      dataChunk.setChunk_meta(getChunkCompressionMeta());
-      dataChunk.setNumberOfRowsInpage(nodeHolder.getEntryCount());
-      List<Encoding> encodings = new ArrayList<Encoding>();
-      if (isDimensionColumn) {
-        dataChunk.setData_page_length(nodeHolder.getKeyLengths()[index]);
-        if (containsEncoding(index, Encoding.DICTIONARY, columnSchema, segmentProperties)) {
-          encodings.add(Encoding.DICTIONARY);
-        }
-        if (containsEncoding(index, Encoding.DIRECT_DICTIONARY, columnSchema, segmentProperties)) {
-          encodings.add(Encoding.DIRECT_DICTIONARY);
-        }
-        dataChunk.setRowMajor(nodeHolder.getColGrpBlocks()[index]);
-        // TODO : Once schema PR is merged and information needs to be passed
-        // here.
-        if (nodeHolder.getRleEncodingForDictDim()[index]) {
-          dataChunk.setRle_page_length(nodeHolder.getDataIndexMapLength()[index]);
-          encodings.add(Encoding.RLE);
-        }
-        dataChunk.setSort_state(nodeHolder.getIsSortedKeyBlock()[index] ?
-            SortState.SORT_EXPLICIT :
-            SortState.SORT_NATIVE);
-
-        if (!nodeHolder.getIsSortedKeyBlock()[index]) {
-          dataChunk.setRowid_page_length(nodeHolder.getKeyBlockIndexLength()[index]);
-          encodings.add(Encoding.INVERTED_INDEX);
-        }
-        dataChunk.min_max.addToMax_values(
-            ByteBuffer.wrap(nodeHolder.getDimensionColumnMaxData()[index]));
-        dataChunk.min_max.addToMin_values(
-            ByteBuffer.wrap(nodeHolder.getDimensionColumnMinData()[index]));
-      } else {
-        dataChunk.setData_page_length(nodeHolder.getDataArray()[index].length);
-        // TODO : Right now the encodings are happening at runtime. change as
-        // per this encoders.
-        dataChunk.setEncoders(encodings);
-
-        dataChunk.setRowMajor(false);
-        // TODO : Right now the encodings are happening at runtime. change as
-        // per this encoders.
-        encodings.add(Encoding.DELTA);
-        dataChunk.setEncoders(encodings);
-        // TODO writing dummy presence meta need to set actual presence
-        // meta
-        PresenceMeta presenceMeta = new PresenceMeta();
-        presenceMeta.setPresent_bit_streamIsSet(true);
-        presenceMeta.setPresent_bit_stream(CompressorFactory.getInstance().getCompressor()
-            .compressByte(nodeHolder.getMeasureNullValueIndex()[index].toByteArray()));
-        dataChunk.setPresence(presenceMeta);
-        List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>();
-        encoderMetaList.add(ByteBuffer.wrap(serializeEncodeMetaUsingByteBuffer(
-            createValueEncoderMeta(nodeHolder.getStats(), index))));
-        ByteBuffer decimalMeta = writeInfoIfDecimal(index, segmentProperties);
-        if (decimalMeta != null) {
-          encoderMetaList.add(decimalMeta);
-        }
-        dataChunk.setEncoder_meta(encoderMetaList);
-        dataChunk.min_max
-            .addToMax_values(ByteBuffer.wrap(nodeHolder.getMeasureColumnMaxData()[index]));
-        dataChunk.min_max
-            .addToMin_values(ByteBuffer.wrap(nodeHolder.getMeasureColumnMinData()[index]));
-      }
-      dataChunk.setEncoders(encodings);
-      colDataChunks.add(dataChunk);
-    }
-    return colDataChunks;
-  }
-
-  public static DataChunk3 getDataChunk3(List<NodeHolder> nodeHolderList,
-      List<ColumnSchema> columnSchema, SegmentProperties segmentProperties, int index,
-      boolean isDimensionColumn) throws IOException {
-    List<DataChunk2> dataChunksList =
-        getDatachunk2(nodeHolderList, columnSchema, segmentProperties, index, isDimensionColumn);
+  public static DataChunk3 getDataChunk3(List<DataChunk2> dataChunksList) {
     int offset = 0;
     DataChunk3 dataChunk = new DataChunk3();
     List<Integer> pageOffsets = new ArrayList<>();
     List<Integer> pageLengths = new ArrayList<>();
     int length = 0;
-    for (int i = 0; i < dataChunksList.size(); i++) {
+    for (DataChunk2 dataChunk2 : dataChunksList) {
       pageOffsets.add(offset);
-      length =
-          dataChunksList.get(i).getData_page_length() + dataChunksList.get(i).getRle_page_length()
-              + dataChunksList.get(i).getRowid_page_length();
+      length = dataChunk2.getData_page_length() + dataChunk2.getRle_page_length() +
+          dataChunk2.getRowid_page_length();
       pageLengths.add(length);
       offset += length;
     }
@@ -850,36 +650,30 @@ public class CarbonMetadataUtil {
     return dataChunk;
   }
 
-  public static byte[] serializeEncodeMetaUsingByteBuffer(ValueEncoderMeta valueEncoderMeta) {
-    ByteBuffer buffer = null;
-    switch (valueEncoderMeta.getType()) {
-      case LONG:
-        buffer = ByteBuffer.allocate(
-            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
-                + 3);
-        buffer.putChar(valueEncoderMeta.getTypeInChar());
-        buffer.putLong((Long) valueEncoderMeta.getMaxValue());
-        buffer.putLong((Long) valueEncoderMeta.getMinValue());
-        buffer.putLong(0L);  // unique value, not used
-        break;
-      case DOUBLE:
-        buffer = ByteBuffer.allocate(
-            (CarbonCommonConstants.DOUBLE_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
-                + 3);
-        buffer.putChar(valueEncoderMeta.getTypeInChar());
-        buffer.putDouble((Double) valueEncoderMeta.getMaxValue());
-        buffer.putDouble((Double) valueEncoderMeta.getMinValue());
-        buffer.putDouble(0d); // unique value, not used
-        break;
-      case DECIMAL:
-        buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE + 3);
-        buffer.putChar(valueEncoderMeta.getTypeInChar());
-        break;
+  /**
+   * return DataChunk3 for the dimension column (specifed by `columnIndex`)
+   * in `encodedTablePageList`
+   */
+  public static DataChunk3 getDimensionDataChunk3(List<EncodedTablePage> encodedTablePageList,
+      int columnIndex) throws IOException {
+    List<DataChunk2> dataChunksList = new ArrayList<>(encodedTablePageList.size());
+    for (EncodedTablePage encodedTablePage : encodedTablePageList) {
+      dataChunksList.add(encodedTablePage.getDimension(columnIndex).getDataChunk2());
     }
-    buffer.putInt(0); // decimal point, not used
-    buffer.put(valueEncoderMeta.getDataTypeSelected());
-    buffer.flip();
-    return buffer.array();
+    return CarbonMetadataUtil.getDataChunk3(dataChunksList);
+  }
+
+  /**
+   * return DataChunk3 for the measure column (specifed by `columnIndex`)
+   * in `encodedTablePageList`
+   */
+  public static DataChunk3 getMeasureDataChunk3(List<EncodedTablePage> encodedTablePageList,
+      int columnIndex) throws IOException {
+    List<DataChunk2> dataChunksList = new ArrayList<>(encodedTablePageList.size());
+    for (EncodedTablePage encodedTablePage : encodedTablePageList) {
+      dataChunksList.add(encodedTablePage.getMeasure(columnIndex).getDataChunk2());
+    }
+    return CarbonMetadataUtil.getDataChunk3(dataChunksList);
   }
 
   public static int compareMeasureData(byte[] first, byte[] second, DataType dataType) {
@@ -931,83 +725,4 @@ public class CarbonMetadataUtil {
     return fileHeader;
   }
 
-  /**
-   * Below method will be used to get the data chunk2 serialize object list
-   *
-   * @param nodeHolder        node holder
-   * @param columnSchema     table columns
-   * @param segmentProperties segment properties
-   * @param isDimensionColumn to get the list of dimension column or measure column
-   * @return list of data chunk2
-   * @throws IOException
-   */
-  public static List<byte[]> getDataChunk2(NodeHolder nodeHolder, List<ColumnSchema> columnSchema,
-      SegmentProperties segmentProperties, boolean isDimensionColumn) throws IOException {
-    List<byte[]> dataChunkBuffer = new ArrayList<>();
-    if (isDimensionColumn) {
-      for (int i = 0; i < nodeHolder.getKeyArray().length; i++) {
-        DataChunk2 dataChunk = new DataChunk2();
-        dataChunk.min_max = new BlockletMinMaxIndex();
-        dataChunk.setChunk_meta(getChunkCompressionMeta());
-        dataChunk.setNumberOfRowsInpage(nodeHolder.getEntryCount());
-        List<Encoding> encodings = new ArrayList<Encoding>();
-        dataChunk.setData_page_length(nodeHolder.getKeyLengths()[i]);
-        if (containsEncoding(i, Encoding.DICTIONARY, columnSchema, segmentProperties)) {
-          encodings.add(Encoding.DICTIONARY);
-        }
-        if (containsEncoding(i, Encoding.DIRECT_DICTIONARY, columnSchema, segmentProperties)) {
-          encodings.add(Encoding.DIRECT_DICTIONARY);
-        }
-        dataChunk.setRowMajor(nodeHolder.getColGrpBlocks()[i]);
-        if (nodeHolder.getRleEncodingForDictDim()[i]) {
-          dataChunk.setRle_page_length(nodeHolder.getDataIndexMapLength()[i]);
-          encodings.add(Encoding.RLE);
-        }
-        dataChunk.setSort_state(
-            nodeHolder.getIsSortedKeyBlock()[i] ? SortState.SORT_EXPLICIT : SortState.SORT_NATIVE);
-        if (!nodeHolder.getIsSortedKeyBlock()[i]) {
-          dataChunk.setRowid_page_length(nodeHolder.getKeyBlockIndexLength()[i]);
-          encodings.add(Encoding.INVERTED_INDEX);
-        }
-        dataChunk.min_max.addToMax_values(
-            ByteBuffer.wrap(nodeHolder.getDimensionColumnMaxData()[i]));
-        dataChunk.min_max.addToMin_values(
-            ByteBuffer.wrap(nodeHolder.getDimensionColumnMinData()[i]));
-        dataChunk.setEncoders(encodings);
-        dataChunkBuffer.add(CarbonUtil.getByteArray(dataChunk));
-      }
-    } else {
-      for (int i = 0; i < nodeHolder.getDataArray().length; i++) {
-        DataChunk2 dataChunk = new DataChunk2();
-        dataChunk.min_max = new BlockletMinMaxIndex();
-        dataChunk.setChunk_meta(getChunkCompressionMeta());
-        dataChunk.setNumberOfRowsInpage(nodeHolder.getEntryCount());
-        dataChunk.setData_page_length(nodeHolder.getDataArray()[i].length);
-        List<Encoding> encodings = new ArrayList<Encoding>();
-        // TODO : Right now the encodings are happening at runtime. change as
-        // per this encoders.
-        dataChunk.setEncoders(encodings);
-        dataChunk.setRowMajor(false);
-        // TODO : Right now the encodings are happening at runtime. change as
-        // per this encoders.
-        encodings.add(Encoding.DELTA);
-        dataChunk.setEncoders(encodings);
-        // TODO writing dummy presence meta need to set actual presence
-        // meta
-        PresenceMeta presenceMeta = new PresenceMeta();
-        presenceMeta.setPresent_bit_streamIsSet(true);
-        presenceMeta.setPresent_bit_stream(CompressorFactory.getInstance().getCompressor()
-            .compressByte(nodeHolder.getMeasureNullValueIndex()[i].toByteArray()));
-        dataChunk.setPresence(presenceMeta);
-        List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>();
-        encoderMetaList.add(ByteBuffer.wrap(serializeEncodeMetaUsingByteBuffer(
-            createValueEncoderMeta(nodeHolder.getStats(), i))));
-        dataChunk.setEncoder_meta(encoderMetaList);
-        dataChunk.min_max.addToMax_values(ByteBuffer.wrap(nodeHolder.getMeasureColumnMaxData()[i]));
-        dataChunk.min_max.addToMin_values(ByteBuffer.wrap(nodeHolder.getMeasureColumnMinData()[i]));
-        dataChunkBuffer.add(CarbonUtil.getByteArray(dataChunk));
-      }
-    }
-    return dataChunkBuffer;
-  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 6dd211a..eff8f0d 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -53,8 +53,6 @@ import org.apache.carbondata.core.datastore.columnar.ColumnGroupModel;
 import org.apache.carbondata.core.datastore.columnar.UnBlockIndexer;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
-import org.apache.carbondata.core.indexstore.BlockletDetailInfo;
 import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
@@ -826,15 +824,6 @@ public final class CarbonUtil {
   }
 
   /**
-   * Below method will be used to get the stats of the measure data page
-   */
-  public static MeasurePageStatsVO getMeasurePageStats(
-      List<ValueEncoderMeta> encodeMetaList) {
-    return MeasurePageStatsVO.build(
-        encodeMetaList.toArray(new ValueEncoderMeta[encodeMetaList.size()]));
-  }
-
-  /**
    * Below method will be used to check whether particular encoding is present
    * in the dimension or not
    *
@@ -1350,20 +1339,6 @@ public final class CarbonUtil {
     return thriftByteArray;
   }
 
-  /**
-   * Below method will be used to convert the bytearray to data chunk object
-   *
-   * @param dataChunkBytes datachunk thrift object in bytes
-   * @return data chunk thrift object
-   */
-  public static DataChunk2 readDataChunk(byte[] dataChunkBytes, int offset, int length)
-      throws IOException {
-    return (DataChunk2) read(dataChunkBytes, new ThriftReader.TBaseCreator() {
-      @Override public TBase create() {
-        return new DataChunk2();
-      }
-    }, offset, length);
-  }
 
   public static DataChunk3 readDataChunk3(ByteBuffer dataChunkBuffer, int offset, int length)
       throws IOException {
@@ -1462,6 +1437,38 @@ public final class CarbonUtil {
     return valueEncoderMeta;
   }
 
+  public static byte[] serializeEncodeMetaUsingByteBuffer(ValueEncoderMeta valueEncoderMeta) {
+    ByteBuffer buffer = null;
+    switch (valueEncoderMeta.getType()) {
+      case LONG:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.LONG_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(valueEncoderMeta.getTypeInChar());
+        buffer.putLong((Long) valueEncoderMeta.getMaxValue());
+        buffer.putLong((Long) valueEncoderMeta.getMinValue());
+        buffer.putLong(0L); // unique value, not used
+        break;
+      case DOUBLE:
+        buffer = ByteBuffer.allocate(
+            (CarbonCommonConstants.DOUBLE_SIZE_IN_BYTE * 3) + CarbonCommonConstants.INT_SIZE_IN_BYTE
+                + 3);
+        buffer.putChar(valueEncoderMeta.getTypeInChar());
+        buffer.putDouble((Double) valueEncoderMeta.getMaxValue());
+        buffer.putDouble((Double) valueEncoderMeta.getMinValue());
+        buffer.putDouble(0d); // unique value, not used
+        break;
+      case DECIMAL:
+        buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE + 3);
+        buffer.putChar(valueEncoderMeta.getTypeInChar());
+        break;
+    }
+    buffer.putInt(0); // decimal point, not used
+    buffer.put(valueEncoderMeta.getDataTypeSelected());
+    buffer.flip();
+    return buffer.array();
+  }
+
   /**
    * Below method will be used to convert indexes in range
    * Indexes=[0,1,2,3,4,5,6,7,8,9]
@@ -1872,5 +1879,47 @@ public final class CarbonUtil {
       CarbonUtil.deleteFoldersAndFiles(dbPath);
     }
   }
+
+  public static byte[] getMaxValueAsBytes(ValueEncoderMeta meta) {
+    ByteBuffer b;
+    switch (meta.getType()) {
+      case LONG:
+        b = ByteBuffer.allocate(8);
+        b.putLong((long) meta.getMaxValue());
+        b.flip();
+        return b.array();
+      case DOUBLE:
+        b = ByteBuffer.allocate(8);
+        b.putDouble((double) meta.getMaxValue());
+        b.flip();
+        return b.array();
+      case DECIMAL:
+      case BYTE_ARRAY:
+        return new byte[8];
+      default:
+        throw new IllegalArgumentException("Invalid data type: " + meta.getType());
+    }
+  }
+
+  public static byte[] getMinValueAsBytes(ValueEncoderMeta meta) {
+    ByteBuffer b;
+    switch (meta.getType()) {
+      case LONG:
+        b = ByteBuffer.allocate(8);
+        b.putLong((long) meta.getMinValue());
+        b.flip();
+        return b.array();
+      case DOUBLE:
+        b = ByteBuffer.allocate(8);
+        b.putDouble((double) meta.getMinValue());
+        b.flip();
+        return b.array();
+      case DECIMAL:
+      case BYTE_ARRAY:
+        return new byte[8];
+      default:
+        throw new IllegalArgumentException("Invalid data type: " + meta.getType());
+    }
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 37ae5bb..39b8b3c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -136,25 +136,6 @@ public final class DataTypeUtil {
   }
 
   /**
-   * This method will return the type of measure based on its data type
-   *
-   * @param dataType
-   * @return
-   */
-  public static char getAggType(DataType dataType) {
-    switch (dataType) {
-      case DECIMAL:
-        return CarbonCommonConstants.BIG_DECIMAL_MEASURE;
-      case SHORT:
-      case INT:
-      case LONG:
-        return CarbonCommonConstants.BIG_INT_MEASURE;
-      default:
-        return CarbonCommonConstants.DOUBLE_MEASURE;
-    }
-  }
-
-  /**
    * This method will convert a big decimal value to bytes
    *
    * @param num

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/util/NodeHolder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/NodeHolder.java b/core/src/main/java/org/apache/carbondata/core/util/NodeHolder.java
index 95037b2..4afa9b6 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/NodeHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/NodeHolder.java
@@ -19,9 +19,12 @@ package org.apache.carbondata.core.util;
 
 import java.util.BitSet;
 
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 
 public class NodeHolder {
+  private EncodedTablePage encodedData;
+
   /**
    * keyArray
    */
@@ -94,7 +97,7 @@ public class NodeHolder {
 
   private byte[][] measureColumnMinData;
 
-  private MeasurePageStatsVO stats;
+  private SimpleStatsResult stats;
 
   /**
    * array of rleEncodingForDictDim flag to identify the rleEncodingForDictDim
@@ -418,11 +421,37 @@ public class NodeHolder {
     return this.writeAll;
   }
 
-  public MeasurePageStatsVO getStats() {
+  public SimpleStatsResult getStats() {
     return stats;
   }
 
-  public void setMeasureStats(MeasurePageStatsVO stats) {
+  public void setMeasureStats(SimpleStatsResult stats) {
     this.stats = stats;
   }
+
+  public static byte[][] getKeyArray(EncodedTablePage encodedTablePage) {
+    int numDimensions = encodedTablePage.getNumDimensions();
+    byte[][] keyArray = new byte[numDimensions][];
+    for (int i = 0; i < numDimensions; i++) {
+      keyArray[i] = encodedTablePage.getDimension(i).getEncodedData();
+    }
+    return keyArray;
+  }
+
+  public static byte[][] getDataArray(EncodedTablePage encodedTablePage) {
+    int numMeasures = encodedTablePage.getNumMeasures();
+    byte[][] dataArray = new byte[numMeasures][];
+    for (int i = 0; i < numMeasures; i++) {
+      dataArray[i] = encodedTablePage.getMeasure(i).getEncodedData();
+    }
+    return dataArray;
+  }
+
+  public void setEncodedData(EncodedTablePage encodedData) {
+    this.encodedData = encodedData;
+  }
+
+  public EncodedTablePage getEncodedData() {
+    return encodedData;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/util/NonDictionaryUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/NonDictionaryUtil.java b/core/src/main/java/org/apache/carbondata/core/util/NonDictionaryUtil.java
new file mode 100644
index 0000000..d6ecfbc
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/util/NonDictionaryUtil.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.util;
+
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
+
+/**
+ * This is the utility class for No Dictionary changes.
+ */
+public class NonDictionaryUtil {
+
+  /**
+   * This method will form one single byte [] for all the high card dims.
+   * For example if you need to pack 2 columns c1 and c2 , it stores in following way
+   *  <total_len(short)><offsetLen(short)><offsetLen+c1_len(short)><c1(byte[])><c2(byte[])>
+   * @param byteBufferArr
+   * @return
+   */
+  public static byte[] packByteBufferIntoSingleByteArray(byte[][] byteBufferArr) {
+    // for empty array means there is no data to remove dictionary.
+    if (null == byteBufferArr || byteBufferArr.length == 0) {
+      return null;
+    }
+    int noOfCol = byteBufferArr.length;
+    short toDetermineLengthOfByteArr = 2;
+    short offsetLen = (short) (noOfCol * 2 + toDetermineLengthOfByteArr);
+    int totalBytes = calculateTotalBytes(byteBufferArr) + offsetLen;
+
+    ByteBuffer buffer = ByteBuffer.allocate(totalBytes);
+
+    // write the length of the byte [] as first short
+    buffer.putShort((short) (totalBytes - toDetermineLengthOfByteArr));
+    // writing the offset of the first element.
+    buffer.putShort(offsetLen);
+
+    // prepare index for byte []
+    for (int index = 0; index < byteBufferArr.length - 1; index++) {
+      int noOfBytes = byteBufferArr[index].length;
+
+      buffer.putShort((short) (offsetLen + noOfBytes));
+      offsetLen += noOfBytes;
+    }
+
+    // put actual data.
+    for (int index = 0; index < byteBufferArr.length; index++) {
+      buffer.put(byteBufferArr[index]);
+    }
+    buffer.rewind();
+    return buffer.array();
+
+  }
+
+  /**
+   * To calculate the total bytes in byte Buffer[].
+   *
+   * @param byteBufferArr
+   * @return
+   */
+  private static int calculateTotalBytes(byte[][] byteBufferArr) {
+    int total = 0;
+    for (int index = 0; index < byteBufferArr.length; index++) {
+      total += byteBufferArr[index].length;
+    }
+    return total;
+  }
+
+  /**
+   * Method to get the required Dimension from obj []
+   *
+   * @param index
+   * @param row
+   * @return
+   */
+  public static Integer getDimension(int index, Object[] row) {
+
+    Integer[] dimensions = (Integer[]) row[WriteStepRowUtil.DICTIONARY_DIMENSION];
+
+    return dimensions[index];
+
+  }
+
+  /**
+   * Method to get the required measure from obj []
+   *
+   * @param index
+   * @param row
+   * @return
+   */
+  public static Object getMeasure(int index, Object[] row) {
+    Object[] measures = (Object[]) row[WriteStepRowUtil.MEASURE];
+    return measures[index];
+  }
+
+  public static byte[] getByteArrayForNoDictionaryCols(Object[] row) {
+
+    return (byte[]) row[WriteStepRowUtil.NO_DICTIONARY_AND_COMPLEX];
+  }
+
+  public static void prepareOutObj(Object[] out, int[] dimArray, byte[][] byteBufferArr,
+      Object[] measureArray) {
+
+    out[WriteStepRowUtil.DICTIONARY_DIMENSION] = dimArray;
+    out[WriteStepRowUtil.NO_DICTIONARY_AND_COMPLEX] = byteBufferArr;
+    out[WriteStepRowUtil.MEASURE] = measureArray;
+
+  }
+
+  /**
+   * This method will extract the single dimension from the complete high card dims byte[].+     *
+   * The format of the byte [] will be,  Totallength,CompleteStartOffsets,Dat
+   *
+   * @param highCardArr
+   * @param index
+   * @param highCardinalityCount
+   * @param outBuffer
+   */
+  public static void extractSingleHighCardDims(byte[] highCardArr, int index,
+      int highCardinalityCount, ByteBuffer outBuffer) {
+    ByteBuffer buff = null;
+    short secIndex = 0;
+    short firstIndex = 0;
+    int length;
+    // if the requested index is a last one then we need to calculate length
+    // based on byte[] length.
+    if (index == highCardinalityCount - 1) {
+      // need to read 2 bytes(1 short) to determine starting offset and
+      // length can be calculated by array length.
+      buff = ByteBuffer.wrap(highCardArr, (index * 2) + 2, 2);
+    } else {
+      // need to read 4 bytes(2 short) to determine starting offset and
+      // length.
+      buff = ByteBuffer.wrap(highCardArr, (index * 2) + 2, 4);
+    }
+
+    firstIndex = buff.getShort();
+    // if it is a last dimension in high card then this will be last
+    // offset.so calculate length from total length
+    if (index == highCardinalityCount - 1) {
+      secIndex = (short) highCardArr.length;
+    } else {
+      secIndex = buff.getShort();
+    }
+
+    length = secIndex - firstIndex;
+
+    outBuffer.position(firstIndex);
+    outBuffer.limit(outBuffer.position() + length);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
index 56e83db..b953d45 100644
--- a/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/util/CarbonMetadataUtilTest.java
@@ -17,34 +17,41 @@
 
 package org.apache.carbondata.core.util;
 
-import mockit.Mock;
-import mockit.MockUp;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.page.statistics.MeasurePageStatsVO;
-import org.apache.carbondata.core.metadata.index.BlockIndexInfo;
+import org.apache.carbondata.core.datastore.page.EncodedTablePage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
 import org.apache.carbondata.core.metadata.BlockletInfoColumnar;
+import org.apache.carbondata.core.metadata.CodecMetaFactory;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.format.*;
+import org.apache.carbondata.core.metadata.index.BlockIndexInfo;
+import org.apache.carbondata.format.BlockIndex;
+import org.apache.carbondata.format.BlockletInfo;
 import org.apache.carbondata.format.BlockletMinMaxIndex;
 import org.apache.carbondata.format.ColumnSchema;
+import org.apache.carbondata.format.DataChunk;
 import org.apache.carbondata.format.DataType;
+import org.apache.carbondata.format.Encoding;
+import org.apache.carbondata.format.FileFooter;
+import org.apache.carbondata.format.IndexHeader;
+import org.apache.carbondata.format.SegmentInfo;
 
+import mockit.Mock;
+import mockit.MockUp;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import static junit.framework.TestCase.*;
-import static org.apache.carbondata.core.util.CarbonMetadataUtil.getIndexHeader;
+import static junit.framework.TestCase.assertEquals;
 import static org.apache.carbondata.core.util.CarbonMetadataUtil.convertFileFooter;
 import static org.apache.carbondata.core.util.CarbonMetadataUtil.getBlockIndexInfo;
+import static org.apache.carbondata.core.util.CarbonMetadataUtil.getIndexHeader;
 
 public class CarbonMetadataUtilTest {
   static List<ByteBuffer> byteBufferList;
@@ -57,8 +64,6 @@ public class CarbonMetadataUtilTest {
   static int[] objDecimal;
 
   @BeforeClass public static void setUp() {
-    Long lngObj = new Long("11221");
-    byte byt = 1;
     objMaxArr = new Long[6];
     objMaxArr[0] = new Long("111111");
     objMaxArr[1] = new Long("121111");
@@ -113,13 +118,11 @@ public class CarbonMetadataUtilTest {
     blockletInfoList.add(blockletInfo);
     blockletInfoList.add(blockletInfo);
 
-    ValueEncoderMeta valueEncoderMeta = new ValueEncoderMeta();
-    valueEncoderMeta.setDecimal(5);
-    valueEncoderMeta.setMinValue(objMinArr);
-    valueEncoderMeta.setMaxValue(objMaxArr);
-    valueEncoderMeta.setUniqueValue(lngObj);
-    valueEncoderMeta.setType('a');
-    valueEncoderMeta.setDataTypeSelected(byt);
+    ValueEncoderMeta meta = CodecMetaFactory.createMeta();
+    meta.setDecimal(5);
+    meta.setMinValue(objMinArr);
+    meta.setMaxValue(objMaxArr);
+    meta.setType(ColumnPageCodecMeta.DOUBLE_MEASURE);
 
     List<Encoding> encoders = new ArrayList<>();
     encoders.add(Encoding.INVERTED_INDEX);
@@ -199,19 +202,52 @@ public class CarbonMetadataUtilTest {
 
     ValueEncoderMeta[] metas = new ValueEncoderMeta[6];
     for (int i = 0; i < metas.length; i++) {
-      metas[i] = new ValueEncoderMeta();
+      metas[i] = CodecMetaFactory.createMeta();
       metas[i].setMinValue(objMinArr[i]);
       metas[i].setMaxValue(objMaxArr[i]);
-      metas[i].setUniqueValue(objMinArr[i]);
       metas[i].setDecimal(objDecimal[i]);
-      metas[i].setType(CarbonCommonConstants.BIG_INT_MEASURE);
-      metas[i].setDataTypeSelected(byteArr[i]);
+      metas[i].setType(ColumnPageCodecMeta.BIG_INT_MEASURE);
     }
 
-    MeasurePageStatsVO stats = MeasurePageStatsVO.build(metas);
-
     BlockletInfoColumnar blockletInfoColumnar = new BlockletInfoColumnar();
 
+    final ValueEncoderMeta meta = CodecMetaFactory.createMeta();
+
+    new MockUp<ColumnPageCodecMeta>() {
+      @SuppressWarnings("unused") @Mock
+      public byte[] serialize() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public byte[] getMaxAsBytes() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public byte[] getMinAsBytes() {
+        return new byte[]{1,2};
+      }
+      @SuppressWarnings("unused") @Mock
+      public org.apache.carbondata.core.metadata.datatype.DataType getSrcDataType() {
+        return org.apache.carbondata.core.metadata.datatype.DataType.DOUBLE;
+      }
+    };
+
+    new MockUp<EncodedMeasurePage>() {
+      @SuppressWarnings("unused") @Mock
+      public ValueEncoderMeta getMetaData() {
+        return meta;
+      }
+    };
+
+    final EncodedMeasurePage measure = new EncodedMeasurePage(6, new byte[]{0,1}, meta,
+        new BitSet());
+    new MockUp<EncodedTablePage>() {
+      @SuppressWarnings("unused") @Mock
+      public EncodedMeasurePage getMeasure(int measureIndex) {
+        return measure;
+      }
+    };
+
     BitSet[] bitSetArr = new BitSet[6];
     bitSetArr[0] = new BitSet();
     bitSetArr[1] = new BitSet();
@@ -222,7 +258,6 @@ public class CarbonMetadataUtilTest {
     blockletInfoColumnar.setColumnMaxData(maxByteArr);
     blockletInfoColumnar.setColumnMinData(maxByteArr);
     blockletInfoColumnar.setKeyLengths(intArr);
-    blockletInfoColumnar.setColGrpBlocks(boolArr);
     blockletInfoColumnar.setKeyOffSets(longArr);
     blockletInfoColumnar.setDataIndexMapOffsets(longArr);
     blockletInfoColumnar.setAggKeyBlock(boolArr);
@@ -232,7 +267,8 @@ public class CarbonMetadataUtilTest {
     blockletInfoColumnar.setMeasureLength(intArr);
     blockletInfoColumnar.setMeasureOffset(longArr);
     blockletInfoColumnar.setMeasureNullValueIndex(bitSetArr);
-    blockletInfoColumnar.setStats(stats);
+    EncodedTablePage encodedTablePage = EncodedTablePage.newEmptyInstance();
+    blockletInfoColumnar.setEncodedTablePage(encodedTablePage);
 
     BlockletInfoColumnar blockletInfoColumnar1 = new BlockletInfoColumnar();
     blockletInfoColumnar1.setColumnMaxData(maxByteArr);
@@ -243,13 +279,11 @@ public class CarbonMetadataUtilTest {
     blockletInfoColumnar1.setAggKeyBlock(boolArr);
     blockletInfoColumnar1.setDataIndexMapLength(intArr);
     blockletInfoColumnar1.setIsSortedKeyColumn(boolArr);
-    blockletInfoColumnar1.setColGrpBlocks(boolArr);
     blockletInfoColumnar1.setKeyOffSets(longArr);
     blockletInfoColumnar1.setMeasureLength(intArr);
     blockletInfoColumnar1.setMeasureOffset(longArr);
     blockletInfoColumnar1.setMeasureNullValueIndex(bitSetArr);
-    blockletInfoColumnar1.setStats(stats);
-    blockletInfoColumnar1.setColGrpBlocks(boolArr);
+    blockletInfoColumnar1.setEncodedTablePage(encodedTablePage);
 
     List<BlockletInfoColumnar> blockletInfoColumnarList = new ArrayList<>();
     blockletInfoColumnarList.add(blockletInfoColumnar);
@@ -285,7 +319,7 @@ public class CarbonMetadataUtilTest {
     BlockletMinMaxIndex blockletMinMaxIndex = new BlockletMinMaxIndex();
     blockletMinMaxIndex.addToMax_values(ByteBuffer.wrap(byteMaxArr));
     blockletMinMaxIndex.addToMin_values(ByteBuffer.wrap(byteMinArr));
-    FileFooter result = convertFileFooter(blockletInfoColumnarList, 4, cardinality, columnSchemas,
+    FileFooter result = convertFileFooter(blockletInfoColumnarList, cardinality, columnSchemas,
         segmentProperties);
     assertEquals(result.getTable_columns(), columnSchemas);
 


[4/7] carbondata git commit: [CARBONDATA-1098] Change page statistics use exact type and use column page in writer

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/compression/Compression.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/compression/Compression.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/compression/Compression.java
deleted file mode 100644
index c954a33..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/compression/Compression.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.compression;
-
-public interface Compression {
-  byte[] compress(byte[] input);
-  byte[] decompress(byte[] input);
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
index 2e8eff2..6b3a365 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
@@ -21,7 +21,7 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 
@@ -37,7 +37,7 @@ public abstract class AdaptiveCompressionCodec implements ColumnPageCodec {
   protected final Compressor compressor;
 
   // statistics of this page, can be used by subclass
-  protected final ColumnPageStatsVO stats;
+  protected final SimpleStatsResult stats;
 
   // the data type used for storage
   protected final DataType targetDataType;
@@ -46,7 +46,7 @@ public abstract class AdaptiveCompressionCodec implements ColumnPageCodec {
   protected final DataType srcDataType;
 
   protected AdaptiveCompressionCodec(DataType srcDataType, DataType targetDataType,
-      ColumnPageStatsVO stats, Compressor compressor) {
+      SimpleStatsResult stats, Compressor compressor) {
     this.stats = stats;
     this.srcDataType = srcDataType;
     this.targetDataType = targetDataType;
@@ -55,7 +55,7 @@ public abstract class AdaptiveCompressionCodec implements ColumnPageCodec {
 
   public abstract String getName();
 
-  public abstract byte[] encode(ColumnPage input) throws MemoryException, IOException;
+  public abstract EncodedColumnPage encode(ColumnPage input) throws MemoryException, IOException;
 
   public abstract ColumnPage decode(byte[] input, int offset, int length) throws MemoryException;
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegerCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegerCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegerCodec.java
deleted file mode 100644
index fe15ba7..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegerCodec.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.encoding;
-
-import java.io.IOException;
-
-import org.apache.carbondata.core.datastore.compression.Compressor;
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.LazyColumnPage;
-import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-
-/**
- * Codec for integer (byte, short, int, long) data type page.
- * This codec will do type casting on page data to make storage minimum.
- */
-class AdaptiveIntegerCodec extends AdaptiveCompressionCodec {
-
-  private ColumnPage encodedPage;
-
-  public static ColumnPageCodec newInstance(DataType srcDataType, DataType targetDataType,
-      ColumnPageStatsVO stats, Compressor compressor) {
-    return new AdaptiveIntegerCodec(srcDataType, targetDataType, stats, compressor);
-  }
-
-  private AdaptiveIntegerCodec(DataType srcDataType, DataType targetDataType,
-      ColumnPageStatsVO stats, Compressor compressor) {
-    super(srcDataType, targetDataType, stats, compressor);
-  }
-
-  @Override
-  public String getName() {
-    return "AdaptiveIntegerCodec";
-  }
-
-  @Override
-  public byte[] encode(ColumnPage input) throws MemoryException, IOException {
-    encodedPage = ColumnPage
-        .newPage(targetDataType, input.getPageSize(), stats.getScale(), stats.getPrecision());
-    input.encode(codec);
-    byte[] result = encodedPage.compress(compressor);
-    encodedPage.freeMemory();
-    return result;
-  }
-
-  @Override
-  public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
-    if (srcDataType.equals(targetDataType)) {
-      return ColumnPage
-          .decompress(compressor, targetDataType, input, offset, length, stats.getScale(),
-              stats.getPrecision());
-    } else {
-      ColumnPage page = ColumnPage
-          .decompress(compressor, targetDataType, input, offset, length, stats.getScale(),
-              stats.getPrecision());
-      return LazyColumnPage.newPage(page, codec);
-    }
-  }
-
-  // encoded value = (type cast page value to target data type)
-  private PrimitiveCodec codec = new PrimitiveCodec() {
-    @Override
-    public void encode(int rowId, byte value) {
-      switch (targetDataType) {
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, short value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte) value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, int value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte) value);
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short) value);
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, long value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte) value);
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short) value);
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int) value);
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int) value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, float value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte) value);
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short) value);
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int) value);
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int) value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, double value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte) value);
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short) value);
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int) value);
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int) value);
-          break;
-        case LONG:
-          encodedPage.putLong(rowId, (long) value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public long decodeLong(byte value) {
-      return value;
-    }
-
-    @Override
-    public long decodeLong(short value) {
-      return value;
-    }
-
-    @Override
-    public long decodeLong(int value) {
-      return value;
-    }
-
-    @Override
-    public double decodeDouble(byte value) {
-      return value;
-    }
-
-    @Override
-    public double decodeDouble(short value) {
-      return value;
-    }
-
-    @Override
-    public double decodeDouble(int value) {
-      return value;
-    }
-
-    @Override
-    public double decodeDouble(long value) {
-      return value;
-    }
-
-    @Override
-    public double decodeDouble(float value) {
-      throw new RuntimeException("internal error: " + debugInfo());
-    }
-
-    @Override
-    public double decodeDouble(double value) {
-      throw new RuntimeException("internal error: " + debugInfo());
-    }
-  };
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
new file mode 100644
index 0000000..ed8d734
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveIntegralCodec.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.io.IOException;
+
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.LazyColumnPage;
+import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.CodecMetaFactory;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+
+/**
+ * Codec for integer (byte, short, int, long) data type page.
+ * This codec will do type casting on page data to make storage minimum.
+ */
+class AdaptiveIntegralCodec extends AdaptiveCompressionCodec {
+
+  private ColumnPage encodedPage;
+
+  public static ColumnPageCodec newInstance(DataType srcDataType, DataType targetDataType,
+      SimpleStatsResult stats, Compressor compressor) {
+    return new AdaptiveIntegralCodec(srcDataType, targetDataType, stats, compressor);
+  }
+
+  private AdaptiveIntegralCodec(DataType srcDataType, DataType targetDataType,
+      SimpleStatsResult stats, Compressor compressor) {
+    super(srcDataType, targetDataType, stats, compressor);
+  }
+
+  @Override
+  public String getName() {
+    return "AdaptiveIntegralCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) throws MemoryException, IOException {
+    encodedPage = ColumnPage
+        .newPage(targetDataType, input.getPageSize(), stats.getScale(), stats.getPrecision());
+    input.encode(codec);
+    byte[] result = encodedPage.compress(compressor);
+    encodedPage.freeMemory();
+    return new EncodedMeasurePage(input.getPageSize(), result,
+        CodecMetaFactory.createMeta(stats, targetDataType),
+        ((SimpleStatsResult)input.getStatistics()).getNullBits());
+  }
+
+  @Override
+  public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
+    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length);
+    return LazyColumnPage.newPage(page, codec);
+  }
+
+  // encoded value = (type cast page value to target data type)
+  private PrimitiveCodec codec = new PrimitiveCodec() {
+    @Override
+    public void encode(int rowId, byte value) {
+      switch (targetDataType) {
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, short value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte) value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, int value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte) value);
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short) value);
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, long value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte) value);
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short) value);
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int) value);
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int) value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, float value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte) value);
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short) value);
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int) value);
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int) value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, double value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte) value);
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short) value);
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int) value);
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int) value);
+          break;
+        case LONG:
+          encodedPage.putLong(rowId, (long) value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public long decodeLong(byte value) {
+      return value;
+    }
+
+    @Override
+    public long decodeLong(short value) {
+      return value;
+    }
+
+    @Override
+    public long decodeLong(int value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(byte value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(short value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(int value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(long value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(float value) {
+      throw new RuntimeException("internal error: " + debugInfo());
+    }
+
+    @Override
+    public double decodeDouble(double value) {
+      throw new RuntimeException("internal error: " + debugInfo());
+    }
+  };
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
index 36d5989..a77bf69 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
@@ -34,11 +34,9 @@ public interface ColumnPageCodec {
   String getName();
 
   /**
-   * apply a column page and output encoded byte array
-   * @param input column page to apply
-   * @return encoded data
+   * encode a column page and return the encoded data
    */
-  byte[] encode(ColumnPage input) throws MemoryException, IOException;
+  EncodedColumnPage encode(ColumnPage input) throws MemoryException, IOException;
 
   /**
    * decode byte array from offset to a column page
@@ -48,4 +46,5 @@ public interface ColumnPageCodec {
    * @return decoded data
    */
   ColumnPage decode(byte[] input, int offset, int length) throws MemoryException;
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
index 659dc2a..d2d3a44 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
@@ -19,7 +19,7 @@ package org.apache.carbondata.core.datastore.page.encoding;
 
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.compression.CompressorFactory;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 
 /**
@@ -32,29 +32,64 @@ public class DefaultEncodingStrategy extends EncodingStrategy {
   private static final int THREE_BYTES_MAX = (int) Math.pow(2, 23) - 1;
   private static final int THREE_BYTES_MIN = - THREE_BYTES_MAX - 1;
 
-  // fit the long input value into minimum data type
-  private static DataType fitDataType(long value) {
-    if (value <= Byte.MAX_VALUE && value >= Byte.MIN_VALUE) {
+  private DataType fitLongMinMax(long max, long min) {
+    if (max <= Byte.MAX_VALUE && min >= Byte.MIN_VALUE) {
       return DataType.BYTE;
-    } else if (value <= Short.MAX_VALUE && value >= Short.MIN_VALUE) {
+    } else if (max <= Short.MAX_VALUE && min >= Short.MIN_VALUE) {
       return DataType.SHORT;
-    } else if (value <= THREE_BYTES_MAX && value >= THREE_BYTES_MIN) {
+    } else if (max <= THREE_BYTES_MAX && min >= THREE_BYTES_MIN) {
       return DataType.SHORT_INT;
-    } else if (value <= Integer.MAX_VALUE && value >= Integer.MIN_VALUE) {
+    } else if (max <= Integer.MAX_VALUE && min >= Integer.MIN_VALUE) {
       return DataType.INT;
     } else {
       return DataType.LONG;
     }
   }
 
-  private DataType fitDataType(long max, long min) {
-    if (max <= Byte.MAX_VALUE && min >= Byte.MIN_VALUE) {
+  private DataType fitMinMax(DataType dataType, Object max, Object min) {
+    switch (dataType) {
+      case BYTE:
+        return fitLongMinMax((byte) max, (byte) min);
+      case SHORT:
+        return fitLongMinMax((short) max, (short) min);
+      case INT:
+        return fitLongMinMax((int) max, (int) min);
+      case LONG:
+        return fitLongMinMax((long) max, (long) min);
+      case DOUBLE:
+        return DataType.DOUBLE;
+      default:
+        throw new RuntimeException("internal error: " + dataType);
+    }
+  }
+
+  // fit the long input value into minimum data type
+  private DataType fitDelta(DataType dataType, Object max, Object min) {
+    // use long data type to calculate delta to avoid overflow
+    long value;
+    switch (dataType) {
+      case BYTE:
+        value = (long)(byte) max - (long)(byte) min;
+        break;
+      case SHORT:
+        value = (long)(short) max - (long)(short) min;
+        break;
+      case INT:
+        value = (long)(int) max - (long)(int) min;
+        break;
+      case LONG:
+        // TODO: add overflow detection and return delta type
+        return DataType.LONG;
+      default:
+        throw new RuntimeException("internal error: " + dataType);
+    }
+    if (value <= Byte.MAX_VALUE && value >= Byte.MIN_VALUE) {
       return DataType.BYTE;
-    } else if (max <= Short.MAX_VALUE && min >= Short.MIN_VALUE) {
+    } else if (value <= Short.MAX_VALUE && value >= Short.MIN_VALUE) {
       return DataType.SHORT;
-    } else if (max <= THREE_BYTES_MAX && min >= THREE_BYTES_MIN) {
+    } else if (value <= THREE_BYTES_MAX && value >= THREE_BYTES_MIN) {
       return DataType.SHORT_INT;
-    } else if (max <= Integer.MAX_VALUE && min >= Integer.MIN_VALUE) {
+    } else if (value <= Integer.MAX_VALUE && value >= Integer.MIN_VALUE) {
       return DataType.INT;
     } else {
       return DataType.LONG;
@@ -63,10 +98,9 @@ public class DefaultEncodingStrategy extends EncodingStrategy {
 
   // choose between adaptive encoder or delta adaptive encoder, based on whose target data type
   // size is smaller
-  @Override
-  ColumnPageCodec newCodecForIntegerType(ColumnPageStatsVO stats) {
+  @Override ColumnPageCodec newCodecForIntegralType(SimpleStatsResult stats) {
     DataType srcDataType = stats.getDataType();
-    DataType adaptiveDataType = fitDataType((long)stats.getMax(), (long)stats.getMin());
+    DataType adaptiveDataType = fitMinMax(stats.getDataType(), stats.getMax(), stats.getMin());
     DataType deltaDataType;
 
     // TODO: this handling is for data compatibility, change to Override check when implementing
@@ -74,7 +108,7 @@ public class DefaultEncodingStrategy extends EncodingStrategy {
     if (adaptiveDataType == DataType.LONG) {
       deltaDataType = DataType.LONG;
     } else {
-      deltaDataType = fitDataType((long) stats.getMax() - (long) stats.getMin());
+      deltaDataType = fitDelta(stats.getDataType(), stats.getMax(), stats.getMin());
     }
     if (Math.min(adaptiveDataType.getSizeInBytes(), deltaDataType.getSizeInBytes()) ==
         srcDataType.getSizeInBytes()) {
@@ -83,27 +117,24 @@ public class DefaultEncodingStrategy extends EncodingStrategy {
     }
     if (adaptiveDataType.getSizeInBytes() <= deltaDataType.getSizeInBytes()) {
       // choose adaptive encoding
-      return AdaptiveIntegerCodec.newInstance(
+      return AdaptiveIntegralCodec.newInstance(
           stats.getDataType(), adaptiveDataType, stats, compressor);
     } else {
       // choose delta adaptive encoding
-      return DeltaIntegerCodec.newInstance(stats.getDataType(), deltaDataType, stats, compressor);
+      return DeltaIntegralCodec.newInstance(stats.getDataType(), deltaDataType, stats, compressor);
     }
   }
 
-  @Override
-  ColumnPageCodec newCodecForFloatingType(ColumnPageStatsVO stats) {
+  @Override ColumnPageCodec newCodecForFloatingType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
 
   // for decimal, currently it is a very basic implementation
-  @Override
-  ColumnPageCodec newCodecForDecimalType(ColumnPageStatsVO stats) {
+  @Override ColumnPageCodec newCodecForDecimalType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
 
-  @Override
-  ColumnPageCodec newCodecForByteArrayType(ColumnPageStatsVO stats) {
+  @Override ColumnPageCodec newCodecForByteArrayType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegerCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegerCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegerCodec.java
deleted file mode 100644
index a45552a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegerCodec.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.encoding;
-
-import java.io.IOException;
-
-import org.apache.carbondata.core.datastore.compression.Compressor;
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.LazyColumnPage;
-import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-
-/**
- * Codec for integer (byte, short, int, long) data type page.
- * This codec will calculate delta of page max value and page value,
- * and do type casting of the diff to make storage minimum.
- */
-public class DeltaIntegerCodec extends AdaptiveCompressionCodec {
-
-  private ColumnPage encodedPage;
-
-  private long max;
-
-  public static DeltaIntegerCodec newInstance(DataType srcDataType, DataType targetDataType,
-      ColumnPageStatsVO stats, Compressor compressor) {
-    return new DeltaIntegerCodec(srcDataType, targetDataType, stats, compressor);
-  }
-
-  private DeltaIntegerCodec(DataType srcDataType, DataType targetDataType,
-      ColumnPageStatsVO stats, Compressor compressor) {
-    super(srcDataType, targetDataType, stats, compressor);
-    switch (srcDataType) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-        max = (long) stats.getMax();
-        break;
-      case FLOAT:
-      case DOUBLE:
-        max = (long)((double) stats.getMax());
-        break;
-    }
-  }
-
-  @Override
-  public String getName() {
-    return "DeltaIntegerCodec";
-  }
-
-  @Override
-  public byte[] encode(ColumnPage input) throws MemoryException, IOException {
-    encodedPage = ColumnPage
-        .newPage(targetDataType, input.getPageSize(), stats.getScale(), stats.getPrecision());
-    input.encode(codec);
-    byte[] result = encodedPage.compress(compressor);
-    encodedPage.freeMemory();
-    return result;
-  }
-
-  @Override
-  public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
-    if (srcDataType.equals(targetDataType)) {
-      return ColumnPage
-          .decompress(compressor, targetDataType, input, offset, length, stats.getScale(),
-              stats.getPrecision());
-    } else {
-      ColumnPage page = ColumnPage
-          .decompress(compressor, targetDataType, input, offset, length, stats.getScale(),
-              stats.getPrecision());
-      return LazyColumnPage.newPage(page, codec);
-    }
-  }
-
-  // encoded value = (max value of page) - (page value)
-  private PrimitiveCodec codec = new PrimitiveCodec() {
-    @Override
-    public void encode(int rowId, byte value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, short value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short)(max - value));
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, int value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short)(max - value));
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int)(max - value));
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int)(max - value));
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, long value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short)(max - value));
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int)(max - value));
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int)(max - value));
-          break;
-        case LONG:
-          encodedPage.putLong(rowId, max - value);
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, float value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short)(max - value));
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int)(max - value));
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int)(max - value));
-          break;
-        case LONG:
-          encodedPage.putLong(rowId, (long)(max - value));
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public void encode(int rowId, double value) {
-      switch (targetDataType) {
-        case BYTE:
-          encodedPage.putByte(rowId, (byte)(max - value));
-          break;
-        case SHORT:
-          encodedPage.putShort(rowId, (short)(max - value));
-          break;
-        case SHORT_INT:
-          encodedPage.putShortInt(rowId, (int)(max - value));
-          break;
-        case INT:
-          encodedPage.putInt(rowId, (int)(max - value));
-          break;
-        case LONG:
-          encodedPage.putLong(rowId, (long)(max - value));
-          break;
-        default:
-          throw new RuntimeException("internal error: " + debugInfo());
-      }
-    }
-
-    @Override
-    public long decodeLong(byte value) {
-      return max - value;
-    }
-
-    @Override
-    public long decodeLong(short value) {
-      return max - value;
-    }
-
-    @Override
-    public long decodeLong(int value) {
-      return max - value;
-    }
-
-    @Override
-    public double decodeDouble(byte value) {
-      return max - value;
-    }
-
-    @Override
-    public double decodeDouble(short value) {
-      return max - value;
-    }
-
-    @Override
-    public double decodeDouble(int value) {
-      return max - value;
-    }
-
-    @Override
-    public double decodeDouble(long value) {
-      return max - value;
-    }
-
-    @Override
-    public double decodeDouble(float value) {
-      // this codec is for integer type only
-      throw new RuntimeException("internal error: " + debugInfo());
-    }
-
-    @Override
-    public double decodeDouble(double value) {
-      // this codec is for integer type only
-      throw new RuntimeException("internal error: " + debugInfo());
-    }
-  };
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
new file mode 100644
index 0000000..53a8295
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DeltaIntegralCodec.java
@@ -0,0 +1,257 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.io.IOException;
+
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.LazyColumnPage;
+import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.CodecMetaFactory;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+
+/**
+ * Codec for integer (byte, short, int, long) data type page.
+ * This codec will calculate delta of page max value and page value,
+ * and do type casting of the diff to make storage minimum.
+ */
+public class DeltaIntegralCodec extends AdaptiveCompressionCodec {
+
+  private ColumnPage encodedPage;
+
+  private long max;
+
+  public static DeltaIntegralCodec newInstance(DataType srcDataType, DataType targetDataType,
+      SimpleStatsResult stats, Compressor compressor) {
+    return new DeltaIntegralCodec(srcDataType, targetDataType, stats, compressor);
+  }
+
+  private DeltaIntegralCodec(DataType srcDataType, DataType targetDataType,
+      SimpleStatsResult stats, Compressor compressor) {
+    super(srcDataType, targetDataType, stats, compressor);
+    switch (srcDataType) {
+      case BYTE:
+        max = (byte) stats.getMax();
+        break;
+      case SHORT:
+        max = (short) stats.getMax();
+        break;
+      case INT:
+        max = (int) stats.getMax();
+        break;
+      case LONG:
+        max = (long) stats.getMax();
+        break;
+      case FLOAT:
+      case DOUBLE:
+        max = (long)((double) stats.getMax());
+        break;
+    }
+  }
+
+  @Override
+  public String getName() {
+    return "DeltaIntegralCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) throws MemoryException, IOException {
+    encodedPage = ColumnPage
+        .newPage(targetDataType, input.getPageSize(), stats.getScale(), stats.getPrecision());
+    input.encode(codec);
+    byte[] result = encodedPage.compress(compressor);
+    encodedPage.freeMemory();
+    return new EncodedMeasurePage(input.getPageSize(),
+        result,
+        CodecMetaFactory.createMeta(stats, targetDataType),
+        ((SimpleStatsResult)input.getStatistics()).getNullBits());
+  }
+
+  @Override
+  public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
+    ColumnPage page = ColumnPage.decompress(compressor, targetDataType, input, offset, length);
+    return LazyColumnPage.newPage(page, codec);
+  }
+
+  // encoded value = (max value of page) - (page value)
+  private PrimitiveCodec codec = new PrimitiveCodec() {
+    @Override
+    public void encode(int rowId, byte value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, short value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short)(max - value));
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, int value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short)(max - value));
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int)(max - value));
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int)(max - value));
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, long value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short)(max - value));
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int)(max - value));
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int)(max - value));
+          break;
+        case LONG:
+          encodedPage.putLong(rowId, max - value);
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, float value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short)(max - value));
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int)(max - value));
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int)(max - value));
+          break;
+        case LONG:
+          encodedPage.putLong(rowId, (long)(max - value));
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public void encode(int rowId, double value) {
+      switch (targetDataType) {
+        case BYTE:
+          encodedPage.putByte(rowId, (byte)(max - value));
+          break;
+        case SHORT:
+          encodedPage.putShort(rowId, (short)(max - value));
+          break;
+        case SHORT_INT:
+          encodedPage.putShortInt(rowId, (int)(max - value));
+          break;
+        case INT:
+          encodedPage.putInt(rowId, (int)(max - value));
+          break;
+        case LONG:
+          encodedPage.putLong(rowId, (long)(max - value));
+          break;
+        default:
+          throw new RuntimeException("internal error: " + debugInfo());
+      }
+    }
+
+    @Override
+    public long decodeLong(byte value) {
+      return max - value;
+    }
+
+    @Override
+    public long decodeLong(short value) {
+      return max - value;
+    }
+
+    @Override
+    public long decodeLong(int value) {
+      return max - value;
+    }
+
+    @Override
+    public double decodeDouble(byte value) {
+      return max - value;
+    }
+
+    @Override
+    public double decodeDouble(short value) {
+      return max - value;
+    }
+
+    @Override
+    public double decodeDouble(int value) {
+      return max - value;
+    }
+
+    @Override
+    public double decodeDouble(long value) {
+      return max - value;
+    }
+
+    @Override
+    public double decodeDouble(float value) {
+      // this codec is for integer type only
+      throw new RuntimeException("internal error: " + debugInfo());
+    }
+
+    @Override
+    public double decodeDouble(double value) {
+      // this codec is for integer type only
+      throw new RuntimeException("internal error: " + debugInfo());
+    }
+  };
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
index d608fea..a1d4b61 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
@@ -21,8 +21,11 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
+import org.apache.carbondata.core.datastore.page.LazyColumnPage;
+import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.CodecMetaFactory;
 
 /**
  * This codec directly apply compression on the input data
@@ -30,14 +33,14 @@ import org.apache.carbondata.core.memory.MemoryException;
 public class DirectCompressCodec implements ColumnPageCodec {
 
   private Compressor compressor;
-  private ColumnPageStatsVO stats;
+  private SimpleStatsResult stats;
 
-  private DirectCompressCodec(ColumnPageStatsVO stats, Compressor compressor) {
+  private DirectCompressCodec(SimpleStatsResult stats, Compressor compressor) {
     this.compressor = compressor;
     this.stats = stats;
   }
 
-  public static DirectCompressCodec newInstance(ColumnPageStatsVO stats, Compressor compressor) {
+  public static DirectCompressCodec newInstance(SimpleStatsResult stats, Compressor compressor) {
     return new DirectCompressCodec(stats, compressor);
   }
 
@@ -47,14 +50,89 @@ public class DirectCompressCodec implements ColumnPageCodec {
   }
 
   @Override
-  public byte[] encode(ColumnPage input) throws IOException, MemoryException {
-    return input.compress(compressor);
+  public EncodedColumnPage encode(ColumnPage input) throws IOException, MemoryException {
+    byte[] result = input.compress(compressor);
+    return new EncodedMeasurePage(input.getPageSize(), result,
+        CodecMetaFactory.createMeta(stats, stats.getDataType()),
+        ((SimpleStatsResult)input.getStatistics()).getNullBits());
   }
 
   @Override
   public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
-    return ColumnPage
+    ColumnPage page = ColumnPage
         .decompress(compressor, stats.getDataType(), input, offset, length, stats.getScale(),
             stats.getPrecision());
+    return LazyColumnPage.newPage(page, codec);
   }
+
+  private PrimitiveCodec codec = new PrimitiveCodec() {
+    @Override
+    public void encode(int rowId, byte value) {
+    }
+
+    @Override
+    public void encode(int rowId, short value) {
+    }
+
+    @Override
+    public void encode(int rowId, int value) {
+    }
+
+    @Override
+    public void encode(int rowId, long value) {
+    }
+
+    @Override
+    public void encode(int rowId, float value) {
+    }
+
+    @Override
+    public void encode(int rowId, double value) {
+    }
+
+    @Override
+    public long decodeLong(byte value) {
+      return value;
+    }
+
+    @Override
+    public long decodeLong(short value) {
+      return value;
+    }
+
+    @Override
+    public long decodeLong(int value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(byte value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(short value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(int value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(long value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(float value) {
+      return value;
+    }
+
+    @Override
+    public double decodeDouble(double value) {
+      return value;
+    }
+  };
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedColumnPage.java
new file mode 100644
index 0000000..1630e06
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedColumnPage.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.BitSet;
+
+import org.apache.carbondata.format.DataChunk2;
+
+/**
+ * An column page after encoding and compression.
+ */
+public abstract class EncodedColumnPage {
+
+  // number of row of this page
+  protected int pageSize;
+
+  // encoded and compressed column page data
+  protected byte[] encodedData;
+
+  protected BitSet nullBitSet;
+
+  // metadata of this page
+  protected DataChunk2 dataChunk2;
+
+  EncodedColumnPage(int pageSize, byte[] encodedData) {
+    this.pageSize = pageSize;
+    this.encodedData = encodedData;
+  }
+
+  public abstract DataChunk2 buildDataChunk2() throws IOException;
+
+  /**
+   * return the encoded and compressed data page
+   */
+  public byte[] getEncodedData() {
+    return encodedData;
+  }
+
+  /**
+   * return the size of the s
+   */
+  public int getSerializedSize() {
+    return encodedData.length;
+  }
+
+  public ByteBuffer serialize() {
+    return ByteBuffer.wrap(encodedData);
+  }
+
+  public DataChunk2 getDataChunk2() {
+    return dataChunk2;
+  }
+
+  public void setNullBitSet(BitSet nullBitSet) {
+    this.nullBitSet = nullBitSet;
+  }
+
+  public BitSet getNullBitSet() {
+    return nullBitSet;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedData.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedData.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedData.java
deleted file mode 100644
index 0d1b2e4..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedData.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.encoding;
-
-import org.apache.carbondata.core.datastore.columnar.IndexStorage;
-
-// result result of all columns
-public class EncodedData {
-  // dimension data that include rowid (index)
-  public IndexStorage[] indexStorages;
-
-  // encoded and compressed dimension data
-  public byte[][] dimensions;
-
-  // encoded and compressed measure data
-  public byte[][] measures;
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedDimensionPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedDimensionPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedDimensionPage.java
new file mode 100644
index 0000000..30d58cf
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedDimensionPage.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.DimensionType;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.page.statistics.TablePageStatistics;
+import org.apache.carbondata.core.util.CarbonMetadataUtil;
+import org.apache.carbondata.format.BlockletMinMaxIndex;
+import org.apache.carbondata.format.DataChunk2;
+import org.apache.carbondata.format.Encoding;
+import org.apache.carbondata.format.SortState;
+
+/**
+ * Encoded dimension page that include data and inverted index
+ */
+public class EncodedDimensionPage extends EncodedColumnPage {
+  private IndexStorage indexStorage;
+  private DimensionType dimensionType;
+
+  public EncodedDimensionPage(int pageSize, byte[] encodedData, IndexStorage indexStorage,
+      DimensionType dimensionType) {
+    super(pageSize, encodedData);
+    this.indexStorage = indexStorage;
+    this.dimensionType = dimensionType;
+    this.dataChunk2 = buildDataChunk2();
+  }
+
+  private int getTotalRowIdPageLengthInBytes() {
+    return CarbonCommonConstants.INT_SIZE_IN_BYTE +
+        indexStorage.getRowIdPageLengthInBytes() + indexStorage.getRowIdRlePageLengthInBytes();
+  }
+
+  @Override
+  public int getSerializedSize() {
+    int size = encodedData.length;
+    if (indexStorage.getRowIdPageLengthInBytes() > 0) {
+      size += getTotalRowIdPageLengthInBytes();
+    }
+    if (indexStorage.getDataRlePageLengthInBytes() > 0) {
+      size += indexStorage.getDataRlePageLengthInBytes();
+    }
+    return size;
+  }
+
+  @Override
+  public ByteBuffer serialize() {
+    ByteBuffer buffer = ByteBuffer.allocate(getSerializedSize());
+    buffer.put(encodedData);
+    if (indexStorage.getRowIdPageLengthInBytes() > 0) {
+      buffer.putInt(indexStorage.getRowIdPageLengthInBytes());
+      short[] rowIdPage = (short[])indexStorage.getRowIdPage();
+      for (short rowId : rowIdPage) {
+        buffer.putShort(rowId);
+      }
+      if (indexStorage.getRowIdRlePageLengthInBytes() > 0) {
+        short[] rowIdRlePage = (short[])indexStorage.getRowIdRlePage();
+        for (short rowIdRle : rowIdRlePage) {
+          buffer.putShort(rowIdRle);
+        }
+      }
+    }
+    if (indexStorage.getDataRlePageLengthInBytes() > 0) {
+      short[] dataRlePage = (short[])indexStorage.getDataRlePage();
+      for (short dataRle : dataRlePage) {
+        buffer.putShort(dataRle);
+      }
+    }
+    buffer.flip();
+    return buffer;
+  }
+
+  @Override
+  public DataChunk2 buildDataChunk2() {
+    DataChunk2 dataChunk = new DataChunk2();
+    dataChunk.min_max = new BlockletMinMaxIndex();
+    dataChunk.setChunk_meta(CarbonMetadataUtil.getSnappyChunkCompressionMeta());
+    dataChunk.setNumberOfRowsInpage(pageSize);
+    List<Encoding> encodings = new ArrayList<Encoding>();
+    dataChunk.setData_page_length(encodedData.length);
+    if (dimensionType == DimensionType.GLOBAL_DICTIONARY ||
+        dimensionType == DimensionType.DIRECT_DICTIONARY ||
+        dimensionType == DimensionType.COMPLEX) {
+      encodings.add(Encoding.DICTIONARY);
+    }
+    if (dimensionType == DimensionType.DIRECT_DICTIONARY) {
+      encodings.add(Encoding.DIRECT_DICTIONARY);
+    }
+    if (indexStorage.getDataRlePageLengthInBytes() > 0 ||
+        dimensionType == DimensionType.GLOBAL_DICTIONARY) {
+      dataChunk.setRle_page_length(indexStorage.getDataRlePageLengthInBytes());
+      encodings.add(Encoding.RLE);
+    }
+    SortState sort = (indexStorage.getRowIdPageLengthInBytes() > 0) ?
+        SortState.SORT_EXPLICIT : SortState.SORT_NATIVE;
+    dataChunk.setSort_state(sort);
+    if (indexStorage.getRowIdPageLengthInBytes() > 0) {
+      dataChunk.setRowid_page_length(getTotalRowIdPageLengthInBytes());
+      encodings.add(Encoding.INVERTED_INDEX);
+    }
+    if (dimensionType == DimensionType.PLAIN_VALUE) {
+      dataChunk.min_max.addToMax_values(ByteBuffer.wrap(
+          TablePageStatistics.updateMinMaxForNoDictionary(indexStorage.getMax())));
+      dataChunk.min_max.addToMin_values(ByteBuffer.wrap(
+          TablePageStatistics.updateMinMaxForNoDictionary(indexStorage.getMin())));
+    } else {
+      dataChunk.min_max.addToMax_values(ByteBuffer.wrap(indexStorage.getMax()));
+      dataChunk.min_max.addToMin_values(ByteBuffer.wrap(indexStorage.getMin()));
+    }
+    dataChunk.setEncoders(encodings);
+    return dataChunk;
+  }
+
+  public IndexStorage getIndexStorage() {
+    return indexStorage;
+  }
+
+  public DimensionType getDimensionType() {
+    return dimensionType;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java
new file mode 100644
index 0000000..0ef48c6
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodedMeasurePage.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.List;
+
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.compression.CompressorFactory;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
+import org.apache.carbondata.core.util.CarbonMetadataUtil;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.format.BlockletMinMaxIndex;
+import org.apache.carbondata.format.DataChunk2;
+import org.apache.carbondata.format.Encoding;
+import org.apache.carbondata.format.PresenceMeta;
+
+/**
+ * Encoded measure page that include data and statistics
+ */
+public class EncodedMeasurePage extends EncodedColumnPage {
+
+  private ValueEncoderMeta metaData;
+
+  public EncodedMeasurePage(int pageSize, byte[] encodedData, ValueEncoderMeta metaData,
+      BitSet nullBitSet) throws IOException {
+    super(pageSize, encodedData);
+    this.metaData = metaData;
+    this.nullBitSet = nullBitSet;
+    this.dataChunk2 = buildDataChunk2();
+  }
+
+  @Override
+  public DataChunk2 buildDataChunk2() throws IOException {
+    DataChunk2 dataChunk = new DataChunk2();
+    dataChunk.min_max = new BlockletMinMaxIndex();
+    dataChunk.setChunk_meta(CarbonMetadataUtil.getSnappyChunkCompressionMeta());
+    dataChunk.setNumberOfRowsInpage(pageSize);
+    dataChunk.setData_page_length(encodedData.length);
+    dataChunk.setRowMajor(false);
+    // TODO : Change as per this encoders.
+    List<Encoding> encodings = new ArrayList<Encoding>();
+    encodings.add(Encoding.DELTA);
+    dataChunk.setEncoders(encodings);
+    PresenceMeta presenceMeta = new PresenceMeta();
+    presenceMeta.setPresent_bit_streamIsSet(true);
+    Compressor compressor = CompressorFactory.getInstance().getCompressor();
+    presenceMeta.setPresent_bit_stream(compressor.compressByte(nullBitSet.toByteArray()));
+    dataChunk.setPresence(presenceMeta);
+    List<ByteBuffer> encoderMetaList = new ArrayList<ByteBuffer>();
+    if (metaData instanceof ColumnPageCodecMeta) {
+      ColumnPageCodecMeta meta = (ColumnPageCodecMeta) metaData;
+      encoderMetaList.add(ByteBuffer.wrap(meta.serialize()));
+      dataChunk.min_max.addToMax_values(ByteBuffer.wrap(meta.getMaxAsBytes()));
+      dataChunk.min_max.addToMin_values(ByteBuffer.wrap(meta.getMinAsBytes()));
+    } else {
+      encoderMetaList.add(ByteBuffer.wrap(CarbonUtil.serializeEncodeMetaUsingByteBuffer(metaData)));
+      dataChunk.min_max.addToMax_values(ByteBuffer.wrap(CarbonUtil.getMaxValueAsBytes(metaData)));
+      dataChunk.min_max.addToMin_values(ByteBuffer.wrap(CarbonUtil.getMinValueAsBytes(metaData)));
+    }
+    dataChunk.setEncoder_meta(encoderMetaList);
+    return dataChunk;
+  }
+
+  public ValueEncoderMeta getMetaData() {
+    return metaData;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
index 77d3b74..ee13277 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
@@ -17,7 +17,9 @@
 
 package org.apache.carbondata.core.datastore.page.encoding;
 
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
+import org.apache.carbondata.core.datastore.page.statistics.PrimitivePageStatsCollector;
+import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 
 /**
@@ -28,13 +30,13 @@ public abstract class EncodingStrategy {
   /**
    * create codec based on the page data type and statistics
    */
-  public ColumnPageCodec createCodec(ColumnPageStatsVO stats) {
+  public ColumnPageCodec createCodec(SimpleStatsResult stats) {
     switch (stats.getDataType()) {
       case BYTE:
       case SHORT:
       case INT:
       case LONG:
-        return newCodecForIntegerType(stats);
+        return newCodecForIntegralType(stats);
       case FLOAT:
       case DOUBLE:
         return newCodecForFloatingType(stats);
@@ -52,20 +54,58 @@ public abstract class EncodingStrategy {
    * create codec based on the page data type and statistics contained by ValueEncoderMeta
    */
   public ColumnPageCodec createCodec(ValueEncoderMeta meta, int scale, int precision) {
-    ColumnPageStatsVO stats = ColumnPageStatsVO.copyFrom(meta, scale, precision);
-    return createCodec(stats);
+    if (meta instanceof ColumnPageCodecMeta) {
+      ColumnPageCodecMeta codecMeta = (ColumnPageCodecMeta) meta;
+      SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(codecMeta);
+      switch (codecMeta.getSrcDataType()) {
+        case BYTE:
+        case SHORT:
+        case INT:
+        case LONG:
+          return newCodecForIntegralType(stats);
+        case FLOAT:
+        case DOUBLE:
+          return newCodecForFloatingType(stats);
+        case DECIMAL:
+          return newCodecForDecimalType(stats);
+        case BYTE_ARRAY:
+          // no dictionary dimension
+          return newCodecForByteArrayType(stats);
+        default:
+          throw new RuntimeException("unsupported data type: " + stats.getDataType());
+      }
+    } else {
+      SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(meta, scale, precision);
+      switch (meta.getType()) {
+        case BYTE:
+        case SHORT:
+        case INT:
+        case LONG:
+          return newCodecForIntegralType(stats);
+        case FLOAT:
+        case DOUBLE:
+          return newCodecForFloatingType(stats);
+        case DECIMAL:
+          return newCodecForDecimalType(stats);
+        case BYTE_ARRAY:
+          // no dictionary dimension
+          return newCodecForByteArrayType(stats);
+        default:
+          throw new RuntimeException("unsupported data type: " + stats.getDataType());
+      }
+    }
   }
 
   // for byte, short, int, long
-  abstract ColumnPageCodec newCodecForIntegerType(ColumnPageStatsVO stats);
+  abstract ColumnPageCodec newCodecForIntegralType(SimpleStatsResult stats);
 
   // for float, double
-  abstract ColumnPageCodec newCodecForFloatingType(ColumnPageStatsVO stats);
+  abstract ColumnPageCodec newCodecForFloatingType(SimpleStatsResult stats);
 
   // for decimal
-  abstract ColumnPageCodec newCodecForDecimalType(ColumnPageStatsVO stats);
+  abstract ColumnPageCodec newCodecForDecimalType(SimpleStatsResult stats);
 
   // for byte array
-  abstract ColumnPageCodec newCodecForByteArrayType(ColumnPageStatsVO stats);
+  abstract ColumnPageCodec newCodecForByteArrayType(SimpleStatsResult stats);
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
new file mode 100644
index 0000000..ef8307e
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/key/TablePageKey.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.key;
+
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.block.SegmentProperties;
+import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
+import org.apache.carbondata.core.keygenerator.KeyGenException;
+import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.util.NonDictionaryUtil;
+
+public class TablePageKey {
+  private int pageSize;
+
+  private byte[][] currentNoDictionaryKey;
+
+  // MDK start key
+  private byte[] startKey;
+
+  // MDK end key
+  private byte[] endKey;
+
+  // startkey for no dictionary columns
+  private byte[][] noDictStartKey;
+
+  // endkey for no diciotn
+  private byte[][] noDictEndKey;
+
+  // startkey for no dictionary columns after packing into one column
+  private byte[] packedNoDictStartKey;
+
+  // endkey for no dictionary columns after packing into one column
+  private byte[] packedNoDictEndKey;
+
+  private KeyGenerator mdkGenerator;
+  private SegmentProperties segmentProperties;
+  private boolean hasNoDictionary;
+
+  public TablePageKey(int pageSize, KeyGenerator mdkGenerator, SegmentProperties segmentProperties,
+      boolean hasNoDictionary) {
+    this.pageSize = pageSize;
+    this.mdkGenerator = mdkGenerator;
+    this.segmentProperties = segmentProperties;
+    this.hasNoDictionary = hasNoDictionary;
+  }
+
+  /** update all keys based on the input row */
+  public void update(int rowId, CarbonRow row, byte[] mdk) throws KeyGenException {
+    if (hasNoDictionary) {
+      currentNoDictionaryKey = WriteStepRowUtil.getNoDictAndComplexDimension(row);
+    }
+    if (rowId == 0) {
+      startKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
+      noDictStartKey = currentNoDictionaryKey;
+    }
+    noDictEndKey = currentNoDictionaryKey;
+    if (rowId == pageSize - 1) {
+      endKey = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
+      finalizeKeys();
+    }
+  }
+
+  public Object getKey() {
+    return this;
+  }
+
+  /** update all keys if SORT_COLUMNS option is used when creating table */
+  private void finalizeKeys() {
+    // If SORT_COLUMNS is used, may need to update start/end keys since the they may
+    // contains dictionary columns that are not in SORT_COLUMNS, which need to be removed from
+    // start/end key
+    int numberOfDictSortColumns = segmentProperties.getNumberOfDictSortColumns();
+    if (numberOfDictSortColumns > 0) {
+      // if SORT_COLUMNS contain dictionary columns
+      int[] keySize = segmentProperties.getFixedLengthKeySplitter().getBlockKeySize();
+      if (keySize.length > numberOfDictSortColumns) {
+        // if there are some dictionary columns that are not in SORT_COLUMNS, it will come to here
+        int newMdkLength = 0;
+        for (int i = 0; i < numberOfDictSortColumns; i++) {
+          newMdkLength += keySize[i];
+        }
+        byte[] newStartKeyOfSortKey = new byte[newMdkLength];
+        byte[] newEndKeyOfSortKey = new byte[newMdkLength];
+        System.arraycopy(startKey, 0, newStartKeyOfSortKey, 0, newMdkLength);
+        System.arraycopy(endKey, 0, newEndKeyOfSortKey, 0, newMdkLength);
+        startKey = newStartKeyOfSortKey;
+        endKey = newEndKeyOfSortKey;
+      }
+    } else {
+      startKey = new byte[0];
+      endKey = new byte[0];
+    }
+
+    // Do the same update for noDictionary start/end Key
+    int numberOfNoDictSortColumns = segmentProperties.getNumberOfNoDictSortColumns();
+    if (numberOfNoDictSortColumns > 0) {
+      // if sort_columns contain no-dictionary columns
+      if (noDictStartKey.length > numberOfNoDictSortColumns) {
+        byte[][] newNoDictionaryStartKey = new byte[numberOfNoDictSortColumns][];
+        byte[][] newNoDictionaryEndKey = new byte[numberOfNoDictSortColumns][];
+        System.arraycopy(
+            noDictStartKey, 0, newNoDictionaryStartKey, 0, numberOfNoDictSortColumns);
+        System.arraycopy(
+            noDictEndKey, 0, newNoDictionaryEndKey, 0, numberOfNoDictSortColumns);
+        noDictStartKey = newNoDictionaryStartKey;
+        noDictEndKey = newNoDictionaryEndKey;
+      }
+      packedNoDictStartKey =
+          NonDictionaryUtil.packByteBufferIntoSingleByteArray(noDictStartKey);
+      packedNoDictEndKey =
+          NonDictionaryUtil.packByteBufferIntoSingleByteArray(noDictEndKey);
+    } else {
+      noDictStartKey = new byte[0][];
+      noDictEndKey = new byte[0][];
+      packedNoDictStartKey = new byte[0];
+      packedNoDictEndKey = new byte[0];
+    }
+  }
+
+  public byte[] getStartKey() {
+    return startKey;
+  }
+
+  public byte[] getEndKey() {
+    return endKey;
+  }
+
+  public byte[] getNoDictStartKey() {
+    return packedNoDictStartKey;
+  }
+
+  public byte[] getNoDictEndKey() {
+    return packedNoDictEndKey;
+  }
+
+  public int getPageSize() {
+    return pageSize;
+  }
+
+  public byte[] serializeStartKey() {
+    byte[] updatedNoDictionaryStartKey = updateNoDictionaryStartAndEndKey(getNoDictStartKey());
+    ByteBuffer buffer = ByteBuffer.allocate(
+        CarbonCommonConstants.INT_SIZE_IN_BYTE + CarbonCommonConstants.INT_SIZE_IN_BYTE
+            + startKey.length + updatedNoDictionaryStartKey.length);
+    buffer.putInt(startKey.length);
+    buffer.putInt(updatedNoDictionaryStartKey.length);
+    buffer.put(startKey);
+    buffer.put(updatedNoDictionaryStartKey);
+    buffer.rewind();
+    return buffer.array();
+  }
+
+  public byte[] serializeEndKey() {
+    byte[] updatedNoDictionaryEndKey = updateNoDictionaryStartAndEndKey(getNoDictEndKey());
+    ByteBuffer buffer = ByteBuffer.allocate(
+        CarbonCommonConstants.INT_SIZE_IN_BYTE + CarbonCommonConstants.INT_SIZE_IN_BYTE
+            + endKey.length + updatedNoDictionaryEndKey.length);
+    buffer.putInt(endKey.length);
+    buffer.putInt(updatedNoDictionaryEndKey.length);
+    buffer.put(endKey);
+    buffer.put(updatedNoDictionaryEndKey);
+    buffer.rewind();
+    return buffer.array();
+  }
+
+  /**
+   * Below method will be used to update the no dictionary start and end key
+   *
+   * @param key key to be updated
+   * @return return no dictionary key
+   */
+  public byte[] updateNoDictionaryStartAndEndKey(byte[] key) {
+    if (key.length == 0) {
+      return key;
+    }
+    // add key to byte buffer remove the length part of the data
+    ByteBuffer buffer = ByteBuffer.wrap(key, 2, key.length - 2);
+    // create a output buffer without length
+    ByteBuffer output = ByteBuffer.allocate(key.length - 2);
+    short numberOfByteToStorLength = 2;
+    // as length part is removed, so each no dictionary value index
+    // needs to be reshuffled by 2 bytes
+    int NumberOfNoDictSortColumns = segmentProperties.getNumberOfNoDictSortColumns();
+    for (int i = 0; i < NumberOfNoDictSortColumns; i++) {
+      output.putShort((short) (buffer.getShort() - numberOfByteToStorLength));
+    }
+    // copy the data part
+    while (buffer.hasRemaining()) {
+      output.put(buffer.get());
+    }
+    output.rewind();
+    return output.array();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsCollector.java
new file mode 100644
index 0000000..5439a29
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsCollector.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.statistics;
+
+public interface ColumnPageStatsCollector {
+  void updateNull(int rowId);
+  void update(byte value);
+  void update(short value);
+  void update(int value);
+  void update(long value);
+  void update(double value);
+  void update(byte[] value);
+
+  /**
+   * return the collected statistics
+   */
+  Object getPageStats();
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsVO.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsVO.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsVO.java
deleted file mode 100644
index 7ce0be2..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/ColumnPageStatsVO.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.statistics;
-
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.util.DataTypeUtil;
-
-/** statics for one column page */
-public class ColumnPageStatsVO {
-  private DataType dataType;
-
-  /** min and max value of the measures */
-  private Object min, max;
-
-  private int scale;
-
-  private int precision;
-
-  public ColumnPageStatsVO(DataType dataType) {
-    this.dataType = dataType;
-    switch (dataType) {
-      case SHORT:
-      case INT:
-      case LONG:
-        max = Long.MIN_VALUE;
-        min = Long.MAX_VALUE;
-        break;
-      case DOUBLE:
-        max = Double.MIN_VALUE;
-        min = Double.MAX_VALUE;
-        break;
-      case DECIMAL:
-        max = new BigDecimal(Double.MIN_VALUE);
-        min = new BigDecimal(Double.MAX_VALUE);
-        break;
-    }
-  }
-
-  public static ColumnPageStatsVO copyFrom(ValueEncoderMeta meta, int scale, int precision) {
-    ColumnPageStatsVO instance = new ColumnPageStatsVO(meta.getType());
-    instance.min = meta.getMinValue();
-    instance.max = meta.getMaxValue();
-    instance.scale = scale;
-    instance.precision = precision;
-    return instance;
-  }
-
-  /**
-   * update the statistics for the input row
-   */
-  public void update(Object value) {
-    switch (dataType) {
-      case SHORT:
-        max = ((long) max > ((Short) value).longValue()) ? max : ((Short) value).longValue();
-        min = ((long) min < ((Short) value).longValue()) ? min : ((Short) value).longValue();
-        break;
-      case INT:
-        max = ((long) max > ((Integer) value).longValue()) ? max : ((Integer) value).longValue();
-        min = ((long) min  < ((Integer) value).longValue()) ? min : ((Integer) value).longValue();
-        break;
-      case LONG:
-        max = ((long) max > (long) value) ? max : value;
-        min = ((long) min < (long) value) ? min : value;
-        break;
-      case DOUBLE:
-        max = ((double) max > (double) value) ? max : value;
-        min = ((double) min < (double) value) ? min : value;
-        break;
-      case DECIMAL:
-        break;
-      case ARRAY:
-      case STRUCT:
-        // for complex type column, writer is not going to use stats, so, do nothing
-    }
-  }
-
-  public void updateNull() {
-    switch (dataType) {
-      case SHORT:
-        max = ((long) max > 0) ? max : 0L;
-        min = ((long) min < 0) ? min : 0L;
-        break;
-      case INT:
-        max = ((long) max > 0) ? max : 0L;
-        min = ((long) min  < 0) ? min : 0L;
-        break;
-      case LONG:
-        max = ((long) max > 0) ? max : 0L;
-        min = ((long) min < 0) ? min : 0L;
-        break;
-      case DOUBLE:
-        max = ((double) max > 0d) ? max : 0d;
-        min = ((double) min < 0d) ? min : 0d;
-        break;
-      case DECIMAL:
-        break;
-      case ARRAY:
-      case STRUCT:
-        // for complex type column, writer is not going to use stats, so, do nothing
-    }
-  }
-
-  /**
-   * return min value as byte array
-   */
-  public byte[] minBytes() {
-    return getValueAsBytes(getMin());
-  }
-
-  /**
-   * return max value as byte array
-   */
-  public byte[] maxBytes() {
-    return getValueAsBytes(getMax());
-  }
-
-  /**
-   * convert value to byte array
-   */
-  private byte[] getValueAsBytes(Object value) {
-    ByteBuffer b;
-    switch (dataType) {
-      case BYTE:
-      case SHORT:
-      case INT:
-      case LONG:
-        b = ByteBuffer.allocate(8);
-        b.putLong((Long) value);
-        b.flip();
-        return b.array();
-      case DOUBLE:
-        b = ByteBuffer.allocate(8);
-        b.putDouble((Double) value);
-        b.flip();
-        return b.array();
-      case DECIMAL:
-        return DataTypeUtil.bigDecimalToByte((BigDecimal) value);
-      default:
-        throw new IllegalArgumentException("Invalid data type: " + dataType);
-    }
-  }
-
-  public Object getMin() {
-    return min;
-  }
-
-  public Object getMax() {
-    return max;
-  }
-
-  public DataType getDataType() {
-    return dataType;
-  }
-
-  public int getScale() {
-    return scale;
-  }
-
-  public int getPrecision() {
-    return precision;
-  }
-
-  @Override
-  public String toString() {
-    return String.format("min: %s, max: %s", min, max);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/MeasurePageStatsVO.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/MeasurePageStatsVO.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/MeasurePageStatsVO.java
deleted file mode 100644
index 600867a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/MeasurePageStatsVO.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.page.statistics;
-
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-
-public class MeasurePageStatsVO {
-  // statistics of each measure column
-  private Object[] min, max;
-
-  private DataType[] dataType;
-  private byte[] selectedDataType;
-
-  private MeasurePageStatsVO() {
-  }
-
-  public MeasurePageStatsVO(ColumnPage[] measurePages) {
-    min = new Object[measurePages.length];
-    max = new Object[measurePages.length];
-    dataType = new DataType[measurePages.length];
-    selectedDataType = new byte[measurePages.length];
-    for (int i = 0; i < measurePages.length; i++) {
-      ColumnPageStatsVO stats = measurePages[i].getStatistics();
-      min[i] = stats.getMin();
-      max[i] = stats.getMax();
-      dataType[i] = measurePages[i].getDataType();
-    }
-  }
-
-  public static MeasurePageStatsVO build(ValueEncoderMeta[] encoderMetas) {
-    Object[] max = new Object[encoderMetas.length];
-    Object[] min = new Object[encoderMetas.length];
-    DataType[] dataType = new DataType[encoderMetas.length];
-    byte[] selectedDataType = new byte[encoderMetas.length];
-    for (int i = 0; i < encoderMetas.length; i++) {
-      max[i] = encoderMetas[i].getMaxValue();
-      min[i] = encoderMetas[i].getMinValue();
-      dataType[i] = encoderMetas[i].getType();
-      selectedDataType[i] = encoderMetas[i].getDataTypeSelected();
-    }
-
-    MeasurePageStatsVO stats = new MeasurePageStatsVO();
-    stats.dataType = dataType;
-    stats.selectedDataType = selectedDataType;
-    stats.min = min;
-    stats.max = max;
-    return stats;
-  }
-
-  public DataType getDataType(int measureIndex) {
-    return dataType[measureIndex];
-  }
-
-  public Object getMin(int measureIndex) {
-    return min[measureIndex];
-  }
-
-  public Object getMax(int measureIndex) {
-    return max[measureIndex];
-  }
-
-  public byte getDataTypeSelected(int measureIndex) {
-    return selectedDataType[measureIndex];
-  }
-}


[5/7] carbondata git commit: [CARBONDATA-1098] Change page statistics use exact type and use column page in writer

Posted by ja...@apache.org.
[CARBONDATA-1098] Change page statistics use exact type and use column page in writer

This PR changes writer in data load:

make statistics collection use exact data type in schema instead of generic type
change consumer and writer to use EncodedTablePage instead of NodeHolder. EncodedTablePage is the output of TablePage.encode

This closes#1102


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/bc3e6843
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/bc3e6843
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/bc3e6843

Branch: refs/heads/master
Commit: bc3e6843ee83370b6b20e5c9eef92f10667edbae
Parents: c504dd2
Author: jackylk <ja...@huawei.com>
Authored: Tue Jul 4 08:12:13 2017 +0800
Committer: Raghunandan S <ca...@gmail.com>
Committed: Fri Jul 28 01:05:59 2017 +0800

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |   2 -
 .../carbondata/core/datastore/TableSpec.java    |   1 +
 .../AbstractMeasureChunkReaderV2V3Format.java   |  25 --
 ...CompressedMeasureChunkFileBasedReaderV2.java |  14 +
 ...CompressedMeasureChunkFileBasedReaderV3.java |  15 +
 .../chunk/store/MeasureChunkStoreFactory.java   | 102 -----
 .../chunk/store/MeasureDataChunkStore.java      |  87 ----
 .../safe/SafeAbstractMeasureDataChunkStore.java | 113 -----
 .../safe/SafeBigDecimalMeasureChunkStore.java   |  98 -----
 .../impl/safe/SafeByteMeasureChunkStore.java    |  56 ---
 .../impl/safe/SafeDoubleMeasureChunkStore.java  |  55 ---
 .../impl/safe/SafeIntMeasureChunkStore.java     |  55 ---
 .../impl/safe/SafeLongMeasureChunkStore.java    |  56 ---
 .../impl/safe/SafeShortMeasureChunkStore.java   |  57 ---
 .../UnsafeAbstractMeasureDataChunkStore.java    | 129 ------
 .../UnsafeBigDecimalMeasureChunkStore.java      | 139 ------
 .../unsafe/UnsafeByteMeasureChunkStore.java     |  59 ---
 .../unsafe/UnsafeDoubleMeasureChunkStore.java   |  61 ---
 .../impl/unsafe/UnsafeIntMeasureChunkStore.java |  61 ---
 .../unsafe/UnsafeLongMeasureChunkStore.java     |  60 ---
 .../unsafe/UnsafeShortMeasureChunkStore.java    |  60 ---
 .../columnar/BlockIndexerStorageForInt.java     |  27 ++
 ...kIndexerStorageForNoInvertedIndexForInt.java |  15 +
 ...ndexerStorageForNoInvertedIndexForShort.java |  15 +
 .../columnar/BlockIndexerStorageForShort.java   |  27 ++
 .../core/datastore/columnar/IndexStorage.java   |   7 +
 .../core/datastore/page/ColumnPage.java         |  36 +-
 .../core/datastore/page/EncodedTablePage.java   | 154 +++++++
 .../core/datastore/page/LazyColumnPage.java     |   8 +-
 .../datastore/page/compression/Compression.java |  23 -
 .../page/encoding/AdaptiveCompressionCodec.java |   8 +-
 .../page/encoding/AdaptiveIntegerCodec.java     | 224 ----------
 .../page/encoding/AdaptiveIntegralCodec.java    | 219 ++++++++++
 .../page/encoding/ColumnPageCodec.java          |   7 +-
 .../page/encoding/DefaultEncodingStrategy.java  |  79 +++-
 .../page/encoding/DeltaIntegerCodec.java        | 255 -----------
 .../page/encoding/DeltaIntegralCodec.java       | 257 +++++++++++
 .../page/encoding/DirectCompressCodec.java      |  92 +++-
 .../page/encoding/EncodedColumnPage.java        |  78 ++++
 .../datastore/page/encoding/EncodedData.java    |  32 --
 .../page/encoding/EncodedDimensionPage.java     | 141 ++++++
 .../page/encoding/EncodedMeasurePage.java       |  87 ++++
 .../page/encoding/EncodingStrategy.java         |  58 ++-
 .../core/datastore/page/key/TablePageKey.java   | 212 +++++++++
 .../statistics/ColumnPageStatsCollector.java    |  33 ++
 .../page/statistics/ColumnPageStatsVO.java      | 186 --------
 .../page/statistics/MeasurePageStatsVO.java     |  82 ----
 .../statistics/PrimitivePageStatsCollector.java | 294 +++++++++++++
 .../page/statistics/SimpleStatsResult.java      |  35 ++
 .../page/statistics/TablePageStatistics.java    | 130 ++++++
 .../statistics/VarLengthPageStatsCollector.java | 107 +++++
 .../core/metadata/BlockletInfoColumnar.java     |  34 +-
 .../core/metadata/CodecMetaFactory.java         |  90 ++++
 .../core/metadata/ColumnPageCodecMeta.java      | 270 ++++++++++++
 .../core/metadata/ValueEncoderMeta.java         |   2 +-
 .../impl/AbstractScannedResultCollector.java    |   6 -
 .../core/util/CarbonMetadataUtil.java           | 433 ++++---------------
 .../apache/carbondata/core/util/CarbonUtil.java |  99 +++--
 .../carbondata/core/util/DataTypeUtil.java      |  19 -
 .../apache/carbondata/core/util/NodeHolder.java |  37 +-
 .../carbondata/core/util/NonDictionaryUtil.java | 167 +++++++
 .../core/util/CarbonMetadataUtilTest.java       | 106 +++--
 .../carbondata/core/util/DataTypeUtilTest.java  |   8 -
 .../core/writer/CarbonFooterWriterTest.java     | 115 +++--
 format/src/main/thrift/carbondata.thrift        |   2 +-
 .../apache/carbondata/hive/CarbonHiveSerDe.java |   4 +-
 .../apache/carbondata/hive/TestCarbonSerde.java |   2 +-
 .../src/test/resources/complexTypeDecimal.csv   |  16 +-
 .../resources/complexTypeDecimalNestedHive.csv  |   2 +-
 .../columnar/ColGroupBlockStorage.java          | 104 -----
 .../newflow/sort/SortStepRowUtil.java           |   6 +-
 .../steps/DataWriterProcessorStepImpl.java      |   2 +-
 .../sortdata/IntermediateFileMerger.java        |   2 +-
 .../sortandgroupby/sortdata/RowComparator.java  |   2 +-
 .../sortdata/RowComparatorForNormalDims.java    |   2 +-
 .../sortdata/SortTempFileChunkHolder.java       |   3 +-
 .../UnCompressedTempSortFileWriter.java         |   2 +-
 .../store/CarbonFactDataHandlerColumnar.java    |  73 ++--
 .../carbondata/processing/store/TablePage.java  | 226 ++++++++--
 .../processing/store/TablePageEncoder.java      | 203 ---------
 .../processing/store/TablePageKey.java          | 138 ------
 .../processing/store/TablePageStatistics.java   | 142 ------
 .../store/writer/AbstractFactDataWriter.java    |  36 +-
 .../store/writer/CarbonFactDataWriter.java      |  22 +-
 .../writer/v1/CarbonFactDataWriterImplV1.java   | 129 +++---
 .../writer/v2/CarbonFactDataWriterImplV2.java   |  58 +--
 .../writer/v3/CarbonFactDataWriterImplV3.java   | 390 ++++-------------
 .../store/writer/v3/DataWriterHolder.java       |  24 +-
 .../processing/util/NonDictionaryUtil.java      | 167 -------
 .../carbondata/processing/StoreCreator.java     |   4 +-
 90 files changed, 3382 insertions(+), 3998 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 3acdba9..f2e59ab 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -693,12 +693,10 @@ public final class CarbonCommonConstants {
    * BIG_DECIMAL_MEASURE
    */
   public static final char BIG_DECIMAL_MEASURE = 'b';
-
   /**
    * BIG_INT_MEASURE
    */
   public static final char BIG_INT_MEASURE = 'd';
-
   /**
    * CARBON_PREFETCH_BUFFERSIZE
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index 650c2a6..87c4934 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -178,6 +178,7 @@ public class TableSpec {
     public int getNumExpandedDimensions() {
       return numDimensionExpanded;
     }
+
   }
 
   public class MeasureSpec {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
index c35cefb..dd61826 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
@@ -24,14 +24,8 @@ import java.util.List;
 import org.apache.carbondata.core.datastore.FileHolder;
 import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
 import org.apache.carbondata.core.datastore.compression.CompressorFactory;
-import org.apache.carbondata.core.datastore.page.ColumnPage;
-import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.metadata.blocklet.datachunk.PresenceMeta;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.format.DataChunk2;
 
 /**
  * Abstract class for V2, V3 format measure column reader
@@ -129,23 +123,4 @@ public abstract class AbstractMeasureChunkReaderV2V3Format extends AbstractMeasu
   protected abstract MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileHolder fileReader,
       int startColumnBlockletIndex, int endColumnBlockletIndex) throws IOException;
 
-
-  protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk,
-      DataChunk2 measureColumnChunk, int copyPoint) throws MemoryException {
-    // for measure, it should have only one ValueEncoderMeta
-    List<ByteBuffer> encoder_meta = measureColumnChunk.getEncoder_meta();
-    assert (encoder_meta.size() > 0);
-    byte[] encodedMeta = encoder_meta.get(0).array();
-    ValueEncoderMeta meta = CarbonUtil.deserializeEncoderMetaV3(encodedMeta);
-    int scale = -1;
-    int precision = -1;
-    if (encoder_meta.size() > 1) {
-      ByteBuffer decimalInfo = encoder_meta.get(1);
-      scale = decimalInfo.getInt();
-      precision = decimalInfo.getInt();
-    }
-    ColumnPageCodec codec = strategy.createCodec(meta, scale, precision);
-    byte[] rawData = measureRawColumnChunk.getRawData().array();
-    return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
-  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
index d90c7fe..09f367a 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
@@ -24,7 +24,9 @@ import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.reader.measure.AbstractMeasureChunkReaderV2V3Format;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
 import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.format.DataChunk2;
@@ -125,4 +127,16 @@ public class CompressedMeasureChunkFileBasedReaderV2 extends AbstractMeasureChun
     datChunk.setNullValueIndexHolder(getPresenceMeta(measureColumnChunk.presence));
     return datChunk;
   }
+
+  protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk,
+      DataChunk2 measureColumnChunk, int copyPoint) throws MemoryException, IOException {
+    // for measure, it should have only one ValueEncoderMeta
+    assert (measureColumnChunk.getEncoder_meta().size() == 1);
+    byte[] encodedMeta = measureColumnChunk.getEncoder_meta().get(0).array();
+
+    ValueEncoderMeta meta = CarbonUtil.deserializeEncoderMetaV3(encodedMeta);
+    ColumnPageCodec codec = strategy.createCodec(meta);
+    byte[] rawData = measureRawColumnChunk.getRawData().array();
+    return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
index 2ca7193..492d46a 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
@@ -24,7 +24,9 @@ import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.reader.measure.AbstractMeasureChunkReaderV2V3Format;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
 import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.format.DataChunk2;
@@ -222,4 +224,17 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
     return datChunk;
   }
 
+  protected ColumnPage decodeMeasure(MeasureRawColumnChunk measureRawColumnChunk,
+      DataChunk2 measureColumnChunk, int copyPoint) throws MemoryException {
+    // for measure, it should have only one ValueEncoderMeta
+    assert (measureColumnChunk.getEncoder_meta().size() == 1);
+    byte[] encodedMeta = measureColumnChunk.getEncoder_meta().get(0).array();
+
+    ColumnPageCodecMeta meta = new ColumnPageCodecMeta();
+    meta.deserialize(encodedMeta);
+    ColumnPageCodec codec = strategy.createCodec(meta);
+    byte[] rawData = measureRawColumnChunk.getRawData().array();
+    return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureChunkStoreFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureChunkStoreFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureChunkStoreFactory.java
deleted file mode 100644
index 12bfea9..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureChunkStoreFactory.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeBigDecimalMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeByteMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeDoubleMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeIntMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeLongMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.safe.SafeShortMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeBigDecimalMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeByteMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeDoubleMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeIntMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeLongMeasureChunkStore;
-import org.apache.carbondata.core.datastore.chunk.store.impl.unsafe.UnsafeShortMeasureChunkStore;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.util.CarbonProperties;
-
-/**
- * Factory class for getting the measure store type
- */
-public class MeasureChunkStoreFactory {
-
-  /**
-   * instance type
-   */
-  public static final MeasureChunkStoreFactory INSTANCE = new MeasureChunkStoreFactory();
-
-  /**
-   * is unsafe
-   */
-  private static final boolean isUnsafe;
-
-  static {
-    isUnsafe = Boolean.parseBoolean(CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION,
-            CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION_DEFAULTVALUE));
-  }
-
-  private MeasureChunkStoreFactory() {
-  }
-
-  /**
-   * Below method will be used to get the measure data chunk store based on data type
-   *
-   * @param dataType     data type
-   * @param numberOfRows number of rows
-   * @return measure chunk store
-   */
-  public MeasureDataChunkStore getMeasureDataChunkStore(DataType dataType, int numberOfRows) {
-    if (!isUnsafe) {
-      switch (dataType) {
-        case BYTE:
-          return new SafeByteMeasureChunkStore(numberOfRows);
-        case SHORT:
-          return new SafeShortMeasureChunkStore(numberOfRows);
-        case INT:
-          return new SafeIntMeasureChunkStore(numberOfRows);
-        case LONG:
-          return new SafeLongMeasureChunkStore(numberOfRows);
-        case DECIMAL:
-          return new SafeBigDecimalMeasureChunkStore(numberOfRows);
-        case DOUBLE:
-        default:
-          return new SafeDoubleMeasureChunkStore(numberOfRows);
-      }
-    } else {
-      switch (dataType) {
-        case BYTE:
-          return new UnsafeByteMeasureChunkStore(numberOfRows);
-        case SHORT:
-          return new UnsafeShortMeasureChunkStore(numberOfRows);
-        case INT:
-          return new UnsafeIntMeasureChunkStore(numberOfRows);
-        case LONG:
-          return new UnsafeLongMeasureChunkStore(numberOfRows);
-        case DECIMAL:
-          return new UnsafeBigDecimalMeasureChunkStore(numberOfRows);
-        case DOUBLE:
-        default:
-          return new UnsafeDoubleMeasureChunkStore(numberOfRows);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureDataChunkStore.java
deleted file mode 100644
index 05a050d..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/MeasureDataChunkStore.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store;
-
-import java.math.BigDecimal;
-
-/**
- * Responsibility is store the measure data in memory,
- * memory can be on heap or offheap based on the user configuration
- */
-public interface MeasureDataChunkStore<T> {
-
-  /**
-   * Below method will be used to put the data to memory
-   *
-   * @param data
-   */
-  void putData(T data);
-
-  /**
-   * to get byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  byte getByte(int index);
-
-  /**
-   * to get the short value
-   *
-   * @param index
-   * @return short value based on index
-   */
-  short getShort(int index);
-
-  /**
-   * to get the int value
-   *
-   * @param index
-   * @return int value based on index
-   */
-  int getInt(int index);
-
-  /**
-   * to get the long value
-   *
-   * @param index
-   * @return long value based on index
-   */
-  long getLong(int index);
-
-  /**
-   * to get the double value
-   *
-   * @param index
-   * @return double value based on index
-   */
-  double getDouble(int index);
-
-  /**
-   * To get the bigdecimal value
-   *
-   * @param index
-   * @return bigdecimal value based on index
-   */
-  BigDecimal getBigDecimal(int index);
-
-  /**
-   * To free the occupied memory
-   */
-  void freeMemory();
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractMeasureDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractMeasureDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractMeasureDataChunkStore.java
deleted file mode 100644
index dc9fb89..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeAbstractMeasureDataChunkStore.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-import java.math.BigDecimal;
-
-import org.apache.carbondata.core.datastore.chunk.store.MeasureDataChunkStore;
-
-/**
- * Responsibility is store the measure data in memory,
- */
-public abstract class SafeAbstractMeasureDataChunkStore<T> implements
-    MeasureDataChunkStore<T> {
-
-  /**
-   * number of rows
-   */
-  protected int numberOfRows;
-
-  public SafeAbstractMeasureDataChunkStore(int numberOfRows) {
-    this.numberOfRows = numberOfRows;
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override
-  public byte getByte(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the short value
-   *
-   * @param index
-   * @return short value based on index
-   */
-  @Override
-  public short getShort(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the int value
-   *
-   * @param index
-   * @return int value based on index
-   */
-  @Override
-  public int getInt(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the long value
-   *
-   * @param index
-   * @return long value based on index
-   */
-  @Override
-  public long getLong(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the double value
-   *
-   * @param index
-   * @return double value based on index
-   */
-  @Override
-  public double getDouble(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * To get the bigdecimal value
-   *
-   * @param index
-   * @return bigdecimal value based on index
-   */
-  @Override
-  public BigDecimal getBigDecimal(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * To free the occupied memory
-   */
-  @Override
-  public void freeMemory() {
-    // do nothing as GC will take care of freeing the memory
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeBigDecimalMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeBigDecimalMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeBigDecimalMeasureChunkStore.java
deleted file mode 100644
index 03f59fc..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeBigDecimalMeasureChunkStore.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.util.DataTypeUtil;
-
-/**
- * Responsibility is store the big decimal measure data in memory,
- */
-public class SafeBigDecimalMeasureChunkStore extends SafeAbstractMeasureDataChunkStore<byte[]> {
-
-  /**
-   * data chunk
-   */
-  private byte[] dataChunk;
-
-  /**
-   * offset of actual data
-   */
-  private int[] dataOffsets;
-
-  public SafeBigDecimalMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-    this.dataOffsets = new int[numberOfRows];
-  }
-
-  @Override public void putData(byte[] data) {
-    this.dataChunk = data;
-    // As data is of variable length and data format is
-    // <length in int><data><length in int><data>
-    // we need to store offset of each data so data can be accessed directly
-    // for example:
-    //data = {0,0,0,0,5,1,2,3,4,5,0,0,0,0,6,0,1,2,3,4,5,0,0,0,0,2,8,9}
-    //so value stored in offset will be position of actual data
-    // [5,14,24]
-    // to store this value we need to get the actual data length + 4 bytes used for storing the
-    // length
-
-    // start position will be used to store the current data position
-    int startOffset = 0;
-    // as first position will be start from 4 byte as data is stored first in the memory block
-    // we need to skip first two bytes this is because first two bytes will be length of the data
-    // which we have to skip
-    dataOffsets[0] = CarbonCommonConstants.INT_SIZE_IN_BYTE;
-    // creating a byte buffer which will wrap the length of the row
-    ByteBuffer buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    for (int i = 1; i < numberOfRows; i++) {
-      buffer.put(data, startOffset, CarbonCommonConstants.INT_SIZE_IN_BYTE);
-      buffer.flip();
-      // so current row position will be
-      // previous row length + 4 bytes used for storing previous row data
-      startOffset += buffer.getInt() + CarbonCommonConstants.INT_SIZE_IN_BYTE;
-      // as same byte buffer is used to avoid creating many byte buffer for each row
-      // we need to clear the byte buffer
-      buffer.clear();
-      dataOffsets[i] = startOffset + CarbonCommonConstants.INT_SIZE_IN_BYTE;
-    }
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override public BigDecimal getBigDecimal(int index) {
-    int currentDataOffset = dataOffsets[index];
-    int length = 0;
-    // calculating the length of data
-    if (index < numberOfRows - 1) {
-      length = (int) (dataOffsets[index + 1] - (currentDataOffset
-          + CarbonCommonConstants.INT_SIZE_IN_BYTE));
-    } else {
-      // for last record
-      length = (int) (this.dataChunk.length - currentDataOffset);
-    }
-    return DataTypeUtil.byteToBigDecimal(dataChunk, currentDataOffset, length);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeByteMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeByteMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeByteMeasureChunkStore.java
deleted file mode 100644
index 74565a8..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeByteMeasureChunkStore.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-/**
- * Responsible for storing Byte array data to memory.
- */
-public class SafeByteMeasureChunkStore extends
-    SafeAbstractMeasureDataChunkStore<byte[]> {
-
-  /**
-   * data
-   */
-  private byte[] data;
-
-  public SafeByteMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put byte array data to memory
-   *
-   * @param data
-   */
-  @Override
-  public void putData(byte[] data) {
-    this.data = data;
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override
-  public byte getByte(int index) {
-    return this.data[index];
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeDoubleMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeDoubleMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeDoubleMeasureChunkStore.java
deleted file mode 100644
index 39d7ee7..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeDoubleMeasureChunkStore.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-/**
- * Below class will be used to store the measure values of double data type
- */
-public class SafeDoubleMeasureChunkStore extends
-    SafeAbstractMeasureDataChunkStore<double[]> {
-
-  /**
-   * data
-   */
-  private double[] data;
-
-  public SafeDoubleMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to store double array data
-   *
-   * @param data
-   */
-  @Override
-  public void putData(double[] data) {
-    this.data = data;
-  }
-
-  /**
-   * to get the double value
-   *
-   * @param index
-   * @return double value based on index
-   */
-  @Override
-  public double getDouble(int index) {
-    return this.data[index];
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeIntMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeIntMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeIntMeasureChunkStore.java
deleted file mode 100644
index be7c8fd..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeIntMeasureChunkStore.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-/**
- * Responsible for storing int array data to memory.
- */
-public class SafeIntMeasureChunkStore extends
-    SafeAbstractMeasureDataChunkStore<int[]> {
-
-  /**
-   * data
-   */
-  private int[] data;
-
-  public SafeIntMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put int array data to memory
-   *
-   * @param data
-   */
-  @Override
-  public void putData(int[] data) {
-    this.data = data;
-  }
-
-  /**
-   * to get the int value
-   *
-   * @param index
-   * @return int value based on index
-   */
-  @Override
-  public int getInt(int index) {
-    return this.data[index];
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeLongMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeLongMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeLongMeasureChunkStore.java
deleted file mode 100644
index 31c12a0..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeLongMeasureChunkStore.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-/**
- * Below class will be used to store the measure values of long data type
- *
- */
-public class SafeLongMeasureChunkStore extends
-    SafeAbstractMeasureDataChunkStore<long[]> {
-
-  /**
-   * data
-   */
-  private long[] data;
-
-  public SafeLongMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to store long array data
-   *
-   * @param data
-   */
-  @Override
-  public void putData(long[] data) {
-    this.data = data;
-  }
-
-  /**
-   * to get the long value
-   *
-   * @param index
-   * @return long value based on index
-   */
-  @Override
-  public long getLong(int index) {
-    return this.data[index];
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeShortMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeShortMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeShortMeasureChunkStore.java
deleted file mode 100644
index a9a6a25..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/safe/SafeShortMeasureChunkStore.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.safe;
-
-/**
- * Below class will be used to store the measure values of short data type
- *
- */
-public class SafeShortMeasureChunkStore extends
-    SafeAbstractMeasureDataChunkStore<short[]> {
-
-  /**
-   * data
-   */
-  private short[] data;
-
-  public SafeShortMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put short array data
-   *
-   * @param data
-   */
-  @Override
-  public void putData(short[] data) {
-    this.data = data;
-  }
-
-  /**
-   * to get the short value
-   *
-   * @param index
-   * @return shot value based on index
-   */
-  @Override
-  public short getShort(int index) {
-    return data[index];
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractMeasureDataChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractMeasureDataChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractMeasureDataChunkStore.java
deleted file mode 100644
index 545b864..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeAbstractMeasureDataChunkStore.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import java.math.BigDecimal;
-
-import org.apache.carbondata.core.datastore.chunk.store.MeasureDataChunkStore;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-import org.apache.carbondata.core.memory.MemoryBlock;
-
-/**
- * Responsibility is store the measure data in memory, memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public abstract class UnsafeAbstractMeasureDataChunkStore<T> implements MeasureDataChunkStore<T> {
-
-  /**
-   * memory block
-   */
-  protected MemoryBlock dataPageMemoryBlock;
-
-  /**
-   * number of rows
-   */
-  protected int numberOfRows;
-
-  /**
-   * to check memory is released or not
-   */
-  protected boolean isMemoryReleased;
-
-  /**
-   * to check memory is occupied or not
-   */
-  protected boolean isMemoryOccupied;
-
-  public UnsafeAbstractMeasureDataChunkStore(int numberOfRows) {
-    this.numberOfRows = numberOfRows;
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override public byte getByte(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the short value
-   *
-   * @param index
-   * @return short value based on index
-   */
-  @Override public short getShort(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the int value
-   *
-   * @param index
-   * @return int value based on index
-   */
-  @Override public int getInt(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the long value
-   *
-   * @param index
-   * @return long value based on index
-   */
-  @Override public long getLong(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * to get the double value
-   *
-   * @param index
-   * @return double value based on index
-   */
-  @Override public double getDouble(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * To get the bigdecimal value
-   *
-   * @param index
-   * @return bigdecimal value based on index
-   */
-  @Override public BigDecimal getBigDecimal(int index) {
-    throw new UnsupportedOperationException("Operation not supported");
-  }
-
-  /**
-   * To free the occupied memory
-   */
-  @Override public void freeMemory() {
-    if (isMemoryReleased) {
-      return;
-    }
-    MemoryAllocatorFactory.INSATANCE.getMemoryAllocator().free(dataPageMemoryBlock);
-    isMemoryReleased = true;
-    this.dataPageMemoryBlock = null;
-    this.isMemoryOccupied = false;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeBigDecimalMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeBigDecimalMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeBigDecimalMeasureChunkStore.java
deleted file mode 100644
index 4082689..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeBigDecimalMeasureChunkStore.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-import org.apache.carbondata.core.util.DataTypeUtil;
-
-/**
- * Responsible for storing big decimal array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeBigDecimalMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<byte[]> {
-
-  /**
-   * start position of data offsets
-   */
-  private long offsetStartPosition;
-
-  public UnsafeBigDecimalMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  @Override public void putData(byte[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock = MemoryAllocatorFactory.INSATANCE.getMemoryAllocator()
-        .allocate(data.length + (numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE));
-    this.offsetStartPosition = data.length;
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.BYTE_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    // As data is of variable length and data format is
-    // <length in short><data><length in short><data>
-    // we need to store offset of each data so data can be accessed directly
-    // for example:
-    //data = {0,0,0,0,5,1,2,3,4,5,0,0,0,0,6,0,1,2,3,4,5,0,0,0,0,2,8,9}
-    //so value stored in offset will be position of actual data
-    // [5,14,24]
-    // to store this value we need to get the actual data length + 4 bytes used for storing the
-    // length
-    // start position will be used to store the current data position
-    int startOffset = 0;
-    // position from where offsets will start
-    long pointerOffsets = this.offsetStartPosition;
-    // as first position will be start from 4 byte as data is stored first in the memory block
-    // we need to skip first two bytes this is because first two bytes will be length of the data
-    // which we have to skip
-    CarbonUnsafe.unsafe.putInt(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + pointerOffsets,
-        CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    // incrementing the pointers as first value is already filled and as we are storing as int
-    // we need to increment the 4 bytes to set the position of the next value to set
-    pointerOffsets += CarbonCommonConstants.INT_SIZE_IN_BYTE;
-    // creating a byte buffer which will wrap the length of the row
-    // using byte buffer as unsafe will return bytes in little-endian encoding
-    ByteBuffer buffer = ByteBuffer.allocate(CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    // store length of data
-    byte[] length = new byte[CarbonCommonConstants.INT_SIZE_IN_BYTE];
-    // as first offset is already stored, we need to start from the 2nd row in data array
-    for (int i = 1; i < numberOfRows; i++) {
-      // first copy the length of previous row
-      CarbonUnsafe.unsafe.copyMemory(dataPageMemoryBlock.getBaseObject(),
-          dataPageMemoryBlock.getBaseOffset() + startOffset, length, CarbonUnsafe.BYTE_ARRAY_OFFSET,
-          CarbonCommonConstants.INT_SIZE_IN_BYTE);
-      buffer.put(length);
-      buffer.flip();
-      // so current row position will be
-      // previous row length + 4 bytes used for storing previous row data
-      startOffset += CarbonCommonConstants.INT_SIZE_IN_BYTE + buffer.getInt();
-      // as same byte buffer is used to avoid creating many byte buffer for each row
-      // we need to clear the byte buffer
-      buffer.clear();
-      // now put the offset of current row, here we need to add 4 more bytes as current will
-      // also have length part so we have to skip length
-      CarbonUnsafe.unsafe.putInt(dataPageMemoryBlock.getBaseObject(),
-          dataPageMemoryBlock.getBaseOffset() + pointerOffsets,
-          startOffset + CarbonCommonConstants.INT_SIZE_IN_BYTE);
-      // incrementing the pointers as first value is already filled and as we are storing as int
-      // we need to increment the 4 bytes to set the position of the next value to set
-      pointerOffsets += CarbonCommonConstants.INT_SIZE_IN_BYTE;
-
-      this.isMemoryOccupied = true;
-    }
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override public BigDecimal getBigDecimal(int index) {
-    // now to get the row from memory block we need to do following thing
-    // 1. first get the current offset
-    // 2. if it's not a last row- get the next row offset
-    // Subtract the current row offset + 4 bytes(to skip the data length) with next row offset
-    // else subtract the current row offset
-    // with complete data length get the offset of set of data
-    int currentDataOffset = CarbonUnsafe.unsafe.getInt(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + this.offsetStartPosition + (index
-            * CarbonCommonConstants.INT_SIZE_IN_BYTE));
-    int length = 0;
-    // calculating the length of data
-    if (index < numberOfRows - 1) {
-      int OffsetOfNextdata = CarbonUnsafe.unsafe.getInt(dataPageMemoryBlock.getBaseObject(),
-          dataPageMemoryBlock.getBaseOffset() + this.offsetStartPosition + ((index + 1)
-              * CarbonCommonConstants.INT_SIZE_IN_BYTE));
-      length = OffsetOfNextdata - (currentDataOffset + CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    } else {
-      // for last record we need to subtract with data length
-      length = (int) this.offsetStartPosition - currentDataOffset;
-    }
-    byte[] row = new byte[length];
-    CarbonUnsafe.unsafe.copyMemory(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + currentDataOffset, row,
-        CarbonUnsafe.BYTE_ARRAY_OFFSET, length);
-    return DataTypeUtil.byteToBigDecimal(row);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeByteMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeByteMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeByteMeasureChunkStore.java
deleted file mode 100644
index f44ddb5..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeByteMeasureChunkStore.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-
-/**
- * Responsible for storing Byte array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeByteMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<byte[]> {
-
-  public UnsafeByteMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put byte array data to memory
-   *
-   * @param data
-   */
-  @Override public void putData(byte[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock =
-        MemoryAllocatorFactory.INSATANCE.getMemoryAllocator().allocate(data.length);
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.BYTE_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    this.isMemoryOccupied = true;
-  }
-
-  /**
-   * to get the byte value
-   *
-   * @param index
-   * @return byte value based on index
-   */
-  @Override public byte getByte(int index) {
-    return CarbonUnsafe.unsafe
-        .getByte(dataPageMemoryBlock.getBaseObject(), dataPageMemoryBlock.getBaseOffset() + index);
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeDoubleMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeDoubleMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeDoubleMeasureChunkStore.java
deleted file mode 100644
index 40c7753..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeDoubleMeasureChunkStore.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-
-/**
- * Responsible for storing double array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeDoubleMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<double[]> {
-
-  public UnsafeDoubleMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put double array data to memory
-   *
-   * @param data
-   */
-  @Override public void putData(double[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock = MemoryAllocatorFactory.INSATANCE.getMemoryAllocator()
-        .allocate(data.length * CarbonCommonConstants.DOUBLE_SIZE_IN_BYTE);
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.DOUBLE_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    this.isMemoryOccupied = true;
-  }
-
-  /**
-   * to get the double value
-   *
-   * @param index
-   * @return double value based on index
-   */
-  @Override public double getDouble(int index) {
-    return CarbonUnsafe.unsafe.getDouble(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + (index * CarbonCommonConstants.DOUBLE_SIZE_IN_BYTE));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeIntMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeIntMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeIntMeasureChunkStore.java
deleted file mode 100644
index 1e1a5d2..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeIntMeasureChunkStore.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-
-/**
- * Responsible for storing int array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeIntMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<int[]> {
-
-  public UnsafeIntMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put int array data to memory
-   *
-   * @param data
-   */
-  @Override public void putData(int[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock = MemoryAllocatorFactory.INSATANCE.getMemoryAllocator()
-        .allocate(data.length * CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.INT_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    this.isMemoryOccupied = true;
-  }
-
-  /**
-   * to get the int value
-   *
-   * @param index
-   * @return int value based on index
-   */
-  @Override public int getInt(int index) {
-    return CarbonUnsafe.unsafe.getInt(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + (index * CarbonCommonConstants.INT_SIZE_IN_BYTE));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeLongMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeLongMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeLongMeasureChunkStore.java
deleted file mode 100644
index 82ebd13..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeLongMeasureChunkStore.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-
-/**
- * Responsible for storing long array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeLongMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<long[]> {
-
-  public UnsafeLongMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put long array data to memory
-   *
-   * @param data
-   */
-  @Override public void putData(long[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock = MemoryAllocatorFactory.INSATANCE.getMemoryAllocator()
-        .allocate(data.length * CarbonCommonConstants.LONG_SIZE_IN_BYTE);
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.LONG_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    this.isMemoryOccupied = true;
-  }
-
-  /**
-   * to get the long value
-   *
-   * @param index
-   * @return long value based on index
-   */
-  @Override public long getLong(int index) {
-    return CarbonUnsafe.unsafe.getLong(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + (index * CarbonCommonConstants.LONG_SIZE_IN_BYTE));
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeShortMeasureChunkStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeShortMeasureChunkStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeShortMeasureChunkStore.java
deleted file mode 100644
index afe6ce7..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/store/impl/unsafe/UnsafeShortMeasureChunkStore.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.store.impl.unsafe;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.memory.CarbonUnsafe;
-import org.apache.carbondata.core.memory.MemoryAllocatorFactory;
-
-/**
- * Responsible for storing short array data to memory. memory can be on heap or
- * offheap based on the user configuration using unsafe interface
- */
-public class UnsafeShortMeasureChunkStore extends UnsafeAbstractMeasureDataChunkStore<short[]> {
-
-  public UnsafeShortMeasureChunkStore(int numberOfRows) {
-    super(numberOfRows);
-  }
-
-  /**
-   * Below method will be used to put short array data to memory
-   *
-   * @param data
-   */
-  @Override public void putData(short[] data) {
-    assert (!this.isMemoryOccupied);
-    this.dataPageMemoryBlock = MemoryAllocatorFactory.INSATANCE.getMemoryAllocator()
-        .allocate(data.length * CarbonCommonConstants.SHORT_SIZE_IN_BYTE);
-    // copy the data to memory
-    CarbonUnsafe.unsafe
-        .copyMemory(data, CarbonUnsafe.SHORT_ARRAY_OFFSET, dataPageMemoryBlock.getBaseObject(),
-            dataPageMemoryBlock.getBaseOffset(), dataPageMemoryBlock.size());
-    this.isMemoryOccupied = true;
-  }
-
-  /**
-   * to get the short value
-   *
-   * @param index
-   * @return shot value based on index
-   */
-  @Override public short getShort(int index) {
-    return CarbonUnsafe.unsafe.getShort(dataPageMemoryBlock.getBaseObject(),
-        dataPageMemoryBlock.getBaseOffset() + (index * CarbonCommonConstants.SHORT_SIZE_IN_BYTE));
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForInt.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForInt.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForInt.java
index 78e6d35..27194bb 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForInt.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForInt.java
@@ -148,6 +148,15 @@ public class BlockIndexerStorageForInt implements IndexStorage<int[]> {
     return rowIdPage;
   }
 
+  @Override
+  public int getRowIdPageLengthInBytes() {
+    if (rowIdPage != null) {
+      return rowIdPage.length * 4;
+    } else {
+      return 0;
+    }
+  }
+
   /**
    * @return the rowIdRlePage
    */
@@ -155,6 +164,15 @@ public class BlockIndexerStorageForInt implements IndexStorage<int[]> {
     return rowIdRlePage;
   }
 
+  @Override
+  public int getRowIdRlePageLengthInBytes() {
+    if (rowIdRlePage != null) {
+      return rowIdRlePage.length * 4;
+    } else {
+      return 0;
+    }
+  }
+
   /**
    * @return the dataPage
    */
@@ -205,6 +223,15 @@ public class BlockIndexerStorageForInt implements IndexStorage<int[]> {
     return dataRlePage;
   }
 
+  @Override
+  public int getDataRlePageLengthInBytes() {
+    if (dataRlePage != null) {
+      return dataRlePage.length * 4;
+    } else {
+      return 0;
+    }
+  }
+
   @Override public int getTotalSize() {
     return totalSize;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForInt.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForInt.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForInt.java
index c168614..218694f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForInt.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForInt.java
@@ -60,6 +60,11 @@ public class BlockIndexerStorageForNoInvertedIndexForInt implements IndexStorage
     return new int[0];
   }
 
+  @Override
+  public int getDataRlePageLengthInBytes() {
+    return 0;
+  }
+
   @Override public int getTotalSize() {
     return totalSize;
   }
@@ -77,6 +82,11 @@ public class BlockIndexerStorageForNoInvertedIndexForInt implements IndexStorage
     return new int[0];
   }
 
+  @Override
+  public int getRowIdPageLengthInBytes() {
+    return 0;
+  }
+
   /**
    * no use
    *
@@ -86,6 +96,11 @@ public class BlockIndexerStorageForNoInvertedIndexForInt implements IndexStorage
     return new int[0];
   }
 
+  @Override
+  public int getRowIdRlePageLengthInBytes() {
+    return 0;
+  }
+
   /**
    * @return the dataPage
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForShort.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForShort.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForShort.java
index 5712655..911a260 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForShort.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForNoInvertedIndexForShort.java
@@ -77,6 +77,11 @@ public class BlockIndexerStorageForNoInvertedIndexForShort implements IndexStora
     return new short[0];
   }
 
+  @Override
+  public int getDataRlePageLengthInBytes() {
+    return 0;
+  }
+
   @Override public int getTotalSize() {
     return totalSize;
   }
@@ -94,6 +99,11 @@ public class BlockIndexerStorageForNoInvertedIndexForShort implements IndexStora
     return new short[0];
   }
 
+  @Override
+  public int getRowIdPageLengthInBytes() {
+    return 0;
+  }
+
   /**
    * no use
    *
@@ -103,6 +113,11 @@ public class BlockIndexerStorageForNoInvertedIndexForShort implements IndexStora
     return new short[0];
   }
 
+  @Override
+  public int getRowIdRlePageLengthInBytes() {
+    return 0;
+  }
+
   /**
    * @return the dataPage
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForShort.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForShort.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForShort.java
index 2ffb610..7303b66 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForShort.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/BlockIndexerStorageForShort.java
@@ -155,6 +155,15 @@ public class BlockIndexerStorageForShort implements IndexStorage<short[]> {
     return rowIdPage;
   }
 
+  @Override
+  public int getRowIdPageLengthInBytes() {
+    if (rowIdPage != null) {
+      return rowIdPage.length * 2;
+    } else {
+      return 0;
+    }
+  }
+
   /**
    * @return the rowIdRlePage
    */
@@ -162,6 +171,15 @@ public class BlockIndexerStorageForShort implements IndexStorage<short[]> {
     return rowIdRlePage;
   }
 
+  @Override
+  public int getRowIdRlePageLengthInBytes() {
+    if (rowIdRlePage != null) {
+      return rowIdRlePage.length * 2;
+    } else {
+      return 0;
+    }
+  }
+
   /**
    * @return the dataPage
    */
@@ -224,6 +242,15 @@ public class BlockIndexerStorageForShort implements IndexStorage<short[]> {
     return dataRlePage;
   }
 
+  @Override
+  public int getDataRlePageLengthInBytes() {
+    if (dataRlePage != null) {
+      return dataRlePage.length * 2;
+    } else {
+      return 0;
+    }
+  }
+
   @Override public int getTotalSize() {
     return totalSize;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/columnar/IndexStorage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/IndexStorage.java b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/IndexStorage.java
index 0570317..0ef8cad 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/columnar/IndexStorage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/columnar/IndexStorage.java
@@ -18,16 +18,23 @@
 package org.apache.carbondata.core.datastore.columnar;
 
 public interface IndexStorage<T> {
+
   boolean isAlreadySorted();
 
   T getRowIdPage();
 
+  int getRowIdPageLengthInBytes();
+
   T getRowIdRlePage();
 
+  int getRowIdRlePageLengthInBytes();
+
   byte[][] getDataPage();
 
   T getDataRlePage();
 
+  int getDataRlePageLengthInBytes();
+
   int getTotalSize();
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
index ef2c9ec..180c092 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
@@ -19,11 +19,10 @@ package org.apache.carbondata.core.datastore.page;
 
 import java.io.IOException;
 import java.math.BigDecimal;
-import java.util.BitSet;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.compression.Compressor;
-import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsVO;
+import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsCollector;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DecimalConverterFactory;
@@ -46,11 +45,8 @@ public abstract class ColumnPage {
   protected int scale;
   protected int precision;
 
-  // statistics of this column page
-  private ColumnPageStatsVO stats;
-
-  // The index of the rowId whose value is null, will be set to 1
-  private BitSet nullBitSet;
+  // statistics collector for this column page
+  private ColumnPageStatsCollector statsCollector;
 
   protected DecimalConverterFactory.DecimalConverter decimalConverter;
 
@@ -74,14 +70,18 @@ public abstract class ColumnPage {
     return dataType;
   }
 
-  public ColumnPageStatsVO getStatistics() {
-    return stats;
+  public Object getStatistics() {
+    return statsCollector.getPageStats();
   }
 
   public int getPageSize() {
     return pageSize;
   }
 
+  public void setStatsCollector(ColumnPageStatsCollector statsCollector) {
+    this.statsCollector = statsCollector;
+  }
+
   private static ColumnPage createVarLengthPage(DataType dataType, int pageSize, int scale,
       int precision) {
     if (unsafe) {
@@ -299,36 +299,40 @@ public abstract class ColumnPage {
   public void putData(int rowId, Object value) {
     if (value == null) {
       putNull(rowId);
-      stats.updateNull();
+      statsCollector.updateNull(rowId);
       return;
     }
     switch (dataType) {
       case BYTE:
-        // TODO: change sort step to store as exact data type
         putByte(rowId, (byte) value);
+        statsCollector.update((byte) value);
         break;
       case SHORT:
         putShort(rowId, (short) value);
+        statsCollector.update((short) value);
         break;
       case INT:
         putInt(rowId, (int) value);
+        statsCollector.update((int) value);
         break;
       case LONG:
         putLong(rowId, (long) value);
+        statsCollector.update((long) value);
         break;
       case DOUBLE:
         putDouble(rowId, (double) value);
+        statsCollector.update((double) value);
         break;
       case DECIMAL:
         putDecimal(rowId, (BigDecimal) value);
         break;
       case BYTE_ARRAY:
         putBytes(rowId, (byte[]) value);
+        statsCollector.update((byte[]) value);
         break;
       default:
         throw new RuntimeException("unsupported data type: " + dataType);
     }
-    stats.update(value);
   }
 
   /**
@@ -381,7 +385,6 @@ public abstract class ColumnPage {
    * Set null at rowId
    */
   private void putNull(int rowId) {
-    nullBitSet.set(rowId);
     switch (dataType) {
       case BYTE:
         putByte(rowId, (byte) 0);
@@ -405,13 +408,6 @@ public abstract class ColumnPage {
   }
 
   /**
-   * Get null bitset page
-   */
-  public BitSet getNullBitSet() {
-    return nullBitSet;
-  }
-
-  /**
    * Get byte value at rowId
    */
   public abstract byte getByte(int rowId);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
new file mode 100644
index 0000000..ea9c373
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/EncodedTablePage.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedColumnPage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
+import org.apache.carbondata.core.datastore.page.key.TablePageKey;
+import org.apache.carbondata.core.util.CarbonUtil;
+
+/**
+ * Table page that after encoding and compression.
+ */
+public class EncodedTablePage {
+
+  // encoded data and metadata for each dimension column
+  private EncodedDimensionPage[] dimensions;
+
+  // encoded data and metadata for each measure column
+  private EncodedMeasurePage[] measures;
+
+  // key of this page
+  private TablePageKey pageKey;
+
+  // number of row in this page
+  private int pageSize;
+
+  // true if it is last page of all input rows
+  private boolean isLastPage;
+
+  // size in bytes of all encoded columns (including data and metadate)
+  private int encodedSize;
+
+  public static EncodedTablePage newEmptyInstance() {
+    EncodedTablePage page = new EncodedTablePage();
+    page.pageSize = 0;
+    page.encodedSize = 0;
+    page.measures = new EncodedMeasurePage[0];
+    page.dimensions = new EncodedDimensionPage[0];
+    return page;
+  }
+
+  public static EncodedTablePage newInstance(int pageSize,
+      EncodedDimensionPage[] dimensions, EncodedMeasurePage[] measures,
+      TablePageKey tablePageKey) {
+    return new EncodedTablePage(pageSize, dimensions, measures, tablePageKey);
+  }
+
+  private EncodedTablePage() {
+  }
+
+  private EncodedTablePage(int pageSize, EncodedDimensionPage[] encodedDimensions,
+      EncodedMeasurePage[] encodedMeasures, TablePageKey tablePageKey) {
+    this.dimensions = encodedDimensions;
+    this.measures = encodedMeasures;
+    this.pageSize = pageSize;
+    this.pageKey = tablePageKey;
+    this.encodedSize = calculatePageSize(encodedDimensions, encodedMeasures);
+  }
+
+  // return size in bytes of this encoded page
+  private int calculatePageSize(EncodedDimensionPage[] encodedDimensions,
+      EncodedMeasurePage[] encodedMeasures) {
+    int size = 0;
+    int totalEncodedDimensionDataLength = 0;
+    int totalEncodedMeasuredDataLength = 0;
+    // add row id index length
+    for (EncodedDimensionPage dimension : dimensions) {
+      IndexStorage indexStorage = dimension.getIndexStorage();
+      if (!indexStorage.isAlreadySorted()) {
+        size += indexStorage.getRowIdPageLengthInBytes() +
+            indexStorage.getRowIdRlePageLengthInBytes() +
+            CarbonCommonConstants.INT_SIZE_IN_BYTE;
+      }
+      if (indexStorage.getDataRlePageLengthInBytes() > 0) {
+        size += indexStorage.getDataRlePageLengthInBytes();
+      }
+      totalEncodedDimensionDataLength += dimension.getEncodedData().length;
+    }
+    for (EncodedColumnPage measure : measures) {
+      size += measure.getEncodedData().length;
+    }
+
+    for (EncodedDimensionPage encodedDimension : encodedDimensions) {
+      size += CarbonUtil.getByteArray(encodedDimension.getDataChunk2()).length;
+    }
+    for (EncodedMeasurePage encodedMeasure : encodedMeasures) {
+      size += CarbonUtil.getByteArray(encodedMeasure.getDataChunk2()).length;
+    }
+    size += totalEncodedDimensionDataLength + totalEncodedMeasuredDataLength;
+    return size;
+  }
+
+  public int getEncodedSize() {
+    return encodedSize;
+  }
+
+  public int getPageSize() {
+    return pageSize;
+  }
+
+  public int getNumDimensions() {
+    return dimensions.length;
+  }
+
+  public int getNumMeasures() {
+    return measures.length;
+  }
+
+  public TablePageKey getPageKey() {
+    return pageKey;
+  }
+
+  public boolean isLastPage() {
+    return isLastPage;
+  }
+
+  public void setIsLastPage(boolean isWriteAll) {
+    this.isLastPage = isWriteAll;
+  }
+
+  public EncodedMeasurePage getMeasure(int measureIndex) {
+    return measures[measureIndex];
+  }
+
+  public EncodedMeasurePage[] getMeasures() {
+    return measures;
+  }
+
+  public EncodedDimensionPage getDimension(int dimensionIndex) {
+    return dimensions[dimensionIndex];
+  }
+
+  public EncodedDimensionPage[] getDimensions() {
+    return dimensions;
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/bc3e6843/core/src/main/java/org/apache/carbondata/core/datastore/page/LazyColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/LazyColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/LazyColumnPage.java
index b0978d3..2864e80 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/LazyColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/LazyColumnPage.java
@@ -20,7 +20,7 @@ package org.apache.carbondata.core.datastore.page;
 import java.math.BigDecimal;
 
 /**
- * This is a decorator of column page, it performs transformation lazily (when caller calls getXXX
+ * This is a decorator of column page, it performs decoding lazily (when caller calls getXXX
  * method to get the value from the page)
  */
 public class LazyColumnPage extends ColumnPage {
@@ -38,8 +38,8 @@ public class LazyColumnPage extends ColumnPage {
     this.codec = codec;
   }
 
-  public static ColumnPage newPage(ColumnPage columnPage, PrimitiveCodec transform) {
-    return new LazyColumnPage(columnPage, transform);
+  public static ColumnPage newPage(ColumnPage columnPage, PrimitiveCodec codec) {
+    return new LazyColumnPage(columnPage, codec);
   }
 
   @Override
@@ -94,7 +94,7 @@ public class LazyColumnPage extends ColumnPage {
 
   @Override
   public BigDecimal getDecimal(int rowId) {
-    throw new UnsupportedOperationException("internal error");
+    return columnPage.getDecimal(rowId);
   }
 
   @Override


[6/7] carbondata git commit: [CARBONDATA-1268] Support encoding strategy for dimension columns

Posted by ja...@apache.org.
[CARBONDATA-1268] Support encoding strategy for dimension columns

In this PR, dimension encoding is changed to use EncodingStrategy instead of hard coding.
In future, dimension encoding can be adjusted by extending EncodingStrategy

This closes#1136


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/a5af0ff2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/a5af0ff2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/a5af0ff2

Branch: refs/heads/master
Commit: a5af0ff238230bf64c8ac987bec9977d3f081ff2
Parents: bc3e684
Author: jackylk <ja...@huawei.com>
Authored: Thu Jul 13 09:21:30 2017 +0800
Committer: Raghunandan S <ca...@gmail.com>
Committed: Fri Jul 28 01:06:03 2017 +0800

----------------------------------------------------------------------
 .../carbondata/core/datastore/TableSpec.java    | 275 +++++++------------
 ...CompressedMeasureChunkFileBasedReaderV1.java |   2 +-
 ...CompressedMeasureChunkFileBasedReaderV2.java |   2 +-
 ...CompressedMeasureChunkFileBasedReaderV3.java |   2 +-
 .../core/datastore/page/ColumnPage.java         |   3 +
 .../core/datastore/page/ComplexColumnPage.java  |   4 +-
 .../page/encoding/AdaptiveCompressionCodec.java |   6 +
 .../page/encoding/ColumnPageCodec.java          |   7 +
 .../encoding/ComplexDimensionIndexCodec.java    |  74 +++++
 .../page/encoding/DefaultEncodingStrategy.java  |  37 ++-
 .../page/encoding/DictDimensionIndexCodec.java  |  65 +++++
 .../page/encoding/DirectCompressCodec.java      |   6 +
 .../encoding/DirectDictDimensionIndexCodec.java |  66 +++++
 .../page/encoding/EncodingStrategy.java         |   8 +-
 .../HighCardDictDimensionIndexCodec.java        |  66 +++++
 .../page/encoding/IndexStorageCodec.java        |  48 ++++
 .../schema/table/column/CarbonDimension.java    |   4 +
 .../spark/rdd/CarbonDataRDDFactory.scala        |   9 +-
 .../store/CarbonFactDataHandlerColumnar.java    |   6 +-
 .../carbondata/processing/store/TablePage.java  | 218 ++++-----------
 .../util/CarbonDataProcessorUtil.java           |  20 ++
 21 files changed, 576 insertions(+), 352 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
index 87c4934..f1d3546 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/TableSpec.java
@@ -25,216 +25,145 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 
 public class TableSpec {
 
-  // contains name and type for each dimension
-  private DimensionSpec dimensionSpec;
-  // contains name and type for each measure
-  private MeasureSpec measureSpec;
+  // column spec for each dimension and measure
+  private DimensionSpec[] dimensionSpec;
+  private MeasureSpec[] measureSpec;
 
-  public TableSpec(List<CarbonDimension> dimensions, List<CarbonMeasure> measures) {
-    dimensionSpec = new DimensionSpec(dimensions);
-    measureSpec = new MeasureSpec(measures);
-  }
-
-  public DimensionSpec getDimensionSpec() {
-    return dimensionSpec;
-  }
-
-  public MeasureSpec getMeasureSpec() {
-    return measureSpec;
-  }
+  // number of simple dimensions
+  private int numSimpleDimensions;
 
-  public class DimensionSpec {
-
-    // field name of each dimension, in schema order
-    private String[] fieldName;
-
-    // encoding type of each dimension, in schema order
-    private DimensionType[] types;
-
-    // number of simple dimensions
-    private int numSimpleDimensions;
-
-    // number of complex dimensions
-    private int numComplexDimensions;
-
-    // number of dimensions after complex column expansion
-    private int numDimensionExpanded;
-
-    DimensionSpec(List<CarbonDimension> dimensions) {
-      // first calculate total number of columnar field considering column group and complex column
-      numDimensionExpanded = 0;
-      numSimpleDimensions = 0;
-      numComplexDimensions = 0;
-      boolean inColumnGroup = false;
-      for (CarbonDimension dimension : dimensions) {
-        if (dimension.isColumnar()) {
-          if (inColumnGroup) {
-            inColumnGroup = false;
-          }
-          if (dimension.isComplex()) {
-            numDimensionExpanded += dimension.getNumDimensionsExpanded();
-            numComplexDimensions++;
-          } else {
-            numDimensionExpanded++;
-            numSimpleDimensions++;
-          }
-        } else {
-          // column group
-          if (!inColumnGroup) {
-            inColumnGroup = true;
-            numDimensionExpanded++;
-            numSimpleDimensions++;
-          }
+  public TableSpec(List<CarbonDimension> dimensions, List<CarbonMeasure> measures) {
+    // first calculate total number of columnar field considering column group and complex column
+    numSimpleDimensions = 0;
+    for (CarbonDimension dimension : dimensions) {
+      if (dimension.isColumnar()) {
+        if (!dimension.isComplex()) {
+          numSimpleDimensions++;
         }
+      } else {
+        throw new UnsupportedOperationException("column group is not supported");
       }
+    }
+    dimensionSpec = new DimensionSpec[dimensions.size()];
+    measureSpec = new MeasureSpec[measures.size()];
+    addDimensions(dimensions);
+    addMeasures(measures);
+  }
 
-      // then extract dimension name and type for each column
-      fieldName = new String[numDimensionExpanded];
-      types = new DimensionType[numDimensionExpanded];
-      inColumnGroup = false;
-      int index = 0;
-      for (CarbonDimension dimension : dimensions) {
-        if (dimension.isColumnar()) {
-          if (inColumnGroup) {
-            inColumnGroup = false;
-          }
-          if (dimension.isComplex()) {
-            int count = addDimension(index, dimension);
-            index += count;
-          } else if (dimension.getDataType() == DataType.TIMESTAMP ||
-                     dimension.getDataType() == DataType.DATE) {
-            addSimpleDimension(index++, dimension.getColName(), DimensionType.DIRECT_DICTIONARY);
-          } else if (dimension.isGlobalDictionaryEncoding()) {
-            addSimpleDimension(index++, dimension.getColName(), DimensionType.GLOBAL_DICTIONARY);
-          } else {
-            addSimpleDimension(index++, dimension.getColName(), DimensionType.PLAIN_VALUE);
-          }
+  private void addDimensions(List<CarbonDimension> dimensions) {
+    int dimIndex = 0;
+    for (int i = 0; i < dimensions.size(); i++) {
+      CarbonDimension dimension = dimensions.get(i);
+      if (dimension.isColumnar()) {
+        if (dimension.isComplex()) {
+          DimensionSpec spec = new DimensionSpec(DimensionType.COMPLEX, dimension);
+          dimensionSpec[dimIndex++] = spec;
+        } else if (dimension.isDirectDictionaryEncoding()) {
+          DimensionSpec spec = new DimensionSpec(DimensionType.DIRECT_DICTIONARY, dimension);
+          dimensionSpec[dimIndex++] = spec;
+        } else if (dimension.isGlobalDictionaryEncoding()) {
+          DimensionSpec spec = new DimensionSpec(DimensionType.GLOBAL_DICTIONARY, dimension);
+          dimensionSpec[dimIndex++] = spec;
         } else {
-          // column group
-          if (!inColumnGroup) {
-            addSimpleDimension(index++, dimension.getColName(), DimensionType.COLUMN_GROUP);
-            inColumnGroup = true;
-          }
+          DimensionSpec spec = new DimensionSpec(DimensionType.PLAIN_VALUE, dimension);
+          dimensionSpec[dimIndex++] = spec;
         }
       }
     }
+  }
 
-    private void addSimpleDimension(int index, String name, DimensionType type) {
-      fieldName[index] = name;
-      types[index] = type;
+  private void addMeasures(List<CarbonMeasure> measures) {
+    for (int i = 0; i < measures.size(); i++) {
+      CarbonMeasure measure = measures.get(i);
+      measureSpec[i] = new MeasureSpec(measure.getColName(), measure.getDataType());
     }
+  }
 
-    // add dimension and return number of columns added
-    private int addDimension(int index, CarbonDimension dimension) {
-      switch (dimension.getDataType()) {
-        case ARRAY:
-          addSimpleDimension(index, dimension.getColName() + ".offset", DimensionType.COMPLEX);
-          List<CarbonDimension> arrayChildren = dimension.getListOfChildDimensions();
-          int count = 1;
-          for (CarbonDimension child : arrayChildren) {
-            count += addDimension(index + count, child);
-          }
-          return count;
-        case STRUCT:
-          addSimpleDimension(index, dimension.getColName() + ".empty", DimensionType.COMPLEX);
-          List<CarbonDimension> structChildren = dimension.getListOfChildDimensions();
-          count = 1;
-          for (CarbonDimension child : structChildren) {
-            count += addDimension(index + count, child);
-          }
-          return count;
-        case TIMESTAMP:
-        case DATE:
-          addSimpleDimension(index, dimension.getColName(), DimensionType.DIRECT_DICTIONARY);
-          return 1;
-        default:
-          addSimpleDimension(index, dimension.getColName(),
-              dimension.isGlobalDictionaryEncoding() ?
-                  DimensionType.GLOBAL_DICTIONARY : DimensionType.PLAIN_VALUE);
-          return 1;
-      }
-    }
+  public DimensionSpec getDimensionSpec(int dimensionIndex) {
+    return dimensionSpec[dimensionIndex];
+  }
 
+  public MeasureSpec getMeasureSpec(int measureIndex) {
+    return measureSpec[measureIndex];
+  }
 
-    /**
-     * return the dimension type of index'th dimension. index is from 0 to numDimensions
-     */
-    public DimensionType getType(int index) {
-      assert (index >= 0 && index < types.length);
-      return types[index];
+  public int getNumSimpleDimensions() {
+    return numSimpleDimensions;
+  }
+
+  public int getNumDimensions() {
+    return dimensionSpec.length;
+  }
+
+    public int getScale(int index) {
+      assert (index >= 0 && index < precision.length);
+      return scale[index];
     }
 
-    /**
-     * return number of dimensions
+    public int getPrecision(int index) {
+      assert (index >= 0 && index < precision.length);
+      return precision[index];
+    }/**
+     * return number of measures
      */
-    public int getNumSimpleDimensions() {
-      return numSimpleDimensions;
-    }
+    public int getNumMeasures() {
+      return measureSpec.length;
+    }public class ColumnSpec {
+    // field name of this column
+    private String fieldName;
+
+    // data type of this column
+    private DataType dataType;
 
-    public int getNumComplexDimensions() {
-      return numComplexDimensions;
+    ColumnSpec(String fieldName, DataType dataType) {
+      this.fieldName = fieldName;
+      this.dataType = dataType;
     }
 
-    public int getNumExpandedDimensions() {
-      return numDimensionExpanded;
+    public DataType getDataType() {
+      return dataType;
     }
 
+    public String getFieldName() {
+      return fieldName;
+    }
   }
 
-  public class MeasureSpec {
+  public class DimensionSpec extends ColumnSpec {
 
-    // field name of each measure, in schema order
-    private String[] fieldName;
+    // dimension type of this dimension
+    private DimensionType type;
 
-    // data type of each measure, in schema order
-    private DataType[] types;
+    // indicate whether this dimension is in sort column
+    private boolean inSortColumns;
 
-    private int[] scale;
+    // indicate whether this dimension need to do inverted index
+    private boolean doInvertedIndex;
 
-    private int[] precision;
-
-    MeasureSpec(List<CarbonMeasure> measures) {
-      fieldName = new String[measures.size()];
-      types = new DataType[measures.size()];
-      scale = new int[measures.size()];
-      precision = new int[measures.size()];
-      int i = 0;
-      for (CarbonMeasure measure: measures) {
-        add(i++, measure.getColName(), measure.getDataType(), measure.getScale(),
-            measure.getPrecision());
-      }
+    DimensionSpec(DimensionType dimensionType, CarbonDimension dimension) {
+      super(dimension.getColName(), dimension.getDataType());
+      this.type = dimensionType;
+      this.inSortColumns = dimension.isSortColumn();
+      this.doInvertedIndex = dimension.isUseInvertedIndex();
     }
 
-    private void add(int index, String name, DataType type, int scale, int precision) {
-      fieldName[index] = name;
-      types[index] = type;
-      this.scale[index] = scale;
-      this.precision[index] = precision;
+    public DimensionType getDimensionType() {
+      return type;
     }
 
-    /**
-     * return the data type of index'th measure. index is from 0 to numMeasures
-     */
-    public DataType getType(int index) {
-      assert (index >= 0 && index < types.length);
-      return types[index];
+    public boolean isInSortColumns() {
+      return inSortColumns;
     }
 
-    public int getScale(int index) {
-      assert (index >= 0 && index < precision.length);
-      return scale[index];
+    public boolean isDoInvertedIndex() {
+      return doInvertedIndex;
     }
+  }
 
-    public int getPrecision(int index) {
-      assert (index >= 0 && index < precision.length);
-      return precision[index];
-    }
+  public class MeasureSpec extends ColumnSpec {
 
-    /**
-     * return number of measures
-     */
-    public int getNumMeasures() {
-      return types.length;
+    MeasureSpec(String fieldName, DataType dataType) {
+      super(fieldName, dataType);
     }
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
index 8f69a7c..6bf65da 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
@@ -98,7 +98,7 @@ public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChun
     DataChunk dataChunk = measureColumnChunks.get(blockIndex);
     ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
 
-    ColumnPageCodec codec = strategy.createCodec(meta, -1, -1);
+    ColumnPageCodec codec = strategy.newCodec(meta);
     ColumnPage page = codec.decode(measureRawColumnChunk.getRawData().array(),
         measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength());
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
index 09f367a..7511b6e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
@@ -135,7 +135,7 @@ public class CompressedMeasureChunkFileBasedReaderV2 extends AbstractMeasureChun
     byte[] encodedMeta = measureColumnChunk.getEncoder_meta().get(0).array();
 
     ValueEncoderMeta meta = CarbonUtil.deserializeEncoderMetaV3(encodedMeta);
-    ColumnPageCodec codec = strategy.createCodec(meta);
+    ColumnPageCodec codec = strategy.newCodec(meta);
     byte[] rawData = measureRawColumnChunk.getRawData().array();
     return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
index 492d46a..1881791 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
@@ -232,7 +232,7 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
 
     ColumnPageCodecMeta meta = new ColumnPageCodecMeta();
     meta.deserialize(encodedMeta);
-    ColumnPageCodec codec = strategy.createCodec(meta);
+    ColumnPageCodec codec = strategy.newCodec(meta);
     byte[] rawData = measureRawColumnChunk.getRawData().array();
     return codec.decode(rawData, copyPoint, measureColumnChunk.data_page_length);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
index 180c092..90300d9 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/ColumnPage.java
@@ -183,6 +183,9 @@ public abstract class ColumnPage {
         case DECIMAL:
           instance = newDecimalPage(new byte[pageSize][], scale, precision);
           break;
+        case BYTE_ARRAY:
+          instance = new SafeVarLengthColumnPage(dataType, pageSize);
+          break;
         default:
           throw new RuntimeException("Unsupported data dataType: " + dataType);
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
index d9b8e54..5698e39 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/ComplexColumnPage.java
@@ -23,7 +23,6 @@ import java.util.List;
 
 import org.apache.carbondata.common.CarbonIterator;
 
-
 // Represent a complex column page, e.g. Array, Struct type column
 public class ComplexColumnPage {
 
@@ -77,4 +76,7 @@ public class ComplexColumnPage {
     return depth;
   }
 
+  public int getPageSize() {
+    return pageSize;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
index 6b3a365..7ae606f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/AdaptiveCompressionCodec.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
 import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.datatype.DataType;
@@ -60,6 +61,11 @@ public abstract class AdaptiveCompressionCodec implements ColumnPageCodec {
   public abstract ColumnPage decode(byte[] input, int offset, int length) throws MemoryException;
 
   @Override
+  public EncodedColumnPage[] encodeComplexColumn(ComplexColumnPage input) {
+    throw new UnsupportedOperationException("internal error");
+  }
+
+  @Override
   public String toString() {
     return String.format("%s[src type: %s, target type: %s, stats(%s)]",
         getClass().getName(), srcDataType, targetDataType, stats);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
index a77bf69..ac7a79e 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ColumnPageCodec.java
@@ -20,6 +20,7 @@ package org.apache.carbondata.core.datastore.page.encoding;
 import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
 import org.apache.carbondata.core.memory.MemoryException;
 
 /**
@@ -39,6 +40,12 @@ public interface ColumnPageCodec {
   EncodedColumnPage encode(ColumnPage input) throws MemoryException, IOException;
 
   /**
+   * encode complex column page and return the coded data
+   * TODO: remove this interface after complex column page is unified with column page
+   */
+  EncodedColumnPage[] encodeComplexColumn(ComplexColumnPage input);
+
+  /**
    * decode byte array from offset to a column page
    * @param input encoded byte array
    * @param offset startoffset of the input to decode

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ComplexDimensionIndexCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ComplexDimensionIndexCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ComplexDimensionIndexCodec.java
new file mode 100644
index 0000000..12efba4
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/ComplexDimensionIndexCodec.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import java.util.Iterator;
+
+import org.apache.carbondata.core.datastore.DimensionType;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class ComplexDimensionIndexCodec extends IndexStorageCodec {
+
+  ComplexDimensionIndexCodec(boolean isSort, boolean isInvertedIndex, Compressor compressor) {
+    super(isSort, isInvertedIndex, compressor);
+  }
+
+  @Override
+  public String getName() {
+    return "ComplexDimensionIndexCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) throws MemoryException {
+    throw new UnsupportedOperationException("internal error");
+  }
+
+  @Override
+  public EncodedColumnPage[] encodeComplexColumn(ComplexColumnPage input) {
+    EncodedColumnPage[] encodedPages = new EncodedColumnPage[input.getDepth()];
+    int index = 0;
+    Iterator<byte[][]> iterator = input.iterator();
+    while (iterator.hasNext()) {
+      byte[][] data = iterator.next();
+      encodedPages[index++] = encodeChildColumn(input.getPageSize(), data);
+    }
+    return encodedPages;
+  }
+
+  private EncodedColumnPage encodeChildColumn(int pageSize, byte[][] data) {
+    IndexStorage indexStorage;
+    if (version == ColumnarFormatVersion.V3) {
+      indexStorage = new BlockIndexerStorageForShort(data, false, false, false);
+    } else {
+      indexStorage = new BlockIndexerStorageForInt(data, false, false, false);
+    }
+    byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
+    byte[] compressed = compressor.compressByte(flattened);
+    return new EncodedDimensionPage(pageSize, compressed, indexStorage,
+        DimensionType.COMPLEX);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
index d2d3a44..b9aac73 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DefaultEncodingStrategy.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.core.datastore.page.encoding;
 
+import org.apache.carbondata.core.datastore.TableSpec;
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.compression.CompressorFactory;
 import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
@@ -125,16 +126,46 @@ public class DefaultEncodingStrategy extends EncodingStrategy {
     }
   }
 
-  @Override ColumnPageCodec newCodecForFloatingType(SimpleStatsResult stats) {
+  @Override
+  ColumnPageCodec newCodecForFloatingType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
 
   // for decimal, currently it is a very basic implementation
-  @Override ColumnPageCodec newCodecForDecimalType(SimpleStatsResult stats) {
+  @Override
+  ColumnPageCodec newCodecForDecimalType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
 
-  @Override ColumnPageCodec newCodecForByteArrayType(SimpleStatsResult stats) {
+  @Override
+  ColumnPageCodec newCodecForByteArrayType(SimpleStatsResult stats) {
     return DirectCompressCodec.newInstance(stats, compressor);
   }
+
+  @Override
+  public ColumnPageCodec newCodec(TableSpec.DimensionSpec dimensionSpec) {
+    Compressor compressor = CompressorFactory.getInstance().getCompressor();
+    switch (dimensionSpec.getDimensionType()) {
+      case GLOBAL_DICTIONARY:
+        return new DictDimensionIndexCodec(
+            dimensionSpec.isInSortColumns(),
+            dimensionSpec.isInSortColumns() && dimensionSpec.isDoInvertedIndex(),
+            compressor);
+      case DIRECT_DICTIONARY:
+        return new DirectDictDimensionIndexCodec(
+            dimensionSpec.isInSortColumns(),
+            dimensionSpec.isInSortColumns() && dimensionSpec.isDoInvertedIndex(),
+            compressor);
+      case PLAIN_VALUE:
+        return new HighCardDictDimensionIndexCodec(
+            dimensionSpec.isInSortColumns(),
+            dimensionSpec.isInSortColumns() && dimensionSpec.isDoInvertedIndex(),
+            compressor);
+      case COMPLEX:
+        return new ComplexDimensionIndexCodec(false, false, compressor);
+      default:
+        throw new RuntimeException("unsupported dimension type: " +
+            dimensionSpec.getDimensionType());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DictDimensionIndexCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DictDimensionIndexCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DictDimensionIndexCodec.java
new file mode 100644
index 0000000..20b63ba
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DictDimensionIndexCodec.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import org.apache.carbondata.core.datastore.DimensionType;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class DictDimensionIndexCodec extends IndexStorageCodec {
+
+  DictDimensionIndexCodec(boolean isSort, boolean isInvertedIndex, Compressor compressor) {
+    super(isSort, isInvertedIndex, compressor);
+  }
+
+  @Override
+  public String getName() {
+    return "DictDimensionIndexCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) {
+    IndexStorage indexStorage;
+    byte[][] data = input.getByteArrayPage();
+    if (isInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForShort(data, true, false, isSort);
+      } else {
+        indexStorage = new BlockIndexerStorageForInt(data, true, false, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
+      } else {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
+    byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
+    byte[] compressed = compressor.compressByte(flattened);
+    return new EncodedDimensionPage(input.getPageSize(), compressed, indexStorage,
+        DimensionType.GLOBAL_DICTIONARY);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
index a1d4b61..664926c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectCompressCodec.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.compression.Compressor;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
 import org.apache.carbondata.core.datastore.page.LazyColumnPage;
 import org.apache.carbondata.core.datastore.page.PrimitiveCodec;
 import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
@@ -58,6 +59,11 @@ public class DirectCompressCodec implements ColumnPageCodec {
   }
 
   @Override
+  public EncodedColumnPage[] encodeComplexColumn(ComplexColumnPage input) {
+    throw new UnsupportedOperationException("internal error");
+  }
+
+  @Override
   public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
     ColumnPage page = ColumnPage
         .decompress(compressor, stats.getDataType(), input, offset, length, stats.getScale(),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectDictDimensionIndexCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectDictDimensionIndexCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectDictDimensionIndexCodec.java
new file mode 100644
index 0000000..d3e5e66
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/DirectDictDimensionIndexCodec.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import org.apache.carbondata.core.datastore.DimensionType;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class DirectDictDimensionIndexCodec extends IndexStorageCodec {
+
+  DirectDictDimensionIndexCodec(boolean isSort, boolean isInvertedIndex, Compressor compressor) {
+    super(isSort, isInvertedIndex, compressor);
+  }
+
+  @Override
+  public String getName() {
+    return "DirectDictDimensionIndexCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) throws MemoryException {
+    IndexStorage indexStorage;
+    byte[][] data = input.getByteArrayPage();
+    if (isInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForShort(data, false, false, isSort);
+      } else {
+        indexStorage = new BlockIndexerStorageForInt(data, false, false, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
+      } else {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
+    byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
+    byte[] compressed = compressor.compressByte(flattened);
+    return new EncodedDimensionPage(input.getPageSize(), compressed, indexStorage,
+        DimensionType.GLOBAL_DICTIONARY);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
index ee13277..29219ea 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingStrategy.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.core.datastore.page.encoding;
 
+import org.apache.carbondata.core.datastore.TableSpec;
 import org.apache.carbondata.core.datastore.page.statistics.PrimitivePageStatsCollector;
 import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult;
 import org.apache.carbondata.core.metadata.ColumnPageCodecMeta;
@@ -30,7 +31,7 @@ public abstract class EncodingStrategy {
   /**
    * create codec based on the page data type and statistics
    */
-  public ColumnPageCodec createCodec(SimpleStatsResult stats) {
+  public ColumnPageCodec newCodec(SimpleStatsResult stats) {
     switch (stats.getDataType()) {
       case BYTE:
       case SHORT:
@@ -53,7 +54,7 @@ public abstract class EncodingStrategy {
   /**
    * create codec based on the page data type and statistics contained by ValueEncoderMeta
    */
-  public ColumnPageCodec createCodec(ValueEncoderMeta meta, int scale, int precision) {
+  public ColumnPageCodec newCodec(ValueEncoderMeta meta, int scale, int precision) {
     if (meta instanceof ColumnPageCodecMeta) {
       ColumnPageCodecMeta codecMeta = (ColumnPageCodecMeta) meta;
       SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(codecMeta);
@@ -108,4 +109,7 @@ public abstract class EncodingStrategy {
   // for byte array
   abstract ColumnPageCodec newCodecForByteArrayType(SimpleStatsResult stats);
 
+  // for dimension column
+  public abstract ColumnPageCodec newCodec(TableSpec.DimensionSpec dimensionSpec);
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/HighCardDictDimensionIndexCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/HighCardDictDimensionIndexCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/HighCardDictDimensionIndexCodec.java
new file mode 100644
index 0000000..c1620c6
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/HighCardDictDimensionIndexCodec.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import org.apache.carbondata.core.datastore.DimensionType;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
+import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
+import org.apache.carbondata.core.datastore.columnar.IndexStorage;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.util.ByteUtil;
+
+public class HighCardDictDimensionIndexCodec  extends IndexStorageCodec {
+
+  HighCardDictDimensionIndexCodec(boolean isSort, boolean isInvertedIndex, Compressor compressor) {
+    super(isSort, isInvertedIndex, compressor);
+  }
+
+  @Override
+  public String getName() {
+    return "HighCardDictDimensionIndexCodec";
+  }
+
+  @Override
+  public EncodedColumnPage encode(ColumnPage input) throws MemoryException {
+    IndexStorage indexStorage;
+    byte[][] data = input.getByteArrayPage();
+    if (isInvertedIndex) {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForShort(data, false, true, isSort);
+      } else {
+        indexStorage = new BlockIndexerStorageForInt(data, false, true, isSort);
+      }
+    } else {
+      if (version == ColumnarFormatVersion.V3) {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForShort(data, true);
+      } else {
+        indexStorage = new BlockIndexerStorageForNoInvertedIndexForInt(data);
+      }
+    }
+    byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
+    byte[] compressed = compressor.compressByte(flattened);
+    return new EncodedDimensionPage(input.getPageSize(), compressed, indexStorage,
+        DimensionType.PLAIN_VALUE);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/IndexStorageCodec.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/IndexStorageCodec.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/IndexStorageCodec.java
new file mode 100644
index 0000000..3122b15
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/IndexStorageCodec.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.page.encoding;
+
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+public abstract class IndexStorageCodec implements ColumnPageCodec {
+  protected ColumnarFormatVersion version = CarbonProperties.getInstance().getFormatVersion();
+  protected Compressor compressor;
+  protected boolean isSort;
+  protected boolean isInvertedIndex;
+
+  IndexStorageCodec(boolean isSort, boolean isInvertedIndex, Compressor compressor) {
+    this.isSort = isSort;
+    this.isInvertedIndex = isInvertedIndex;
+    this.compressor = compressor;
+  }
+
+  @Override
+  public EncodedColumnPage[] encodeComplexColumn(ComplexColumnPage input) {
+    throw new UnsupportedOperationException("internal error");
+  }
+
+  @Override
+  public ColumnPage decode(byte[] input, int offset, int length) throws MemoryException {
+    throw new UnsupportedOperationException("internal error");
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
index dd01c56..b4c052f 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java
@@ -114,6 +114,10 @@ public class CarbonDimension extends CarbonColumn {
     this.complexTypeOrdinal = complexTypeOrdinal;
   }
 
+  public boolean isDirectDictionaryEncoding() {
+    return getEncoder().contains(Encoding.DIRECT_DICTIONARY);
+  }
+
   public boolean isGlobalDictionaryEncoding() {
     return getEncoder().contains(Encoding.DICTIONARY);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index db7717c..4c78fa9 100644
--- a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -706,22 +706,21 @@ object CarbonDataRDDFactory {
         }
       }
       // create new segment folder  in carbon store
-      if (!updateModel.isDefined) {
+      if (updateModel.isEmpty) {
         CarbonLoaderUtil.checkAndCreateCarbonDataLocation(storePath,
           carbonLoadModel.getSegmentId, carbonTable)
       }
       var loadStatus = CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS
       var errorMessage: String = "DataLoad failure"
       var executorMessage: String = ""
-      val configuration = DataLoadProcessBuilder.createConfiguration(carbonLoadModel)
-      val sortScope = CarbonDataProcessorUtil.getSortScope(configuration)
+      val isSortTable = carbonTable.getNumberOfSortColumns > 0
+      val sortScope = CarbonDataProcessorUtil.getSortScope(carbonLoadModel.getSortScope)
       try {
         if (updateModel.isDefined) {
           loadDataFrameForUpdate()
         } else if (carbonTable.getPartitionInfo(carbonTable.getFactTableName) != null) {
           loadDataForPartitionTable()
-        } else if (configuration.isSortTable &&
-            sortScope.equals(SortScopeOptions.SortScope.GLOBAL_SORT)) {
+        } else if (isSortTable && sortScope.equals(SortScopeOptions.SortScope.GLOBAL_SORT)) {
           LOGGER.audit("Using global sort for loading.")
           status = DataLoadProcessBuilderOnSpark.loadDataUsingGlobalSort(sqlContext.sparkContext,
             dataFrame, carbonLoadModel)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
index 6ed5d31..9c48af7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
@@ -56,7 +56,6 @@ import org.apache.carbondata.processing.store.file.FileManager;
 import org.apache.carbondata.processing.store.file.IFileManagerComposite;
 import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 import org.apache.carbondata.processing.store.writer.CarbonFactDataWriter;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
 
 /**
  * Fact data handler class to handle the fact data
@@ -173,9 +172,8 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler {
             CarbonCommonConstants.AGGREAGATE_COLUMNAR_KEY_BLOCK_DEFAULTVALUE));
     if (isAggKeyBlock) {
       int[] dimLens = model.getSegmentProperties().getDimColumnsCardinality();
-      for (int i = 0; i < model.getTableSpec().getDimensionSpec().getNumSimpleDimensions(); i++) {
-        if (CarbonDataProcessorUtil
-            .isRleApplicableForColumn(model.getTableSpec().getDimensionSpec().getType(i))) {
+      for (int i = 0; i < model.getTableSpec().getNumSimpleDimensions(); i++) {
+        if (model.getSegmentProperties().getDimensions().get(i).isGlobalDictionaryEncoding()) {
           this.rleEncodingForDictDimension[i] = true;
         }
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
index c5a9bec..9881e8e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
@@ -22,25 +22,17 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
-import java.util.Iterator;
 import java.util.List;
 
-import org.apache.carbondata.core.datastore.DimensionType;
 import org.apache.carbondata.core.datastore.GenericDataType;
 import org.apache.carbondata.core.datastore.TableSpec;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForInt;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForInt;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForNoInvertedIndexForShort;
-import org.apache.carbondata.core.datastore.columnar.BlockIndexerStorageForShort;
-import org.apache.carbondata.core.datastore.columnar.IndexStorage;
-import org.apache.carbondata.core.datastore.compression.Compressor;
-import org.apache.carbondata.core.datastore.compression.CompressorFactory;
 import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
 import org.apache.carbondata.core.datastore.page.ComplexColumnPage;
 import org.apache.carbondata.core.datastore.page.EncodedTablePage;
 import org.apache.carbondata.core.datastore.page.encoding.ColumnPageCodec;
 import org.apache.carbondata.core.datastore.page.encoding.DefaultEncodingStrategy;
+import org.apache.carbondata.core.datastore.page.encoding.EncodedColumnPage;
 import org.apache.carbondata.core.datastore.page.encoding.EncodedDimensionPage;
 import org.apache.carbondata.core.datastore.page.encoding.EncodedMeasurePage;
 import org.apache.carbondata.core.datastore.page.encoding.EncodingStrategy;
@@ -54,10 +46,8 @@ import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.util.ByteUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.DataTypeUtil;
-import org.apache.carbondata.processing.util.CarbonDataProcessorUtil;
 
 import org.apache.spark.sql.types.Decimal;
 
@@ -72,9 +62,9 @@ public class TablePage {
 
   // TODO: we should have separate class for key columns so that keys are stored together in
   // one vector to make it efficient for sorting
-  private ColumnPage[] dictDimensionPage;
-  private ColumnPage[] noDictDimensionPage;
-  private ComplexColumnPage[] complexDimensionPage;
+  private ColumnPage[] dictDimensionPages;
+  private ColumnPage[] noDictDimensionPages;
+  private ComplexColumnPage[] complexDimensionPages;
   private ColumnPage[] measurePage;
 
   // the num of rows in this page, it must be less than short value (65536)
@@ -90,23 +80,23 @@ public class TablePage {
     this.model = model;
     this.pageSize = pageSize;
     int numDictDimension = model.getMDKeyGenerator().getDimCount();
-    dictDimensionPage = new ColumnPage[numDictDimension];
-    for (int i = 0; i < dictDimensionPage.length; i++) {
-      ColumnPage page = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+    dictDimensionPages = new ColumnPage[numDictDimension];
+    for (int i = 0; i < dictDimensionPages.length; i++) {
+      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize);
       page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
-      dictDimensionPage[i] = page;
+      dictDimensionPages[i] = page;
     }
-    noDictDimensionPage = new ColumnPage[model.getNoDictionaryCount()];
-    for (int i = 0; i < noDictDimensionPage.length; i++) {
-      ColumnPage page = ColumnPage.newVarLengthPage(DataType.BYTE_ARRAY, pageSize);
+    noDictDimensionPages = new ColumnPage[model.getNoDictionaryCount()];
+    for (int i = 0; i < noDictDimensionPages.length; i++) {
+      ColumnPage page = ColumnPage.newPage(DataType.BYTE_ARRAY, pageSize);
       page.setStatsCollector(VarLengthPageStatsCollector.newInstance());
-      noDictDimensionPage[i] = page;
+      noDictDimensionPages[i] = page;
     }
-    complexDimensionPage = new ComplexColumnPage[model.getComplexColumnCount()];
-    for (int i = 0; i < complexDimensionPage.length; i++) {
+    complexDimensionPages = new ComplexColumnPage[model.getComplexColumnCount()];
+    for (int i = 0; i < complexDimensionPages.length; i++) {
       // here we still do not the depth of the complex column, it will be initialized when
       // we get the first row.
-      complexDimensionPage[i] = null;
+      complexDimensionPages[i] = null;
     }
     measurePage = new ColumnPage[model.getMeasureCount()];
     DataType[] dataTypes = model.getMeasureDataType();
@@ -117,7 +107,7 @@ public class TablePage {
       page.setStatsCollector(PrimitivePageStatsCollector.newInstance(dataTypes[i], pageSize));
       measurePage[i] = page;
     }
-    boolean hasNoDictionary = noDictDimensionPage.length > 0;
+    boolean hasNoDictionary = noDictDimensionPages.length > 0;
     this.key = new TablePageKey(pageSize, model.getMDKeyGenerator(), model.getSegmentProperties(),
         hasNoDictionary);
   }
@@ -140,13 +130,13 @@ public class TablePage {
       throws KeyGenException {
     // 1. convert dictionary columns
     byte[][] keys = model.getSegmentProperties().getFixedLengthKeySplitter().splitKey(mdk);
-    for (int i = 0; i < dictDimensionPage.length; i++) {
-      dictDimensionPage[i].putData(rowId, keys[i]);
+    for (int i = 0; i < dictDimensionPages.length; i++) {
+      dictDimensionPages[i].putData(rowId, keys[i]);
     }
 
     // 2. convert noDictionary columns and complex columns.
-    int noDictionaryCount = noDictDimensionPage.length;
-    int complexColumnCount = complexDimensionPage.length;
+    int noDictionaryCount = noDictDimensionPages.length;
+    int complexColumnCount = complexDimensionPages.length;
     if (noDictionaryCount > 0 || complexColumnCount > 0) {
       byte[][] noDictAndComplex = WriteStepRowUtil.getNoDictAndComplexDimension(row);
       for (int i = 0; i < noDictAndComplex.length; i++) {
@@ -154,7 +144,7 @@ public class TablePage {
           // noDictionary columns, since it is variable length, we need to prepare each
           // element as LV result byte array (first two bytes are the length of the array)
           byte[] valueWithLength = addLengthToByteArray(noDictAndComplex[i]);
-          noDictDimensionPage[i].putData(rowId, valueWithLength);
+          noDictDimensionPages[i].putData(rowId, valueWithLength);
         } else {
           // complex columns
           addComplexColumn(i - noDictionaryCount, rowId, noDictAndComplex[i]);
@@ -194,13 +184,13 @@ public class TablePage {
     // initialize the page if first row
     if (rowId == 0) {
       int depthInComplexColumn = complexDataType.getColsCount();
-      complexDimensionPage[index] = new ComplexColumnPage(pageSize, depthInComplexColumn);
+      complexDimensionPages[index] = new ComplexColumnPage(pageSize, depthInComplexColumn);
     }
 
-    int depthInComplexColumn = complexDimensionPage[index].getDepth();
+    int depthInComplexColumn = complexDimensionPages[index].getDepth();
     // this is the result columnar data which will be added to page,
     // size of this list is the depth of complex column, we will fill it by input data
-    List<ArrayList<byte[]>> encodedComplexColumnar = new ArrayList<>();
+    List<ArrayList<byte[]>> encodedComplexColumnar = new ArrayList<>(depthInComplexColumn);
     for (int k = 0; k < depthInComplexColumn; k++) {
       encodedComplexColumnar.add(new ArrayList<byte[]>());
     }
@@ -221,15 +211,15 @@ public class TablePage {
     }
 
     for (int depth = 0; depth < depthInComplexColumn; depth++) {
-      complexDimensionPage[index].putComplexData(rowId, depth, encodedComplexColumnar.get(depth));
+      complexDimensionPages[index].putComplexData(rowId, depth, encodedComplexColumnar.get(depth));
     }
   }
 
   void freeMemory() {
-    for (ColumnPage page : dictDimensionPage) {
+    for (ColumnPage page : dictDimensionPages) {
       page.freeMemory();
     }
-    for (ColumnPage page : noDictDimensionPage) {
+    for (ColumnPage page : noDictDimensionPages) {
       page.freeMemory();
     }
     for (ColumnPage page : measurePage) {
@@ -264,146 +254,52 @@ public class TablePage {
       throws MemoryException, IOException {
     EncodedMeasurePage[] encodedMeasures = new EncodedMeasurePage[measurePage.length];
     for (int i = 0; i < measurePage.length; i++) {
-      SimpleStatsResult stats = (SimpleStatsResult)(measurePage[i].getStatistics());
-      ColumnPageCodec encoder = encodingStrategy.createCodec(stats);
+      ColumnPageCodec encoder =
+          encodingStrategy.newCodec((SimpleStatsResult)(measurePage[i].getStatistics()));
       encodedMeasures[i] = (EncodedMeasurePage) encoder.encode(measurePage[i]);
     }
     return encodedMeasures;
   }
 
-  private IndexStorage encodeAndCompressDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
-  private IndexStorage encodeAndCompressDirectDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) throws KeyGenException {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, false, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, false, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, false);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
-  private IndexStorage encodeAndCompressComplexDimension(byte[][] data) {
-    if (version == ColumnarFormatVersion.V3) {
-      return new BlockIndexerStorageForShort(data, false, false, false);
-    } else {
-      return new BlockIndexerStorageForInt(data, false, false, false);
-    }
-  }
-
-  private IndexStorage encodeAndCompressNoDictDimension(byte[][] data, boolean isSort,
-      boolean isUseInvertedIndex, boolean isRleApplicable) {
-    if (isUseInvertedIndex) {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForShort(data, isRleApplicable, true, isSort);
-      } else {
-        return new BlockIndexerStorageForInt(data, isRleApplicable, true, isSort);
-      }
-    } else {
-      if (version == ColumnarFormatVersion.V3) {
-        return new BlockIndexerStorageForNoInvertedIndexForShort(data, true);
-      } else {
-        return new BlockIndexerStorageForNoInvertedIndexForInt(data);
-      }
-    }
-  }
-
   // apply and compress each dimension, set encoded data in `encodedData`
   private EncodedDimensionPage[] encodeAndCompressDimensions()
-      throws KeyGenException {
-    TableSpec.DimensionSpec dimensionSpec = model.getTableSpec().getDimensionSpec();
-    int dictionaryColumnCount = -1;
-    int noDictionaryColumnCount = -1;
-    int indexStorageOffset = 0;
-    IndexStorage[] indexStorages = new IndexStorage[dimensionSpec.getNumExpandedDimensions()];
-    Compressor compressor = CompressorFactory.getInstance().getCompressor();
-    EncodedDimensionPage[] compressedColumns = new EncodedDimensionPage[indexStorages.length];
-    boolean[] isUseInvertedIndex = model.getIsUseInvertedIndex();
-    for (int i = 0; i < dimensionSpec.getNumSimpleDimensions(); i++) {
-      ColumnPage page;
-      byte[] flattened;
-      boolean isSortColumn = model.isSortColumn(i);
-      switch (dimensionSpec.getType(i)) {
+      throws KeyGenException, IOException, MemoryException {
+    List<EncodedDimensionPage> encodedDimensions = new ArrayList<>();
+    List<EncodedDimensionPage> encodedComplexDimenions = new ArrayList<>();
+    TableSpec tableSpec = model.getTableSpec();
+    int dictIndex = 0;
+    int noDictIndex = 0;
+    int complexDimIndex = 0;
+    int numDimensions = tableSpec.getNumDimensions();
+    for (int i = 0; i < numDimensions; i++) {
+      ColumnPageCodec codec;
+      EncodedDimensionPage encodedPage;
+      TableSpec.DimensionSpec spec = tableSpec.getDimensionSpec(i);
+      switch (spec.getDimensionType()) {
         case GLOBAL_DICTIONARY:
-          // dictionary dimension
-          page = dictDimensionPage[++dictionaryColumnCount];
-          indexStorages[indexStorageOffset] = encodeAndCompressDictDimension(
-              page.getByteArrayPage(),
-              isSortColumn,
-              isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
-          break;
         case DIRECT_DICTIONARY:
-          // timestamp and date column
-          page = dictDimensionPage[++dictionaryColumnCount];
-          indexStorages[indexStorageOffset] = encodeAndCompressDirectDictDimension(
-              page.getByteArrayPage(),
-              isSortColumn,
-              isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
+          codec = encodingStrategy.newCodec(spec);
+          encodedPage = (EncodedDimensionPage) codec.encode(dictDimensionPages[dictIndex++]);
+          encodedDimensions.add(encodedPage);
           break;
         case PLAIN_VALUE:
-          // high cardinality dimension, encoded as plain string
-          page = noDictDimensionPage[++noDictionaryColumnCount];
-          indexStorages[indexStorageOffset] = encodeAndCompressNoDictDimension(
-              page.getByteArrayPage(),
-              isSortColumn,
-              isUseInvertedIndex[i] & isSortColumn,
-              CarbonDataProcessorUtil.isRleApplicableForColumn(dimensionSpec.getType(i)));
-          flattened = ByteUtil.flatten(indexStorages[indexStorageOffset].getDataPage());
+          codec = encodingStrategy.newCodec(spec);
+          encodedPage = (EncodedDimensionPage) codec.encode(noDictDimensionPages[noDictIndex++]);
+          encodedDimensions.add(encodedPage);
           break;
         case COMPLEX:
-          // we need to add complex column at last, so skipping it here
-          continue;
-        default:
-          throw new RuntimeException("unsupported dimension type: " + dimensionSpec.getType(i));
+          codec = encodingStrategy.newCodec(spec);
+          EncodedColumnPage[] encodedPages = codec.encodeComplexColumn(
+              complexDimensionPages[complexDimIndex++]);
+          for (EncodedColumnPage page : encodedPages) {
+            encodedComplexDimenions.add((EncodedDimensionPage) page);
+          }
+          break;
       }
-      byte[] compressedData = compressor.compressByte(flattened);
-      compressedColumns[indexStorageOffset] = new EncodedDimensionPage(
-          pageSize, compressedData, indexStorages[indexStorageOffset], dimensionSpec.getType(i));
-      SimpleStatsResult stats = (SimpleStatsResult) page.getStatistics();
-      compressedColumns[indexStorageOffset].setNullBitSet(stats.getNullBits());
-      indexStorageOffset++;
     }
 
-    // handle complex type column
-    for (int i = 0; i < dimensionSpec.getNumComplexDimensions(); i++) {
-      Iterator<byte[][]> iterator = complexDimensionPage[i].iterator();
-      while (iterator.hasNext()) {
-        byte[][] data = iterator.next();
-        indexStorages[indexStorageOffset] = encodeAndCompressComplexDimension(data);
-        byte[] flattened = ByteUtil.flatten(data);
-        byte[] compressedData = compressor.compressByte(flattened);
-        compressedColumns[indexStorageOffset] = new EncodedDimensionPage(
-            pageSize, compressedData, indexStorages[indexStorageOffset], DimensionType.COMPLEX);
-        indexStorageOffset++;
-      }
-    }
-    return compressedColumns;
+    encodedDimensions.addAll(encodedComplexDimenions);
+    return encodedDimensions.toArray(new EncodedDimensionPage[encodedDimensions.size()]);
   }
 }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a5af0ff2/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index 815c752..b46a42c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -488,6 +488,26 @@ public final class CarbonDataProcessorUtil {
     return sortScope;
   }
 
+  public static SortScopeOptions.SortScope getSortScope(String sortScopeString) {
+    SortScopeOptions.SortScope sortScope;
+    try {
+      // first check whether user input it from ddl, otherwise get from carbon properties
+      if (sortScopeString == null) {
+        sortScope = SortScopeOptions.getSortScope(CarbonProperties.getInstance()
+            .getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+                CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT));
+      } else {
+        sortScope = SortScopeOptions.getSortScope(sortScopeString);
+      }
+      LOGGER.warn("sort scope is set to " + sortScope);
+    } catch (Exception e) {
+      sortScope = SortScopeOptions.getSortScope(CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT);
+      LOGGER.warn("Exception occured while resolving sort scope. " +
+          "sort scope is set to " + sortScope);
+    }
+    return sortScope;
+  }
+
   /**
    * Get the batch sort size
    * @param configuration