You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2018/06/08 11:40:31 UTC

[06/50] [abbrv] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

[CARBONDATA-2489] Coverity scan fixes

  https://scan4.coverity.com/reports.htm#v29367/p11911

  This closes #2313


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/7ef91645
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/7ef91645
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/7ef91645

Branch: refs/heads/spark-2.3
Commit: 7ef916455d8b490f3e32efd3a0bfeb80ab9127f1
Parents: f184de8
Author: Raghunandan S <ca...@gmail.com>
Authored: Sun Aug 27 23:37:05 2017 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Mon May 21 14:02:24 2018 +0530

----------------------------------------------------------------------
 README.md                                       |  21 +-
 .../impl/ExtendedRollingFileAppender.java       |   9 +-
 .../cache/dictionary/ColumnDictionaryInfo.java  |   3 +
 .../dictionary/DoubleArrayTrieDictionary.java   | 387 ---------------
 .../dictionary/ForwardDictionaryCache.java      |   4 +-
 .../core/constants/CarbonCommonConstants.java   |   2 +-
 .../core/datamap/AbstractDataMapJob.java        |   9 -
 .../carbondata/core/datamap/DataMapChooser.java |   4 +-
 .../core/datamap/DataMapStoreManager.java       |   7 +-
 .../core/datamap/dev/BlockletSerializer.java    |  14 +-
 .../chunk/impl/AbstractDimensionColumnPage.java |   8 +
 .../impl/ColumnGroupDimensionColumnPage.java    | 194 --------
 .../impl/FixedLengthDimensionColumnPage.java    |   5 +-
 .../impl/VariableLengthDimensionColumnPage.java |   5 +-
 ...mpressedDimensionChunkFileBasedReaderV1.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV2.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV3.java |   4 +-
 .../AbstractMeasureChunkReaderV2V3Format.java   |  12 +-
 .../core/datastore/compression/Compressor.java  |  10 -
 .../datastore/compression/SnappyCompressor.java |  74 +--
 .../filesystem/AbstractDFSCarbonFile.java       |   4 +-
 .../impl/btree/AbstractBTreeLeafNode.java       |   2 +-
 .../datastore/page/UnsafeDecimalColumnPage.java |  20 +-
 .../page/UnsafeFixLengthColumnPage.java         |  70 +--
 .../page/encoding/EncodingFactory.java          |   3 +
 .../page/statistics/KeyPageStatsCollector.java  |  17 +-
 .../page/statistics/LVStringStatsCollector.java |  21 +-
 .../IncrementalColumnDictionaryGenerator.java   |   4 +-
 .../generator/TableDictionaryGenerator.java     |   8 +-
 .../blockletindex/BlockletDataMapFactory.java   |  12 +-
 .../blockletindex/SegmentIndexFileStore.java    |  15 +-
 .../carbondata/core/locks/ZookeeperInit.java    |  10 +-
 .../core/memory/UnsafeMemoryManager.java        |  11 +-
 .../core/metadata/datatype/ArrayType.java       |  34 +-
 .../core/metadata/datatype/DecimalType.java     |  31 ++
 .../core/metadata/datatype/StructType.java      |  25 +
 .../schema/table/column/ColumnSchema.java       |  38 +-
 .../carbondata/core/preagg/TimeSeriesUDF.java   |   2 +-
 .../CarbonDictionaryMetadataReaderImpl.java     |   6 +
 .../RestructureBasedRawResultCollector.java     |   2 +-
 .../impl/SearchModeDetailQueryExecutor.java     |   4 +-
 .../SearchModeVectorDetailQueryExecutor.java    |   4 +-
 .../scan/filter/FilterExpressionProcessor.java  |   6 +-
 .../carbondata/core/scan/filter/FilterUtil.java |  89 ++--
 .../ExcludeColGroupFilterExecuterImpl.java      |  48 --
 .../IncludeColGroupFilterExecuterImpl.java      | 232 ---------
 .../executer/RangeValueFilterExecuterImpl.java  |   2 +-
 .../executer/RestructureEvaluatorImpl.java      |   1 -
 .../executer/RowLevelFilterExecuterImpl.java    |  20 +-
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   7 +-
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   4 +-
 ...velRangeLessThanEqualFilterExecuterImpl.java |   4 +-
 ...RowLevelRangeLessThanFilterExecuterImpl.java |   4 +-
 .../resolver/ConditionalFilterResolverImpl.java |   1 -
 .../AbstractDetailQueryResultIterator.java      |   9 +-
 .../scanner/impl/BlockletFilterScanner.java     |   4 +
 .../core/statusmanager/LoadMetadataDetails.java |   8 +-
 .../SegmentUpdateStatusManager.java             |  60 +--
 .../util/AbstractDataFileFooterConverter.java   |   7 +-
 .../core/util/CarbonMetadataUtil.java           |   2 +-
 .../carbondata/core/util/DataTypeUtil.java      |  12 +-
 .../core/util/path/CarbonTablePath.java         |  14 +-
 .../impl/ColumnGroupDimensionDataChunkTest.java | 118 -----
 .../filesystem/AlluxioCarbonFileTest.java       |   3 +
 .../filesystem/ViewFsCarbonFileTest.java        |   9 +-
 .../apache/carbondata/hadoop/CacheClient.java   |  49 --
 .../hadoop/api/CarbonOutputCommitter.java       |   3 +
 .../hadoop/internal/segment/Segment.java        |  23 -
 .../hive/CarbonDictionaryDecodeReadSupport.java |  11 +-
 .../hive/MapredCarbonInputFormat.java           |  18 +-
 .../presto/CarbondataSplitManager.java          |  27 +-
 .../presto/impl/CarbonLocalInputSplit.java      |  14 +-
 .../presto/readers/BooleanStreamReader.java     |   6 +-
 .../presto/readers/DoubleStreamReader.java      |   8 +-
 .../presto/readers/IntegerStreamReader.java     |   8 +-
 .../presto/readers/LongStreamReader.java        |   8 +-
 .../presto/readers/ObjectStreamReader.java      |  16 +-
 .../presto/readers/ShortStreamReader.java       |  10 +-
 .../presto/readers/SliceStreamReader.java       |   8 +-
 .../presto/readers/TimestampStreamReader.java   |  11 +-
 .../spark/sql/common/util/QueryTest.scala       |   4 +-
 .../server/SecureDictionaryServer.java          |   4 +-
 .../spark/rdd/CarbonCleanFilesRDD.scala         |  83 ----
 .../spark/rdd/CarbonDeleteLoadByDateRDD.scala   |  89 ----
 .../spark/rdd/CarbonDeleteLoadRDD.scala         |  84 ----
 .../spark/rdd/CarbonDropTableRDD.scala          |  71 ---
 .../apache/spark/sql/test/util/QueryTest.scala  |   9 +
 .../VectorizedCarbonRecordReader.java           |  24 +-
 .../processing/loading/BadRecordsLogger.java    |  19 +-
 .../loading/sort/impl/ThreadStatusObserver.java |  15 +-
 .../UnsafeBatchParallelReadMergeSorterImpl.java |   4 +-
 .../loading/sort/unsafe/UnsafeSortDataRows.java |   3 +-
 .../UnsafeInMemoryIntermediateDataMerger.java   |   7 +-
 .../merger/UnsafeIntermediateFileMerger.java    |   8 +-
 .../unsafe/merger/UnsafeIntermediateMerger.java |  20 +-
 .../UnsafeSingleThreadFinalSortFilesMerger.java |   9 +-
 .../loading/steps/InputProcessorStepImpl.java   |   4 +
 .../processing/merger/CarbonDataMergerUtil.java |  82 ++--
 .../merger/RowResultMergerProcessor.java        |  32 +-
 .../partition/impl/QueryPartitionHelper.java    |  74 ---
 .../sort/sortdata/IntermediateFileMerger.java   |   7 +-
 .../SingleThreadFinalSortFilesMerger.java       |   9 +-
 .../store/writer/AbstractFactDataWriter.java    |  32 +-
 .../processing/util/CarbonQueryUtil.java        |  80 ----
 .../carbondata/processing/StoreCreator.java     | 469 -------------------
 .../sdk/file/CarbonReaderBuilder.java           |   1 +
 .../carbondata/store/LocalCarbonStore.java      |  34 +-
 .../store/worker/SearchRequestHandler.java      |   3 +-
 .../streaming/CarbonStreamRecordReader.java     |   3 -
 109 files changed, 686 insertions(+), 2620 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 4b4577e..be3186c 100644
--- a/README.md
+++ b/README.md
@@ -1,17 +1,17 @@
 <!--
-    Licensed to the Apache Software Foundation (ASF) under one or more 
+    Licensed to the Apache Software Foundation (ASF) under one or more
     contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership. 
+    this work for additional information regarding copyright ownership.
     The ASF licenses this file to you under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with 
+    (the "License"); you may not use this file except in compliance with
     the License.  You may obtain a copy of the License at
 
       http://www.apache.org/licenses/LICENSE-2.0
 
-    Unless required by applicable law or agreed to in writing, software 
-    distributed under the License is distributed on an "AS IS" BASIS, 
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and 
+    See the License for the specific language governing permissions and
     limitations under the License.
 -->
 
@@ -31,6 +31,15 @@ Visit count: [![HitCount](http://hits.dwyl.io/jackylk/apache/carbondata.svg)](ht
 Spark2.2:
 [![Build Status](https://builds.apache.org/buildStatus/icon?job=carbondata-master-spark-2.2)](https://builds.apache.org/view/A-D/view/CarbonData/job/carbondata-master-spark-2.2/lastBuild/testReport)
 [![Coverage Status](https://coveralls.io/repos/github/apache/carbondata/badge.svg?branch=master)](https://coveralls.io/github/apache/carbondata?branch=master)
+<a href="https://scan.coverity.com/projects/carbondata">
+  <img alt="Coverity Scan Build Status"
+       src="https://scan.coverity.com/projects/13444/badge.svg"/>
+</a>
+## Features
+CarbonData file format is a columnar store in HDFS, it has many features that a modern columnar format has, such as splittable, compression schema ,complex data type etc, and CarbonData has following unique features:
+* Stores data along with index: it can significantly accelerate query performance and reduces the I/O scans and CPU resources, where there are filters in the query.  CarbonData index consists of multiple level of indices, a processing framework can leverage this index to reduce the task it needs to schedule and process, and it can also do skip scan in more finer grain unit (called blocklet) in task side scanning instead of scanning the whole file. 
+* Operable encoded data :Through supporting efficient compression and global encoding schemes, can query on compressed/encoded data, the data can be converted just before returning the results to the users, which is "late materialized". 
+* Supports for various use cases with one single Data format : like interactive OLAP-style query, Sequential Access (big scan), Random Access (narrow scan). 
 
 ## Building CarbonData
 CarbonData is built using Apache Maven, to [build CarbonData](https://github.com/apache/carbondata/blob/master/build)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
index 828dd14..089865b 100644
--- a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
+++ b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
@@ -50,7 +50,7 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
    */
 
   private long nextRollover = 0;
-  private boolean cleanupInProgress = false;
+  private volatile boolean cleanupInProgress = false;
 
   /**
    * Total number of files at any point of time should be Backup number of
@@ -195,7 +195,9 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
     }
 
     // Do clean up finally
-    cleanUpLogs(startName, folderPath);
+    if (!cleanupInProgress) {
+      cleanUpLogs(startName, folderPath);
+    }
   }
 
   private void cleanUpLogs(final String startName, final String folderPath) {
@@ -204,9 +206,6 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
       Runnable r = new Runnable() {
 
         public void run() {
-          if (cleanupInProgress) {
-            return;
-          }
           synchronized (ExtendedRollingFileAppender.class) {
             cleanupInProgress = true;
             try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
index 3b915e0..ad1d201 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -193,6 +193,9 @@ public class ColumnDictionaryInfo extends AbstractColumnDictionaryInfo {
       int mid = (low + high) >>> 1;
       int surrogateKey = sortedSurrogates.get(mid);
       byte[] dictionaryValue = getDictionaryBytesFromSurrogate(surrogateKey);
+      if (null == dictionaryValue) {
+        return CarbonCommonConstants.INVALID_SURROGATE_KEY;
+      }
       int cmp = -1;
       if (this.getDataType() != DataTypes.STRING) {
         cmp = compareFilterKeyWithDictionaryKey(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
deleted file mode 100644
index ef36d7a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.cache.dictionary;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.Charset;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * A dictionary based on DoubleArrayTrie data structure that maps enumerations
- * of byte[] to int IDs. With DoubleArrayTrie the memory footprint of the mapping
- * is minimize,d if compared to HashMap.
- * This DAT implementation is inspired by https://linux.thai.net/~thep/datrie/datrie.html
- */
-
-public class DoubleArrayTrieDictionary {
-  private static final byte[] HEAD_MAGIC = new byte[]{
-      0x44, 0x41, 0x54, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74
-  }; // "DATTrieDict"
-  private static final int HEAD_LEN = HEAD_MAGIC.length;
-
-  private static final int INIT_CAPA_VALUE = 256;  // init len of double array
-  private static final int BASE_ROOT_VALUE = 1;    // root base value of trie root
-  private static final int CHCK_ROOT_VALUE = -1;   // root check value of trie root
-  private static final int UUSD_ROOM_VALUE = -2;   // unused position, only for zero
-  private static final int EPTY_BACK_VALUE = 0;    // value of empty position
-
-  private static final int ENCODE_BASE_VALUE = 10; // encode start number
-
-  private int[] base;
-  private int[] check;
-  private int size;
-  private int capacity;
-
-  private int id = ENCODE_BASE_VALUE;
-
-  public DoubleArrayTrieDictionary() {
-    base = new int[INIT_CAPA_VALUE];
-    check = new int[INIT_CAPA_VALUE];
-    capacity = INIT_CAPA_VALUE;
-    base[0] = UUSD_ROOM_VALUE;
-    check[0] = UUSD_ROOM_VALUE;
-    base[1] = BASE_ROOT_VALUE;
-    check[1] = CHCK_ROOT_VALUE;
-    size = 2;
-  }
-
-  private void init(int capacity, int size, int[] base, int[] check) {
-    int blen = base.length;
-    int clen = check.length;
-    if (capacity < size || size < 0 || blen != clen) {
-      throw new IllegalArgumentException("Illegal init parameters");
-    }
-    this.base = new int[capacity];
-    this.check = new int[capacity];
-    this.capacity = capacity;
-    System.arraycopy(base, 0, this.base, 0, blen);
-    System.arraycopy(check, 0, this.check, 0, clen);
-    this.size = size;
-  }
-
-  public void clear() {
-    base = null;
-    check = null;
-    size = 0;
-    capacity = 0;
-  }
-
-  private int reSize(int newCapacity) {
-    if (newCapacity < capacity) {
-      return capacity;
-    }
-    int[] newBase = new int[newCapacity];
-    int[] newCheck = new int[newCapacity];
-    if (capacity > 0) {
-      System.arraycopy(base, 0, newBase, 0, capacity);
-      System.arraycopy(check, 0, newCheck, 0, capacity);
-    }
-    base = newBase;
-    check = newCheck;
-    capacity = newCapacity;
-    return capacity;
-  }
-
-  public int getSize() {
-    return size;
-  }
-
-  public int getCapacity() {
-    return capacity;
-  }
-
-  /**
-   * Get apply value of key
-   *
-   * @param key
-   * @return
-   */
-  public int getValue(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    return getValue(bKeys);
-  }
-
-  /**
-   * Get apply value of bKeys
-   *
-   * @param bKeys
-   * @return
-   */
-  private int getValue(byte[] bKeys) {
-    int from = 1;
-    int to;
-    int current;
-    int len = bKeys.length;
-    if (size == 0) return -1;
-    for (int i = 0; i < len; i++) {
-      current = bKeys[i] & 0xFF;
-      to = base[from] + current;
-      if (check[to] != from) return -1;
-      int baseValue = base[to];
-      if (baseValue <= -ENCODE_BASE_VALUE) {
-        if (i == len - 1) {
-          return -1 * baseValue;
-        } else {
-          return -1;
-        }
-      }
-      from = to;
-
-    }
-    return -1;
-  }
-
-  /**
-   * Get all children of one node
-   *
-   * @param pos
-   * @return
-   */
-  private TreeSet<Integer> getChildren(int pos) {
-    TreeSet<Integer> children = new TreeSet<Integer>();
-    for (int i = 0; i < 0xFF; i++) {
-      int cpos = base[pos] + i;
-      if (cpos >= size) break;
-      if (cpos < 0) {
-        return null;
-      }
-      if (check[cpos] == pos) {
-        children.add(i);
-      }
-    }
-    return children;
-  }
-
-  /**
-   * @TODO: need to optimize performance
-   *
-   * Find multiple free position for {values}
-   * the distance between free position should be as same as {values}
-   *
-   * @param values
-   * @return
-   */
-  private int findFreeRoom(SortedSet<Integer> values) {
-    int min = values.first();
-    int max = values.last();
-    for (int i = min + 1; i < capacity; i++) {
-      if (i + max >= capacity) {
-        reSize(capacity + values.size());
-      }
-      int res = 0;
-      for (Integer v : values) {
-        res = res | base[v - min + i];
-      }
-      if (res == EPTY_BACK_VALUE) return i - min;
-    }
-    return -1;
-  }
-
-  /**
-   * Find one empty position for value
-   *
-   * @param value
-   * @return
-   */
-  private int findAvailableHop(int value) {
-    reSize(size + 1);
-    int result = size - 1;
-    for (int i = value + 1; i < capacity; i++) {
-      if (base[i] == EPTY_BACK_VALUE) {
-        result = i - value;
-        break;
-      }
-    }
-    return result;
-  }
-
-  /**
-   * Resolve when conflict and reset current node and its children.
-   *
-   * @param start current conflict position
-   * @param bKey current byte value which for processing
-   * @return
-   */
-  private int conflict(int start, int bKey) {
-    int from = start;
-    TreeSet<Integer> children = getChildren(from);
-    children.add(bKey);
-    int newBasePos = findFreeRoom(children);
-    children.remove(bKey);
-
-    int oldBasePos = base[start];
-    base[start] = newBasePos;
-
-    int oldPos, newPos;
-    for (Integer child : children) {
-      oldPos = oldBasePos + child;
-      newPos = newBasePos + child;
-      if (oldPos == from) from = newPos;
-      base[newPos] = base[oldPos];
-      check[newPos] = check[oldPos];
-      if (newPos >= size) size = newPos + 1;
-      if (base[oldPos] > 0) {
-        TreeSet<Integer> cs = getChildren(oldPos);
-        for (Integer c : cs) {
-          check[base[oldPos] + c] = newPos;
-        }
-      }
-      base[oldPos] = EPTY_BACK_VALUE;
-      check[oldPos] = EPTY_BACK_VALUE;
-    }
-    return from;
-  }
-
-  /**
-   * Insert element (byte[]) into DAT.
-   * 1. if the element has been DAT then return.
-   * 2. if position which is empty then insert directly.
-   * 3. if conflict then resolve it.
-   *
-   * @param bKeys
-   * @return
-   */
-  private boolean insert(byte[] bKeys) {
-    int from = 1;
-    int klen = bKeys.length;
-    for (int i = 0; i < klen; i++) {
-      int c = bKeys[i] & 0xFF;
-      int to = base[from] + c;
-      reSize((int) (to * 1.2) + 1);
-      if (check[to] == from) {
-        if (i == klen - 1) return true;
-        from = to;
-      } else if (check[to] == EPTY_BACK_VALUE) {
-        check[to] = from;
-        if (i == klen - 1) {
-          base[to] = -id;
-          id = id + 1;
-          return true;
-        } else {
-          int next = bKeys[i + 1] & 0xFF;
-          base[to] = findAvailableHop(next);
-          from = to;
-        }
-        if (to >= size) size = to + 1;
-      } else {
-        int rConflict = conflict(from, c);
-        int locate = base[rConflict] + c;
-        if (check[locate] != EPTY_BACK_VALUE) {
-          System.err.println("conflict");
-        }
-        check[locate] = rConflict;
-        if (i == klen - 1) {
-          base[locate] = -id;
-          id = id + 1;
-        } else {
-          int nah = bKeys[i + 1] & 0xFF;
-          base[locate] = findAvailableHop(nah);
-        }
-        if (locate >= size) size = locate + 1;
-        from = locate;
-        if (i == klen - 1) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Insert element (String) into DAT, the element will be transformed to
-   * byte[] firstly then insert into DAT.
-   *
-   * @param key
-   * @return
-   */
-  public boolean insert(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    if (!insert(bKeys)) {
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Serialize the DAT to data output stream
-   *
-   * @param out
-   * @throws IOException
-   */
-  public void write(DataOutputStream out) throws IOException {
-    out.write(HEAD_MAGIC);
-    out.writeInt(capacity);
-    out.writeInt(size);
-    for (int i = 0; i < size; i++) {
-      out.writeInt(base[i]);
-    }
-    for (int i = 0; i < size; i++) {
-      out.writeInt(check[i]);
-    }
-  }
-
-  /**
-   * Deserialize the DAT from data input stream
-   *
-   * @param in
-   * @throws IOException
-   */
-  public void read(DataInputStream in) throws IOException {
-    byte[] header = new byte[HEAD_LEN];
-    in.readFully(header);
-    int comp = 0;
-    for (int i = 0; i < HEAD_LEN; i++) {
-      comp = HEAD_MAGIC[i] - header[i];
-      if (comp != 0) break;
-    }
-    if (comp != 0) throw new IllegalArgumentException("Illegal file type");
-    int capacity = in.readInt();
-    int size = in.readInt();
-    if (capacity < size || size < 0) throw new IllegalArgumentException("Illegal parameters");
-    int[] base = new int[size];
-    int[] check = new int[size];
-    for (int i = 0; i < size; i++) {
-      base[i] = in.readInt();
-    }
-    for (int i = 0; i < size; i++) {
-      check[i] = in.readInt();
-    }
-    init(capacity, size, base, check);
-  }
-
-  /**
-   * Dump double array value about Trie
-   */
-  public void dump(PrintStream out) {
-    out.println("Capacity = " + capacity + ", Size = " + size);
-    for (int i = 0; i < size; i++) {
-      if (base[i] != EPTY_BACK_VALUE) {
-        out.print(i + ":[" + base[i] + "," + check[i] + "], ");
-      }
-    }
-    out.println();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
index f43e21b..55a1c05 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
@@ -19,10 +19,10 @@ package org.apache.carbondata.core.cache.dictionary;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -51,7 +51,7 @@ public class ForwardDictionaryCache<K extends
       LogServiceFactory.getLogService(ForwardDictionaryCache.class.getName());
 
   private static final Map<DictionaryColumnUniqueIdentifier, Object> DICTIONARY_LOCK_OBJECT =
-      new HashMap<>();
+      new ConcurrentHashMap<>();
 
   private static final long sizeOfEmptyDictChunks =
       ObjectSizeCalculator.estimate(new ArrayList<byte[]>(CarbonUtil.getDictionaryChunkSize()), 16);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index f3a821b..5ba1fec 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1003,7 +1003,7 @@ public final class CarbonCommonConstants {
   /**
    * Indicates alter partition
    */
-  public static String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
+  public static final String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
 
   /**
    * hdfs temporary directory key

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
index bdbf9fc..7d1cb48 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
@@ -17,12 +17,8 @@
 
 package org.apache.carbondata.core.datamap;
 
-import java.util.List;
-
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
-import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 
@@ -34,9 +30,4 @@ public abstract class AbstractDataMapJob implements DataMapJob {
   @Override public void execute(CarbonTable carbonTable,
       FileInputFormat<Void, BlockletDataMapIndexWrapper> format) {
   }
-
-  @Override public List<ExtendedBlocklet> execute(DistributableDataMapFormat dataMapFormat,
-      FilterResolverIntf resolverIntf) {
-    return null;
-  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
index 4d1c718..cf5dffd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
@@ -283,8 +283,8 @@ public class DataMapChooser {
       List<ColumnExpression> columnExpressions, Set<ExpressionType> expressionTypes) {
     List<DataMapTuple> tuples = new ArrayList<>();
     for (TableDataMap dataMap : allDataMap) {
-      if (contains(dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes))
-      {
+      if (null != dataMap.getDataMapFactory().getMeta() && contains(
+          dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes)) {
         tuples.add(
             new DataMapTuple(dataMap.getDataMapFactory().getMeta().getIndexedColumns().size(),
                 dataMap));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index c739dc3..1359e85 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -207,8 +207,8 @@ public final class DataMapStoreManager {
    * @param providerName
    * @return
    */
-  public DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider, String providerName)
-      throws IOException {
+  public synchronized DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider,
+      String providerName) throws IOException {
     intializeDataMapCatalogs(dataMapProvider);
     return dataMapCatalogs.get(providerName);
   }
@@ -225,6 +225,9 @@ public final class DataMapStoreManager {
         DataMapCatalog dataMapCatalog = dataMapCatalogs.get(schema.getProviderName());
         if (dataMapCatalog == null) {
           dataMapCatalog = dataMapProvider.createDataMapCatalog();
+          if (null == dataMapCatalog) {
+            throw new RuntimeException("Internal Error.");
+          }
           dataMapCatalogs.put(schema.getProviderName(), dataMapCatalog);
         }
         try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
index bd5f994..ebcf972 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
@@ -41,8 +41,11 @@ public class BlockletSerializer {
       throws IOException {
     DataOutputStream dataOutputStream =
         FileFactory.getDataOutputStream(writePath, FileFactory.getFileType(writePath));
-    grainBlocklet.write(dataOutputStream);
-    dataOutputStream.close();
+    try {
+      grainBlocklet.write(dataOutputStream);
+    } finally {
+      dataOutputStream.close();
+    }
   }
 
   /**
@@ -55,8 +58,11 @@ public class BlockletSerializer {
     DataInputStream inputStream =
         FileFactory.getDataInputStream(writePath, FileFactory.getFileType(writePath));
     FineGrainBlocklet blocklet = new FineGrainBlocklet();
-    blocklet.readFields(inputStream);
-    inputStream.close();
+    try {
+      blocklet.readFields(inputStream);
+    } finally {
+      inputStream.close();
+    }
     return blocklet;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
index 91e55dc..d400952 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
@@ -29,6 +29,14 @@ public abstract class AbstractDimensionColumnPage implements DimensionColumnPage
    */
   DimensionDataChunkStore dataChunkStore;
 
+
+  /**
+   * @return whether data is explicitly sorted or not
+   */
+  protected boolean isExplicitSorted(int[] invertedIndex) {
+    return (null == invertedIndex || 0 == invertedIndex.length) ? false : true;
+  }
+
   /**
    * @return whether columns where explicitly sorted or not
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
deleted file mode 100644
index 741c13d..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.chunk.impl;
-
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to column group dimension data chunk store
- */
-public class ColumnGroupDimensionColumnPage extends AbstractDimensionColumnPage {
-
-  /**
-   * Constructor for this class
-   *
-   * @param dataChunk       data chunk
-   * @param columnValueSize chunk attributes
-   * @param numberOfRows
-   */
-  public ColumnGroupDimensionColumnPage(byte[] dataChunk, int columnValueSize, int numberOfRows) {
-    this.dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, false, numberOfRows, dataChunk.length,
-        DimensionStoreType.FIXEDLENGTH);
-    this.dataChunkStore.putArray(null, null, dataChunk);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param rowId             row id of the chunk
-   * @param offset            offset from which data need to be filed
-   * @param data              data to filed
-   * @param restructuringInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillRawData(int rowId, int offset, byte[] data,
-      KeyStructureInfo restructuringInfo) {
-    byte[] row = dataChunkStore.getRow(rowId);
-    byte[] maskedKey = getMaskedKey(row, restructuringInfo);
-    System.arraycopy(maskedKey, 0, data, offset, maskedKey.length);
-    return maskedKey.length;
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param chunkIndex
-   * @param outputSurrogateKey
-   * @param info          KeyStructureInfo
-   * @return
-   */
-  @Override public int fillSurrogateKey(int rowId, int chunkIndex, int[] outputSurrogateKey,
-      KeyStructureInfo info) {
-    byte[] data = dataChunkStore.getRow(rowId);
-    long[] keyArray = info.getKeyGenerator().getKeyArray(data);
-    int[] ordinal = info.getMdkeyQueryDimensionOrdinal();
-    for (int i = 0; i < ordinal.length; i++) {
-      outputSurrogateKey[chunkIndex++] = (int) keyArray[ordinal[i]];
-    }
-    return chunkIndex;
-  }
-
-  /**
-   * Below method will be used to get the masked key
-   *
-   * @param data   data
-   * @param info
-   * @return
-   */
-  private byte[] getMaskedKey(byte[] data, KeyStructureInfo info) {
-    byte[] maskedKey = new byte[info.getMaskByteRanges().length];
-    int counter = 0;
-    int byteRange = 0;
-    for (int i = 0; i < info.getMaskByteRanges().length; i++) {
-      byteRange = info.getMaskByteRanges()[i];
-      maskedKey[counter++] = (byte) (data[byteRange] & info.getMaxKey()[byteRange]);
-    }
-    return maskedKey;
-  }
-
-  /**
-   * @return inverted index
-   */
-  @Override public int getInvertedIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @param rowId
-   * @return inverted index reverse
-   */
-  @Override public int getInvertedReverseIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @return whether columns where explictly sorted or not
-   */
-  @Override public boolean isExplicitSorted() {
-    return false;
-  }
-
-  /**
-   * to compare the data
-   *
-   * @param rowId        row index to be compared
-   * @param compareValue value to compare
-   * @return compare result
-   */
-  @Override public int compareTo(int rowId, byte[] compareValue) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(ColumnVectorInfo[] vectorInfo, int chunkIndex,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray = restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(k));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param filteredRowId
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(int[] filteredRowId, ColumnVectorInfo[] vectorInfo,
-      int chunkIndex, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray =
-          restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(filteredRowId[k]));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
index ff54b12..76bcf30 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
@@ -41,11 +41,12 @@ public class FixedLengthDimensionColumnPage extends AbstractDimensionColumnPage
    */
   public FixedLengthDimensionColumnPage(byte[] dataChunk, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows, int columnValueSize) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         dataChunk.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) :
         dataChunk.length;
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(columnValueSize, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.FIXEDLENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunk);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
index d03b2de..1c6b7f4 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
@@ -37,12 +37,13 @@ public class VariableLengthDimensionColumnPage extends AbstractDimensionColumnPa
    */
   public VariableLengthDimensionColumnPage(byte[] dataChunks, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         (dataChunks.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) + (
             numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE)) :
         (dataChunks.length + (numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE));
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(0, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(0, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.VARIABLELENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunks);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 0dc1c1b..6679402 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -22,7 +22,6 @@ import java.util.List;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -102,8 +101,8 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     int blockIndex = dimensionRawColumnChunk.getColumnIndex();
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     FileReader fileReader = dimensionRawColumnChunk.getFileReader();
 
@@ -146,14 +145,9 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
     }
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
-    if (dataChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!CarbonUtil
+    if (!CarbonUtil
         .hasEncoding(dataChunk.getEncodingList(), Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 31fa819..8938260 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -118,8 +117,8 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
   public DimensionColumnPage decodeColumnPage(
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     DataChunk2 dimensionColumnChunk = null;
     int copySourcePoint = (int) dimensionRawColumnChunk.getOffSet();
@@ -171,14 +170,9 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
 
-    if (dimensionColumnChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
+    if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,
               numberOfRows);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
index 0fdc515..58a9b18 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
@@ -244,8 +244,8 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
       ByteBuffer pageData, DataChunk2 pageMetadata, int offset) {
     byte[] dataPage;
     int[] rlePage;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     dataPage = COMPRESSOR.unCompressByte(pageData.array(), offset, pageMetadata.data_page_length);
     offset += pageMetadata.data_page_length;
     // if row id block is present then read the row id chunk and uncompress it

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
index a3ed339..7d59d47 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
@@ -103,11 +103,15 @@ public abstract class AbstractMeasureChunkReaderV2V3Format extends AbstractMeasu
    * @param presentMetadataThrift
    * @return wrapper presence meta
    */
-  protected BitSet getNullBitSet(
-      org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
+  protected BitSet getNullBitSet(org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
     Compressor compressor = CompressorFactory.getInstance().getCompressor();
-    return BitSet.valueOf(
-        compressor.unCompressByte(presentMetadataThrift.getPresent_bit_stream()));
+    final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
+    if (null != present_bit_stream) {
+      return BitSet
+          .valueOf(compressor.unCompressByte(present_bit_stream));
+    } else {
+      return new BitSet(1);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
index a32651a..5c2a5fb 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
@@ -33,32 +33,22 @@ public interface Compressor {
 
   byte[] compressShort(short[] unCompInput);
 
-  short[] unCompressShort(byte[] compInput);
-
   short[] unCompressShort(byte[] compInput, int offset, int lenght);
 
   byte[] compressInt(int[] unCompInput);
 
-  int[] unCompressInt(byte[] compInput);
-
   int[] unCompressInt(byte[] compInput, int offset, int length);
 
   byte[] compressLong(long[] unCompInput);
 
-  long[] unCompressLong(byte[] compInput);
-
   long[] unCompressLong(byte[] compInput, int offset, int length);
 
   byte[] compressFloat(float[] unCompInput);
 
-  float[] unCompressFloat(byte[] compInput);
-
   float[] unCompressFloat(byte[] compInput, int offset, int length);
 
   byte[] compressDouble(double[] unCompInput);
 
-  double[] unCompressDouble(byte[] compInput);
-
   double[] unCompressDouble(byte[] compInput, int offset, int length);
 
   long rawCompress(long inputAddress, int inputSize, long outputAddress) throws IOException;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
index f234f80..65244d2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
@@ -59,7 +59,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, unCompInput.length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -68,7 +68,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, byteSize);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -77,19 +77,20 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompress(compInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return compInput;
   }
 
   @Override public byte[] unCompressByte(byte[] compInput, int offset, int length) {
     int uncompressedLength = 0;
-    byte[] data = null;
+    byte[] data;
     try {
       uncompressedLength = Snappy.uncompressedLength(compInput, offset, length);
       data = new byte[uncompressedLength];
       Snappy.uncompress(compInput, offset, length, data, 0);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
     return data;
   }
@@ -99,17 +100,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public short[] unCompressShort(byte[] compInput) {
-    try {
-      return Snappy.uncompressShortArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public short[] unCompressShort(byte[] compInput, int offset, int lenght) {
@@ -117,8 +109,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressShortArray(compInput, offset, lenght);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressInt(int[] unCompInput) {
@@ -126,17 +118,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public int[] unCompressInt(byte[] compInput) {
-    try {
-      return Snappy.uncompressIntArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public int[] unCompressInt(byte[] compInput, int offset, int length) {
@@ -144,8 +127,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressIntArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressLong(long[] unCompInput) {
@@ -153,17 +136,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public long[] unCompressLong(byte[] compInput) {
-    try {
-      return Snappy.uncompressLongArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public long[] unCompressLong(byte[] compInput, int offset, int length) {
@@ -171,8 +145,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressLongArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressFloat(float[] unCompInput) {
@@ -180,17 +154,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public float[] unCompressFloat(byte[] compInput) {
-    try {
-      return Snappy.uncompressFloatArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public float[] unCompressFloat(byte[] compInput, int offset, int length) {
@@ -198,8 +163,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressFloatArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressDouble(double[] unCompInput) {
@@ -207,17 +172,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public double[] unCompressDouble(byte[] compInput) {
-    try {
-      return Snappy.uncompressDoubleArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public double[] unCompressDouble(byte[] compInput, int offset, int length) {
@@ -228,8 +184,8 @@ public class SnappyCompressor implements Compressor {
       return result;
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
index 7255237..05f96c5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
@@ -274,9 +274,9 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
           int count = dataInputStream.available();
           // create buffer
           byte[] byteStreamBuffer = new byte[count];
-          dataInputStream.read(byteStreamBuffer);
+          int bytesRead = dataInputStream.read(byteStreamBuffer);
           stream = fileSystem.create(pt, true, bufferSize);
-          stream.write(byteStreamBuffer);
+          stream.write(byteStreamBuffer, 0, bytesRead);
         } else {
           stream = fileSystem.append(pt, bufferSize);
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
index f5a751b..9477dff 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
@@ -202,7 +202,7 @@ public abstract class AbstractBTreeLeafNode implements BTreeNode {
       int[][] columnIndexRange) throws IOException {
     // No required here as leaf which will will be use this class will implement its own get
     // measure chunks
-    return null;
+    throw new UnsupportedOperationException("Unsupported operation");
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
index 378b51f..1cdefc8 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
@@ -124,13 +124,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = (long)rowId << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -145,13 +145,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = (long)rowId << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
@@ -187,7 +187,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
@@ -202,7 +202,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = (long) rowId << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -218,13 +218,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = (long) rowId << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
@@ -266,13 +266,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
     switch (decimalConverter.getDecimalConverterType()) {
       case DECIMAL_INT:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << intBits;
+          long offset = (long)i << intBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
         }
         break;
       case DECIMAL_LONG:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << longBits;
+          long offset = (long)i << longBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
         }
         break;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
index 6847ab9..7965e93 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
@@ -94,13 +94,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -115,19 +115,19 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putDouble(int rowId, double value) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     CarbonUnsafe.getUnsafe().putDouble(baseAddress, baseOffset + offset, value);
   }
 
@@ -151,13 +151,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -173,25 +173,25 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
   @Override
   public float getFloat(int rowId) {
-    long offset = rowId << floatBits;
+    long offset = ((long)rowId) << floatBits;
     return CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
   }
 
   @Override
   public double getDouble(int rowId) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     return CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
   }
 
@@ -219,9 +219,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public byte[] getBytePage() {
     byte[] data = new byte[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << byteBits;
-      data[i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -229,9 +229,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public short[] getShortPage() {
     short[] data = new short[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << shortBits;
-      data[i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -247,9 +247,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public int[] getIntPage() {
     int[] data = new int[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << intBits;
-      data[i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -257,9 +257,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public long[] getLongPage() {
     long[] data = new long[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << longBits;
-      data[i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -267,9 +267,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public float[] getFloatPage() {
     float[] data = new float[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << floatBits;
-      data[i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -277,9 +277,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public double[] getDoublePage() {
     double[] data = new double[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << doubleBits;
-      data[i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -363,34 +363,34 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   public void convertValue(ColumnPageValueConverter codec) {
     int pageSize = getPageSize();
     if (dataType == DataTypes.BYTE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << byteBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.SHORT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << shortBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.INT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << intBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.LONG) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << longBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.FLOAT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << floatBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.DOUBLE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << doubleBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
       }
     } else {
       throw new UnsupportedOperationException("invalid data type: " + dataType);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
index 597def0..318d55d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
@@ -118,6 +118,9 @@ public abstract class EncodingFactory {
    * Old way of creating decoder, based on algorithm
    */
   public ColumnPageDecoder createDecoderLegacy(ValueEncoderMeta metadata) {
+    if (null == metadata) {
+      throw new RuntimeException("internal error");
+    }
     SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
     TableSpec.ColumnSpec spec =
         TableSpec.ColumnSpec.newInstanceLegacy("legacy", stats.getDataType(), ColumnType.MEASURE);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
index e6cf29e..22537db 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
@@ -73,16 +73,17 @@ public class KeyPageStatsCollector implements ColumnPageStatsCollector {
 
   @Override
   public void update(byte[] value) {
-    if (min == null && max == null) {
+    if (null == min) {
       min = value;
+    }
+    if (null == max) {
+      max = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
+      min = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
       max = value;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
-        min = value;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
-        max = value;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
index 23795c5..7958a8d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
@@ -84,16 +84,21 @@ public class LVStringStatsCollector implements ColumnPageStatsCollector {
       newValue = new byte[value.length - 2];
       System.arraycopy(value, 2, newValue, 0, newValue.length);
     }
-    if (min == null && max == null) {
+
+    if (null == min) {
       min = newValue;
+    }
+
+    if (null == max) {
+      max = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
+      min = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
       max = newValue;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
-        min = newValue;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
-        max = newValue;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index e0feb04..8a69b80 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -97,7 +97,9 @@ public class IncrementalColumnDictionaryGenerator implements BiDictionary<Intege
   }
 
   @Override public int size() {
-    return currentDictionarySize;
+    synchronized (lock) {
+      return currentDictionarySize;
+    }
   }
 
   @Override public Integer generateKey(String value) throws DictionaryGenerationException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
index 5db13b6..7bb8259 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
@@ -57,6 +57,9 @@ public class TableDictionaryGenerator
       throws DictionaryGenerationException {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(value.getColumnName());
 
+    if (null == dimension) {
+      throw new DictionaryGenerationException("Dictionary Generation Failed");
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return generator.generateKey(value.getData());
@@ -65,6 +68,9 @@ public class TableDictionaryGenerator
   public Integer size(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(key.getColumnName());
 
+    if (null == dimension) {
+      return 0;
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return ((BiDictionary) generator).size();
@@ -91,7 +97,7 @@ public class TableDictionaryGenerator
   public void updateGenerator(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable
         .getPrimitiveDimensionByName(key.getColumnName());
-    if (null == columnMap.get(dimension.getColumnId())) {
+    if (null != dimension && null == columnMap.get(dimension.getColumnId())) {
       synchronized (columnMap) {
         if (null == columnMap.get(dimension.getColumnId())) {
           columnMap.put(dimension.getColumnId(),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
index 021fb82..0188281 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
@@ -17,11 +17,7 @@
 package org.apache.carbondata.core.indexstore.blockletindex;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -169,9 +165,11 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
       return false;
     }
     for (int i = 0; i < tableColumnList.size(); i++) {
-      return indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i));
+      if (!indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i))) {
+        return false;
+      }
     }
-    return false;
+    return true;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index c6efd77..c2686d0 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -282,12 +282,15 @@ public class SegmentIndexFileStore {
     DataInputStream dataInputStream =
         FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
     byte[] bytes = new byte[(int) indexFile.getSize()];
-    dataInputStream.readFully(bytes);
-    carbonIndexMap.put(indexFile.getName(), bytes);
-    carbonIndexMapWithFullPath.put(
-        indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
-            + indexFile.getName(), bytes);
-    dataInputStream.close();
+    try {
+      dataInputStream.readFully(bytes);
+      carbonIndexMap.put(indexFile.getName(), bytes);
+      carbonIndexMapWithFullPath.put(
+          indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
+              + indexFile.getName(), bytes);
+    } finally {
+      dataInputStream.close();
+    }
   }
 
   private MergedBlockIndexHeader readMergeBlockIndexHeader(ThriftReader thriftReader)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
index 9a8d3f6..ecdb672 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
@@ -54,12 +54,10 @@ public class ZookeeperInit {
 
   public static ZookeeperInit getInstance(String zooKeeperUrl) {
 
-    if (null == zooKeeperInit) {
-      synchronized (ZookeeperInit.class) {
-        if (null == zooKeeperInit) {
-          LOGGER.info("Initiating Zookeeper client.");
-          zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
-        }
+    synchronized (ZookeeperInit.class) {
+      if (null == zooKeeperInit) {
+        LOGGER.info("Initiating Zookeeper client.");
+        zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
       }
     }
     return zooKeeperInit;