You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2018/05/21 08:33:17 UTC

[1/4] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Repository: carbondata
Updated Branches:
  refs/heads/master f184de885 -> 7ef916455


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
----------------------------------------------------------------------
diff --git a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
deleted file mode 100644
index 16d4d53..0000000
--- a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
+++ /dev/null
@@ -1,469 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.processing;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.nio.charset.Charset;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.UUID;
-
-import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CacheProvider;
-import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.cache.dictionary.Dictionary;
-import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl;
-import org.apache.carbondata.core.fileoperations.FileWriteOperation;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.CarbonMetadata;
-import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
-import org.apache.carbondata.core.metadata.ColumnIdentifier;
-import org.apache.carbondata.core.metadata.converter.SchemaConverter;
-import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.core.metadata.encoder.Encoding;
-import org.apache.carbondata.core.metadata.schema.SchemaEvolution;
-import org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.TableSchema;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
-import org.apache.carbondata.core.statusmanager.SegmentStatus;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
-import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
-import org.apache.carbondata.core.writer.ThriftWriter;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriter;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriterImpl;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfo;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPreparator;
-import org.apache.carbondata.processing.loading.DataLoadExecutor;
-import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants;
-import org.apache.carbondata.processing.loading.csvinput.BlockDetails;
-import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat;
-import org.apache.carbondata.processing.loading.csvinput.CSVRecordReaderIterator;
-import org.apache.carbondata.processing.loading.csvinput.StringArrayWritable;
-import org.apache.carbondata.processing.loading.model.CarbonDataLoadSchema;
-import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
-import org.apache.carbondata.processing.util.TableOptionConstant;
-
-import com.google.gson.Gson;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapred.TaskAttemptID;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-
-/**
- * This class will create store file based on provided schema
- *
- */
-public class StoreCreator {
-
-  private static AbsoluteTableIdentifier identifier;
-  private static String storePath = "";
-  static {
-    try {
-      storePath = new File("target/store").getCanonicalPath();
-      String dbName = "testdb";
-      String tableName = "testtable";
-      identifier =
-          AbsoluteTableIdentifier.from(
-              storePath + "/testdb/testtable",
-              new CarbonTableIdentifier(dbName, tableName, UUID.randomUUID().toString()));
-    } catch (IOException ex) {
-
-    }
-  }
-
-  public static AbsoluteTableIdentifier getIdentifier() {
-    return identifier;
-  }
-
-  /**
-   * Create store without any restructure
-   */
-  public static void createCarbonStore() {
-    try {
-      String factFilePath = new File("../hadoop/src/test/resources/data.csv").getCanonicalPath();
-      File storeDir = new File(storePath);
-      CarbonUtil.deleteFoldersAndFiles(storeDir);
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.STORE_LOCATION_HDFS,
-          storePath);
-
-      CarbonTable table = createTable();
-      writeDictionary(factFilePath, table);
-      CarbonDataLoadSchema schema = new CarbonDataLoadSchema(table);
-      CarbonLoadModel loadModel = new CarbonLoadModel();
-      loadModel.setCarbonDataLoadSchema(schema);
-      loadModel.setDatabaseName(identifier.getCarbonTableIdentifier().getDatabaseName());
-      loadModel.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-      loadModel.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-      loadModel.setCarbonTransactionalTable(true);
-      loadModel.setFactFilePath(factFilePath);
-      loadModel.setLoadMetadataDetails(new ArrayList<LoadMetadataDetails>());
-      loadModel.setTablePath(identifier.getTablePath());
-      loadModel.setDateFormat(null);
-      loadModel.setDefaultTimestampFormat(CarbonProperties.getInstance().getProperty(
-          CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
-      loadModel.setDefaultDateFormat(CarbonProperties.getInstance().getProperty(
-          CarbonCommonConstants.CARBON_DATE_FORMAT,
-          CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT));
-      loadModel
-          .setSerializationNullFormat(
-              TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName() + "," + "\\N");
-      loadModel
-          .setBadRecordsLoggerEnable(
-              TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName() + "," + "false");
-      loadModel
-          .setBadRecordsAction(
-              TableOptionConstant.BAD_RECORDS_ACTION.getName() + "," + "FORCE");
-      loadModel
-          .setIsEmptyDataBadRecord(
-              DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD + "," + "false");
-      loadModel.setCsvHeader("ID,date,country,name,phonetype,serialname,salary");
-      loadModel.setCsvHeaderColumns(loadModel.getCsvHeader().split(","));
-      loadModel.setTaskNo("0");
-      loadModel.setSegmentId("0");
-      loadModel.setFactTimeStamp(System.currentTimeMillis());
-      loadModel.setMaxColumns("10");
-
-      loadData(loadModel, storePath);
-
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  private static CarbonTable createTable() throws IOException {
-    TableInfo tableInfo = new TableInfo();
-    tableInfo.setDatabaseName(identifier.getCarbonTableIdentifier().getDatabaseName());
-    TableSchema tableSchema = new TableSchema();
-    tableSchema.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-    List<ColumnSchema> columnSchemas = new ArrayList<ColumnSchema>();
-    ArrayList<Encoding> encodings = new ArrayList<>();
-    encodings.add(Encoding.DICTIONARY);
-    ColumnSchema id = new ColumnSchema();
-    id.setColumnName("ID");
-    id.setColumnar(true);
-    id.setDataType(DataTypes.INT);
-    id.setEncodingList(encodings);
-    id.setColumnUniqueId(UUID.randomUUID().toString());
-    id.setDimensionColumn(true);
-    id.setColumnGroup(1);
-    columnSchemas.add(id);
-
-    ColumnSchema date = new ColumnSchema();
-    date.setColumnName("date");
-    date.setColumnar(true);
-    date.setDataType(DataTypes.STRING);
-    date.setEncodingList(encodings);
-    date.setColumnUniqueId(UUID.randomUUID().toString());
-    date.setDimensionColumn(true);
-    date.setColumnGroup(2);
-    columnSchemas.add(date);
-
-    ColumnSchema country = new ColumnSchema();
-    country.setColumnName("country");
-    country.setColumnar(true);
-    country.setDataType(DataTypes.STRING);
-    country.setEncodingList(encodings);
-    country.setColumnUniqueId(UUID.randomUUID().toString());
-    country.setDimensionColumn(true);
-    country.setColumnGroup(3);
-    columnSchemas.add(country);
-
-    ColumnSchema name = new ColumnSchema();
-    name.setColumnName("name");
-    name.setColumnar(true);
-    name.setDataType(DataTypes.STRING);
-    name.setEncodingList(encodings);
-    name.setColumnUniqueId(UUID.randomUUID().toString());
-    name.setDimensionColumn(true);
-    name.setColumnGroup(4);
-    columnSchemas.add(name);
-
-    ColumnSchema phonetype = new ColumnSchema();
-    phonetype.setColumnName("phonetype");
-    phonetype.setColumnar(true);
-    phonetype.setDataType(DataTypes.STRING);
-    phonetype.setEncodingList(encodings);
-    phonetype.setColumnUniqueId(UUID.randomUUID().toString());
-    phonetype.setDimensionColumn(true);
-    phonetype.setColumnGroup(5);
-    columnSchemas.add(phonetype);
-
-    ColumnSchema serialname = new ColumnSchema();
-    serialname.setColumnName("serialname");
-    serialname.setColumnar(true);
-    serialname.setDataType(DataTypes.STRING);
-    serialname.setEncodingList(encodings);
-    serialname.setColumnUniqueId(UUID.randomUUID().toString());
-    serialname.setDimensionColumn(true);
-    serialname.setColumnGroup(6);
-    columnSchemas.add(serialname);
-
-    ColumnSchema salary = new ColumnSchema();
-    salary.setColumnName("salary");
-    salary.setColumnar(true);
-    salary.setDataType(DataTypes.INT);
-    salary.setEncodingList(new ArrayList<Encoding>());
-    salary.setColumnUniqueId(UUID.randomUUID().toString());
-    salary.setDimensionColumn(false);
-    salary.setColumnGroup(7);
-    columnSchemas.add(salary);
-
-    tableSchema.setListOfColumns(columnSchemas);
-    SchemaEvolution schemaEvol = new SchemaEvolution();
-    schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    tableSchema.setSchemaEvalution(schemaEvol);
-    tableSchema.setTableId(UUID.randomUUID().toString());
-    tableInfo.setTableUniqueName(
-        identifier.getCarbonTableIdentifier().getTableUniqueName()
-    );
-    tableInfo.setLastUpdatedTime(System.currentTimeMillis());
-    tableInfo.setFactTable(tableSchema);
-    tableInfo.setTablePath(identifier.getTablePath());
-
-    String schemaFilePath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath());
-    String schemaMetadataPath = CarbonTablePath.getFolderContainingFile(schemaFilePath);
-    CarbonMetadata.getInstance().loadTableMetadata(tableInfo);
-
-    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
-    org.apache.carbondata.format.TableInfo thriftTableInfo = schemaConverter
-        .fromWrapperToExternalTableInfo(tableInfo, tableInfo.getDatabaseName(),
-            tableInfo.getFactTable().getTableName());
-    org.apache.carbondata.format.SchemaEvolutionEntry schemaEvolutionEntry =
-        new org.apache.carbondata.format.SchemaEvolutionEntry(tableInfo.getLastUpdatedTime());
-    thriftTableInfo.getFact_table().getSchema_evolution().getSchema_evolution_history()
-        .add(schemaEvolutionEntry);
-
-    FileFactory.FileType fileType = FileFactory.getFileType(schemaMetadataPath);
-    if (!FileFactory.isFileExist(schemaMetadataPath, fileType)) {
-      FileFactory.mkdirs(schemaMetadataPath, fileType);
-    }
-
-    ThriftWriter thriftWriter = new ThriftWriter(schemaFilePath, false);
-    thriftWriter.open();
-    thriftWriter.write(thriftTableInfo);
-    thriftWriter.close();
-    return CarbonMetadata.getInstance().getCarbonTable(tableInfo.getTableUniqueName());
-  }
-
-  private static void writeDictionary(String factFilePath, CarbonTable table) throws Exception {
-    BufferedReader reader = new BufferedReader(new FileReader(factFilePath));
-    String header = reader.readLine();
-    String[] split = header.split(",");
-    List<CarbonColumn> allCols = new ArrayList<CarbonColumn>();
-    List<CarbonDimension> dims = table.getDimensionByTableName(table.getTableName());
-    allCols.addAll(dims);
-    List<CarbonMeasure> msrs = table.getMeasureByTableName(table.getTableName());
-    allCols.addAll(msrs);
-    Set<String>[] set = new HashSet[dims.size()];
-    for (int i = 0; i < set.length; i++) {
-      set[i] = new HashSet<String>();
-    }
-    String line = reader.readLine();
-    while (line != null) {
-      String[] data = line.split(",");
-      for (int i = 0; i < set.length; i++) {
-        set[i].add(data[i]);
-      }
-      line = reader.readLine();
-    }
-
-    Cache dictCache = CacheProvider.getInstance()
-        .createCache(CacheType.REVERSE_DICTIONARY);
-    for (int i = 0; i < set.length; i++) {
-      ColumnIdentifier columnIdentifier =
-          new ColumnIdentifier(dims.get(i).getColumnId(), null, null);
-      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier =
-          new DictionaryColumnUniqueIdentifier(table.getAbsoluteTableIdentifier(), columnIdentifier,
-              columnIdentifier.getDataType());
-      CarbonDictionaryWriter writer =
-          new CarbonDictionaryWriterImpl(dictionaryColumnUniqueIdentifier);
-      for (String value : set[i]) {
-        writer.write(value);
-      }
-      writer.close();
-      writer.commit();
-      Dictionary dict = (Dictionary) dictCache.get(
-          new DictionaryColumnUniqueIdentifier(identifier,
-        		  columnIdentifier, dims.get(i).getDataType()));
-      CarbonDictionarySortInfoPreparator preparator =
-          new CarbonDictionarySortInfoPreparator();
-      List<String> newDistinctValues = new ArrayList<String>();
-      CarbonDictionarySortInfo dictionarySortInfo =
-          preparator.getDictionarySortInfo(newDistinctValues, dict, dims.get(i).getDataType());
-      CarbonDictionarySortIndexWriter carbonDictionaryWriter =
-          new CarbonDictionarySortIndexWriterImpl(dictionaryColumnUniqueIdentifier);
-      try {
-        carbonDictionaryWriter.writeSortIndex(dictionarySortInfo.getSortIndex());
-        carbonDictionaryWriter.writeInvertedSortIndex(dictionarySortInfo.getSortIndexInverted());
-      } finally {
-        carbonDictionaryWriter.close();
-      }
-    }
-    reader.close();
-  }
-
-  /**
-   * Execute graph which will further load data
-   *
-   * @param loadModel
-   * @param storeLocation
-   * @throws Exception
-   */
-  public static void loadData(CarbonLoadModel loadModel, String storeLocation)
-      throws Exception {
-    new File(storeLocation).mkdirs();
-    String outPutLoc = storeLocation + "/etl";
-    String databaseName = loadModel.getDatabaseName();
-    String tableName = loadModel.getTableName();
-    String tempLocationKey = databaseName + '_' + tableName + "_1";
-    CarbonProperties.getInstance().addProperty(tempLocationKey, storeLocation);
-    CarbonProperties.getInstance().addProperty("store_output_location", outPutLoc);
-    CarbonProperties.getInstance().addProperty("send.signal.load", "false");
-    CarbonProperties.getInstance().addProperty("carbon.is.columnar.storage", "true");
-    CarbonProperties.getInstance().addProperty("carbon.dimension.split.value.in.columnar", "1");
-    CarbonProperties.getInstance().addProperty("carbon.is.fullyfilled.bits", "true");
-    CarbonProperties.getInstance().addProperty("is.int.based.indexer", "true");
-    CarbonProperties.getInstance().addProperty("aggregate.columnar.keyblock", "true");
-    CarbonProperties.getInstance().addProperty("is.compressed.keyblock", "false");
-    CarbonProperties.getInstance().addProperty("carbon.leaf.node.size", "120000");
-
-    String graphPath =
-        outPutLoc + File.separator + loadModel.getDatabaseName() + File.separator + tableName
-            + File.separator + 0 + File.separator + 1 + File.separator + tableName + ".ktr";
-    File path = new File(graphPath);
-    if (path.exists()) {
-      path.delete();
-    }
-
-    BlockDetails blockDetails = new BlockDetails(new Path(loadModel.getFactFilePath()),
-        0, new File(loadModel.getFactFilePath()).length(), new String[] {"localhost"});
-    Configuration configuration = new Configuration();
-    CSVInputFormat.setCommentCharacter(configuration, loadModel.getCommentChar());
-    CSVInputFormat.setCSVDelimiter(configuration, loadModel.getCsvDelimiter());
-    CSVInputFormat.setEscapeCharacter(configuration, loadModel.getEscapeChar());
-    CSVInputFormat.setHeaderExtractionEnabled(configuration, true);
-    CSVInputFormat.setQuoteCharacter(configuration, loadModel.getQuoteChar());
-    CSVInputFormat.setReadBufferSize(configuration, CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.CSV_READ_BUFFER_SIZE,
-            CarbonCommonConstants.CSV_READ_BUFFER_SIZE_DEFAULT));
-    CSVInputFormat.setMaxColumns(configuration, "10");
-    CSVInputFormat.setNumberOfColumns(configuration, "7");
-
-    TaskAttemptContextImpl hadoopAttemptContext = new TaskAttemptContextImpl(configuration, new TaskAttemptID("", 1, TaskType.MAP, 0, 0));
-    CSVInputFormat format = new CSVInputFormat();
-
-    RecordReader<NullWritable, StringArrayWritable> recordReader =
-        format.createRecordReader(blockDetails, hadoopAttemptContext);
-
-    CSVRecordReaderIterator readerIterator = new CSVRecordReaderIterator(recordReader, blockDetails, hadoopAttemptContext);
-    String[] storeLocationArray = new String[] {storeLocation + "/" + databaseName + "/" + tableName};
-    new DataLoadExecutor().execute(loadModel,
-        storeLocationArray,
-        new CarbonIterator[]{readerIterator});
-
-    writeLoadMetadata(loadModel.getCarbonDataLoadSchema(), loadModel.getTableName(), loadModel.getTableName(),
-        new ArrayList<LoadMetadataDetails>());
-  }
-
-  public static void writeLoadMetadata(CarbonDataLoadSchema schema, String databaseName,
-      String tableName, List<LoadMetadataDetails> listOfLoadFolderDetails) throws IOException {
-    LoadMetadataDetails loadMetadataDetails = new LoadMetadataDetails();
-    loadMetadataDetails.setLoadEndTime(System.currentTimeMillis());
-    loadMetadataDetails.setSegmentStatus(SegmentStatus.SUCCESS);
-    loadMetadataDetails.setLoadName(String.valueOf(0));
-    loadMetadataDetails.setLoadStartTime(loadMetadataDetails.getTimeStamp(readCurrentTime()));
-    listOfLoadFolderDetails.add(loadMetadataDetails);
-
-    String dataLoadLocation = schema.getCarbonTable().getMetadataPath() + File.separator
-        + CarbonTablePath.TABLE_STATUS_FILE;
-
-    DataOutputStream dataOutputStream;
-    Gson gsonObjectToWrite = new Gson();
-    BufferedWriter brWriter = null;
-
-    AtomicFileOperations writeOperation =
-        new AtomicFileOperationsImpl(dataLoadLocation, FileFactory.getFileType(dataLoadLocation));
-
-    try {
-
-      dataOutputStream = writeOperation.openForWrite(FileWriteOperation.OVERWRITE);
-      brWriter = new BufferedWriter(new OutputStreamWriter(dataOutputStream,
-              Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)));
-
-      String metadataInstance = gsonObjectToWrite.toJson(listOfLoadFolderDetails.toArray());
-      brWriter.write(metadataInstance);
-    } catch (Exception ex) {
-      throw ex;
-    } finally {
-      try {
-        if (null != brWriter) {
-          brWriter.flush();
-        }
-      } catch (Exception e) {
-        throw e;
-
-      }
-      CarbonUtil.closeStreams(brWriter);
-
-    }
-    writeOperation.close();
-
-  }
-
-  public static String readCurrentTime() {
-    SimpleDateFormat sdf = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_MILLIS);
-    String date = null;
-
-    date = sdf.format(new Date());
-
-    return date;
-  }
-
-  public static void main(String[] args) {
-    StoreCreator.createCarbonStore();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index d15e548..946ea0f 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -165,6 +165,7 @@ public class CarbonReaderBuilder {
           new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID());
       RecordReader reader = format.createRecordReader(split, attempt);
       reader.initialize(split, attempt);
+      reader.close();
       readers.add(reader);
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
index 394ffea..9e338e7 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Objects;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.scan.expression.Expression;
@@ -49,13 +51,15 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 @InterfaceAudience.Internal
 class LocalCarbonStore extends MetaCachedCarbonStore {
 
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(LocalCarbonStore.class.getName());
+
   @Override
   public Iterator<CarbonRow> scan(String path, String[] projectColumns) throws IOException {
     return scan(path, projectColumns, null);
   }
 
-  @Override
-  public Iterator<CarbonRow> scan(String path, String[] projectColumns, Expression filter)
+  @Override public Iterator<CarbonRow> scan(String path, String[] projectColumns, Expression filter)
       throws IOException {
     Objects.requireNonNull(path);
     Objects.requireNonNull(projectColumns);
@@ -73,8 +77,8 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
     CarbonInputFormat.setTableName(job.getConfiguration(), table.getTableName());
     CarbonInputFormat.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
     CarbonInputFormat.setCarbonReadSupport(job.getConfiguration(), CarbonRowReadSupport.class);
-    CarbonInputFormat.setColumnProjection(
-        job.getConfiguration(), new CarbonProjection(projectColumns));
+    CarbonInputFormat
+        .setColumnProjection(job.getConfiguration(), new CarbonProjection(projectColumns));
     if (filter != null) {
       CarbonInputFormat.setFilterPredicates(job.getConfiguration(), filter);
     }
@@ -84,6 +88,8 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
 
     List<RecordReader<Void, Object>> readers = new ArrayList<>(splits.size());
 
+    List<CarbonRow> rows = new ArrayList<>();
+
     try {
       for (InputSplit split : splits) {
         TaskAttemptContextImpl attempt =
@@ -92,19 +98,27 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
         reader.initialize(split, attempt);
         readers.add(reader);
       }
-    } catch (InterruptedException e) {
-      throw new IOException(e);
-    }
 
-    List<CarbonRow> rows = new ArrayList<>();
-    try {
       for (RecordReader<Void, Object> reader : readers) {
         while (reader.nextKeyValue()) {
-          rows.add((CarbonRow)reader.getCurrentValue());
+          rows.add((CarbonRow) reader.getCurrentValue());
+        }
+        try {
+          reader.close();
+        } catch (IOException e) {
+          LOGGER.error(e);
         }
       }
     } catch (InterruptedException e) {
       throw new IOException(e);
+    } finally {
+      for (RecordReader<Void, Object> reader : readers) {
+        try {
+          reader.close();
+        } catch (IOException e) {
+          LOGGER.error(e);
+        }
+      }
     }
     return rows.iterator();
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
----------------------------------------------------------------------
diff --git a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
index 445b292..9727352 100644
--- a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
+++ b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
@@ -128,11 +128,12 @@ public class SearchRequestHandler {
     // In search mode, reader will read multiple blocks by using a thread pool
     CarbonRecordReader<CarbonRow> reader =
         new CarbonRecordReader<>(queryModel, new CarbonRowReadSupport());
-    reader.initialize(mbSplit, null);
 
     // read all rows by the reader
     List<CarbonRow> rows = new LinkedList<>();
     try {
+      reader.initialize(mbSplit, null);
+
       // loop to read required number of rows.
       // By default, if user does not specify the limit value, limit is Long.MaxValue
       while (reader.nextKeyValue() && rowCount < limit) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
index cbf93b8..c4b501d 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
@@ -414,9 +414,6 @@ public class CarbonStreamRecordReader extends RecordReader<Void, Object> {
 
   private boolean isScanRequired(BlockletHeader header) {
     // TODO require to implement min-max index
-    if (null == filter) {
-      return true;
-    }
     return true;
   }
 


[4/4] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
[CARBONDATA-2489] Coverity scan fixes

  https://scan4.coverity.com/reports.htm#v29367/p11911

  This closes #2313


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/7ef91645
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/7ef91645
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/7ef91645

Branch: refs/heads/master
Commit: 7ef916455d8b490f3e32efd3a0bfeb80ab9127f1
Parents: f184de8
Author: Raghunandan S <ca...@gmail.com>
Authored: Sun Aug 27 23:37:05 2017 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Mon May 21 14:02:24 2018 +0530

----------------------------------------------------------------------
 README.md                                       |  21 +-
 .../impl/ExtendedRollingFileAppender.java       |   9 +-
 .../cache/dictionary/ColumnDictionaryInfo.java  |   3 +
 .../dictionary/DoubleArrayTrieDictionary.java   | 387 ---------------
 .../dictionary/ForwardDictionaryCache.java      |   4 +-
 .../core/constants/CarbonCommonConstants.java   |   2 +-
 .../core/datamap/AbstractDataMapJob.java        |   9 -
 .../carbondata/core/datamap/DataMapChooser.java |   4 +-
 .../core/datamap/DataMapStoreManager.java       |   7 +-
 .../core/datamap/dev/BlockletSerializer.java    |  14 +-
 .../chunk/impl/AbstractDimensionColumnPage.java |   8 +
 .../impl/ColumnGroupDimensionColumnPage.java    | 194 --------
 .../impl/FixedLengthDimensionColumnPage.java    |   5 +-
 .../impl/VariableLengthDimensionColumnPage.java |   5 +-
 ...mpressedDimensionChunkFileBasedReaderV1.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV2.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV3.java |   4 +-
 .../AbstractMeasureChunkReaderV2V3Format.java   |  12 +-
 .../core/datastore/compression/Compressor.java  |  10 -
 .../datastore/compression/SnappyCompressor.java |  74 +--
 .../filesystem/AbstractDFSCarbonFile.java       |   4 +-
 .../impl/btree/AbstractBTreeLeafNode.java       |   2 +-
 .../datastore/page/UnsafeDecimalColumnPage.java |  20 +-
 .../page/UnsafeFixLengthColumnPage.java         |  70 +--
 .../page/encoding/EncodingFactory.java          |   3 +
 .../page/statistics/KeyPageStatsCollector.java  |  17 +-
 .../page/statistics/LVStringStatsCollector.java |  21 +-
 .../IncrementalColumnDictionaryGenerator.java   |   4 +-
 .../generator/TableDictionaryGenerator.java     |   8 +-
 .../blockletindex/BlockletDataMapFactory.java   |  12 +-
 .../blockletindex/SegmentIndexFileStore.java    |  15 +-
 .../carbondata/core/locks/ZookeeperInit.java    |  10 +-
 .../core/memory/UnsafeMemoryManager.java        |  11 +-
 .../core/metadata/datatype/ArrayType.java       |  34 +-
 .../core/metadata/datatype/DecimalType.java     |  31 ++
 .../core/metadata/datatype/StructType.java      |  25 +
 .../schema/table/column/ColumnSchema.java       |  38 +-
 .../carbondata/core/preagg/TimeSeriesUDF.java   |   2 +-
 .../CarbonDictionaryMetadataReaderImpl.java     |   6 +
 .../RestructureBasedRawResultCollector.java     |   2 +-
 .../impl/SearchModeDetailQueryExecutor.java     |   4 +-
 .../SearchModeVectorDetailQueryExecutor.java    |   4 +-
 .../scan/filter/FilterExpressionProcessor.java  |   6 +-
 .../carbondata/core/scan/filter/FilterUtil.java |  89 ++--
 .../ExcludeColGroupFilterExecuterImpl.java      |  48 --
 .../IncludeColGroupFilterExecuterImpl.java      | 232 ---------
 .../executer/RangeValueFilterExecuterImpl.java  |   2 +-
 .../executer/RestructureEvaluatorImpl.java      |   1 -
 .../executer/RowLevelFilterExecuterImpl.java    |  20 +-
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   7 +-
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   4 +-
 ...velRangeLessThanEqualFilterExecuterImpl.java |   4 +-
 ...RowLevelRangeLessThanFilterExecuterImpl.java |   4 +-
 .../resolver/ConditionalFilterResolverImpl.java |   1 -
 .../AbstractDetailQueryResultIterator.java      |   9 +-
 .../scanner/impl/BlockletFilterScanner.java     |   4 +
 .../core/statusmanager/LoadMetadataDetails.java |   8 +-
 .../SegmentUpdateStatusManager.java             |  60 +--
 .../util/AbstractDataFileFooterConverter.java   |   7 +-
 .../core/util/CarbonMetadataUtil.java           |   2 +-
 .../carbondata/core/util/DataTypeUtil.java      |  12 +-
 .../core/util/path/CarbonTablePath.java         |  14 +-
 .../impl/ColumnGroupDimensionDataChunkTest.java | 118 -----
 .../filesystem/AlluxioCarbonFileTest.java       |   3 +
 .../filesystem/ViewFsCarbonFileTest.java        |   9 +-
 .../apache/carbondata/hadoop/CacheClient.java   |  49 --
 .../hadoop/api/CarbonOutputCommitter.java       |   3 +
 .../hadoop/internal/segment/Segment.java        |  23 -
 .../hive/CarbonDictionaryDecodeReadSupport.java |  11 +-
 .../hive/MapredCarbonInputFormat.java           |  18 +-
 .../presto/CarbondataSplitManager.java          |  27 +-
 .../presto/impl/CarbonLocalInputSplit.java      |  14 +-
 .../presto/readers/BooleanStreamReader.java     |   6 +-
 .../presto/readers/DoubleStreamReader.java      |   8 +-
 .../presto/readers/IntegerStreamReader.java     |   8 +-
 .../presto/readers/LongStreamReader.java        |   8 +-
 .../presto/readers/ObjectStreamReader.java      |  16 +-
 .../presto/readers/ShortStreamReader.java       |  10 +-
 .../presto/readers/SliceStreamReader.java       |   8 +-
 .../presto/readers/TimestampStreamReader.java   |  11 +-
 .../spark/sql/common/util/QueryTest.scala       |   4 +-
 .../server/SecureDictionaryServer.java          |   4 +-
 .../spark/rdd/CarbonCleanFilesRDD.scala         |  83 ----
 .../spark/rdd/CarbonDeleteLoadByDateRDD.scala   |  89 ----
 .../spark/rdd/CarbonDeleteLoadRDD.scala         |  84 ----
 .../spark/rdd/CarbonDropTableRDD.scala          |  71 ---
 .../apache/spark/sql/test/util/QueryTest.scala  |   9 +
 .../VectorizedCarbonRecordReader.java           |  24 +-
 .../processing/loading/BadRecordsLogger.java    |  19 +-
 .../loading/sort/impl/ThreadStatusObserver.java |  15 +-
 .../UnsafeBatchParallelReadMergeSorterImpl.java |   4 +-
 .../loading/sort/unsafe/UnsafeSortDataRows.java |   3 +-
 .../UnsafeInMemoryIntermediateDataMerger.java   |   7 +-
 .../merger/UnsafeIntermediateFileMerger.java    |   8 +-
 .../unsafe/merger/UnsafeIntermediateMerger.java |  20 +-
 .../UnsafeSingleThreadFinalSortFilesMerger.java |   9 +-
 .../loading/steps/InputProcessorStepImpl.java   |   4 +
 .../processing/merger/CarbonDataMergerUtil.java |  82 ++--
 .../merger/RowResultMergerProcessor.java        |  32 +-
 .../partition/impl/QueryPartitionHelper.java    |  74 ---
 .../sort/sortdata/IntermediateFileMerger.java   |   7 +-
 .../SingleThreadFinalSortFilesMerger.java       |   9 +-
 .../store/writer/AbstractFactDataWriter.java    |  32 +-
 .../processing/util/CarbonQueryUtil.java        |  80 ----
 .../carbondata/processing/StoreCreator.java     | 469 -------------------
 .../sdk/file/CarbonReaderBuilder.java           |   1 +
 .../carbondata/store/LocalCarbonStore.java      |  34 +-
 .../store/worker/SearchRequestHandler.java      |   3 +-
 .../streaming/CarbonStreamRecordReader.java     |   3 -
 109 files changed, 686 insertions(+), 2620 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 4b4577e..be3186c 100644
--- a/README.md
+++ b/README.md
@@ -1,17 +1,17 @@
 <!--
-    Licensed to the Apache Software Foundation (ASF) under one or more 
+    Licensed to the Apache Software Foundation (ASF) under one or more
     contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership. 
+    this work for additional information regarding copyright ownership.
     The ASF licenses this file to you under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with 
+    (the "License"); you may not use this file except in compliance with
     the License.  You may obtain a copy of the License at
 
       http://www.apache.org/licenses/LICENSE-2.0
 
-    Unless required by applicable law or agreed to in writing, software 
-    distributed under the License is distributed on an "AS IS" BASIS, 
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and 
+    See the License for the specific language governing permissions and
     limitations under the License.
 -->
 
@@ -31,6 +31,15 @@ Visit count: [![HitCount](http://hits.dwyl.io/jackylk/apache/carbondata.svg)](ht
 Spark2.2:
 [![Build Status](https://builds.apache.org/buildStatus/icon?job=carbondata-master-spark-2.2)](https://builds.apache.org/view/A-D/view/CarbonData/job/carbondata-master-spark-2.2/lastBuild/testReport)
 [![Coverage Status](https://coveralls.io/repos/github/apache/carbondata/badge.svg?branch=master)](https://coveralls.io/github/apache/carbondata?branch=master)
+<a href="https://scan.coverity.com/projects/carbondata">
+  <img alt="Coverity Scan Build Status"
+       src="https://scan.coverity.com/projects/13444/badge.svg"/>
+</a>
+## Features
+CarbonData file format is a columnar store in HDFS, it has many features that a modern columnar format has, such as splittable, compression schema ,complex data type etc, and CarbonData has following unique features:
+* Stores data along with index: it can significantly accelerate query performance and reduces the I/O scans and CPU resources, where there are filters in the query.  CarbonData index consists of multiple level of indices, a processing framework can leverage this index to reduce the task it needs to schedule and process, and it can also do skip scan in more finer grain unit (called blocklet) in task side scanning instead of scanning the whole file. 
+* Operable encoded data :Through supporting efficient compression and global encoding schemes, can query on compressed/encoded data, the data can be converted just before returning the results to the users, which is "late materialized". 
+* Supports for various use cases with one single Data format : like interactive OLAP-style query, Sequential Access (big scan), Random Access (narrow scan). 
 
 ## Building CarbonData
 CarbonData is built using Apache Maven, to [build CarbonData](https://github.com/apache/carbondata/blob/master/build)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
index 828dd14..089865b 100644
--- a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
+++ b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
@@ -50,7 +50,7 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
    */
 
   private long nextRollover = 0;
-  private boolean cleanupInProgress = false;
+  private volatile boolean cleanupInProgress = false;
 
   /**
    * Total number of files at any point of time should be Backup number of
@@ -195,7 +195,9 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
     }
 
     // Do clean up finally
-    cleanUpLogs(startName, folderPath);
+    if (!cleanupInProgress) {
+      cleanUpLogs(startName, folderPath);
+    }
   }
 
   private void cleanUpLogs(final String startName, final String folderPath) {
@@ -204,9 +206,6 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
       Runnable r = new Runnable() {
 
         public void run() {
-          if (cleanupInProgress) {
-            return;
-          }
           synchronized (ExtendedRollingFileAppender.class) {
             cleanupInProgress = true;
             try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
index 3b915e0..ad1d201 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -193,6 +193,9 @@ public class ColumnDictionaryInfo extends AbstractColumnDictionaryInfo {
       int mid = (low + high) >>> 1;
       int surrogateKey = sortedSurrogates.get(mid);
       byte[] dictionaryValue = getDictionaryBytesFromSurrogate(surrogateKey);
+      if (null == dictionaryValue) {
+        return CarbonCommonConstants.INVALID_SURROGATE_KEY;
+      }
       int cmp = -1;
       if (this.getDataType() != DataTypes.STRING) {
         cmp = compareFilterKeyWithDictionaryKey(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
deleted file mode 100644
index ef36d7a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.cache.dictionary;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.Charset;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * A dictionary based on DoubleArrayTrie data structure that maps enumerations
- * of byte[] to int IDs. With DoubleArrayTrie the memory footprint of the mapping
- * is minimize,d if compared to HashMap.
- * This DAT implementation is inspired by https://linux.thai.net/~thep/datrie/datrie.html
- */
-
-public class DoubleArrayTrieDictionary {
-  private static final byte[] HEAD_MAGIC = new byte[]{
-      0x44, 0x41, 0x54, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74
-  }; // "DATTrieDict"
-  private static final int HEAD_LEN = HEAD_MAGIC.length;
-
-  private static final int INIT_CAPA_VALUE = 256;  // init len of double array
-  private static final int BASE_ROOT_VALUE = 1;    // root base value of trie root
-  private static final int CHCK_ROOT_VALUE = -1;   // root check value of trie root
-  private static final int UUSD_ROOM_VALUE = -2;   // unused position, only for zero
-  private static final int EPTY_BACK_VALUE = 0;    // value of empty position
-
-  private static final int ENCODE_BASE_VALUE = 10; // encode start number
-
-  private int[] base;
-  private int[] check;
-  private int size;
-  private int capacity;
-
-  private int id = ENCODE_BASE_VALUE;
-
-  public DoubleArrayTrieDictionary() {
-    base = new int[INIT_CAPA_VALUE];
-    check = new int[INIT_CAPA_VALUE];
-    capacity = INIT_CAPA_VALUE;
-    base[0] = UUSD_ROOM_VALUE;
-    check[0] = UUSD_ROOM_VALUE;
-    base[1] = BASE_ROOT_VALUE;
-    check[1] = CHCK_ROOT_VALUE;
-    size = 2;
-  }
-
-  private void init(int capacity, int size, int[] base, int[] check) {
-    int blen = base.length;
-    int clen = check.length;
-    if (capacity < size || size < 0 || blen != clen) {
-      throw new IllegalArgumentException("Illegal init parameters");
-    }
-    this.base = new int[capacity];
-    this.check = new int[capacity];
-    this.capacity = capacity;
-    System.arraycopy(base, 0, this.base, 0, blen);
-    System.arraycopy(check, 0, this.check, 0, clen);
-    this.size = size;
-  }
-
-  public void clear() {
-    base = null;
-    check = null;
-    size = 0;
-    capacity = 0;
-  }
-
-  private int reSize(int newCapacity) {
-    if (newCapacity < capacity) {
-      return capacity;
-    }
-    int[] newBase = new int[newCapacity];
-    int[] newCheck = new int[newCapacity];
-    if (capacity > 0) {
-      System.arraycopy(base, 0, newBase, 0, capacity);
-      System.arraycopy(check, 0, newCheck, 0, capacity);
-    }
-    base = newBase;
-    check = newCheck;
-    capacity = newCapacity;
-    return capacity;
-  }
-
-  public int getSize() {
-    return size;
-  }
-
-  public int getCapacity() {
-    return capacity;
-  }
-
-  /**
-   * Get apply value of key
-   *
-   * @param key
-   * @return
-   */
-  public int getValue(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    return getValue(bKeys);
-  }
-
-  /**
-   * Get apply value of bKeys
-   *
-   * @param bKeys
-   * @return
-   */
-  private int getValue(byte[] bKeys) {
-    int from = 1;
-    int to;
-    int current;
-    int len = bKeys.length;
-    if (size == 0) return -1;
-    for (int i = 0; i < len; i++) {
-      current = bKeys[i] & 0xFF;
-      to = base[from] + current;
-      if (check[to] != from) return -1;
-      int baseValue = base[to];
-      if (baseValue <= -ENCODE_BASE_VALUE) {
-        if (i == len - 1) {
-          return -1 * baseValue;
-        } else {
-          return -1;
-        }
-      }
-      from = to;
-
-    }
-    return -1;
-  }
-
-  /**
-   * Get all children of one node
-   *
-   * @param pos
-   * @return
-   */
-  private TreeSet<Integer> getChildren(int pos) {
-    TreeSet<Integer> children = new TreeSet<Integer>();
-    for (int i = 0; i < 0xFF; i++) {
-      int cpos = base[pos] + i;
-      if (cpos >= size) break;
-      if (cpos < 0) {
-        return null;
-      }
-      if (check[cpos] == pos) {
-        children.add(i);
-      }
-    }
-    return children;
-  }
-
-  /**
-   * @TODO: need to optimize performance
-   *
-   * Find multiple free position for {values}
-   * the distance between free position should be as same as {values}
-   *
-   * @param values
-   * @return
-   */
-  private int findFreeRoom(SortedSet<Integer> values) {
-    int min = values.first();
-    int max = values.last();
-    for (int i = min + 1; i < capacity; i++) {
-      if (i + max >= capacity) {
-        reSize(capacity + values.size());
-      }
-      int res = 0;
-      for (Integer v : values) {
-        res = res | base[v - min + i];
-      }
-      if (res == EPTY_BACK_VALUE) return i - min;
-    }
-    return -1;
-  }
-
-  /**
-   * Find one empty position for value
-   *
-   * @param value
-   * @return
-   */
-  private int findAvailableHop(int value) {
-    reSize(size + 1);
-    int result = size - 1;
-    for (int i = value + 1; i < capacity; i++) {
-      if (base[i] == EPTY_BACK_VALUE) {
-        result = i - value;
-        break;
-      }
-    }
-    return result;
-  }
-
-  /**
-   * Resolve when conflict and reset current node and its children.
-   *
-   * @param start current conflict position
-   * @param bKey current byte value which for processing
-   * @return
-   */
-  private int conflict(int start, int bKey) {
-    int from = start;
-    TreeSet<Integer> children = getChildren(from);
-    children.add(bKey);
-    int newBasePos = findFreeRoom(children);
-    children.remove(bKey);
-
-    int oldBasePos = base[start];
-    base[start] = newBasePos;
-
-    int oldPos, newPos;
-    for (Integer child : children) {
-      oldPos = oldBasePos + child;
-      newPos = newBasePos + child;
-      if (oldPos == from) from = newPos;
-      base[newPos] = base[oldPos];
-      check[newPos] = check[oldPos];
-      if (newPos >= size) size = newPos + 1;
-      if (base[oldPos] > 0) {
-        TreeSet<Integer> cs = getChildren(oldPos);
-        for (Integer c : cs) {
-          check[base[oldPos] + c] = newPos;
-        }
-      }
-      base[oldPos] = EPTY_BACK_VALUE;
-      check[oldPos] = EPTY_BACK_VALUE;
-    }
-    return from;
-  }
-
-  /**
-   * Insert element (byte[]) into DAT.
-   * 1. if the element has been DAT then return.
-   * 2. if position which is empty then insert directly.
-   * 3. if conflict then resolve it.
-   *
-   * @param bKeys
-   * @return
-   */
-  private boolean insert(byte[] bKeys) {
-    int from = 1;
-    int klen = bKeys.length;
-    for (int i = 0; i < klen; i++) {
-      int c = bKeys[i] & 0xFF;
-      int to = base[from] + c;
-      reSize((int) (to * 1.2) + 1);
-      if (check[to] == from) {
-        if (i == klen - 1) return true;
-        from = to;
-      } else if (check[to] == EPTY_BACK_VALUE) {
-        check[to] = from;
-        if (i == klen - 1) {
-          base[to] = -id;
-          id = id + 1;
-          return true;
-        } else {
-          int next = bKeys[i + 1] & 0xFF;
-          base[to] = findAvailableHop(next);
-          from = to;
-        }
-        if (to >= size) size = to + 1;
-      } else {
-        int rConflict = conflict(from, c);
-        int locate = base[rConflict] + c;
-        if (check[locate] != EPTY_BACK_VALUE) {
-          System.err.println("conflict");
-        }
-        check[locate] = rConflict;
-        if (i == klen - 1) {
-          base[locate] = -id;
-          id = id + 1;
-        } else {
-          int nah = bKeys[i + 1] & 0xFF;
-          base[locate] = findAvailableHop(nah);
-        }
-        if (locate >= size) size = locate + 1;
-        from = locate;
-        if (i == klen - 1) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Insert element (String) into DAT, the element will be transformed to
-   * byte[] firstly then insert into DAT.
-   *
-   * @param key
-   * @return
-   */
-  public boolean insert(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    if (!insert(bKeys)) {
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Serialize the DAT to data output stream
-   *
-   * @param out
-   * @throws IOException
-   */
-  public void write(DataOutputStream out) throws IOException {
-    out.write(HEAD_MAGIC);
-    out.writeInt(capacity);
-    out.writeInt(size);
-    for (int i = 0; i < size; i++) {
-      out.writeInt(base[i]);
-    }
-    for (int i = 0; i < size; i++) {
-      out.writeInt(check[i]);
-    }
-  }
-
-  /**
-   * Deserialize the DAT from data input stream
-   *
-   * @param in
-   * @throws IOException
-   */
-  public void read(DataInputStream in) throws IOException {
-    byte[] header = new byte[HEAD_LEN];
-    in.readFully(header);
-    int comp = 0;
-    for (int i = 0; i < HEAD_LEN; i++) {
-      comp = HEAD_MAGIC[i] - header[i];
-      if (comp != 0) break;
-    }
-    if (comp != 0) throw new IllegalArgumentException("Illegal file type");
-    int capacity = in.readInt();
-    int size = in.readInt();
-    if (capacity < size || size < 0) throw new IllegalArgumentException("Illegal parameters");
-    int[] base = new int[size];
-    int[] check = new int[size];
-    for (int i = 0; i < size; i++) {
-      base[i] = in.readInt();
-    }
-    for (int i = 0; i < size; i++) {
-      check[i] = in.readInt();
-    }
-    init(capacity, size, base, check);
-  }
-
-  /**
-   * Dump double array value about Trie
-   */
-  public void dump(PrintStream out) {
-    out.println("Capacity = " + capacity + ", Size = " + size);
-    for (int i = 0; i < size; i++) {
-      if (base[i] != EPTY_BACK_VALUE) {
-        out.print(i + ":[" + base[i] + "," + check[i] + "], ");
-      }
-    }
-    out.println();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
index f43e21b..55a1c05 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
@@ -19,10 +19,10 @@ package org.apache.carbondata.core.cache.dictionary;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -51,7 +51,7 @@ public class ForwardDictionaryCache<K extends
       LogServiceFactory.getLogService(ForwardDictionaryCache.class.getName());
 
   private static final Map<DictionaryColumnUniqueIdentifier, Object> DICTIONARY_LOCK_OBJECT =
-      new HashMap<>();
+      new ConcurrentHashMap<>();
 
   private static final long sizeOfEmptyDictChunks =
       ObjectSizeCalculator.estimate(new ArrayList<byte[]>(CarbonUtil.getDictionaryChunkSize()), 16);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index f3a821b..5ba1fec 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1003,7 +1003,7 @@ public final class CarbonCommonConstants {
   /**
    * Indicates alter partition
    */
-  public static String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
+  public static final String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
 
   /**
    * hdfs temporary directory key

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
index bdbf9fc..7d1cb48 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
@@ -17,12 +17,8 @@
 
 package org.apache.carbondata.core.datamap;
 
-import java.util.List;
-
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
-import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 
@@ -34,9 +30,4 @@ public abstract class AbstractDataMapJob implements DataMapJob {
   @Override public void execute(CarbonTable carbonTable,
       FileInputFormat<Void, BlockletDataMapIndexWrapper> format) {
   }
-
-  @Override public List<ExtendedBlocklet> execute(DistributableDataMapFormat dataMapFormat,
-      FilterResolverIntf resolverIntf) {
-    return null;
-  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
index 4d1c718..cf5dffd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
@@ -283,8 +283,8 @@ public class DataMapChooser {
       List<ColumnExpression> columnExpressions, Set<ExpressionType> expressionTypes) {
     List<DataMapTuple> tuples = new ArrayList<>();
     for (TableDataMap dataMap : allDataMap) {
-      if (contains(dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes))
-      {
+      if (null != dataMap.getDataMapFactory().getMeta() && contains(
+          dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes)) {
         tuples.add(
             new DataMapTuple(dataMap.getDataMapFactory().getMeta().getIndexedColumns().size(),
                 dataMap));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index c739dc3..1359e85 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -207,8 +207,8 @@ public final class DataMapStoreManager {
    * @param providerName
    * @return
    */
-  public DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider, String providerName)
-      throws IOException {
+  public synchronized DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider,
+      String providerName) throws IOException {
     intializeDataMapCatalogs(dataMapProvider);
     return dataMapCatalogs.get(providerName);
   }
@@ -225,6 +225,9 @@ public final class DataMapStoreManager {
         DataMapCatalog dataMapCatalog = dataMapCatalogs.get(schema.getProviderName());
         if (dataMapCatalog == null) {
           dataMapCatalog = dataMapProvider.createDataMapCatalog();
+          if (null == dataMapCatalog) {
+            throw new RuntimeException("Internal Error.");
+          }
           dataMapCatalogs.put(schema.getProviderName(), dataMapCatalog);
         }
         try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
index bd5f994..ebcf972 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
@@ -41,8 +41,11 @@ public class BlockletSerializer {
       throws IOException {
     DataOutputStream dataOutputStream =
         FileFactory.getDataOutputStream(writePath, FileFactory.getFileType(writePath));
-    grainBlocklet.write(dataOutputStream);
-    dataOutputStream.close();
+    try {
+      grainBlocklet.write(dataOutputStream);
+    } finally {
+      dataOutputStream.close();
+    }
   }
 
   /**
@@ -55,8 +58,11 @@ public class BlockletSerializer {
     DataInputStream inputStream =
         FileFactory.getDataInputStream(writePath, FileFactory.getFileType(writePath));
     FineGrainBlocklet blocklet = new FineGrainBlocklet();
-    blocklet.readFields(inputStream);
-    inputStream.close();
+    try {
+      blocklet.readFields(inputStream);
+    } finally {
+      inputStream.close();
+    }
     return blocklet;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
index 91e55dc..d400952 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
@@ -29,6 +29,14 @@ public abstract class AbstractDimensionColumnPage implements DimensionColumnPage
    */
   DimensionDataChunkStore dataChunkStore;
 
+
+  /**
+   * @return whether data is explicitly sorted or not
+   */
+  protected boolean isExplicitSorted(int[] invertedIndex) {
+    return (null == invertedIndex || 0 == invertedIndex.length) ? false : true;
+  }
+
   /**
    * @return whether columns where explicitly sorted or not
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
deleted file mode 100644
index 741c13d..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.chunk.impl;
-
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to column group dimension data chunk store
- */
-public class ColumnGroupDimensionColumnPage extends AbstractDimensionColumnPage {
-
-  /**
-   * Constructor for this class
-   *
-   * @param dataChunk       data chunk
-   * @param columnValueSize chunk attributes
-   * @param numberOfRows
-   */
-  public ColumnGroupDimensionColumnPage(byte[] dataChunk, int columnValueSize, int numberOfRows) {
-    this.dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, false, numberOfRows, dataChunk.length,
-        DimensionStoreType.FIXEDLENGTH);
-    this.dataChunkStore.putArray(null, null, dataChunk);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param rowId             row id of the chunk
-   * @param offset            offset from which data need to be filed
-   * @param data              data to filed
-   * @param restructuringInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillRawData(int rowId, int offset, byte[] data,
-      KeyStructureInfo restructuringInfo) {
-    byte[] row = dataChunkStore.getRow(rowId);
-    byte[] maskedKey = getMaskedKey(row, restructuringInfo);
-    System.arraycopy(maskedKey, 0, data, offset, maskedKey.length);
-    return maskedKey.length;
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param chunkIndex
-   * @param outputSurrogateKey
-   * @param info          KeyStructureInfo
-   * @return
-   */
-  @Override public int fillSurrogateKey(int rowId, int chunkIndex, int[] outputSurrogateKey,
-      KeyStructureInfo info) {
-    byte[] data = dataChunkStore.getRow(rowId);
-    long[] keyArray = info.getKeyGenerator().getKeyArray(data);
-    int[] ordinal = info.getMdkeyQueryDimensionOrdinal();
-    for (int i = 0; i < ordinal.length; i++) {
-      outputSurrogateKey[chunkIndex++] = (int) keyArray[ordinal[i]];
-    }
-    return chunkIndex;
-  }
-
-  /**
-   * Below method will be used to get the masked key
-   *
-   * @param data   data
-   * @param info
-   * @return
-   */
-  private byte[] getMaskedKey(byte[] data, KeyStructureInfo info) {
-    byte[] maskedKey = new byte[info.getMaskByteRanges().length];
-    int counter = 0;
-    int byteRange = 0;
-    for (int i = 0; i < info.getMaskByteRanges().length; i++) {
-      byteRange = info.getMaskByteRanges()[i];
-      maskedKey[counter++] = (byte) (data[byteRange] & info.getMaxKey()[byteRange]);
-    }
-    return maskedKey;
-  }
-
-  /**
-   * @return inverted index
-   */
-  @Override public int getInvertedIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @param rowId
-   * @return inverted index reverse
-   */
-  @Override public int getInvertedReverseIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @return whether columns where explictly sorted or not
-   */
-  @Override public boolean isExplicitSorted() {
-    return false;
-  }
-
-  /**
-   * to compare the data
-   *
-   * @param rowId        row index to be compared
-   * @param compareValue value to compare
-   * @return compare result
-   */
-  @Override public int compareTo(int rowId, byte[] compareValue) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(ColumnVectorInfo[] vectorInfo, int chunkIndex,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray = restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(k));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param filteredRowId
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(int[] filteredRowId, ColumnVectorInfo[] vectorInfo,
-      int chunkIndex, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray =
-          restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(filteredRowId[k]));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
index ff54b12..76bcf30 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
@@ -41,11 +41,12 @@ public class FixedLengthDimensionColumnPage extends AbstractDimensionColumnPage
    */
   public FixedLengthDimensionColumnPage(byte[] dataChunk, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows, int columnValueSize) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         dataChunk.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) :
         dataChunk.length;
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(columnValueSize, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.FIXEDLENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunk);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
index d03b2de..1c6b7f4 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
@@ -37,12 +37,13 @@ public class VariableLengthDimensionColumnPage extends AbstractDimensionColumnPa
    */
   public VariableLengthDimensionColumnPage(byte[] dataChunks, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         (dataChunks.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) + (
             numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE)) :
         (dataChunks.length + (numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE));
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(0, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(0, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.VARIABLELENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunks);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 0dc1c1b..6679402 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -22,7 +22,6 @@ import java.util.List;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -102,8 +101,8 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     int blockIndex = dimensionRawColumnChunk.getColumnIndex();
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     FileReader fileReader = dimensionRawColumnChunk.getFileReader();
 
@@ -146,14 +145,9 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
     }
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
-    if (dataChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!CarbonUtil
+    if (!CarbonUtil
         .hasEncoding(dataChunk.getEncodingList(), Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 31fa819..8938260 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -118,8 +117,8 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
   public DimensionColumnPage decodeColumnPage(
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     DataChunk2 dimensionColumnChunk = null;
     int copySourcePoint = (int) dimensionRawColumnChunk.getOffSet();
@@ -171,14 +170,9 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
 
-    if (dimensionColumnChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
+    if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,
               numberOfRows);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
index 0fdc515..58a9b18 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
@@ -244,8 +244,8 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
       ByteBuffer pageData, DataChunk2 pageMetadata, int offset) {
     byte[] dataPage;
     int[] rlePage;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     dataPage = COMPRESSOR.unCompressByte(pageData.array(), offset, pageMetadata.data_page_length);
     offset += pageMetadata.data_page_length;
     // if row id block is present then read the row id chunk and uncompress it

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
index a3ed339..7d59d47 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
@@ -103,11 +103,15 @@ public abstract class AbstractMeasureChunkReaderV2V3Format extends AbstractMeasu
    * @param presentMetadataThrift
    * @return wrapper presence meta
    */
-  protected BitSet getNullBitSet(
-      org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
+  protected BitSet getNullBitSet(org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
     Compressor compressor = CompressorFactory.getInstance().getCompressor();
-    return BitSet.valueOf(
-        compressor.unCompressByte(presentMetadataThrift.getPresent_bit_stream()));
+    final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
+    if (null != present_bit_stream) {
+      return BitSet
+          .valueOf(compressor.unCompressByte(present_bit_stream));
+    } else {
+      return new BitSet(1);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
index a32651a..5c2a5fb 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
@@ -33,32 +33,22 @@ public interface Compressor {
 
   byte[] compressShort(short[] unCompInput);
 
-  short[] unCompressShort(byte[] compInput);
-
   short[] unCompressShort(byte[] compInput, int offset, int lenght);
 
   byte[] compressInt(int[] unCompInput);
 
-  int[] unCompressInt(byte[] compInput);
-
   int[] unCompressInt(byte[] compInput, int offset, int length);
 
   byte[] compressLong(long[] unCompInput);
 
-  long[] unCompressLong(byte[] compInput);
-
   long[] unCompressLong(byte[] compInput, int offset, int length);
 
   byte[] compressFloat(float[] unCompInput);
 
-  float[] unCompressFloat(byte[] compInput);
-
   float[] unCompressFloat(byte[] compInput, int offset, int length);
 
   byte[] compressDouble(double[] unCompInput);
 
-  double[] unCompressDouble(byte[] compInput);
-
   double[] unCompressDouble(byte[] compInput, int offset, int length);
 
   long rawCompress(long inputAddress, int inputSize, long outputAddress) throws IOException;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
index f234f80..65244d2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
@@ -59,7 +59,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, unCompInput.length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -68,7 +68,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, byteSize);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -77,19 +77,20 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompress(compInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return compInput;
   }
 
   @Override public byte[] unCompressByte(byte[] compInput, int offset, int length) {
     int uncompressedLength = 0;
-    byte[] data = null;
+    byte[] data;
     try {
       uncompressedLength = Snappy.uncompressedLength(compInput, offset, length);
       data = new byte[uncompressedLength];
       Snappy.uncompress(compInput, offset, length, data, 0);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
     return data;
   }
@@ -99,17 +100,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public short[] unCompressShort(byte[] compInput) {
-    try {
-      return Snappy.uncompressShortArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public short[] unCompressShort(byte[] compInput, int offset, int lenght) {
@@ -117,8 +109,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressShortArray(compInput, offset, lenght);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressInt(int[] unCompInput) {
@@ -126,17 +118,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public int[] unCompressInt(byte[] compInput) {
-    try {
-      return Snappy.uncompressIntArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public int[] unCompressInt(byte[] compInput, int offset, int length) {
@@ -144,8 +127,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressIntArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressLong(long[] unCompInput) {
@@ -153,17 +136,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public long[] unCompressLong(byte[] compInput) {
-    try {
-      return Snappy.uncompressLongArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public long[] unCompressLong(byte[] compInput, int offset, int length) {
@@ -171,8 +145,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressLongArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressFloat(float[] unCompInput) {
@@ -180,17 +154,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public float[] unCompressFloat(byte[] compInput) {
-    try {
-      return Snappy.uncompressFloatArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public float[] unCompressFloat(byte[] compInput, int offset, int length) {
@@ -198,8 +163,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressFloatArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressDouble(double[] unCompInput) {
@@ -207,17 +172,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public double[] unCompressDouble(byte[] compInput) {
-    try {
-      return Snappy.uncompressDoubleArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public double[] unCompressDouble(byte[] compInput, int offset, int length) {
@@ -228,8 +184,8 @@ public class SnappyCompressor implements Compressor {
       return result;
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
index 7255237..05f96c5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
@@ -274,9 +274,9 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
           int count = dataInputStream.available();
           // create buffer
           byte[] byteStreamBuffer = new byte[count];
-          dataInputStream.read(byteStreamBuffer);
+          int bytesRead = dataInputStream.read(byteStreamBuffer);
           stream = fileSystem.create(pt, true, bufferSize);
-          stream.write(byteStreamBuffer);
+          stream.write(byteStreamBuffer, 0, bytesRead);
         } else {
           stream = fileSystem.append(pt, bufferSize);
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
index f5a751b..9477dff 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
@@ -202,7 +202,7 @@ public abstract class AbstractBTreeLeafNode implements BTreeNode {
       int[][] columnIndexRange) throws IOException {
     // No required here as leaf which will will be use this class will implement its own get
     // measure chunks
-    return null;
+    throw new UnsupportedOperationException("Unsupported operation");
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
index 378b51f..1cdefc8 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
@@ -124,13 +124,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = (long)rowId << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -145,13 +145,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = (long)rowId << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
@@ -187,7 +187,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
@@ -202,7 +202,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = (long) rowId << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -218,13 +218,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = (long) rowId << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
@@ -266,13 +266,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
     switch (decimalConverter.getDecimalConverterType()) {
       case DECIMAL_INT:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << intBits;
+          long offset = (long)i << intBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
         }
         break;
       case DECIMAL_LONG:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << longBits;
+          long offset = (long)i << longBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
         }
         break;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
index 6847ab9..7965e93 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
@@ -94,13 +94,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -115,19 +115,19 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putDouble(int rowId, double value) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     CarbonUnsafe.getUnsafe().putDouble(baseAddress, baseOffset + offset, value);
   }
 
@@ -151,13 +151,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -173,25 +173,25 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
   @Override
   public float getFloat(int rowId) {
-    long offset = rowId << floatBits;
+    long offset = ((long)rowId) << floatBits;
     return CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
   }
 
   @Override
   public double getDouble(int rowId) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     return CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
   }
 
@@ -219,9 +219,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public byte[] getBytePage() {
     byte[] data = new byte[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << byteBits;
-      data[i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -229,9 +229,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public short[] getShortPage() {
     short[] data = new short[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << shortBits;
-      data[i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -247,9 +247,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public int[] getIntPage() {
     int[] data = new int[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << intBits;
-      data[i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -257,9 +257,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public long[] getLongPage() {
     long[] data = new long[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << longBits;
-      data[i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -267,9 +267,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public float[] getFloatPage() {
     float[] data = new float[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << floatBits;
-      data[i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -277,9 +277,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public double[] getDoublePage() {
     double[] data = new double[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << doubleBits;
-      data[i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -363,34 +363,34 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   public void convertValue(ColumnPageValueConverter codec) {
     int pageSize = getPageSize();
     if (dataType == DataTypes.BYTE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << byteBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.SHORT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << shortBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.INT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << intBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.LONG) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << longBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.FLOAT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << floatBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.DOUBLE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << doubleBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
       }
     } else {
       throw new UnsupportedOperationException("invalid data type: " + dataType);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
index 597def0..318d55d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
@@ -118,6 +118,9 @@ public abstract class EncodingFactory {
    * Old way of creating decoder, based on algorithm
    */
   public ColumnPageDecoder createDecoderLegacy(ValueEncoderMeta metadata) {
+    if (null == metadata) {
+      throw new RuntimeException("internal error");
+    }
     SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
     TableSpec.ColumnSpec spec =
         TableSpec.ColumnSpec.newInstanceLegacy("legacy", stats.getDataType(), ColumnType.MEASURE);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
index e6cf29e..22537db 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
@@ -73,16 +73,17 @@ public class KeyPageStatsCollector implements ColumnPageStatsCollector {
 
   @Override
   public void update(byte[] value) {
-    if (min == null && max == null) {
+    if (null == min) {
       min = value;
+    }
+    if (null == max) {
+      max = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
+      min = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
       max = value;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
-        min = value;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
-        max = value;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
index 23795c5..7958a8d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
@@ -84,16 +84,21 @@ public class LVStringStatsCollector implements ColumnPageStatsCollector {
       newValue = new byte[value.length - 2];
       System.arraycopy(value, 2, newValue, 0, newValue.length);
     }
-    if (min == null && max == null) {
+
+    if (null == min) {
       min = newValue;
+    }
+
+    if (null == max) {
+      max = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
+      min = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
       max = newValue;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
-        min = newValue;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
-        max = newValue;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index e0feb04..8a69b80 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -97,7 +97,9 @@ public class IncrementalColumnDictionaryGenerator implements BiDictionary<Intege
   }
 
   @Override public int size() {
-    return currentDictionarySize;
+    synchronized (lock) {
+      return currentDictionarySize;
+    }
   }
 
   @Override public Integer generateKey(String value) throws DictionaryGenerationException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
index 5db13b6..7bb8259 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
@@ -57,6 +57,9 @@ public class TableDictionaryGenerator
       throws DictionaryGenerationException {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(value.getColumnName());
 
+    if (null == dimension) {
+      throw new DictionaryGenerationException("Dictionary Generation Failed");
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return generator.generateKey(value.getData());
@@ -65,6 +68,9 @@ public class TableDictionaryGenerator
   public Integer size(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(key.getColumnName());
 
+    if (null == dimension) {
+      return 0;
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return ((BiDictionary) generator).size();
@@ -91,7 +97,7 @@ public class TableDictionaryGenerator
   public void updateGenerator(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable
         .getPrimitiveDimensionByName(key.getColumnName());
-    if (null == columnMap.get(dimension.getColumnId())) {
+    if (null != dimension && null == columnMap.get(dimension.getColumnId())) {
       synchronized (columnMap) {
         if (null == columnMap.get(dimension.getColumnId())) {
           columnMap.put(dimension.getColumnId(),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
index 021fb82..0188281 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
@@ -17,11 +17,7 @@
 package org.apache.carbondata.core.indexstore.blockletindex;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -169,9 +165,11 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
       return false;
     }
     for (int i = 0; i < tableColumnList.size(); i++) {
-      return indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i));
+      if (!indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i))) {
+        return false;
+      }
     }
-    return false;
+    return true;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index c6efd77..c2686d0 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -282,12 +282,15 @@ public class SegmentIndexFileStore {
     DataInputStream dataInputStream =
         FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
     byte[] bytes = new byte[(int) indexFile.getSize()];
-    dataInputStream.readFully(bytes);
-    carbonIndexMap.put(indexFile.getName(), bytes);
-    carbonIndexMapWithFullPath.put(
-        indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
-            + indexFile.getName(), bytes);
-    dataInputStream.close();
+    try {
+      dataInputStream.readFully(bytes);
+      carbonIndexMap.put(indexFile.getName(), bytes);
+      carbonIndexMapWithFullPath.put(
+          indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
+              + indexFile.getName(), bytes);
+    } finally {
+      dataInputStream.close();
+    }
   }
 
   private MergedBlockIndexHeader readMergeBlockIndexHeader(ThriftReader thriftReader)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
index 9a8d3f6..ecdb672 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
@@ -54,12 +54,10 @@ public class ZookeeperInit {
 
   public static ZookeeperInit getInstance(String zooKeeperUrl) {
 
-    if (null == zooKeeperInit) {
-      synchronized (ZookeeperInit.class) {
-        if (null == zooKeeperInit) {
-          LOGGER.info("Initiating Zookeeper client.");
-          zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
-        }
+    synchronized (ZookeeperInit.class) {
+      if (null == zooKeeperInit) {
+        LOGGER.info("Initiating Zookeeper client.");
+        zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
       }
     }
     return zooKeeperInit;


[3/4] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
index d3b9b48..2115f82 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
@@ -159,7 +159,8 @@ public class UnsafeMemoryManager {
   /**
    * It tries to allocate memory of `size` bytes, keep retry until it allocates successfully.
    */
-  public static MemoryBlock allocateMemoryWithRetry(long taskId, long size) throws MemoryException {
+  public static MemoryBlock allocateMemoryWithRetry(long taskId, long size)
+      throws MemoryException {
     MemoryBlock baseBlock = null;
     int tries = 0;
     while (tries < 300) {
@@ -177,8 +178,7 @@ public class UnsafeMemoryManager {
       tries++;
     }
     if (baseBlock == null) {
-      LOGGER.error(" Memory Used : " + INSTANCE.memoryUsed + " Tasks running : "
-          + taskIdToMemoryBlockMap.keySet());
+      INSTANCE.printCurrentMemoryUsage();
       throw new MemoryException("Not enough memory");
     }
     return baseBlock;
@@ -187,4 +187,9 @@ public class UnsafeMemoryManager {
   public static boolean isOffHeap() {
     return offHeap;
   }
+
+  private synchronized void printCurrentMemoryUsage() {
+    LOGGER.error(
+        " Memory Used : " + memoryUsed + " Tasks running : " + taskIdToMemoryBlockMap.keySet());
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
index 0f0f120..94a4e89 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
@@ -31,20 +31,32 @@ public class ArrayType extends DataType {
     return true;
   }
 
-  public DataType getElementType() {
-    return elementType;
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof ArrayType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((ArrayType) obj).getName())) {
+      return false;
+    }
+    return true;
   }
 
-  @Override public boolean equals(Object o) {
-    if (this == o) return true;
-    if (!(o instanceof ArrayType)) return false;
-
-    ArrayType arrayType = (ArrayType) o;
-
-    return elementType.equals(arrayType.elementType);
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
   }
 
-  @Override public int hashCode() {
-    return elementType.hashCode();
+  public DataType getElementType() {
+    return elementType;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
index b2acd21..8536222 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
@@ -29,6 +29,37 @@ public class DecimalType extends DataType {
     this.scale = scale;
   }
 
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof DecimalType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((DecimalType) obj).getName())) {
+      return false;
+    }
+    if (this.precision != ((DecimalType) obj).precision) {
+      return false;
+    }
+    if (this.scale != ((DecimalType) obj).scale) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
+  }
+
   public int getPrecision() {
     return precision;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
index 97cc4f0..90b7374 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
@@ -33,6 +33,31 @@ public class StructType extends DataType {
     return true;
   }
 
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof StructType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((StructType) obj).getName())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
+  }
+
   public List<StructField> getFields() {
     return fields;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
index 80c6a3a..fb4d8e3 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
@@ -33,15 +33,11 @@ import org.apache.carbondata.core.metadata.schema.table.Writable;
 import org.apache.carbondata.core.metadata.schema.table.WritableUtil;
 import org.apache.carbondata.core.preagg.TimeSeriesUDF;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 /**
  * Store the information about the column meta data present the table
  */
 public class ColumnSchema implements Serializable, Writable {
 
-  private static final Log LOG = LogFactory.getLog(ColumnSchema.class);
   /**
    * serialization version
    */
@@ -334,8 +330,6 @@ public class ColumnSchema implements Serializable, Writable {
         return false;
       }
     } else if (!columnName.equals(other.columnName)) {
-      LOG.error("column name is " + columnName
-          + " but other column name is " + other.columnName);
       return false;
     }
     if (dataType == null) {
@@ -343,8 +337,6 @@ public class ColumnSchema implements Serializable, Writable {
         return false;
       }
     } else if (!dataType.equals(other.dataType)) {
-      LOG.error("column name is" + columnName + " data type is " + dataType
-          + " but other column data type is " + other.dataType);
       return false;
     }
     return true;
@@ -361,40 +353,16 @@ public class ColumnSchema implements Serializable, Writable {
       return false;
     }
     ColumnSchema other = (ColumnSchema) obj;
-    if (!columnUniqueId.equals(other.columnUniqueId)) {
-      LOG.error("Index file's column " + columnName + " columnUniqueId is " + columnUniqueId
-          + " but table's column columnUniqueId is " + other.columnUniqueId);
-      return false;
-    }
-    if (isDimensionColumn != other.isDimensionColumn) {
-      LOG.error("Index file's column " + columnName + " isDimensionColumn is " + isDimensionColumn
-          + " but table's column isDimensionColumn is " + other.isDimensionColumn);
-      return false;
-    }
-    if (scale != other.scale) {
-      LOG.error("Index file's column " + columnName + " scale is " + scale
-          + " but table's column scale is " + other.scale);
-      return false;
-    }
-    if (precision != other.precision) {
-      LOG.error("Index file's column " + columnName + " precision is " + precision
-          + " but table's column precision is " + other.precision);
-      return false;
-    }
-    if (isSortColumn != other.isSortColumn) {
-      LOG.error("Index file's column " + columnName + " isSortColumn is " + isSortColumn
-          + " but table's column isSortColumn is " + other.isSortColumn);
+    if (!columnUniqueId.equals(other.columnUniqueId) ||
+        (isDimensionColumn != other.isDimensionColumn) ||
+        (isSortColumn != other.isSortColumn)) {
       return false;
     }
     if (encodingList.size() != other.encodingList.size()) {
-      LOG.error("Index file's column " + columnName + " encoding size is " + encodingList.size()
-          + " but table's column encoding size is " + other.encodingList.size());
       return false;
     }
     for (int i = 0; i < encodingList.size(); i++) {
       if (encodingList.get(i).compareTo(other.encodingList.get(i)) != 0) {
-        LOG.error("Index file's column " + columnName + " encoding is " + encodingList.get(i)
-            + " but table's column encoding is " + other.encodingList.get(i));
         return false;
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
index df712de..ce0f15d 100644
--- a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
+++ b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
@@ -112,7 +112,7 @@ public class TimeSeriesUDF {
   /**
    * Below method will be used to initialize the thread local
    */
-  private synchronized void initialize() {
+  private void initialize() {
     if (calanderThreadLocal.get() == null) {
       calanderThreadLocal.set(new GregorianCalendar());
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
index 6439b36..4f41b92 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
@@ -111,6 +111,9 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
       // get the thrift object for dictionary chunk
       dictionaryChunkMeta = (ColumnDictionaryChunkMeta) dictionaryMetadataFileReader.read();
     }
+    if (null == dictionaryChunkMeta) {
+      throw new IOException("Last dictionary chunk does not exist");
+    }
     // create a new instance of chunk meta wrapper using thrift object
     return getNewInstanceOfCarbonDictionaryColumnMetaChunk(dictionaryChunkMeta);
   }
@@ -128,6 +131,9 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
         break;
       }
     }
+    if (null == dictionaryChunkMeta) {
+      throw new IOException("Matching dictionary chunk does not exist");
+    }
     // create a new instance of chunk meta wrapper using thrift object
     return getNewInstanceOfCarbonDictionaryColumnMetaChunk(dictionaryChunkMeta);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
index 1c440cf..33f0db7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
@@ -197,7 +197,7 @@ public class RestructureBasedRawResultCollector extends RawBasedResultCollector
         if (CarbonUtil.hasEncoding(actualQueryDimensions[i].getDimension().getEncoder(),
             Encoding.DICTIONARY)) {
           // if dimension exists then add the key array value else add the default value
-          if (dimensionInfo.getDimensionExists()[i]) {
+          if (dimensionInfo.getDimensionExists()[i] && null != keyArray && 0 != keyArray.length) {
             keyArrayWithNewAddedColumns[newKeyArrayIndex++] =
                 keyArray[existingColumnKeyArrayIndex++];
           } else {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
index 04669ab..aed472c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
@@ -70,9 +70,7 @@ public class SearchModeDetailQueryExecutor extends AbstractQueryExecutor<Object>
   public CarbonIterator<Object> execute(QueryModel queryModel)
       throws QueryExecutionException, IOException {
     List<BlockExecutionInfo> blockExecutionInfoList = getBlockExecutionInfos(queryModel);
-    if (executorService == null) {
-      initThreadPool();
-    }
+
     this.queryIterator = new SearchModeResultIterator(
         blockExecutionInfoList,
         queryModel,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
index 6c9396b..00fd511 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
@@ -74,9 +74,7 @@ public class SearchModeVectorDetailQueryExecutor extends AbstractQueryExecutor<O
   public CarbonIterator<Object> execute(QueryModel queryModel)
       throws QueryExecutionException, IOException {
     List<BlockExecutionInfo> blockExecutionInfoList = getBlockExecutionInfos(queryModel);
-    if (executorService == null) {
-      initThreadPool();
-    }
+
     this.queryIterator = new SearchModeVectorResultIterator(
         blockExecutionInfoList,
         queryModel,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index dc9415e..74c9ae2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -207,12 +207,14 @@ public class FilterExpressionProcessor implements FilterProcessor {
    */
   private void addBlockBasedOnMinMaxValue(FilterExecuter filterExecuter,
       List<DataRefNode> listOfDataBlocksToScan, DataRefNode dataRefNode) {
-
+    if (null == dataRefNode.getColumnsMinValue() || null == dataRefNode.getColumnsMaxValue()) {
+      listOfDataBlocksToScan.add(dataRefNode);
+      return;
+    }
     BitSet bitSet = filterExecuter
         .isScanRequired(dataRefNode.getColumnsMaxValue(), dataRefNode.getColumnsMinValue());
     if (!bitSet.isEmpty()) {
       listOfDataBlocksToScan.add(dataRefNode);
-
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 5196f8f..9741915 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -77,12 +77,10 @@ import org.apache.carbondata.core.scan.expression.logical.AndExpression;
 import org.apache.carbondata.core.scan.expression.logical.TrueExpression;
 import org.apache.carbondata.core.scan.filter.executer.AndFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.DimColumnExecuterFilterInfo;
-import org.apache.carbondata.core.scan.filter.executer.ExcludeColGroupFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.ExcludeFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.FalseFilterExecutor;
 import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
 import org.apache.carbondata.core.scan.filter.executer.ImplicitIncludeFilterExecutorImpl;
-import org.apache.carbondata.core.scan.filter.executer.IncludeColGroupFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.IncludeFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecuterFilterInfo;
 import org.apache.carbondata.core.scan.filter.executer.OrFilterExecuterImpl;
@@ -232,30 +230,24 @@ public final class FilterUtil {
             msrColResolvedFilterInfo, true);
       }
     }
-    if (null != dimColResolvedFilterInfo) {
-      CarbonDimension dimension = dimColResolvedFilterInfo.getDimension();
-      if (dimension.hasEncoding(Encoding.IMPLICIT)) {
-        return new ImplicitIncludeFilterExecutorImpl(dimColResolvedFilterInfo);
-      } else if (dimension.isColumnar()) {
-        CarbonDimension dimensionFromCurrentBlock =
-            segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
-        if (null != dimensionFromCurrentBlock) {
-          // update dimension and column index according to the dimension position in current block
-          DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
-              dimColResolvedFilterInfo.getCopyObject();
-          dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
-          dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
-          return new IncludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
-              segmentProperties, false);
-        } else {
-          return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
-              msrColResolvedFilterInfo, false);
-        }
+    CarbonDimension dimension = dimColResolvedFilterInfo.getDimension();
+    if (dimension.hasEncoding(Encoding.IMPLICIT)) {
+      return new ImplicitIncludeFilterExecutorImpl(dimColResolvedFilterInfo);
+    } else {
+      CarbonDimension dimensionFromCurrentBlock =
+          segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
+      if (null != dimensionFromCurrentBlock) {
+        // update dimension and column index according to the dimension position in current block
+        DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
+            dimColResolvedFilterInfo.getCopyObject();
+        dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
+        dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
+        return new IncludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+            segmentProperties, false);
       } else {
-        return new IncludeColGroupFilterExecuterImpl(dimColResolvedFilterInfo, segmentProperties);
+        return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
+            msrColResolvedFilterInfo, false);
       }
-    } else {
-      return new IncludeColGroupFilterExecuterImpl(null, segmentProperties);
     }
   }
 
@@ -288,24 +280,19 @@ public final class FilterUtil {
             msrColResolvedFilterInfo, true);
       }
     }
-    if ((null != dimColResolvedFilterInfo) && (dimColResolvedFilterInfo.getDimension()
-        .isColumnar())) {
-      CarbonDimension dimensionFromCurrentBlock =
-          segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
-      if (null != dimensionFromCurrentBlock) {
-        // update dimension and column index according to the dimension position in current block
-        DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
-            dimColResolvedFilterInfo.getCopyObject();
-        dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
-        dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
-        return new ExcludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
-            segmentProperties, false);
-      } else {
-        return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
-            msrColResolvedFilterInfo, false);
-      }
+    CarbonDimension dimensionFromCurrentBlock =
+        segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
+    if (null != dimensionFromCurrentBlock) {
+      // update dimension and column index according to the dimension position in current block
+      DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
+          dimColResolvedFilterInfo.getCopyObject();
+      dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
+      dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
+      return new ExcludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+          segmentProperties, false);
     } else {
-      return new ExcludeColGroupFilterExecuterImpl(dimColResolvedFilterInfo, segmentProperties);
+      return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
+          msrColResolvedFilterInfo, false);
     }
   }
 
@@ -581,17 +568,15 @@ public final class FilterUtil {
     }
     Collections.sort(surrogates);
     ColumnFilterInfo columnFilterInfo = null;
-    if (surrogates.size() > 0) {
-      columnFilterInfo = new ColumnFilterInfo();
-      if (isExcludeFilterNeedsToApply) {
-        columnFilterInfo.setOptimized(true);
-      }
-      columnFilterInfo.setIncludeFilter(isIncludeFilter);
-      if (!isIncludeFilter) {
-        columnFilterInfo.setExcludeFilterList(surrogates);
-      } else {
-        columnFilterInfo.setFilterList(surrogates);
-      }
+    columnFilterInfo = new ColumnFilterInfo();
+    if (isExcludeFilterNeedsToApply) {
+      columnFilterInfo.setOptimized(true);
+    }
+    columnFilterInfo.setIncludeFilter(isIncludeFilter);
+    if (!isIncludeFilter) {
+      columnFilterInfo.setExcludeFilterList(surrogates);
+    } else {
+      columnFilterInfo.setFilterList(surrogates);
     }
     return columnFilterInfo;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
deleted file mode 100644
index 44f7c07..0000000
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.scan.filter.executer;
-
-import java.util.BitSet;
-
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
-
-/**
- * It checks if filter is required on given block and if required, it does
- * linear search on block data and set the bitset.
- */
-public class ExcludeColGroupFilterExecuterImpl extends ExcludeFilterExecuterImpl {
-
-  /**
-   * @param dimColResolvedFilterInfo
-   * @param segmentProperties
-   */
-  public ExcludeColGroupFilterExecuterImpl(DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
-      SegmentProperties segmentProperties) {
-    super(dimColResolvedFilterInfo, null, segmentProperties, false);
-  }
-
-  /**
-   * Check if scan is required on given block based on min and max value
-   */
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
-    BitSet bitSet = new BitSet(1);
-    bitSet.flip(0, 1);
-    return bitSet;
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
deleted file mode 100644
index e4da26f..0000000
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.scan.filter.executer;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.List;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.core.keygenerator.KeyGenerator;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.executor.util.QueryUtil;
-import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
-import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
-import org.apache.carbondata.core.util.BitSetGroup;
-import org.apache.carbondata.core.util.ByteUtil;
-
-/**
- * It checks if filter is required on given block and if required, it does
- * linear search on block data and set the bitset.
- */
-public class IncludeColGroupFilterExecuterImpl extends IncludeFilterExecuterImpl {
-
-  /**
-   * LOGGER
-   */
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(IncludeColGroupFilterExecuterImpl.class.getName());
-
-  /**
-   * @param dimColResolvedFilterInfo
-   * @param segmentProperties
-   */
-  public IncludeColGroupFilterExecuterImpl(DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
-      SegmentProperties segmentProperties) {
-    super(dimColResolvedFilterInfo, null, segmentProperties, false);
-  }
-
-  /**
-   * It fills BitSet with row index which matches filter key
-   */
-  protected BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
-      int numerOfRows) {
-    BitSet bitSet = new BitSet(numerOfRows);
-
-    try {
-      KeyStructureInfo keyStructureInfo = getKeyStructureInfo();
-      byte[][] filterValues = dimColumnExecuterInfo.getFilterKeys();
-      for (int i = 0; i < filterValues.length; i++) {
-        byte[] filterVal = filterValues[i];
-        for (int rowId = 0; rowId < numerOfRows; rowId++) {
-          byte[] colData = new byte[keyStructureInfo.getMaskByteRanges().length];
-          dimensionColumnPage.fillRawData(rowId, 0, colData, keyStructureInfo);
-          if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterVal, colData) == 0) {
-            bitSet.set(rowId);
-          }
-        }
-      }
-
-    } catch (Exception e) {
-      LOGGER.error(e);
-    }
-
-    return bitSet;
-  }
-
-  @Override
-  public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks,
-      boolean useBitsetPipeLine) throws IOException {
-    int chunkIndex = segmentProperties.getDimensionOrdinalToChunkMapping()
-        .get(dimColumnEvaluatorInfo.getColumnIndex());
-    if (null == rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex]) {
-      rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex] =
-          rawBlockletColumnChunks.getDataBlock().readDimensionChunk(
-              rawBlockletColumnChunks.getFileReader(), chunkIndex);
-    }
-    DimensionRawColumnChunk dimensionRawColumnChunk =
-        rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
-    BitSetGroup bitSetGroup = new BitSetGroup(dimensionRawColumnChunk.getPagesCount());
-    for (int i = 0; i < dimensionRawColumnChunk.getPagesCount(); i++) {
-      if (dimensionRawColumnChunk.getMaxValues() != null) {
-        BitSet bitSet = getFilteredIndexes(dimensionRawColumnChunk.decodeColumnPage(i),
-            dimensionRawColumnChunk.getRowCount()[i]);
-        bitSetGroup.setBitSet(bitSet, i);
-      }
-    }
-    return bitSetGroup;
-  }
-
-  /**
-   * It is required for extracting column data from columngroup chunk
-   *
-   * @return
-   * @throws KeyGenException
-   */
-  private KeyStructureInfo getKeyStructureInfo() throws KeyGenException {
-    int colGrpId = getColumnGroupId(dimColumnEvaluatorInfo.getColumnIndex());
-    KeyGenerator keyGenerator = segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
-    List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
-    mdKeyOrdinal.add(getMdkeyOrdinal(dimColumnEvaluatorInfo.getColumnIndex(), colGrpId));
-    int[] maskByteRanges = QueryUtil.getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
-    byte[] maxKey = QueryUtil.getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
-    KeyStructureInfo restructureInfos = new KeyStructureInfo();
-    restructureInfos.setKeyGenerator(keyGenerator);
-    restructureInfos.setMaskByteRanges(maskByteRanges);
-    restructureInfos.setMaxKey(maxKey);
-    return restructureInfos;
-  }
-
-  /**
-   * Check if scan is required on given block based on min and max value
-   */
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
-    BitSet bitSet = new BitSet(1);
-    byte[][] filterValues = dimColumnExecuterInfo.getFilterKeys();
-    int columnIndex = dimColumnEvaluatorInfo.getColumnIndex();
-    int chunkIndex = segmentProperties.getDimensionOrdinalToChunkMapping().get(columnIndex);
-    int[] cols = getAllColumns(columnIndex);
-    byte[] maxValue = getMinMaxData(cols, blkMaxVal[chunkIndex], columnIndex);
-    byte[] minValue = getMinMaxData(cols, blkMinVal[chunkIndex], columnIndex);
-    boolean isScanRequired = false;
-    for (int k = 0; k < filterValues.length; k++) {
-      // filter value should be in range of max and min value i.e
-      // max>filtervalue>min
-      // so filter-max should be negative
-      int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], maxValue);
-      // and filter-min should be positive
-      int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], minValue);
-
-      // if any filter value is in range than this block needs to be
-      // scanned
-      if (maxCompare <= 0 && minCompare >= 0) {
-        isScanRequired = true;
-        break;
-      }
-    }
-    if (isScanRequired) {
-      bitSet.set(0);
-    }
-    return bitSet;
-  }
-
-  /**
-   * It extract min and max data for given column from stored min max value
-   *
-   * @param colGrpColumns
-   * @param minMaxData
-   * @param columnIndex
-   * @return
-   */
-  private byte[] getMinMaxData(int[] colGrpColumns, byte[] minMaxData, int columnIndex) {
-    int startIndex = 0;
-    int endIndex = 0;
-    if (null != colGrpColumns) {
-      for (int i = 0; i < colGrpColumns.length; i++) {
-        int colGrpId = getColumnGroupId(colGrpColumns[i]);
-        int mdKeyOrdinal = getMdkeyOrdinal(colGrpColumns[i], colGrpId);
-        int[] byteRange = getKeyGenerator(colGrpId).getKeyByteOffsets(mdKeyOrdinal);
-        int colSize = 0;
-        for (int j = byteRange[0]; j <= byteRange[1]; j++) {
-          colSize++;
-        }
-        if (colGrpColumns[i] == columnIndex) {
-          endIndex = startIndex + colSize;
-          break;
-        }
-        startIndex += colSize;
-      }
-    }
-    byte[] data = new byte[endIndex - startIndex];
-    System.arraycopy(minMaxData, startIndex, data, 0, data.length);
-    return data;
-  }
-
-  /**
-   * It returns column groups which have provided column ordinal
-   *
-   * @param columnIndex
-   * @return column group array
-   */
-  private int[] getAllColumns(int columnIndex) {
-    int[][] colGroups = segmentProperties.getColumnGroups();
-    for (int i = 0; i < colGroups.length; i++) {
-      if (QueryUtil.searchInArray(colGroups[i], columnIndex)) {
-        return colGroups[i];
-      }
-    }
-    return null;
-  }
-
-  private int getMdkeyOrdinal(int ordinal, int colGrpId) {
-    return segmentProperties.getColumnGroupMdKeyOrdinal(colGrpId, ordinal);
-  }
-
-  private int getColumnGroupId(int ordinal) {
-    int[][] columnGroups = segmentProperties.getColumnGroups();
-    int colGrpId = -1;
-    for (int i = 0; i < columnGroups.length; i++) {
-      if (columnGroups[i].length > 1) {
-        colGrpId++;
-        if (QueryUtil.searchInArray(columnGroups[i], ordinal)) {
-          break;
-        }
-      }
-    }
-    return colGrpId;
-  }
-
-  public KeyGenerator getKeyGenerator(int colGrpId) {
-    return segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
index 34555e1..b218813 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
@@ -113,7 +113,7 @@ public class RangeValueFilterExecuterImpl extends ValueBasedFilterExecuterImpl {
    */
   private void ifDefaultValueMatchesFilter() {
     isDefaultValuePresentInFilter = false;
-    if (!this.isDimensionPresentInCurrentBlock) {
+    if (!this.isDimensionPresentInCurrentBlock && null != filterRangesValues) {
       CarbonDimension dimension = this.dimColEvaluatorInfo.getDimension();
       byte[] defaultValue = dimension.getDefaultValue();
       if (null != defaultValue) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
index e1432b0..f901238 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
@@ -134,5 +134,4 @@ public abstract class RestructureEvaluatorImpl implements FilterExecuter {
     }
     return isDefaultValuePresentInFilterValues;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index 4bee89b..057a244 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -124,8 +124,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       this.dimColEvaluatorInfoList = dimColEvaluatorInfoList;
     }
     if (this.dimColEvaluatorInfoList.size() > 0) {
-      this.isDimensionPresentInCurrentBlock = new boolean[dimColEvaluatorInfoList.size()];
-      this.dimensionChunkIndex = new int[dimColEvaluatorInfoList.size()];
+      this.isDimensionPresentInCurrentBlock = new boolean[this.dimColEvaluatorInfoList.size()];
+      this.dimensionChunkIndex = new int[this.dimColEvaluatorInfoList.size()];
     } else {
       this.isDimensionPresentInCurrentBlock = new boolean[]{false};
       this.dimensionChunkIndex = new int[]{0};
@@ -136,8 +136,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       this.msrColEvalutorInfoList = msrColEvalutorInfoList;
     }
     if (this.msrColEvalutorInfoList.size() > 0) {
-      this.isMeasurePresentInCurrentBlock = new boolean[msrColEvalutorInfoList.size()];
-      this.measureChunkIndex = new int[msrColEvalutorInfoList.size()];
+      this.isMeasurePresentInCurrentBlock = new boolean[this.msrColEvalutorInfoList.size()];
+      this.measureChunkIndex = new int[this.msrColEvalutorInfoList.size()];
     } else {
       this.isMeasurePresentInCurrentBlock = new boolean[]{false};
       this.measureChunkIndex = new int[] {0};
@@ -647,13 +647,11 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       }
     }
 
-    if (null != msrColEvalutorInfoList) {
-      for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
-        if (null == rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]]) {
-          rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]] =
-              rawBlockletColumnChunks.getDataBlock()
-                  .readMeasureChunk(rawBlockletColumnChunks.getFileReader(), measureChunkIndex[0]);
-        }
+    for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
+      if (null == rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]]) {
+        rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]] =
+            rawBlockletColumnChunks.getDataBlock()
+              .readMeasureChunk(rawBlockletColumnChunks.getFileReader(), measureChunkIndex[0]);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
index be871d4..e339a58 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
@@ -67,7 +67,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
     this.filterRangeValues = filterRangeValues;
     this.msrFilterRangeValues = msrFilterRangeValues;
     lastDimensionColOrdinal = segmentProperties.getLastDimensionColOrdinal();
-    if (!msrColEvalutorInfoList.isEmpty()) {
+    if (!this.msrColEvalutorInfoList.isEmpty()) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       comparator = Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
     }
@@ -98,9 +98,12 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp = (null != comparator ?
+          comparator :
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType()));
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
index a3359be..2ea3c73 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
@@ -98,9 +98,11 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
           if (maxCompare <= 0) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index 0c268c9..f52d087 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@ -100,9 +100,11 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
index c5ed77d..97e750a 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
@@ -100,12 +100,14 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
           Object convertedValue = RestructureUtil
               .getMeasureDefaultValue(measure.getColumnSchema(), measure.getDefaultValue());
           int maxCompare =
-              comparator.compare(msrFilterRangeValues[k], convertedValue);
+              comparatorTmp.compare(msrFilterRangeValues[k], convertedValue);
           if (maxCompare > 0) {
             isDefaultValuePresentInFilter = true;
             break;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
index ccbbc32..8ad0c48 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
@@ -303,7 +303,6 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
           this.dimColResolvedFilterInfo.getDimension(), segmentProperties, false);
     }
     return null;
-
   }
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index d975c20..01aa939 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -135,14 +135,19 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
       } else {
         DataRefNode startDataBlock =
             finder.findFirstDataBlock(dataRefNode, blockInfo.getStartKey());
-        while (startDataBlock.nodeIndex() < blockInfo.getStartBlockletIndex()) {
+        while ((null != startDataBlock) && (startDataBlock.nodeIndex() < blockInfo
+            .getStartBlockletIndex())) {
           startDataBlock = startDataBlock.getNextDataRefNode();
         }
         long numberOfBlockToScan = blockInfo.getNumberOfBlockletToScan();
         //if number of block is less than 0 then take end block.
         if (numberOfBlockToScan <= 0) {
           DataRefNode endDataBlock = finder.findLastDataBlock(dataRefNode, blockInfo.getEndKey());
-          numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
+          if (null != startDataBlock) {
+            numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
+          } else {
+            numberOfBlockToScan = endDataBlock.nodeIndex() + 1;
+          }
         }
         blockInfo.setFirstDataBlock(startDataBlock);
         blockInfo.setNumberOfBlockToScan(numberOfBlockToScan);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
index 57d8177..0100c8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
@@ -110,6 +110,10 @@ public class BlockletFilterScanner extends BlockletFullScanner {
         totalPagesScanned.getCount() + dataBlock.numberOfPages());
     // apply min max
     if (isMinMaxEnabled) {
+      if (null == dataBlock.getColumnsMaxValue()
+              || null == dataBlock.getColumnsMinValue()) {
+        return true;
+      }
       BitSet bitSet = null;
       // check for implicit include filter instance
       if (filterExecuter instanceof ImplicitColumnFilterExecutor) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
index d6671b4..9dc8fe6 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
@@ -230,7 +230,13 @@ public class LoadMetadataDetails implements Serializable {
    * @return
    */
   public long getLoadStartTimeAsLong() {
-    return (!loadStartTime.isEmpty()) ? getTimeStamp(loadStartTime) : 0;
+    if (!loadStartTime.isEmpty()) {
+      Long time = getTimeStamp(loadStartTime);
+      if (null != time) {
+        return time;
+      }
+    }
+    return 0;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
index 363b5bc..1c53fbb 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
@@ -322,13 +322,12 @@ public class SegmentUpdateStatusManager {
    * @return the list of delete file
    */
   private List<String> getDeltaFiles(CarbonFile blockDir, final String blockNameFromTuple,
-      final String extension,
-      String segment) {
+      final String extension, String segment) throws IOException {
     List<String> deleteFileList = new ArrayList<>();
     for (SegmentUpdateDetails block : updateDetails) {
-      if (block.getBlockName().equalsIgnoreCase(blockNameFromTuple) &&
-          block.getSegmentName().equalsIgnoreCase(segment) &&
-          !CarbonUpdateUtil.isBlockInvalid(block.getSegmentStatus())) {
+      if (block.getBlockName().equalsIgnoreCase(blockNameFromTuple) && block.getSegmentName()
+          .equalsIgnoreCase(segment) && !CarbonUpdateUtil
+          .isBlockInvalid(block.getSegmentStatus())) {
         final long deltaStartTimestamp = getStartTimeOfDeltaFile(extension, block);
         // If there is no delete delete file , then return null
         if (deltaStartTimestamp == 0) {
@@ -347,33 +346,38 @@ public class SegmentUpdateStatusManager {
 
   private List<String> getFilePaths(CarbonFile blockDir, final String blockNameFromTuple,
       final String extension, List<String> deleteFileList, final long deltaStartTimestamp,
-      final long deltaEndTimeStamp) {
-    CarbonFile[] files = blockDir.getParentFile().listFiles(new CarbonFileFilter() {
-
-      @Override public boolean accept(CarbonFile pathName) {
-        String fileName = pathName.getName();
-        if (fileName.endsWith(extension) && pathName.getSize() > 0) {
-          String firstPart = fileName.substring(0, fileName.indexOf('.'));
-          String blockName =
-              firstPart.substring(0, firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN));
-          long timestamp = Long.parseLong(firstPart
-              .substring(firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
-                  firstPart.length()));
-          if (blockNameFromTuple.equals(blockName) && (
-              (Long.compare(timestamp, deltaEndTimeStamp) <= 0) && (
-                  Long.compare(timestamp, deltaStartTimestamp) >= 0))) {
-            return true;
+      final long deltaEndTimeStamp) throws IOException {
+    if (null != blockDir.getParentFile()) {
+      CarbonFile[] files = blockDir.getParentFile().listFiles(new CarbonFileFilter() {
+
+        @Override
+        public boolean accept(CarbonFile pathName) {
+          String fileName = pathName.getName();
+          if (fileName.endsWith(extension) && pathName.getSize() > 0) {
+            String firstPart = fileName.substring(0, fileName.indexOf('.'));
+            String blockName =
+                    firstPart.substring(0, firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN));
+            long timestamp = Long.parseLong(firstPart
+                    .substring(firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
+                            firstPart.length()));
+            if (blockNameFromTuple.equals(blockName) && (
+                    (Long.compare(timestamp, deltaEndTimeStamp) <= 0) && (
+                            Long.compare(timestamp, deltaStartTimestamp) >= 0))) {
+              return true;
+            }
           }
+          return false;
         }
-        return false;
-      }
-    });
+      });
 
-    for (CarbonFile cfile : files) {
-      if (null == deleteFileList) {
-        deleteFileList = new ArrayList<String>(files.length);
+      for (CarbonFile cfile : files) {
+        if (null == deleteFileList) {
+          deleteFileList = new ArrayList<String>(files.length);
+        }
+        deleteFileList.add(cfile.getCanonicalPath());
       }
-      deleteFileList.add(cfile.getCanonicalPath());
+    } else {
+      throw new IOException("Parent file could not found");
     }
     return deleteFileList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index b74c279..e30ad03 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -60,7 +60,12 @@ public abstract class AbstractDataFileFooterConverter {
    */
   private static BitSet getPresenceMeta(
       org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
-    return BitSet.valueOf(presentMetadataThrift.getPresent_bit_stream());
+    final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
+    if (null != present_bit_stream) {
+      return BitSet.valueOf(present_bit_stream);
+    } else {
+      return new BitSet(1);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index 9880b4d..af5121c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -370,7 +370,7 @@ public class CarbonMetadataUtil {
     } else if (DataTypes.isDecimal(dataType)) {
       return DataTypeUtil.byteToBigDecimal(first).compareTo(DataTypeUtil.byteToBigDecimal(second));
     } else {
-      throw new IllegalArgumentException("Invalid data type");
+      throw new IllegalArgumentException("Invalid data type:" + dataType);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 9822167..f7f71b3 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -587,20 +587,16 @@ public final class DataTypeUtil {
       return null;
     }
     try {
-      Object parsedValue = null;
       if (actualDataType == DataTypes.SHORT) {
-        parsedValue = Short.parseShort(data);
+        Short.parseShort(data);
       } else if (actualDataType == DataTypes.INT) {
-        parsedValue = Integer.parseInt(data);
+        Integer.parseInt(data);
       } else if (actualDataType == DataTypes.LONG) {
-        parsedValue = Long.parseLong(data);
+        Long.parseLong(data);
       } else {
         return data;
       }
-      if (null != parsedValue) {
-        return data;
-      }
-      return null;
+      return data;
     } catch (NumberFormatException ex) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
index 62192ff..e8a121c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
@@ -576,11 +576,15 @@ public class CarbonTablePath {
    */
   public static CarbonFile[] getSortIndexFiles(CarbonFile sortIndexDir,
       final String columnUniqueId) {
-    return sortIndexDir.listFiles(new CarbonFileFilter() {
-      @Override public boolean accept(CarbonFile file) {
-        return file.getName().startsWith(columnUniqueId) && file.getName().endsWith(SORT_INDEX_EXT);
-      }
-    });
+    if (null != sortIndexDir) {
+      return sortIndexDir.listFiles(new CarbonFileFilter() {
+        @Override public boolean accept(CarbonFile file) {
+          return file.getName().startsWith(columnUniqueId) && file.getName()
+              .endsWith(SORT_INDEX_EXT);
+        }
+      });
+    }
+    return null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
deleted file mode 100644
index 54b66a6..0000000
--- a/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.impl;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.core.keygenerator.KeyGenerator;
-import org.apache.carbondata.core.keygenerator.mdkey.MultiDimKeyVarLengthGenerator;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.executor.util.QueryUtil;
-
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class ColumnGroupDimensionDataChunkTest {
-
-  static ColumnGroupDimensionColumnPage columnGroupDimensionDataChunk;
-  static KeyGenerator keyGenerator;
-
-  @BeforeClass public static void setup() {
-    int[] bitLength = CarbonUtil.getDimensionBitLength(new int[] { 10, 10, 10 }, new int[] { 3 });
-    // create a key generator
-    keyGenerator = new MultiDimKeyVarLengthGenerator(bitLength);
-    byte[] data = new byte[keyGenerator.getKeySizeInBytes() * 3];
-    int position = 0;
-    for (int i = 1; i <= 3; i++) {
-      try {
-        System.arraycopy(keyGenerator.generateKey(new int[] { i, i, i }), 0, data, position,
-            keyGenerator.getKeySizeInBytes());
-      } catch (KeyGenException e) {
-        assertTrue(false);
-      }
-      position += keyGenerator.getKeySizeInBytes();
-    }
-    columnGroupDimensionDataChunk =
-        new ColumnGroupDimensionColumnPage(data, keyGenerator.getKeySizeInBytes(), 3);
-  }
-
-  @Test public void fillChunkDataTest() {
-    List<Integer> ordinals = new ArrayList<Integer>();
-    ordinals.add(1);
-    KeyStructureInfo keyStructureInfo = getKeyStructureInfo(ordinals, keyGenerator);
-    byte[] buffer = new byte[1];
-    columnGroupDimensionDataChunk.fillRawData(1, 0, buffer, keyStructureInfo);
-    assertEquals(buffer[0], 2);
-  }
-
-  @Test public void getChunkDataTest() {
-    byte[] b = { 34, 2 };
-    byte res[] = columnGroupDimensionDataChunk.getChunkData(1);
-    Assert.assertTrue(Arrays.equals(res, b));
-  }
-
-  @Test public void fillConvertedChunkDataTest() {
-    int[] row = new int[3];
-    int[] expected = { 0, 0, 3 };
-    List<Integer> ordinals = new ArrayList<Integer>();
-    ordinals.add(2);
-    KeyStructureInfo keyStructureInfo = getKeyStructureInfo(ordinals, keyGenerator);
-    keyStructureInfo.setMdkeyQueryDimensionOrdinal(new int[] { 2 });
-    int res = columnGroupDimensionDataChunk.fillSurrogateKey(2, 2, row, keyStructureInfo);
-    Assert.assertTrue(Arrays.equals(row, expected));
-  }
-
-  /**
-   * Below method will be used to get the key structure info for the query
-   *
-   * @param ordinals   query model
-   * @param keyGenerator
-   * @return key structure info
-   */
-  private KeyStructureInfo getKeyStructureInfo(List<Integer> ordinals, KeyGenerator keyGenerator) {
-    // getting the masked byte range for dictionary column
-    int[] maskByteRanges = QueryUtil.getMaskedByteRangeBasedOrdinal(ordinals, keyGenerator);
-
-    // getting the masked bytes for query dimension dictionary column
-    int[] maskedBytes = QueryUtil.getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
-
-    // max key for the dictionary dimension present in the query
-    byte[] maxKey = null;
-    try {
-      // getting the max key which will be used to masked and get the
-      // masked key
-      maxKey = QueryUtil.getMaxKeyBasedOnOrinal(ordinals, keyGenerator);
-    } catch (KeyGenException e) {
-    }
-
-    KeyStructureInfo restructureInfos = new KeyStructureInfo();
-    restructureInfos.setKeyGenerator(keyGenerator);
-    restructureInfos.setMaskByteRanges(maskByteRanges);
-    restructureInfos.setMaxKey(maxKey);
-    return restructureInfos;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
index f55cd67..890d36d 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
@@ -59,8 +59,11 @@ public class AlluxioCarbonFileTest {
             }
         try {
             FileOutputStream oFile = new FileOutputStream(file, true);
+            oFile.close();
         } catch (FileNotFoundException e) {
             e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
         }
 
         fileStatus = new FileStatus(12L, true, 60, 120l, 180L, new Path(file.getAbsolutePath()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
index ba661b1..82d1501 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
@@ -54,14 +54,17 @@ public class ViewFsCarbonFileTest {
         file = new File("Test.carbondata");
         if (!file.exists())
             try {
-                file.createNewFile();
+              file.createNewFile();
             } catch (IOException e) {
-                e.printStackTrace();
+              e.printStackTrace();
             }
         try {
-            FileOutputStream oFile = new FileOutputStream(file, true);
+          FileOutputStream oFile = new FileOutputStream(file, true);
+          oFile.close();
         } catch (FileNotFoundException e) {
             e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
         }
 
         fileStatus = new FileStatus(12L, true, 60, 120l, 180L, new Path(file.getAbsolutePath()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
index 32af8d3..8be1e2e 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
@@ -16,39 +16,21 @@
  */
 package org.apache.carbondata.hadoop;
 
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.datastore.SegmentTaskIndexStore;
 import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * CacheClient : Holds all the Cache access clients for Btree, Dictionary
  */
 public class CacheClient {
 
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(CacheClient.class.getName());
-
-  private final Object lock = new Object();
-
   // segment access client for driver LRU cache
   private CacheAccessClient<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper>
       segmentAccessClient;
 
-  private static Map<SegmentTaskIndexStore.SegmentPropertiesWrapper, SegmentProperties>
-      segmentProperties = new ConcurrentHashMap<>();
-
   public CacheClient() {
     Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> segmentCache =
         CacheProvider.getInstance().createCache(CacheType.DRIVER_BTREE);
@@ -63,35 +45,4 @@ public class CacheClient {
   public void close() {
     segmentAccessClient.close();
   }
-
-  /**
-   * Method to get the segment properties and avoid construction of new segment properties until
-   * the schema is not modified
-   *
-   * @param tableIdentifier
-   * @param columnsInTable
-   * @param columnCardinality
-   */
-  public SegmentProperties getSegmentProperties(AbsoluteTableIdentifier tableIdentifier,
-      List<ColumnSchema> columnsInTable, int[] columnCardinality) {
-    SegmentTaskIndexStore.SegmentPropertiesWrapper segmentPropertiesWrapper =
-        new SegmentTaskIndexStore.SegmentPropertiesWrapper(tableIdentifier, columnsInTable,
-            columnCardinality);
-    SegmentProperties segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
-    if (null == segmentProperties) {
-      synchronized (lock) {
-        segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
-        if (null == segmentProperties) {
-          // create a metadata details
-          // this will be useful in query handling
-          // all the data file metadata will have common segment properties we
-          // can use first one to get create the segment properties
-          LOGGER.info("Constructing new SegmentProperties");
-          segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
-          this.segmentProperties.put(segmentPropertiesWrapper, segmentProperties);
-        }
-      }
-    }
-    return segmentProperties;
-  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
index 0bcb188..e5e3165 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
@@ -124,6 +124,9 @@ public class CarbonOutputCommitter extends FileOutputCommitter {
         .mergeSegmentFiles(readPath, segmentFileName,
             CarbonTablePath.getSegmentFilesLocation(loadModel.getTablePath()));
     if (segmentFile != null) {
+      if (null == newMetaEntry) {
+        throw new RuntimeException("Internal Error");
+      }
       // Move all files from temp directory of each segment to partition directory
       SegmentFileStore.moveFromTempFolder(segmentFile,
           loadModel.getSegmentId() + "_" + loadModel.getFactTimeStamp() + ".tmp",

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
index 7d9c712..b619158 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
@@ -18,14 +18,10 @@
 package org.apache.carbondata.hadoop.internal.segment;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 
@@ -56,25 +52,6 @@ public abstract class Segment {
   }
 
   /**
-   * return all InputSplit of this segment, each file is a InputSplit
-   * @param job job context
-   * @return all InputSplit
-   * @throws IOException
-   */
-  public List<InputSplit> getAllSplits(JobContext job) throws IOException {
-    List<InputSplit> result = new ArrayList<>();
-    Path p = new Path(path);
-    FileSystem fs = p.getFileSystem(job.getConfiguration());
-
-    //TODO: filter out the hidden files
-    FileStatus[] files = fs.globStatus(p);
-    for (FileStatus file: files) {
-      // make split and add to result
-    }
-    return result;
-  }
-
-  /**
    * get all files, implementation may use the input filter and index to prune files
    * @param job job context
    * @param filterResolver filter

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
index febca50..e95382c 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
@@ -196,12 +196,15 @@ public class CarbonDictionaryDecodeReadSupport<T> implements CarbonReadSupport<T
       if (carbonColumn.isDimension() && carbonColumn.getColumnSchema().getNumberOfChild() > 0) {
         childCarbonDimensions = ((CarbonDimension) carbonColumn).getListOfChildDimensions();
       }
-      Writable[] arr = new Writable[objArray.length];
-      for (int i = 0; i < objArray.length; i++) {
 
-        arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i));
+      if (null != childCarbonDimensions) {
+        Writable[] arr = new Writable[objArray.length];
+        for (int i = 0; i < objArray.length; i++) {
+
+          arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i));
+        }
+        return new ArrayWritable(Writable.class, arr);
       }
-      return new ArrayWritable(Writable.class, arr);
     }
     throw new IOException("DataType not supported in Carbondata");
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index 89a5ed6..d4cf480 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -77,13 +77,17 @@ public class MapredCarbonInputFormat extends CarbonTableInputFormat<ArrayWritabl
         }
       }
     }
-    AbsoluteTableIdentifier absoluteTableIdentifier = AbsoluteTableIdentifier
-        .from(validInputPath, getDatabaseName(configuration), getTableName(configuration));
-    // read the schema file to get the absoluteTableIdentifier having the correct table id
-    // persisted in the schema
-    CarbonTable carbonTable = SchemaReader.readCarbonTableFromStore(absoluteTableIdentifier);
-    configuration.set(CARBON_TABLE, ObjectSerializationUtil.convertObjectToString(carbonTable));
-    setTableInfo(configuration, carbonTable.getTableInfo());
+    if (null != validInputPath) {
+      AbsoluteTableIdentifier absoluteTableIdentifier = AbsoluteTableIdentifier
+          .from(validInputPath, getDatabaseName(configuration), getTableName(configuration));
+      // read the schema file to get the absoluteTableIdentifier having the correct table id
+      // persisted in the schema
+      CarbonTable carbonTable = SchemaReader.readCarbonTableFromStore(absoluteTableIdentifier);
+      configuration.set(CARBON_TABLE, ObjectSerializationUtil.convertObjectToString(carbonTable));
+      setTableInfo(configuration, carbonTable.getTableInfo());
+    } else {
+      throw new InvalidPathException("No input paths specified in job");
+    }
   }
 
   private static CarbonTable getCarbonTable(Configuration configuration, String path)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
index 811393f..3a54b22 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
@@ -67,20 +67,23 @@ public class CarbondataSplitManager implements ConnectorSplitManager {
         getColumnConstraints(layoutHandle.getConstraint());
 
     CarbonTableCacheModel cache = carbonTableReader.getCarbonCache(key);
-    Expression filters = PrestoFilterUtil.parseFilterExpression(layoutHandle.getConstraint());
-    try {
-      List<CarbonLocalInputSplit> splits = carbonTableReader.getInputSplits2(cache, filters, layoutHandle.getConstraint());
-
-      ImmutableList.Builder<ConnectorSplit> cSplits = ImmutableList.builder();
-      for (CarbonLocalInputSplit split : splits) {
-        cSplits.add(new CarbondataSplit(connectorId, tableHandle.getSchemaTableName(),
-            layoutHandle.getConstraint(), split, rebuildConstraints));
+    if (null != cache) {
+      Expression filters = PrestoFilterUtil.parseFilterExpression(layoutHandle.getConstraint());
+      try {
+        List<CarbonLocalInputSplit> splits = carbonTableReader.getInputSplits2(cache, filters,
+                layoutHandle.getConstraint());
+
+        ImmutableList.Builder<ConnectorSplit> cSplits = ImmutableList.builder();
+        for (CarbonLocalInputSplit split : splits) {
+          cSplits.add(new CarbondataSplit(connectorId, tableHandle.getSchemaTableName(),
+              layoutHandle.getConstraint(), split, rebuildConstraints));
+        }
+        return new FixedSplitSource(cSplits.build());
+      } catch (Exception ex) {
+        throw new RuntimeException(ex.getMessage(), ex);
       }
-      return new FixedSplitSource(cSplits.build());
-    } catch (Exception ex) {
-      throw new RuntimeException(ex.getMessage(), ex);
     }
-
+    return null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
index 3c42d0a..2c6a810 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
@@ -115,15 +115,21 @@ public class CarbonLocalInputSplit {
 
   }
 
-  public static  CarbonInputSplit convertSplit(CarbonLocalInputSplit carbonLocalInputSplit) {
+  public static CarbonInputSplit convertSplit(CarbonLocalInputSplit carbonLocalInputSplit) {
     CarbonInputSplit inputSplit = new CarbonInputSplit(carbonLocalInputSplit.getSegmentId(), "0",
         new Path(carbonLocalInputSplit.getPath()), carbonLocalInputSplit.getStart(),
         carbonLocalInputSplit.getLength(), carbonLocalInputSplit.getLocations()
         .toArray(new String[carbonLocalInputSplit.getLocations().size()]),
-        carbonLocalInputSplit.getNumberOfBlocklets(), ColumnarFormatVersion.valueOf(carbonLocalInputSplit.getVersion()),
+        carbonLocalInputSplit.getNumberOfBlocklets(),
+        ColumnarFormatVersion.valueOf(carbonLocalInputSplit.getVersion()),
         carbonLocalInputSplit.getDeleteDeltaFiles());
     Gson gson = new Gson();
-    BlockletDetailInfo blockletDetailInfo = gson.fromJson(carbonLocalInputSplit.detailInfo, BlockletDetailInfo.class);
+    BlockletDetailInfo blockletDetailInfo =
+        gson.fromJson(carbonLocalInputSplit.detailInfo, BlockletDetailInfo.class);
+
+    if (null == blockletDetailInfo) {
+      throw new RuntimeException("Could not read blocklet details");
+    }
     try {
       blockletDetailInfo.readColumnSchema(blockletDetailInfo.getColumnSchemaBinary());
     } catch (IOException e) {
@@ -132,6 +138,4 @@ public class CarbonLocalInputSplit {
     inputSplit.setDetailInfo(blockletDetailInfo);
     return inputSplit;
   }
-
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
index 4507425..0b7206b 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
@@ -62,10 +62,8 @@ public class BooleanStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeBoolean(builder, byteToBoolean(streamData[i]));
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeBoolean(builder, byteToBoolean(streamData[i]));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
index 23db769..3e7fc59 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
@@ -59,7 +59,7 @@ public class DoubleStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -72,10 +72,8 @@ public class DoubleStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeDouble(builder, (Double) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeDouble(builder, (Double) streamData[i]);
       }
     }
 


[2/4] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
index 7ddd181..ffe1aef 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
@@ -49,7 +49,7 @@ public class IntegerStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -62,10 +62,8 @@ public class IntegerStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, ((Integer) streamData[i]).longValue());
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, ((Integer) streamData[i]).longValue());
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
index 015ac80..e1000c5 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
@@ -49,7 +49,7 @@ public class LongStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         }
         if (columnVector.anyNullsSet()) {
@@ -61,10 +61,8 @@ public class LongStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Long) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Long) streamData[i]);
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
index 82d62ad..8952712 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
@@ -41,32 +41,26 @@ public class ObjectStreamReader  extends AbstractStreamReader {
    * @return
    * @throws IOException
    */
-  public Block readBlock(Type type)
-      throws IOException
-  {
+  public Block readBlock(Type type) throws IOException {
     int numberOfRows = 0;
     BlockBuilder builder = null;
-    if(isVectorReader) {
+    if (isVectorReader) {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        for(int i = 0; i < numberOfRows ; i++ ){
+        for (int i = 0; i < numberOfRows; i++) {
           type.writeObject(builder, columnVector.getData(i));
         }
       }
-
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for(int i = 0; i < numberOfRows ; i++ ){
-          type.writeObject(builder, streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeObject(builder, streamData[i]);
       }
     }
 
     return builder.build();
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
index 00e5485..51f1cd5 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
@@ -49,7 +49,7 @@ public class ShortStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -59,13 +59,11 @@ public class ShortStreamReader extends AbstractStreamReader {
           }
         }
       }
-   } else {
+    } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Short) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Short) streamData[i]);
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
index d98afa3..cce35e0 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
@@ -72,7 +72,7 @@ public class SliceStreamReader extends AbstractStreamReader {
           }
           return new DictionaryBlock(batchSize, dictionarySliceArrayBlock, values);
         } else {
-          if(columnVector.anyNullsSet()) {
+          if (columnVector.anyNullsSet()) {
             handleNullInVector(type, numberOfRows, builder);
           } else {
             populateVector(type, numberOfRows, builder);
@@ -82,10 +82,8 @@ public class SliceStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeSlice(builder, utf8Slice(streamData[i].toString()));
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeSlice(builder, utf8Slice(streamData[i].toString()));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
index 01b7939..a22ef29 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
@@ -39,10 +39,9 @@ public class TimestampStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(columnVector.anyNullsSet()) {
+        if (columnVector.anyNullsSet()) {
           handleNullInVector(type, numberOfRows, builder);
-        }
-        else {
+        } else {
           populateVector(type, numberOfRows, builder);
         }
       }
@@ -50,10 +49,8 @@ public class TimestampStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Long) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Long) streamData[i]);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
index d45e759..49aa7ff 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.common.util
 
-import java.io.{FileInputStream, ObjectInputStream, ObjectOutputStream}
-import java.math
-import java.math.RoundingMode
+import java.io.{ObjectInputStream, ObjectOutputStream}
 import java.util.{Locale, TimeZone}
 
 import org.apache.carbondata.common.logging.LogServiceFactory

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
index 1e98ec2..f4948c4 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
@@ -38,7 +38,6 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.network.TransportContext;
 import org.apache.spark.network.netty.SparkTransportConf;
 import org.apache.spark.network.sasl.SaslServerBootstrap;
-import org.apache.spark.network.server.TransportServer;
 import org.apache.spark.network.server.TransportServerBootstrap;
 import org.apache.spark.network.util.TransportConf;
 import scala.Some;
@@ -144,8 +143,7 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
         TransportServerBootstrap bootstrap =
             new SaslServerBootstrap(transportConf, securityManager);
         String host = findLocalIpAddress(LOGGER);
-        TransportServer transportServer = context
-            .createServer(host, port, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
+        context.createServer(host, port, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
         LOGGER.audit("Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
             + " Listening on port " + newPort);
         this.port = newPort;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
deleted file mode 100644
index 9936a2a..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.execution.command.Partitioner
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonCleanFilesRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    databaseName: String,
-    tableName: String,
-    partitioner: Partitioner)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map(s => new CarbonLoadPartition(id, s._2, s._1))
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-    val iter = new Iterator[(V)] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      // TODO call CARBON delete API
-
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-
-    }
-    iter
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
deleted file mode 100644
index b11dfad..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.statusmanager.LoadMetadataDetails
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.DeletedLoadResult
-
-class CarbonDeleteLoadByDateRDD[K, V](
-    sc: SparkContext,
-    result: DeletedLoadResult[K, V],
-    databaseName: String,
-    tableName: String,
-    dateField: String,
-    dateFieldActualName: String,
-    dateValue: String,
-    factTableName: String,
-    dimTableName: String,
-    storePath: String,
-    loadMetadataDetails: List[LoadMetadataDetails])
-  extends CarbonRDD[(K, V)](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map {s =>
-      new CarbonLoadPartition(id, s._2, s._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[(K, V)] = {
-    new Iterator[(K, V)] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      val partitionID = split.serializableHadoopSplit.value.getPartition.getUniqueID
-
-      // TODO call CARBON delete API
-      logInfo("Applying data retention as per date value " + dateValue)
-      var dateFormat = ""
-      try {
-        dateFormat = CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT
-      } catch {
-        case e: Exception => logInfo("Unable to parse with default time format " + dateValue)
-      }
-      // TODO: Implement it
-      val finished = false
-
-      override def hasNext: Boolean = {
-        finished
-      }
-
-      override def next(): (K, V) = {
-        result.getKey(null, null)
-      }
-    }
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
deleted file mode 100644
index 759ed42..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.execution.command.Partitioner
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonDeleteLoadRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    loadId: Int,
-    databaseName: String,
-    tableName: String,
-    partitioner: Partitioner)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map {f =>
-      new CarbonLoadPartition(id, f._2, f._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-    val iter = new Iterator[V] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      // TODO call CARBON delete API
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-
-    }
-    logInfo("********Deleting***************")
-    iter
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
deleted file mode 100644
index f327d88..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonDropTableRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    databaseName: String,
-    tableName: String)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map { s =>
-      new CarbonLoadPartition(id, s._2, s._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-
-      val iter = new Iterator[V] {
-      // TODO: Clear Btree from memory
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-    }
-    iter
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
index c2c4ab3..5c3ace3 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
@@ -153,6 +153,15 @@ object QueryTest {
         Row.fromSeq(s.toSeq.map {
           case d: java.math.BigDecimal => BigDecimal(d)
           case b: Array[Byte] => b.toSeq
+          case d : Double =>
+            if (!d.isInfinite && !d.isNaN) {
+              var bd = BigDecimal(d)
+              bd = bd.setScale(5, BigDecimal.RoundingMode.UP)
+              bd.doubleValue()
+            }
+            else {
+              d
+            }
           case o => o
         })
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java b/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
index 903bf44..082ef8b 100644
--- a/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
+++ b/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
@@ -48,6 +48,7 @@ import org.apache.carbondata.hadoop.CarbonMultiBlockSplit;
 import org.apache.carbondata.hadoop.InputMetricsStats;
 import org.apache.carbondata.spark.util.CarbonScalaUtil;
 
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.spark.memory.MemoryMode;
@@ -134,24 +135,17 @@ class VectorizedCarbonRecordReader extends AbstractRecordReader<Object> {
       queryExecutor = QueryExecutorFactory.getQueryExecutor(queryModel);
       iterator = (AbstractDetailQueryResultIterator) queryExecutor.execute(queryModel);
     } catch (QueryExecutionException e) {
-      Throwable ext = e;
-      while (ext != null) {
-        if (ext instanceof FileNotFoundException) {
-          throw new InterruptedException(
-              "Insert overwrite may be in progress.Please check " + e.getMessage());
-        }
-        ext = ext.getCause();
+      if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) {
+        LOGGER.error(e);
+        throw new InterruptedException(
+            "Insert overwrite may be in progress.Please check " + e.getMessage());
       }
       throw new InterruptedException(e.getMessage());
     } catch (Exception e) {
-      Throwable ext = e;
-      while (ext != null) {
-        if (ext instanceof FileNotFoundException) {
-          LOGGER.error(e);
-          throw new InterruptedException(
-              "Insert overwrite may be in progress.Please check " + e.getMessage());
-        }
-        ext = ext.getCause();
+      if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) {
+        LOGGER.error(e);
+        throw new InterruptedException(
+            "Insert overwrite may be in progress.Please check " + e.getMessage());
       }
       throw e;
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
index d668329..58ec0d5 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
@@ -24,8 +24,8 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
-import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
@@ -47,7 +47,7 @@ public class BadRecordsLogger {
    * the status
    */
   private static Map<String, String> badRecordEntry =
-      new HashMap<String, String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+      new ConcurrentHashMap<String, String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
   /**
    * File Name
    */
@@ -121,6 +121,9 @@ public class BadRecordsLogger {
 
   public void addBadRecordsToBuilder(Object[] row, String reason)
       throws CarbonDataLoadingException {
+    // setting partial success entry since even if bad records are there then load
+    // status should be partial success regardless of bad record logged
+    badRecordEntry.put(taskKey, "Partially");
     if (badRecordsLogRedirect || badRecordLoggerEnable) {
       StringBuilder logStrings = new StringBuilder();
       int size = row.length;
@@ -158,10 +161,6 @@ public class BadRecordsLogger {
         }
         writeBadRecordsToFile(logStrings);
       }
-    } else {
-      // setting partial success entry since even if bad records are there then load
-      // status should be partial success regardless of bad record logged
-      badRecordEntry.put(taskKey, "Partially");
     }
   }
 
@@ -200,11 +199,6 @@ public class BadRecordsLogger {
     } catch (IOException e) {
       LOGGER.error("Error While writing bad record log File");
       throw new CarbonDataLoadingException("Error While writing bad record log File", e);
-    } finally {
-      // if the Bad record file is created means it partially success
-      // if any entry present with key that means its have bad record for
-      // that key
-      badRecordEntry.put(taskKey, "Partially");
     }
   }
 
@@ -246,9 +240,6 @@ public class BadRecordsLogger {
       LOGGER.error("Error While writing bad record csv File");
       throw new CarbonDataLoadingException("Error While writing bad record csv File", e);
     }
-    finally {
-      badRecordEntry.put(taskKey, "Partially");
-    }
   }
 
   public boolean isBadRecordConvertNullDisable() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
index ed35a96..12f6927 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
@@ -39,17 +39,18 @@ public class ThreadStatusObserver {
     // should assign the throwable object else the actual cause for failure can be overridden as
     // all the running threads will throw interrupted exception on calling shutdownNow and
     // will override the throwable object
-    if (null == this.throwable) {
-      synchronized (lock) {
-        if (null == this.throwable) {
-          executorService.shutdownNow();
-          this.throwable = throwable;
-        }
+    synchronized (lock) {
+      if (null == this.throwable) {
+        executorService.shutdownNow();
+        this.throwable = throwable;
       }
     }
   }
 
   public Throwable getThrowable() {
-    return throwable;
+
+    synchronized (lock) {
+      return throwable;
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
index 80887c1..cb72f54 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
@@ -164,7 +164,9 @@ public class UnsafeBatchParallelReadMergeSorterImpl extends AbstractMergeSorter
         LOGGER.error(e);
         this.threadStatusObserver.notifyFailed(e);
       } finally {
-        sortDataRows.finishThread();
+        synchronized (sortDataRows) {
+          sortDataRows.finishThread();
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
index 1c6ce8d..fb0bcc3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
@@ -109,7 +109,6 @@ public class UnsafeSortDataRows {
     // observer of writing file in thread
     this.threadStatusObserver = new ThreadStatusObserver();
     this.taskId = ThreadLocalTaskInfo.getCarbonTaskInfo().getTaskId();
-    this.inMemoryChunkSize = inMemoryChunkSize;
     this.inMemoryChunkSize = inMemoryChunkSize * 1024L * 1024L;
     enableInMemoryIntermediateMerge = Boolean.parseBoolean(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.ENABLE_INMEMORY_MERGE_SORT,
@@ -121,7 +120,7 @@ public class UnsafeSortDataRows {
       // in sort memory size.
       this.maxSizeAllowed = UnsafeMemoryManager.INSTANCE.getUsableMemory() / 2;
     } else {
-      this.maxSizeAllowed = this.maxSizeAllowed * 1024 * 1024;
+      this.maxSizeAllowed = this.maxSizeAllowed * 1024L * 1024L;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
index 01e7649..a65de16 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.Random;
 import java.util.concurrent.Callable;
@@ -215,7 +216,11 @@ public class UnsafeInMemoryIntermediateDataMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private UnsafeCarbonRowForMerge next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromMemory();
+    if (hasNext()) {
+      return getSortedRecordFromMemory();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
index 22673ff..c5b215e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 
@@ -238,7 +239,12 @@ public class UnsafeIntermediateFileMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private IntermediateSortTempRow next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromFile();
+    if (hasNext()) {
+      return getSortedRecordFromFile();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
+
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
index 0c692c7..8d2c52a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
@@ -38,6 +38,8 @@ import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
 import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
 import org.apache.carbondata.processing.sort.sortdata.SortParameters;
 
+import org.apache.commons.collections.list.SynchronizedList;
+
 /**
  * It does mergesort intermediate files to big file.
  */
@@ -76,7 +78,8 @@ public class UnsafeIntermediateMerger {
     this.mergedPages = new ArrayList<>();
     this.executorService = Executors.newFixedThreadPool(parameters.getNumberOfCores(),
         new CarbonThreadFactory("UnsafeIntermediatePool:" + parameters.getTableName()));
-    this.procFiles = new ArrayList<File>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
+    this.procFiles =
+        SynchronizedList.decorate(new ArrayList<File>(CarbonCommonConstants.CONSTANT_SIZE_TEN));
     this.mergerTask = new ArrayList<>();
 
     Integer spillPercentage;
@@ -111,15 +114,18 @@ public class UnsafeIntermediateMerger {
   }
 
   public void startFileMergingIfPossible() {
-    File[] fileList;
-    if (procFiles.size() >= parameters.getNumberOfIntermediateFileToBeMerged()) {
-      synchronized (lockObject) {
+    File[] fileList = null;
+    synchronized (lockObject) {
+      if (procFiles.size() >= parameters.getNumberOfIntermediateFileToBeMerged()) {
         fileList = procFiles.toArray(new File[procFiles.size()]);
         this.procFiles = new ArrayList<File>();
+        if (LOGGER.isDebugEnabled()) {
+          LOGGER
+              .debug("Submitting request for intermediate merging no of files: " + fileList.length);
+        }
       }
-      if (LOGGER.isDebugEnabled()) {
-        LOGGER.debug("Sumitting request for intermediate merging no of files: " + fileList.length);
-      }
+    }
+    if (null != fileList) {
       startIntermediateMerging(fileList);
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
index 073d13b..6defeb7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 
 import org.apache.carbondata.common.CarbonIterator;
@@ -195,8 +196,12 @@ public class UnsafeSingleThreadFinalSortFilesMerger extends CarbonIterator<Objec
    * @return sorted row
    */
   public Object[] next() {
-    IntermediateSortTempRow sortTempRow =  getSortedRecordFromFile();
-    return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    if (hasNext()) {
+      IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
+      return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
index 4078a13..d0e78fc 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
@@ -96,6 +96,10 @@ public class InputProcessorStepImpl extends AbstractDataLoadProcessorStep {
     // to be launched.
     int parallelThreadNumber = Math.min(inputIterators.length, numberOfCores);
 
+    if (parallelThreadNumber <= 0) {
+      parallelThreadNumber = 1;
+    }
+
     List<CarbonIterator<Object[]>>[] iterators = new List[parallelThreadNumber];
     for (int i = 0; i < parallelThreadNumber; i++) {
       iterators[i] = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
index 1744675..81031de 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
@@ -1108,23 +1108,24 @@ public final class CarbonDataMergerUtil {
 
       CarbonFile[] deleteDeltaFiles =
           segmentUpdateStatusManager.getDeleteDeltaFilesList(seg, blockName);
+      if (null != deleteDeltaFiles) {
+        // The Delete Delta files may have Spill over blocks. Will consider multiple spill over
+        // blocks as one. Currently DeleteDeltaFiles array contains Delete Delta Block name which
+        // lies within Delete Delta Start TimeStamp and End TimeStamp. In order to eliminate
+        // Spill Over Blocks will choose files with unique taskID.
+        for (CarbonFile blocks : deleteDeltaFiles) {
+          // Get Task ID and the Timestamp from the Block name for e.g.
+          // part-0-3-1481084721319.carbondata => "3-1481084721319"
+          String task = CarbonTablePath.DataFileUtil.getTaskNo(blocks.getName());
+          String timestamp =
+              CarbonTablePath.DataFileUtil.getTimeStampFromDeleteDeltaFile(blocks.getName());
+          String taskAndTimeStamp = task + "-" + timestamp;
+          uniqueBlocks.add(taskAndTimeStamp);
+        }
 
-      // The Delete Delta files may have Spill over blocks. Will consider multiple spill over
-      // blocks as one. Currently DeleteDeltaFiles array contains Delete Delta Block name which
-      // lies within Delete Delta Start TimeStamp and End TimeStamp. In order to eliminate
-      // Spill Over Blocks will choose files with unique taskID.
-      for (CarbonFile blocks : deleteDeltaFiles) {
-        // Get Task ID and the Timestamp from the Block name for e.g.
-        // part-0-3-1481084721319.carbondata => "3-1481084721319"
-        String task = CarbonTablePath.DataFileUtil.getTaskNo(blocks.getName());
-        String timestamp =
-            CarbonTablePath.DataFileUtil.getTimeStampFromDeleteDeltaFile(blocks.getName());
-        String taskAndTimeStamp = task + "-" + timestamp;
-        uniqueBlocks.add(taskAndTimeStamp);
-      }
-
-      if (uniqueBlocks.size() > numberDeltaFilesThreshold) {
-        return true;
+        if (uniqueBlocks.size() > numberDeltaFilesThreshold) {
+          return true;
+        }
       }
     }
     return false;
@@ -1152,7 +1153,7 @@ public final class CarbonDataMergerUtil {
       CarbonFile[] deleteDeltaFiles =
           segmentUpdateStatusManager.getDeleteDeltaFilesList(seg, blockName);
 
-      if (deleteDeltaFiles.length > numberDeltaFilesThreshold) {
+      if (null != deleteDeltaFiles && (deleteDeltaFiles.length > numberDeltaFilesThreshold)) {
         blockLists.add(seg.getSegmentNo() + "/" + blockName);
       }
     }
@@ -1200,31 +1201,34 @@ public final class CarbonDataMergerUtil {
 
     String destFileName =
         blockName + "-" + timestamp.toString() + CarbonCommonConstants.DELETE_DELTA_FILE_EXT;
-    String fullBlockFilePath = deleteDeltaFiles[0].getParentFile().getCanonicalPath()
-        + CarbonCommonConstants.FILE_SEPARATOR + destFileName;
-
-    List<String> deleteFilePathList = new ArrayList<String>();
-    for (CarbonFile cFile : deleteDeltaFiles) {
-      deleteFilePathList.add(cFile.getCanonicalPath());
-    }
+    List<String> deleteFilePathList = new ArrayList<>();
+    if (null != deleteDeltaFiles && deleteDeltaFiles.length > 0 && null != deleteDeltaFiles[0]
+        .getParentFile()) {
+      String fullBlockFilePath = deleteDeltaFiles[0].getParentFile().getCanonicalPath()
+          + CarbonCommonConstants.FILE_SEPARATOR + destFileName;
+
+      for (CarbonFile cFile : deleteDeltaFiles) {
+        deleteFilePathList.add(cFile.getCanonicalPath());
+      }
 
-    CarbonDataMergerUtilResult blockDetails = new CarbonDataMergerUtilResult();
-    blockDetails.setBlockName(blockName);
-    blockDetails.setSegmentName(seg);
-    blockDetails.setDeleteDeltaStartTimestamp(timestamp.toString());
-    blockDetails.setDeleteDeltaEndTimestamp(timestamp.toString());
+      CarbonDataMergerUtilResult blockDetails = new CarbonDataMergerUtilResult();
+      blockDetails.setBlockName(blockName);
+      blockDetails.setSegmentName(seg);
+      blockDetails.setDeleteDeltaStartTimestamp(timestamp.toString());
+      blockDetails.setDeleteDeltaEndTimestamp(timestamp.toString());
 
-    try {
-      if (startCompactionDeleteDeltaFiles(deleteFilePathList, blockName, fullBlockFilePath)) {
-        blockDetails.setCompactionStatus(true);
-      } else {
-        blockDetails.setCompactionStatus(false);
+      try {
+        if (startCompactionDeleteDeltaFiles(deleteFilePathList, blockName, fullBlockFilePath)) {
+          blockDetails.setCompactionStatus(true);
+        } else {
+          blockDetails.setCompactionStatus(false);
+        }
+        resultList.add(blockDetails);
+      } catch (IOException e) {
+        LOGGER.error("Compaction of Delete Delta Files failed. The complete file path is "
+            + fullBlockFilePath);
+        throw new IOException();
       }
-      resultList.add(blockDetails);
-    } catch (IOException e) {
-      LOGGER.error("Compaction of Delete Delta Files failed. The complete file path is "
-          + fullBlockFilePath);
-      throw new IOException();
     }
     return resultList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
index 442f1c5..9a3258e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
@@ -138,21 +138,23 @@ public class RowResultMergerProcessor extends AbstractResultProcessor {
       // if record holder is not empty then iterator the slice holder from
       // heap
       iterator = this.recordHolderHeap.poll();
-      while (true) {
-        Object[] convertedRow = iterator.next();
-        if (null == convertedRow) {
-          iterator.close();
-          break;
-        }
-        // do it only once
-        if (!isDataPresent) {
-          dataHandler.initialise();
-          isDataPresent = true;
-        }
-        addRow(convertedRow);
-        // check if leaf contains no record
-        if (!iterator.hasNext()) {
-          break;
+      if (null != iterator) {
+        while (true) {
+          Object[] convertedRow = iterator.next();
+          if (null == convertedRow) {
+            iterator.close();
+            break;
+          }
+          // do it only once
+          if (!isDataPresent) {
+            dataHandler.initialise();
+            isDataPresent = true;
+          }
+          addRow(convertedRow);
+          // check if leaf contains no record
+          if (!iterator.hasNext()) {
+            break;
+          }
         }
       }
       if (isDataPresent)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java b/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
deleted file mode 100644
index b6f12a5..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.partition.impl;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.processing.partition.DataPartitioner;
-import org.apache.carbondata.processing.partition.Partition;
-
-
-public final class QueryPartitionHelper {
-  private static QueryPartitionHelper instance = new QueryPartitionHelper();
-  private Map<String, DataPartitioner> partitionerMap =
-      new HashMap<String, DataPartitioner>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-  private Map<String, DefaultLoadBalancer> loadBalancerMap =
-      new HashMap<String, DefaultLoadBalancer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-
-  private QueryPartitionHelper() {
-
-  }
-
-  public static QueryPartitionHelper getInstance() {
-    return instance;
-  }
-
-  /**
-   * Get partitions applicable for query based on filters applied in query
-   */
-  public List<Partition> getPartitionsForQuery(String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DataPartitioner dataPartitioner = partitionerMap.get(tableUniqueName);
-
-    return dataPartitioner.getPartitions();
-  }
-
-  public List<Partition> getAllPartitions(String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DataPartitioner dataPartitioner = partitionerMap.get(tableUniqueName);
-
-    return dataPartitioner.getAllPartitions();
-  }
-
-  /**
-   * Get the node name where the partition is assigned to.
-   */
-  public String getLocation(Partition partition, String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DefaultLoadBalancer loadBalancer = loadBalancerMap.get(tableUniqueName);
-    return loadBalancer.getNodeForPartitions(partition);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
index c06819c..364515c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 
@@ -237,7 +238,11 @@ public class IntermediateFileMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private IntermediateSortTempRow next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromFile();
+    if (hasNext()) {
+      return getSortedRecordFromFile();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
index 1a839a2..09c1920 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
@@ -23,6 +23,7 @@ import java.util.AbstractQueue;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
@@ -233,8 +234,12 @@ public class SingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> {
    * @throws CarbonSortKeyAndGroupByException
    */
   public Object[] next() {
-    IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
-    return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    if (hasNext()) {
+      IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
+      return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index 8115f97..9d0c933 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -43,7 +43,6 @@ import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.index.BlockIndexInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.util.CarbonMergerUtil;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
@@ -56,8 +55,6 @@ import org.apache.carbondata.format.IndexHeader;
 import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
 
-import org.apache.commons.lang3.ArrayUtils;
-
 public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
 
   private static final LogService LOGGER =
@@ -168,13 +165,13 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
             CarbonCommonConstants.CARBON_BLOCK_META_RESERVED_SPACE_DEFAULT));
     this.blockSizeThreshold =
         fileSizeInBytes - (fileSizeInBytes * spaceReservedForBlockMetaSize) / 100;
-    LOGGER.info("Total file size: " + fileSizeInBytes + " and dataBlock Size: " +
-        blockSizeThreshold);
+    LOGGER
+        .info("Total file size: " + fileSizeInBytes + " and dataBlock Size: " + blockSizeThreshold);
 
     // whether to directly write fact data to HDFS
-    String directlyWriteData2Hdfs = propInstance.getProperty(
-        CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS,
-        CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS_DEFAULT);
+    String directlyWriteData2Hdfs = propInstance
+        .getProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS,
+            CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS_DEFAULT);
     this.enableDirectlyWriteData2Hdfs = "TRUE".equalsIgnoreCase(directlyWriteData2Hdfs);
 
     if (enableDirectlyWriteData2Hdfs) {
@@ -189,22 +186,9 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
     // in case of compaction we will pass the cardinality.
     this.localCardinality = this.model.getColCardinality();
 
-    //TODO: We should delete the levelmetadata file after reading here.
-    // so only data loading flow will need to read from cardinality file.
-    if (null == this.localCardinality) {
-      this.localCardinality = CarbonMergerUtil
-          .getCardinalityFromLevelMetadata(this.model.getStoreLocation(),
-              this.model.getTableName());
-      List<Integer> cardinalityList = new ArrayList<Integer>();
-      thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          this.model.getWrapperColumnSchema());
-      localCardinality =
-          ArrayUtils.toPrimitive(cardinalityList.toArray(new Integer[cardinalityList.size()]));
-    } else { // for compaction case
-      List<Integer> cardinalityList = new ArrayList<Integer>();
-      thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          this.model.getWrapperColumnSchema());
-    }
+    List<Integer> cardinalityList = new ArrayList<Integer>();
+    thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
+        this.model.getWrapperColumnSchema());
     this.numberCompressor = new NumberCompressor(Integer.parseInt(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.BLOCKLET_SIZE,
             CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL)));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
index 4abdf3c..8e23489 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
@@ -17,19 +17,8 @@
 
 package org.apache.carbondata.processing.util;
 
-import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.scan.model.QueryProjection;
-import org.apache.carbondata.processing.partition.Partition;
-import org.apache.carbondata.processing.partition.impl.DefaultLoadBalancer;
-import org.apache.carbondata.processing.partition.impl.PartitionMultiFileImpl;
-import org.apache.carbondata.processing.partition.impl.QueryPartitionHelper;
-import org.apache.carbondata.processing.splits.TableSplit;
 
 import org.apache.commons.lang3.StringUtils;
 
@@ -43,58 +32,6 @@ public class CarbonQueryUtil {
   }
 
   /**
-   * It creates the one split for each region server.
-   */
-  public static synchronized TableSplit[] getTableSplits(String databaseName, String tableName,
-      QueryProjection queryPlan) {
-
-    //Just create splits depends on locations of region servers
-    List<Partition> allPartitions = null;
-    if (queryPlan == null) {
-      allPartitions =
-          QueryPartitionHelper.getInstance().getAllPartitions(databaseName, tableName);
-    } else {
-      allPartitions =
-          QueryPartitionHelper.getInstance().getPartitionsForQuery(databaseName, tableName);
-    }
-    TableSplit[] splits = new TableSplit[allPartitions.size()];
-    for (int i = 0; i < splits.length; i++) {
-      splits[i] = new TableSplit();
-      List<String> locations = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-      Partition partition = allPartitions.get(i);
-      String location = QueryPartitionHelper.getInstance()
-          .getLocation(partition, databaseName, tableName);
-      locations.add(location);
-      splits[i].setPartition(partition);
-      splits[i].setLocations(locations);
-    }
-
-    return splits;
-  }
-
-  /**
-   * It creates the one split for each region server.
-   */
-  public static TableSplit[] getTableSplitsForDirectLoad(String sourcePath) {
-
-    //Just create splits depends on locations of region servers
-    DefaultLoadBalancer loadBalancer = null;
-    List<Partition> allPartitions = getAllFilesForDataLoad(sourcePath);
-    loadBalancer = new DefaultLoadBalancer(new ArrayList<String>(), allPartitions);
-    TableSplit[] tblSplits = new TableSplit[allPartitions.size()];
-    for (int i = 0; i < tblSplits.length; i++) {
-      tblSplits[i] = new TableSplit();
-      List<String> locations = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-      Partition partition = allPartitions.get(i);
-      String location = loadBalancer.getNodeForPartitions(partition);
-      locations.add(location);
-      tblSplits[i].setPartition(partition);
-      tblSplits[i].setLocations(locations);
-    }
-    return tblSplits;
-  }
-
-  /**
    * split sourcePath by comma
    */
   public static void splitFilePath(String sourcePath, List<String> partitionsFiles,
@@ -104,21 +41,4 @@ public class CarbonQueryUtil {
       Collections.addAll(partitionsFiles, files);
     }
   }
-
-  private static List<Partition> getAllFilesForDataLoad(String sourcePath) {
-    List<String> files = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-    splitFilePath(sourcePath, files, CarbonCommonConstants.COMMA);
-    List<Partition> partitionList =
-        new ArrayList<Partition>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-    Map<Integer, List<String>> partitionFiles = new HashMap<Integer, List<String>>();
-
-    partitionFiles.put(0, new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN));
-    partitionList.add(new PartitionMultiFileImpl(0 + "", partitionFiles.get(0)));
-
-    for (int i = 0; i < files.size(); i++) {
-      partitionFiles.get(0).add(files.get(i));
-    }
-    return partitionList;
-  }
-
 }