You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by gv...@apache.org on 2018/06/08 11:40:26 UTC

[01/50] [abbrv] carbondata git commit: [CARBONDATA-2413] After running CarbonWriter, there is null/_system directory about datamap

Repository: carbondata
Updated Branches:
  refs/heads/spark-2.3 8fe165668 -> 041603dcc


[CARBONDATA-2413] After running CarbonWriter, there is null/_system directory about datamap

After running CarbonWriter, there is null directory:

***null/_system# ls
datamap.mdtfile
 **# git status

Fix:
Don't create data map file/directory when using SDK.
According to:
1.segnemt id is null
2.add carbon properties for SDK

This closes #2246


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cf55028f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cf55028f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cf55028f

Branch: refs/heads/spark-2.3
Commit: cf55028f7b5471e8bb6242c10b06129013eb1951
Parents: b8d5abf
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 17 12:13:01 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Sat May 19 08:14:17 2018 +0530

----------------------------------------------------------------------
 .../chunk/reader/MeasureColumnChunkReader.java  |  2 +-
 .../table/DiskBasedDMSchemaStorageProvider.java | 14 ++++---
 .../carbondata/core/scan/filter/FilterUtil.java |  2 +-
 .../scan/result/iterator/RawResultIterator.java |  4 +-
 .../management/CarbonLoadDataCommand.scala      |  2 +-
 .../datasources/SparkCarbonTableFormat.scala    |  2 +-
 .../loading/DataLoadProcessBuilder.java         |  2 +-
 .../loading/model/CarbonLoadModel.java          | 14 +++----
 .../sdk/file/CarbonWriterBuilder.java           |  2 +-
 .../sdk/file/AvroCarbonWriterTest.java          | 13 +++++-
 .../sdk/file/CSVCarbonWriterTest.java           | 12 ++++++
 .../CSVNonTransactionalCarbonWriterTest.java    | 11 +++++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 12 ++++++
 .../apache/carbondata/sdk/file/TestUtil.java    | 44 ++++++++++++++++++++
 .../carbondata/store/LocalCarbonStoreTest.java  | 11 +++++
 15 files changed, 125 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
index bf76025..f1392d0 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
@@ -49,7 +49,7 @@ public interface MeasureColumnChunkReader {
       throws IOException;
 
   /**
-   * Covert raw data to measure chunk
+   * Convert raw data to measure chunk
    * @param measureRawColumnChunk
    * @param pageNumber
    * @return

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DiskBasedDMSchemaStorageProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DiskBasedDMSchemaStorageProvider.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DiskBasedDMSchemaStorageProvider.java
index 4ebbcd0..cf4f6b9 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DiskBasedDMSchemaStorageProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DiskBasedDMSchemaStorageProvider.java
@@ -84,7 +84,7 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
       if (null != brWriter) {
         brWriter.flush();
       }
-      checkAndReloadDataMapSchemas();
+      checkAndReloadDataMapSchemas(true);
       dataMapSchemas.add(dataMapSchema);
       touchMDTFile();
       CarbonUtil.closeStreams(dataOutputStream, brWriter);
@@ -93,7 +93,7 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
 
   @Override public DataMapSchema retrieveSchema(String dataMapName)
       throws IOException, NoSuchDataMapException {
-    checkAndReloadDataMapSchemas();
+    checkAndReloadDataMapSchemas(true);
     for (DataMapSchema dataMapSchema : dataMapSchemas) {
       if (dataMapSchema.getDataMapName().equalsIgnoreCase(dataMapName)) {
         return dataMapSchema;
@@ -103,7 +103,7 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
   }
 
   @Override public List<DataMapSchema> retrieveSchemas(CarbonTable carbonTable) throws IOException {
-    checkAndReloadDataMapSchemas();
+    checkAndReloadDataMapSchemas(false);
     List<DataMapSchema> dataMapSchemas = new ArrayList<>();
     for (DataMapSchema dataMapSchema : this.dataMapSchemas) {
       List<RelationIdentifier> parentTables = dataMapSchema.getParentTables();
@@ -119,7 +119,7 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
   }
 
   @Override public List<DataMapSchema> retrieveAllSchemas() throws IOException {
-    checkAndReloadDataMapSchemas();
+    checkAndReloadDataMapSchemas(true);
     return new ArrayList<>(dataMapSchemas);
   }
 
@@ -175,7 +175,7 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
     }
   }
 
-  private void checkAndReloadDataMapSchemas() throws IOException {
+  private void checkAndReloadDataMapSchemas(boolean touchFile) throws IOException {
     if (FileFactory.isFileExist(mdtFilePath)) {
       long lastModifiedTime = FileFactory.getCarbonFile(mdtFilePath).getLastModifiedTime();
       if (this.lastModifiedTime != lastModifiedTime) {
@@ -183,7 +183,9 @@ public class DiskBasedDMSchemaStorageProvider implements DataMapSchemaStoragePro
         this.lastModifiedTime = lastModifiedTime;
       }
     } else {
-      touchMDTFile();
+      if (touchFile) {
+        touchMDTFile();
+      }
       dataMapSchemas = retrieveAllSchemasInternal();
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 2032ddb..5196f8f 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -885,7 +885,7 @@ public final class FilterUtil {
   }
 
   /**
-   * Below method will be used to covert the filter surrogate keys
+   * Below method will be used to convert the filter surrogate keys
    * to mdkey
    *
    * @param columnFilterInfo

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
index 1fe50a2..94cea91 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/RawResultIterator.java
@@ -191,9 +191,9 @@ public class RawResultIterator extends CarbonIterator<Object[]> {
   private Object[] convertRow(Object[] rawRow) throws KeyGenException {
     byte[] dims = ((ByteArrayWrapper) rawRow[0]).getDictionaryKey();
     long[] keyArray = sourceSegProperties.getDimensionKeyGenerator().getKeyArray(dims);
-    byte[] covertedBytes =
+    byte[] convertedBytes =
         destinationSegProperties.getDimensionKeyGenerator().generateKey(keyArray);
-    ((ByteArrayWrapper) rawRow[0]).setDictionaryKey(covertedBytes);
+    ((ByteArrayWrapper) rawRow[0]).setDictionaryKey(convertedBytes);
     return rawRow;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
index 5ce510b..ba062c0 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/management/CarbonLoadDataCommand.scala
@@ -858,7 +858,7 @@ case class CarbonLoadDataCommand(
   }
 
   /**
-   * Convert the rdd as per steps of data loading inputprocessor step and coverter step
+   * Convert the rdd as per steps of data loading inputprocessor step and converter step
    * @param originRDD
    * @param sparkSession
    * @param model

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
index ac41d2e..42f1f77 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonTableFormat.scala
@@ -110,7 +110,7 @@ with Serializable {
     model.setDictionaryServerHost(options.getOrElse("dicthost", null))
     model.setDictionaryServerPort(options.getOrElse("dictport", "-1").toInt)
     CarbonTableOutputFormat.setOverwrite(conf, options("overwrite").toBoolean)
-    model.setLoadWithoutCoverterStep(true)
+    model.setLoadWithoutConverterStep(true)
 
     val staticPartition = options.getOrElse("staticpartition", null)
     if (staticPartition != null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
index 17d0c76..b7b725c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/DataLoadProcessBuilder.java
@@ -62,7 +62,7 @@ public final class DataLoadProcessBuilder {
       CarbonIterator[] inputIterators) throws Exception {
     CarbonDataLoadConfiguration configuration = createConfiguration(loadModel, storeLocation);
     SortScopeOptions.SortScope sortScope = CarbonDataProcessorUtil.getSortScope(configuration);
-    if (loadModel.isLoadWithoutCoverterStep()) {
+    if (loadModel.isLoadWithoutConverterStep()) {
       return buildInternalWithNoConverter(inputIterators, configuration, sortScope);
     } else if (!configuration.isSortTable() ||
         sortScope.equals(SortScopeOptions.SortScope.NO_SORT)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
index 0cc0da3..f82de83 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModel.java
@@ -111,7 +111,7 @@ public class CarbonLoadModel implements Serializable {
   private String defaultDateFormat;
 
   /**
-   * defines the string that should be treated as null while loadind data
+   * defines the string that should be treated as null while loading data
    */
   private String serializationNullFormat;
 
@@ -204,7 +204,7 @@ public class CarbonLoadModel implements Serializable {
    * For this method there will be no data conversion step. It writes data which is directly
    * pushed into.
    */
-  private boolean isLoadWithoutCoverterStep;
+  private boolean isLoadWithoutConverterStep;
 
   /**
    * Flder path to where data should be written for this load.
@@ -437,7 +437,7 @@ public class CarbonLoadModel implements Serializable {
     copy.batchSortSizeInMb = batchSortSizeInMb;
     copy.isAggLoadRequest = isAggLoadRequest;
     copy.badRecordsLocation = badRecordsLocation;
-    copy.isLoadWithoutCoverterStep = isLoadWithoutCoverterStep;
+    copy.isLoadWithoutConverterStep = isLoadWithoutConverterStep;
     copy.sortColumnsBoundsStr = sortColumnsBoundsStr;
     return copy;
   }
@@ -816,12 +816,12 @@ public class CarbonLoadModel implements Serializable {
   }
 
 
-  public boolean isLoadWithoutCoverterStep() {
-    return isLoadWithoutCoverterStep;
+  public boolean isLoadWithoutConverterStep() {
+    return isLoadWithoutConverterStep;
   }
 
-  public void setLoadWithoutCoverterStep(boolean loadWithoutCoverterStep) {
-    isLoadWithoutCoverterStep = loadWithoutCoverterStep;
+  public void setLoadWithoutConverterStep(boolean loadWithoutConverterStep) {
+    isLoadWithoutConverterStep = loadWithoutConverterStep;
   }
 
   public String getDataWritePath() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 0f9c9d7..585975f 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -336,7 +336,7 @@ public class CarbonWriterBuilder {
     // handle multi level complex type support. As there are no conversion converter step is
     // removed from the load. LoadWithoutConverter flag is going to point to the Loader Builder
     // which will skip Conversion Step.
-    loadModel.setLoadWithoutCoverterStep(true);
+    loadModel.setLoadWithoutConverterStep(true);
     return new AvroCarbonWriter(loadModel);
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
index 163512a..104c6e4 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
@@ -29,16 +29,27 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.avro.generic.GenericData;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.CharEncoding;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
 import org.apache.avro.Schema;
 
-
 public class AvroCarbonWriterTest {
   private String path = "./AvroCarbonWriterSuiteWriteFiles";
 
+  @Before
+  public void cleanFile() {
+    assert (TestUtil.cleanMdtFile());
+  }
+
+  @After
+  public void verifyDMFile() {
+    assert (!TestUtil.verifyMdtFile());
+  }
+
   @Test
   public void testWriteBasic() throws IOException {
     FileUtils.deleteDirectory(new File(path));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
index d68d85b..fc283b6 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
@@ -27,7 +27,9 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.apache.commons.io.FileUtils;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 /**
@@ -35,6 +37,16 @@ import org.junit.Test;
  */
 public class CSVCarbonWriterTest {
 
+  @Before
+  public void cleanFile() {
+    assert (TestUtil.cleanMdtFile());
+  }
+
+  @After
+  public void verifyDMFile() {
+    assert (!TestUtil.verifyMdtFile());
+  }
+
   @Test
   public void testWriteFiles() throws IOException {
     String path = "./testWriteFiles";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
index 19b0a42..881b5a5 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
@@ -27,13 +27,24 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.apache.commons.io.FileUtils;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 /**
  * Test suite for {@link CSVCarbonWriter}
  */
 public class CSVNonTransactionalCarbonWriterTest {
+  @Before
+  public void cleanFile() {
+    assert (TestUtil.cleanMdtFile());
+  }
+
+  @After
+  public void verifyDMFile() {
+    assert (!TestUtil.verifyMdtFile());
+  }
 
   @Test
   public void testWriteFiles() throws IOException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index f2c6d45..937dde8 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -27,11 +27,23 @@ import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 import org.apache.commons.io.FileUtils;
+import org.junit.After;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.Test;
 
 public class CarbonReaderTest {
 
+  @Before
+  public void cleanFile() {
+    assert (TestUtil.cleanMdtFile());
+  }
+
+  @After
+  public void verifyDMFile() {
+    assert (!TestUtil.verifyMdtFile());
+  }
+
   @Test
   public void testWriteAndReadFiles() throws IOException, InterruptedException {
     String path = "./testWriteFiles";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
index 6870f36..97de1a0 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
@@ -23,6 +23,8 @@ import java.io.IOException;
 
 import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.junit.Assert;
@@ -108,4 +110,46 @@ public class TestUtil {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
   }
+
+  /**
+   * verify whether the file exists
+   * if delete the file success or file not exists, then return true; otherwise return false
+   *
+   * @return boolean
+   */
+  public static boolean cleanMdtFile() {
+    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
+            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
+    try {
+      if (FileFactory.isFileExist(fileName)) {
+        File file = new File(fileName);
+        file.delete();
+        return true;
+      } else {
+        return true;
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+      return false;
+    }
+  }
+
+  /**
+   * verify whether the mdt file exists
+   * if the file exists, then return true; otherwise return false
+   *
+   * @return boolean
+   */
+  public static boolean verifyMdtFile() {
+    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
+            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
+    try {
+      if (FileFactory.isFileExist(fileName)) {
+        return true;
+      }
+      return false;
+    } catch (IOException e) {
+      throw new RuntimeException("IO exception:", e);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf55028f/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
index a5b5edc..51d0b27 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
@@ -28,9 +28,20 @@ import org.apache.carbondata.sdk.file.Schema;
 import org.apache.carbondata.sdk.file.TestUtil;
 
 import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.Test;
 
 public class LocalCarbonStoreTest {
+  @Before
+  public void cleanFile() {
+    assert (TestUtil.cleanMdtFile());
+  }
+
+  @After
+  public void verifyDMFile() {
+    assert (!TestUtil.verifyMdtFile());
+  }
 
   // TODO: complete this testcase
   // Currently result rows are empty, because SDK is not writing table status file


[41/50] [abbrv] carbondata git commit: [CARBONDATA-2529] Fixed S3 Issue for Hadoop 2.8.3

Posted by gv...@apache.org.
[CARBONDATA-2529] Fixed S3 Issue for Hadoop 2.8.3

This issue fixes the issue while loading the data with S3 as backend

This closes #2340


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/4d22ddc9
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/4d22ddc9
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/4d22ddc9

Branch: refs/heads/spark-2.3
Commit: 4d22ddc9d932891af7d3f6557a423d65969f1fd3
Parents: 5ad7009
Author: Bhavya <bh...@knoldus.com>
Authored: Thu May 24 21:17:58 2018 +0530
Committer: chenliang613 <ch...@huawei.com>
Committed: Fri Jun 1 14:37:36 2018 +0800

----------------------------------------------------------------------
 .../carbondata/core/datastore/filesystem/HDFSCarbonFile.java      | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/4d22ddc9/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
index 4663ac5..fc5420d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/HDFSCarbonFile.java
@@ -120,7 +120,8 @@ public class HDFSCarbonFile extends AbstractDFSCarbonFile {
         ((DistributedFileSystem) fs).rename(fileStatus.getPath(), new Path(changetoName),
             org.apache.hadoop.fs.Options.Rename.OVERWRITE);
         return true;
-      } else if (fileStatus.getPath().toString().startsWith("s3n")) {
+      } else if (fileStatus.getPath().toString().startsWith("s3n")
+          || fileStatus.getPath().toString().startsWith("s3a")) {
         fs.delete(new Path(changetoName), true);
         return fs.rename(fileStatus.getPath(), new Path(changetoName));
       } else {


[47/50] [abbrv] carbondata git commit: [CARBONDATA-2554] Added support for logical type

Posted by gv...@apache.org.
[CARBONDATA-2554] Added support for logical type

Added support for date and timestamp logical types in AvroCarbonWriter.

This closes #2347


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2f234869
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2f234869
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2f234869

Branch: refs/heads/spark-2.3
Commit: 2f2348690964ac87c2f38939280958f2469d212d
Parents: 27d7059
Author: kunal642 <ku...@gmail.com>
Authored: Mon May 28 11:41:59 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Tue Jun 5 11:52:09 2018 +0530

----------------------------------------------------------------------
 .../DirectDictionaryGenerator.java              |   2 +
 .../DateDirectDictionaryGenerator.java          |   2 +-
 .../TimeStampDirectDictionaryGenerator.java     |   2 +-
 .../TestNonTransactionalCarbonTable.scala       | 145 ++++++++++++++++++-
 .../processing/datatypes/PrimitiveDataType.java |  44 +++++-
 .../loading/dictionary/DirectDictionary.java    |   4 +
 .../InputProcessorStepWithNoConverterImpl.java  |  24 ++-
 .../carbondata/sdk/file/AvroCarbonWriter.java   |  71 ++++++++-
 8 files changed, 279 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
index 469fe1e..2139f31 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/DirectDictionaryGenerator.java
@@ -40,6 +40,8 @@ public interface DirectDictionaryGenerator {
    */
   Object getValueFromSurrogate(int key);
 
+  int generateKey(long value);
+
   /**
    * The method generate and returns the dictionary / surrogate key for direct dictionary column
    * This Method is called while executing filter queries for getting direct surrogate members.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
index c49af9c..329e260 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/DateDirectDictionaryGenerator.java
@@ -163,7 +163,7 @@ public class DateDirectDictionaryGenerator implements DirectDictionaryGenerator
     }
   }
 
-  private int generateKey(long timeValue) {
+  public int generateKey(long timeValue) {
     if (timeValue < MIN_VALUE || timeValue > MAX_VALUE) {
       if (LOGGER.isDebugEnabled()) {
         LOGGER.debug("Value for date type column is not in valid range. Value considered as null.");

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
index d218e99..c7a4194 100644
--- a/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
@@ -206,7 +206,7 @@ public class TimeStampDirectDictionaryGenerator implements DirectDictionaryGener
     }
   }
 
-  private int generateKey(long timeValue) {
+  public int generateKey(long timeValue) {
     long time = (timeValue - cutOffTimeStamp) / granularityFactor;
     int keyValue = -1;
     if (time >= (long) Integer.MIN_VALUE && time <= (long) Integer.MAX_VALUE) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 5beb9c4..095d12d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -17,7 +17,7 @@
 
 package org.apache.carbondata.spark.testsuite.createTable
 
-import java.sql.Timestamp
+import java.sql.{Date, Timestamp}
 import java.io.{File, FileFilter, IOException}
 import java.util
 import java.util.concurrent.TimeUnit
@@ -42,6 +42,7 @@ import scala.concurrent.duration.Duration
 
 import org.apache.avro
 import org.apache.commons.lang.CharEncoding
+import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
 import tech.allegro.schema.json2avro.converter.JsonAvroConverter
 
 import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
@@ -2151,4 +2152,146 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     writer.close()
   }
 
+  test("test logical type date") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "int", "logicalType": "date"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "int", "logicalType": "date"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 101, "course_details": { "course_struct_course_time":10}}""".stripMargin
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType date, course_details struct<course_struct_course_time: date>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(java.sql.Date.valueOf("1970-04-12"), Row(java.sql.Date.valueOf("1970-01-11")))))
+  }
+
+  test("test logical type timestamp-millis") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "long", "logicalType": "timestamp-millis"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "long", "logicalType": "timestamp-millis"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 172800000,"course_details": { "course_struct_course_time":172800000}}""".stripMargin
+
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType timestamp, course_details struct<course_struct_course_time: timestamp>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(Timestamp.valueOf("1970-01-02 16:00:00"), Row(Timestamp.valueOf("1970-01-02 16:00:00")))))
+  }
+
+  test("test logical type-micros timestamp") {
+    sql("drop table if exists sdkOutputTable")
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+    val schema1 =
+      """{
+        |	"namespace": "com.apache.schema",
+        |	"type": "record",
+        |	"name": "StudentActivity",
+        |	"fields": [
+        |		{
+        |			"name": "id",
+        |						"type": {"type" : "long", "logicalType": "timestamp-micros"}
+        |		},
+        |		{
+        |			"name": "course_details",
+        |			"type": {
+        |				"name": "course_details",
+        |				"type": "record",
+        |				"fields": [
+        |					{
+        |						"name": "course_struct_course_time",
+        |						"type": {"type" : "long", "logicalType": "timestamp-micros"}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |}""".stripMargin
+
+    val json1 =
+      """{"id": 172800000000,"course_details": { "course_struct_course_time":172800000000}}""".stripMargin
+
+    val nn = new org.apache.avro.Schema.Parser().parse(schema1)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
+
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
+    writer.write(record)
+    writer.close()
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(dateType timestamp, course_details struct<course_struct_course_time: timestamp>) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkOutputTable"), Seq(Row(Timestamp.valueOf("1970-01-02 16:00:00"), Row(Timestamp.valueOf("1970-01-02 16:00:00")))))
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
index 7450b82..3a477ce 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
@@ -288,7 +288,11 @@ public class PrimitiveDataType implements GenericDataType<Object> {
           logHolder.setReason(message);
         }
       } else {
-        surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+        if (dictionaryGenerator instanceof DirectDictionary && input instanceof Long) {
+          surrogateKey = ((DirectDictionary) dictionaryGenerator).generateKey((long) input);
+        } else {
+          surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+        }
         if (surrogateKey == CarbonCommonConstants.INVALID_SURROGATE_KEY) {
           surrogateKey = CarbonCommonConstants.MEMBER_DEFAULT_VAL_SURROGATE_KEY;
           message = CarbonDataProcessorUtil
@@ -316,15 +320,36 @@ public class PrimitiveDataType implements GenericDataType<Object> {
           if (!this.carbonDimension.getUseActualData()) {
             byte[] value = null;
             if (isDirectDictionary) {
-              int surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+              int surrogateKey;
+              // If the input is a long value then this means that logical type was provided by
+              // the user using AvroCarbonWriter. In this case directly generate surrogate key
+              // using dictionaryGenerator.
+              if (dictionaryGenerator instanceof DirectDictionary && input instanceof Long) {
+                surrogateKey = ((DirectDictionary) dictionaryGenerator).generateKey((long) input);
+              } else {
+                surrogateKey = dictionaryGenerator.getOrGenerateKey(parsedValue);
+              }
               if (surrogateKey == CarbonCommonConstants.INVALID_SURROGATE_KEY) {
                 value = new byte[0];
               } else {
                 value = ByteUtil.toBytes(surrogateKey);
               }
             } else {
-              value = DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(parsedValue,
-                  this.carbonDimension.getDataType(), dateFormat);
+              // If the input is a long value then this means that logical type was provided by
+              // the user using AvroCarbonWriter. In this case directly generate Bytes from value.
+              if (this.carbonDimension.getDataType().equals(DataTypes.DATE)
+                  || this.carbonDimension.getDataType().equals(DataTypes.TIMESTAMP)
+                  && input instanceof Long) {
+                if (dictionaryGenerator != null) {
+                  value = ByteUtil.toBytes(((DirectDictionary) dictionaryGenerator)
+                      .generateKey((long) input));
+                } else {
+                  value = ByteUtil.toBytes(Long.parseLong(parsedValue));
+                }
+              } else {
+                value = DataTypeUtil.getBytesBasedOnDataTypeForNoDictionaryColumn(parsedValue,
+                    this.carbonDimension.getDataType(), dateFormat);
+              }
               if (this.carbonDimension.getDataType() == DataTypes.STRING
                   && value.length > CarbonCommonConstants.MAX_CHARS_PER_COLUMN_DEFAULT) {
                 throw new CarbonDataLoadingException("Dataload failed, String size cannot exceed "
@@ -333,8 +358,15 @@ public class PrimitiveDataType implements GenericDataType<Object> {
             }
             updateValueToByteStream(dataOutputStream, value);
           } else {
-            Object value = DataTypeUtil.getDataDataTypeForNoDictionaryColumn(parsedValue,
-                this.carbonDimension.getDataType(), dateFormat);
+            Object value;
+            if (dictionaryGenerator instanceof DirectDictionary
+                && input instanceof Long) {
+              value = ByteUtil.toBytes(
+                  ((DirectDictionary) dictionaryGenerator).generateKey((long) input));
+            } else {
+              value = DataTypeUtil.getDataDataTypeForNoDictionaryColumn(parsedValue,
+                  this.carbonDimension.getDataType(), dateFormat);
+            }
             if (this.carbonDimension.getDataType() == DataTypes.STRING
                 && value.toString().length() > CarbonCommonConstants.MAX_CHARS_PER_COLUMN_DEFAULT) {
               throw new CarbonDataLoadingException("Dataload failed, String size cannot exceed "

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java b/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
index 165e5a4..33dc8e3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/dictionary/DirectDictionary.java
@@ -46,6 +46,10 @@ public class DirectDictionary implements BiDictionary<Integer, Object> {
     return dictionaryGenerator.generateDirectSurrogateKey(value.toString());
   }
 
+  public Integer generateKey(long value) {
+    return dictionaryGenerator.generateKey(value);
+  }
+
   @Override
   public Object getValue(Integer key) {
     return dictionaryGenerator.getValueFromSurrogate(key);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
index c99a413..5f7a94c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepWithNoConverterImpl.java
@@ -28,6 +28,8 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
+import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
@@ -215,6 +217,10 @@ public class InputProcessorStepWithNoConverterImpl extends AbstractDataLoadProce
 
     private Map<Integer, GenericDataType> dataFieldsWithComplexDataType;
 
+    private DirectDictionaryGenerator dateDictionaryGenerator;
+
+    private DirectDictionaryGenerator timestampDictionaryGenerator;
+
     public InputProcessorIterator(List<CarbonIterator<Object[]>> inputIterators, int batchSize,
         boolean preFetch, AtomicLong rowCounter, int[] orderOfData, boolean[] noDictionaryMapping,
         DataType[] dataTypes, CarbonDataLoadConfiguration configuration,
@@ -313,7 +319,23 @@ public class InputProcessorStepWithNoConverterImpl extends AbstractDataLoadProce
               throw new CarbonDataLoadingException("Loading Exception", e);
             }
           } else {
-            newData[i] = data[orderOfData[i]];
+            DataType dataType = dataFields[i].getColumn().getDataType();
+            if (dataType == DataTypes.DATE && data[orderOfData[i]] instanceof Long) {
+              if (dateDictionaryGenerator == null) {
+                dateDictionaryGenerator = DirectDictionaryKeyGeneratorFactory
+                    .getDirectDictionaryGenerator(dataType, dataFields[i].getDateFormat());
+              }
+              newData[i] = dateDictionaryGenerator.generateKey((long) data[orderOfData[i]]);
+            } else if (dataType == DataTypes.TIMESTAMP && data[orderOfData[i]] instanceof Long) {
+              if (timestampDictionaryGenerator == null) {
+                timestampDictionaryGenerator =
+                    DirectDictionaryKeyGeneratorFactory
+                        .getDirectDictionaryGenerator(dataType, dataFields[i].getTimestampFormat());
+              }
+              newData[i] = timestampDictionaryGenerator.generateKey((long) data[orderOfData[i]]);
+            } else {
+              newData[i] = data[orderOfData[i]];
+            }
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2f234869/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
index 8bbf364..edecd6b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
@@ -24,15 +24,21 @@ import java.util.Random;
 import java.util.UUID;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.keygenerator.directdictionary.timestamp.DateDirectDictionaryGenerator;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.datatype.StructField;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.hadoop.api.CarbonTableOutputFormat;
 import org.apache.carbondata.hadoop.internal.ObjectArrayWritable;
 import org.apache.carbondata.processing.loading.complexobjects.ArrayObject;
 import org.apache.carbondata.processing.loading.complexobjects.StructObject;
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
 
+import org.apache.avro.LogicalType;
+import org.apache.avro.LogicalTypes;
 import org.apache.avro.Schema;
 import org.apache.avro.generic.GenericData;
 import org.apache.hadoop.conf.Configuration;
@@ -55,6 +61,8 @@ public class AvroCarbonWriter extends CarbonWriter {
   private TaskAttemptContext context;
   private ObjectArrayWritable writable;
   private Schema avroSchema;
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonTable.class.getName());
 
   AvroCarbonWriter(CarbonLoadModel loadModel) throws IOException {
     Configuration hadoopConf = new Configuration();
@@ -88,10 +96,35 @@ public class AvroCarbonWriter extends CarbonWriter {
   private Object avroFieldToObject(Schema.Field avroField, Object fieldValue) {
     Object out;
     Schema.Type type = avroField.schema().getType();
+    LogicalType logicalType = avroField.schema().getLogicalType();
     switch (type) {
-      case BOOLEAN:
       case INT:
+        if (logicalType != null) {
+          if (logicalType instanceof LogicalTypes.Date) {
+            int dateIntValue = (int) fieldValue;
+            out = dateIntValue * DateDirectDictionaryGenerator.MILLIS_PER_DAY;
+          } else {
+            LOGGER.warn("Actual type: INT, Logical Type: " + logicalType.getName());
+            out = fieldValue;
+          }
+        } else {
+          out = fieldValue;
+        }
+        break;
+      case BOOLEAN:
       case LONG:
+        if (logicalType != null && !(logicalType instanceof LogicalTypes.TimestampMillis)) {
+          if (logicalType instanceof LogicalTypes.TimestampMicros) {
+            long dateIntValue = (long) fieldValue;
+            out = dateIntValue / 1000L;
+          } else {
+            LOGGER.warn("Actual type: INT, Logical Type: " + logicalType.getName());
+            out = fieldValue;
+          }
+        } else {
+          out = fieldValue;
+        }
+        break;
       case DOUBLE:
       case STRING:
         out = fieldValue;
@@ -177,13 +210,27 @@ public class AvroCarbonWriter extends CarbonWriter {
     String FieldName = avroField.name();
     Schema childSchema = avroField.schema();
     Schema.Type type = childSchema.getType();
+    LogicalType logicalType = childSchema.getLogicalType();
     switch (type) {
       case BOOLEAN:
         return new Field(FieldName, DataTypes.BOOLEAN);
       case INT:
-        return new Field(FieldName, DataTypes.INT);
+        if (logicalType instanceof LogicalTypes.Date) {
+          return new Field(FieldName, DataTypes.DATE);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as INT for " + childSchema
+              .getName());
+          return new Field(FieldName, DataTypes.INT);
+        }
       case LONG:
-        return new Field(FieldName, DataTypes.LONG);
+        if (logicalType instanceof LogicalTypes.TimestampMillis
+            || logicalType instanceof LogicalTypes.TimestampMicros) {
+          return new Field(FieldName, DataTypes.TIMESTAMP);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as LONG for " + childSchema
+              .getName());
+          return new Field(FieldName, DataTypes.LONG);
+        }
       case DOUBLE:
         return new Field(FieldName, DataTypes.DOUBLE);
       case STRING:
@@ -221,13 +268,27 @@ public class AvroCarbonWriter extends CarbonWriter {
 
   private static StructField prepareSubFields(String FieldName, Schema childSchema) {
     Schema.Type type = childSchema.getType();
+    LogicalType logicalType = childSchema.getLogicalType();
     switch (type) {
       case BOOLEAN:
         return new StructField(FieldName, DataTypes.BOOLEAN);
       case INT:
-        return new StructField(FieldName, DataTypes.INT);
+        if (logicalType instanceof LogicalTypes.Date) {
+          return new StructField(FieldName, DataTypes.DATE);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as INT for " + childSchema
+              .getName());
+          return new StructField(FieldName, DataTypes.INT);
+        }
       case LONG:
-        return new StructField(FieldName, DataTypes.LONG);
+        if (logicalType instanceof LogicalTypes.TimestampMillis
+            || logicalType instanceof LogicalTypes.TimestampMicros) {
+          return new StructField(FieldName, DataTypes.TIMESTAMP);
+        } else {
+          LOGGER.warn("Unsupported logical type. Considering Data Type as LONG for " + childSchema
+              .getName());
+          return new StructField(FieldName, DataTypes.LONG);
+        }
       case DOUBLE:
         return new StructField(FieldName, DataTypes.DOUBLE);
       case STRING:


[17/50] [abbrv] carbondata git commit: [CARBONDATA-2433][LUCENE]close the lucene index reader after every task and clean the resource and other functional issues

Posted by gv...@apache.org.
[CARBONDATA-2433][LUCENE]close the lucene index reader after every task and clean the resource and other functional issues

problem:

Lucene IndexReader opened during query is never closed. Which will impact performance and will lead to memory issues
Lucene index will not index the stop words like is, the etc. which may lead to wrong data with filter queries with text match
Solution:

Close the index reader once the task is completed.
Make it configurable to index the stop words in lucene indexing

This closes #2269


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/07a77fab
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/07a77fab
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/07a77fab

Branch: refs/heads/spark-2.3
Commit: 07a77fab7f24cffe72f42a38327479ea18a08c67
Parents: 77a1110
Author: akashrn5 <ak...@gmail.com>
Authored: Fri May 4 10:29:12 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue May 22 16:28:49 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  7 ++++++
 .../core/datamap/AbstractDataMapJob.java        |  2 +-
 .../datamap/DistributableDataMapFormat.java     | 18 ++++++++++++---
 .../carbondata/core/datamap/TableDataMap.java   | 18 ++++++++++++---
 .../carbondata/core/datamap/dev/DataMap.java    |  5 +++++
 .../blockletindex/BlockletDataMap.java          |  5 +++++
 .../datamap/bloom/BloomCoarseGrainDataMap.java  |  5 +++++
 .../datamap/examples/MinMaxIndexDataMap.java    |  5 +++++
 .../lucene/LuceneCoarseGrainDataMap.java        |  5 +++++
 .../datamap/lucene/LuceneDataMapWriter.java     | 20 ++++++++++++++---
 .../datamap/lucene/LuceneFineGrainDataMap.java  | 23 +++++++++++++++++---
 .../lucene/LuceneFineGrainDataMapSuite.scala    | 23 ++++++++++++++++++--
 .../testsuite/datamap/CGDataMapTestCase.scala   |  7 ++++++
 .../testsuite/datamap/FGDataMapTestCase.scala   |  7 ++++++
 .../carbondata/spark/rdd/SparkDataMapJob.scala  |  3 +++
 15 files changed, 138 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 5ba1fec..8ebce9e 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1742,6 +1742,13 @@ public final class CarbonCommonConstants {
    */
   public static final String CARBON_QUERY_DATAMAP_BLOOM_CACHE_SIZE_DEFAULT_VAL = "512";
 
+  // by default lucene will not store or create index for stop words like "is","the", if this
+  // property is set to true lucene will index for stop words also and gives result for the filter
+  // with stop words(example: TEXT_MATCH('description':'the'))
+  public static final String CARBON_LUCENE_INDEX_STOP_WORDS = "carbon.lucene.index.stop.words";
+
+  public static final String CARBON_LUCENE_INDEX_STOP_WORDS_DEFAULT = "false";
+
   private CarbonCommonConstants() {
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
index 7d1cb48..ed3ecc9 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
@@ -30,4 +30,4 @@ public abstract class AbstractDataMapJob implements DataMapJob {
   @Override public void execute(CarbonTable carbonTable,
       FileInputFormat<Void, BlockletDataMapIndexWrapper> format) {
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java b/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
index 4200414..010c6c2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DistributableDataMapFormat.java
@@ -23,6 +23,7 @@ import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
+import org.apache.carbondata.core.datamap.dev.DataMap;
 import org.apache.carbondata.core.datamap.dev.expr.DataMapDistributableWrapper;
 import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -107,6 +108,7 @@ public class DistributableDataMapFormat extends FileInputFormat<Void, ExtendedBl
     return new RecordReader<Void, ExtendedBlocklet>() {
       private Iterator<ExtendedBlocklet> blockletIterator;
       private ExtendedBlocklet currBlocklet;
+      private List<DataMap> dataMaps;
 
       @Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
           throws IOException, InterruptedException {
@@ -124,8 +126,11 @@ public class DistributableDataMapFormat extends FileInputFormat<Void, ExtendedBl
           blockletIterator = Collections.emptyIterator();
           return;
         }
-        List<ExtendedBlocklet> blocklets = tableDataMap.prune(distributable.getDistributable(),
-            dataMapExprWrapper.getFilterResolverIntf(distributable.getUniqueId()), partitions);
+        dataMaps = tableDataMap.getTableDataMaps(distributable.getDistributable());
+        List<ExtendedBlocklet> blocklets = tableDataMap
+            .prune(dataMaps,
+                distributable.getDistributable(),
+                dataMapExprWrapper.getFilterResolverIntf(distributable.getUniqueId()), partitions);
         for (ExtendedBlocklet blocklet : blocklets) {
           blocklet.setDataMapUniqueId(distributable.getUniqueId());
         }
@@ -137,6 +142,9 @@ public class DistributableDataMapFormat extends FileInputFormat<Void, ExtendedBl
         boolean hasNext = blockletIterator.hasNext();
         if (hasNext) {
           currBlocklet = blockletIterator.next();
+        } else {
+          // close all resources when all the results are returned
+          close();
         }
         return hasNext;
       }
@@ -158,7 +166,11 @@ public class DistributableDataMapFormat extends FileInputFormat<Void, ExtendedBl
 
       @Override
       public void close() throws IOException {
-
+        if (null != dataMaps) {
+          for (DataMap dataMap : dataMaps) {
+            dataMap.finish();
+          }
+        }
       }
     };
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/datamap/TableDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/TableDataMap.java b/core/src/main/java/org/apache/carbondata/core/datamap/TableDataMap.java
index b8254d4..4ce0f6c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/TableDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/TableDataMap.java
@@ -139,6 +139,17 @@ public final class TableDataMap extends OperationEventListener {
   }
 
   /**
+   * This method returns all the datamaps corresponding to the distributable object
+   *
+   * @param distributable
+   * @return
+   * @throws IOException
+   */
+  public List<DataMap> getTableDataMaps(DataMapDistributable distributable) throws IOException {
+    return dataMapFactory.getDataMaps(distributable);
+  }
+
+  /**
    * This method is used from any machine after it is distributed. It takes the distributable object
    * to prune the filters.
    *
@@ -146,11 +157,10 @@ public final class TableDataMap extends OperationEventListener {
    * @param filterExp
    * @return
    */
-  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+  public List<ExtendedBlocklet> prune(List<DataMap> dataMaps, DataMapDistributable distributable,
       FilterResolverIntf filterExp, List<PartitionSpec> partitions) throws IOException {
     List<ExtendedBlocklet> detailedBlocklets = new ArrayList<>();
     List<Blocklet> blocklets = new ArrayList<>();
-    List<DataMap> dataMaps = dataMapFactory.getDataMaps(distributable);
     for (DataMap dataMap : dataMaps) {
       blocklets.addAll(dataMap.prune(filterExp,
           segmentPropertiesFetcher.getSegmentProperties(distributable.getSegment()),
@@ -192,7 +202,9 @@ public final class TableDataMap extends OperationEventListener {
    * Clears all datamap
    */
   public void clear() {
-    dataMapFactory.clear();
+    if (null != dataMapFactory) {
+      dataMapFactory.clear();
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java
index 9fbdd90..d846281 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMap.java
@@ -55,4 +55,9 @@ public interface DataMap<T extends Blocklet> {
    */
   void clear();
 
+  /**
+   * clears all the resources for datamaps
+   */
+  void finish();
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
index 6730ad5..6e43fbc 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMap.java
@@ -705,6 +705,11 @@ public class BlockletDataMap extends CoarseGrainDataMap implements Serializable
     return prune(filterExp, this.segmentProperties);
   }
 
+  @Override
+  public void finish() {
+
+  }
+
   private boolean isCorrectUUID(String[] fileDetails, PartitionSpec spec) {
     boolean needToScan = false;
     if (spec.getUuid() != null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
index a5a141c..e9af0ff 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
@@ -205,4 +205,9 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
       return sb.toString();
     }
   }
+
+  @Override
+  public void finish() {
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
index ac6358e..78868a9 100644
--- a/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
+++ b/datamap/examples/src/minmaxdatamap/main/java/org/apache/carbondata/datamap/examples/MinMaxIndexDataMap.java
@@ -170,4 +170,9 @@ public class MinMaxIndexDataMap extends CoarseGrainDataMap {
     readMinMaxDataMap = null;
   }
 
+  @Override
+  public void finish() {
+
+  }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
index 580f18b..77b5347 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
@@ -230,4 +230,9 @@ public class LuceneCoarseGrainDataMap extends CoarseGrainDataMap {
   public void clear() {
 
   }
+
+  @Override
+  public void finish() {
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index 759b607..c7eb3d8 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -43,7 +43,9 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.CharArraySet;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
 import org.apache.lucene.codecs.lucene62.Lucene62Codec;
 import org.apache.lucene.document.Document;
@@ -85,6 +87,11 @@ public class LuceneDataMapWriter extends DataMapWriter {
 
   public static final String ROWID_NAME = "rowId";
 
+  private Codec speedCodec = new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_SPEED);
+
+  private Codec compressionCodec =
+      new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_COMPRESSION);
+
   private Map<LuceneColumnKeys, Map<Integer, RoaringBitmap>> cache = new HashMap<>();
 
   private int cacheSize;
@@ -123,7 +130,14 @@ public class LuceneDataMapWriter extends DataMapWriter {
    */
   public void onBlockletStart(int blockletId) throws IOException {
     if (null == analyzer) {
-      analyzer = new StandardAnalyzer();
+      if (CarbonProperties.getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS,
+              CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS_DEFAULT)
+          .equalsIgnoreCase("true")) {
+        analyzer = new StandardAnalyzer(CharArraySet.EMPTY_SET);
+      } else {
+        analyzer = new StandardAnalyzer();
+      }
     }
     // save index data into ram, write into disk after one page finished
     ramDir = new RAMDirectory();
@@ -162,10 +176,10 @@ public class LuceneDataMapWriter extends DataMapWriter {
         .getProperty(CarbonCommonConstants.CARBON_LUCENE_COMPRESSION_MODE,
             CarbonCommonConstants.CARBON_LUCENE_COMPRESSION_MODE_DEFAULT)
         .equalsIgnoreCase(CarbonCommonConstants.CARBON_LUCENE_COMPRESSION_MODE_DEFAULT)) {
-      indexWriterConfig.setCodec(new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_SPEED));
+      indexWriterConfig.setCodec(speedCodec);
     } else {
       indexWriterConfig
-          .setCodec(new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_COMPRESSION));
+          .setCodec(compressionCodec);
     }
 
     indexWriter = new IndexWriter(indexDir, indexWriterConfig);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index 742f8d0..b26ab53 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -92,6 +92,8 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
   private boolean storeBlockletWise;
 
+  private IndexReader indexReader;
+
   LuceneFineGrainDataMap(Analyzer analyzer, DataMapSchema schema) {
     this.analyzer = analyzer;
     writeCacheSize = LuceneDataMapFactoryBase.validateAndGetWriteCacheSize(schema);
@@ -148,7 +150,7 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
     // open this index path , use HDFS default configuration
     Directory indexDir = new HdfsDirectory(indexPath, FileFactory.getConfiguration());
 
-    IndexReader indexReader = DirectoryReader.open(indexDir);
+    this.indexReader = DirectoryReader.open(indexDir);
     if (indexReader == null) {
       throw new RuntimeException("failed to create index reader object");
     }
@@ -247,7 +249,6 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
       // take the min of total documents available in the reader and limit if set by the user
       maxDocs = Math.min(maxDocs, indexSearcher.getIndexReader().maxDoc());
       // execute index search
-      // initialize to null, else ScoreDoc objects will get accumulated in memory
       TopDocs result = null;
       // the number of documents to be queried in one search. It will always be minimum of
       // search result and maxDocs
@@ -423,4 +424,20 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
   }
 
-}
\ No newline at end of file
+  @Override
+  public void finish() {
+    if (null != indexReader) {
+      try {
+        int referenceCount = indexReader.getRefCount();
+        if (referenceCount > 0) {
+          indexReader.decRef();
+          if (null != indexSearcherMap) {
+            indexSearcherMap.clear();
+          }
+        }
+      } catch (IOException e) {
+        LOGGER.error(e, "Ignoring the exception, Error while closing the lucene index reader");
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
index 0e885de..638d24d 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
@@ -117,7 +117,7 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test lucene fine grain data map") {
-    sql("drop datamap if exists dm on table datamap_test")
+//    sql("drop datamap if exists dm on table datamap_test")
     sql(
       s"""
          | CREATE DATAMAP dm ON TABLE datamap_test
@@ -799,6 +799,21 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists dm_text on table datamap_test_table")
   }
 
+  test("test lucene indexing english stop words") {
+    sql("drop table if exists table_stop")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS, "false")
+    sql("create table table_stop(suggestion string,goal string) stored by 'carbondata'")
+    sql(
+      "create datamap stop_dm on table table_stop using 'lucene' DMPROPERTIES('index_columns'='suggestion')")
+    sql("insert into table_stop select 'The is the stop word','abcde'")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS, "true")
+    sql("insert into table_stop select 'The is one more stop word','defg'")
+    assert(
+      sql("select * from table_stop where text_match('suggestion:*is*')").collect().length == 1)
+  }
+
   override protected def afterAll(): Unit = {
     LuceneFineGrainDataMapSuite.deleteFile(file2)
     sql("DROP TABLE IF EXISTS normal_test")
@@ -813,11 +828,15 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS datamap_test5")
     sql("DROP TABLE IF EXISTS datamap_test7")
     sql("DROP TABLE IF EXISTS datamap_main")
+    sql("DROP TABLE IF EXISTS table_stop")
     sql("use default")
     sql("drop database if exists lucene cascade")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION,
-          CarbonProperties.getStorePath)
+        CarbonProperties.getStorePath)
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS,
+        CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS_DEFAULT)
   }
 }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
index d8fc46f..848acde 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
@@ -234,6 +234,13 @@ class CGDataMap extends CoarseGrainDataMap {
   }
 
   override def isScanRequired(filterExp: FilterResolverIntf): Boolean = ???
+
+  /**
+   * clears all the resources for datamaps
+   */
+  override def finish() = {
+    ???
+  }
 }
 
 class CGDataMapWriter(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
index 535a112..e2642ff 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
@@ -259,6 +259,13 @@ class FGDataMap extends FineGrainDataMap {
   }
 
   override def isScanRequired(filterExp: FilterResolverIntf): Boolean = ???
+
+  /**
+   * clears all the resources for datamaps
+   */
+  override def finish() = {
+
+  }
 }
 
 class FGDataMapWriter(carbonTable: CarbonTable,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/07a77fab/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkDataMapJob.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkDataMapJob.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkDataMapJob.scala
index 6ee566c..43ee31b 100644
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkDataMapJob.scala
+++ b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/SparkDataMapJob.scala
@@ -71,6 +71,9 @@ class DataMapPruneRDD(sc: SparkContext,
     val inputSplit = split.asInstanceOf[DataMapRDDPartition].inputSplit
     val reader = dataMapFormat.createRecordReader(inputSplit, attemptContext)
     reader.initialize(inputSplit, attemptContext)
+    context.addTaskCompletionListener(_ => {
+      reader.close()
+    })
     val iter = new Iterator[ExtendedBlocklet] {
 
       private var havePair = false


[30/50] [abbrv] carbondata git commit: [CARBONDATA-2538] added filter while listing files from writer path

Posted by gv...@apache.org.
[CARBONDATA-2538] added filter while listing files from writer path

1. Added filter to list only index and carbondata files. So even if the lock files are present proper exception can be thrown
2. Updated complex type docs

This closes #2344


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d7773187
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d7773187
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d7773187

Branch: refs/heads/spark-2.3
Commit: d7773187f72c73b7f9514f13bce17de3c552247c
Parents: 8b80b12
Author: kunal642 <ku...@gmail.com>
Authored: Fri May 25 16:21:45 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue May 29 10:49:59 2018 +0530

----------------------------------------------------------------------
 .../core/metadata/schema/table/CarbonTable.java          |  2 +-
 .../readcommitter/LatestFilesReadCommittedScope.java     |  9 ++++++++-
 .../java/org/apache/carbondata/core/util/CarbonUtil.java | 11 ++++++++---
 docs/supported-data-types-in-carbondata.md               |  2 ++
 .../command/table/CarbonDescribeFormattedCommand.scala   |  2 +-
 5 files changed, 20 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d7773187/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index 8528d6f..b1ed981 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -892,7 +892,7 @@ public class CarbonTable implements Serializable {
 
 
   public long size() throws IOException {
-    Map<String, Long> dataIndexSize = CarbonUtil.calculateDataIndexSize(this);
+    Map<String, Long> dataIndexSize = CarbonUtil.calculateDataIndexSize(this, true);
     Long dataSize = dataIndexSize.get(CarbonCommonConstants.CARBON_TOTAL_DATA_SIZE);
     if (dataSize == null) {
       dataSize = 0L;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d7773187/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
index 6106174..14bba65 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.mutate.UpdateVO;
@@ -138,7 +139,13 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
   @Override public void takeCarbonIndexFileSnapShot() throws IOException {
     // Read the current file Path get the list of indexes from the path.
     CarbonFile file = FileFactory.getCarbonFile(carbonFilePath);
-    if (file.listFiles().length == 0) {
+    CarbonFile[] files = file.listFiles(new CarbonFileFilter() {
+      @Override public boolean accept(CarbonFile file) {
+        return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
+            .endsWith(CarbonTablePath.CARBON_DATA_EXT);
+      }
+    });
+    if (files.length == 0) {
       // For nonTransactional table, files can be removed at any point of time.
       // So cannot assume files will be present
       throw new IOException("No files are present in the table location :" + carbonFilePath);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d7773187/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 23d02ef..9ccd772 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2550,7 +2550,8 @@ public final class CarbonUtil {
   /**
    * This method will calculate the data size and index size for carbon table
    */
-  public static Map<String, Long> calculateDataIndexSize(CarbonTable carbonTable)
+  public static Map<String, Long> calculateDataIndexSize(CarbonTable carbonTable,
+      Boolean updateSize)
       throws IOException {
     Map<String, Long> dataIndexSizeMap = new HashMap<String, Long>();
     long dataSize = 0L;
@@ -2565,7 +2566,11 @@ public final class CarbonUtil {
       SegmentStatusManager segmentStatusManager = new SegmentStatusManager(identifier);
       ICarbonLock carbonLock = segmentStatusManager.getTableStatusLock();
       try {
-        if (carbonLock.lockWithRetries()) {
+        boolean lockAcquired = true;
+        if (updateSize) {
+          lockAcquired = carbonLock.lockWithRetries();
+        }
+        if (lockAcquired) {
           LOGGER.info("Acquired lock for table for table status updation");
           String metadataPath = carbonTable.getMetadataPath();
           LoadMetadataDetails[] loadMetadataDetails =
@@ -2593,7 +2598,7 @@ public final class CarbonUtil {
             }
           }
           // If it contains old segment, write new load details
-          if (needUpdate) {
+          if (needUpdate && updateSize) {
             SegmentStatusManager.writeLoadDetailsIntoFile(
                 CarbonTablePath.getTableStatusFilePath(identifier.getTablePath()),
                 loadMetadataDetails);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d7773187/docs/supported-data-types-in-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/supported-data-types-in-carbondata.md b/docs/supported-data-types-in-carbondata.md
index 6c21508..7260afe 100644
--- a/docs/supported-data-types-in-carbondata.md
+++ b/docs/supported-data-types-in-carbondata.md
@@ -38,6 +38,8 @@
   * Complex Types
     * arrays: ARRAY``<data_type>``
     * structs: STRUCT``<col_name : data_type COMMENT col_comment, ...>``
+    
+    **NOTE**: Only 2 level complex type schema is supported for now.
 
   * Other Types
     * BOOLEAN
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d7773187/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index 69bb91f..7d15cc1 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -96,7 +96,7 @@ private[sql] case class CarbonDescribeFormattedCommand(
     val tableComment = tblProps.asScala.getOrElse(CarbonCommonConstants.TABLE_COMMENT, "")
     results ++= Seq(("Comment", tableComment, ""))
     results ++= Seq(("Table Block Size ", carbonTable.getBlockSizeInMB + " MB", ""))
-    val dataIndexSize = CarbonUtil.calculateDataIndexSize(carbonTable)
+    val dataIndexSize = CarbonUtil.calculateDataIndexSize(carbonTable, false)
     if (!dataIndexSize.isEmpty) {
       results ++= Seq((CarbonCommonConstants.TABLE_DATA_SIZE,
         dataIndexSize.get(CarbonCommonConstants.CARBON_TOTAL_DATA_SIZE).toString, ""))


[16/50] [abbrv] carbondata git commit: [CARBONDATA-2496] Changed to hadoop bloom implementation and added compress option to compress bloom on disk

Posted by gv...@apache.org.
[CARBONDATA-2496] Changed to hadoop bloom implementation and added compress option to compress bloom on disk

This PR removes the guava bloom and adds the hadoop bloom. And also added the compress bloom option to compress bloom on disk and in memory as well.
The user can use bloom_compress property to enable/disable compression. By default, it is enabled.

Please check the performance of bloom
Loaded 100 million data with bloom datamap on a column with a cardinality of 5 million with 'BLOOM_SIZE'='5000000', 'bloom_fpp'='0.001'.

This closes #2324


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/77a11107
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/77a11107
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/77a11107

Branch: refs/heads/spark-2.3
Commit: 77a11107c57beebda74925dbb328f7bad6c72136
Parents: d9534c2
Author: ravipesala <ra...@gmail.com>
Authored: Sun May 20 21:52:57 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue May 22 14:24:58 2018 +0530

----------------------------------------------------------------------
 .../blockletindex/BlockletDataMapFactory.java   |   5 +-
 .../datamap/bloom/BloomCoarseGrainDataMap.java  |   8 +-
 .../bloom/BloomCoarseGrainDataMapFactory.java   |  42 ++++++--
 .../carbondata/datamap/bloom/BloomDMModel.java  |  35 ++++--
 .../datamap/bloom/BloomDataMapBuilder.java      |  12 ++-
 .../datamap/bloom/BloomDataMapCache.java        |  12 +--
 .../datamap/bloom/BloomDataMapWriter.java       |  60 ++++++-----
 .../hadoop/util/bloom/CarbonBloomFilter.java    | 108 +++++++++++++++++++
 8 files changed, 225 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
index 0188281..318fc6e 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
@@ -18,6 +18,7 @@ package org.apache.carbondata.core.indexstore.blockletindex;
 
 import java.io.IOException;
 import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -78,7 +79,7 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
   private AbsoluteTableIdentifier identifier;
 
   // segmentId -> list of index file
-  private Map<String, Set<TableBlockIndexUniqueIdentifier>> segmentMap = new HashMap<>();
+  private Map<String, Set<TableBlockIndexUniqueIdentifier>> segmentMap = new ConcurrentHashMap<>();
 
   private Cache<TableBlockIndexUniqueIdentifier, BlockletDataMapIndexWrapper> cache;
 
@@ -279,7 +280,7 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
   }
 
   @Override
-  public void clear() {
+  public synchronized void clear() {
     if (segmentMap.size() > 0) {
       for (String segmentId : segmentMap.keySet().toArray(new String[segmentMap.size()])) {
         clear(new Segment(segmentId, null, null));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
index 09de25e..a5a141c 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMap.java
@@ -44,6 +44,7 @@ import org.apache.carbondata.core.util.CarbonUtil;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.bloom.Key;
 
 /**
  * BloomDataCoarseGrainMap is constructed in blocklet level. For each indexed column,
@@ -83,7 +84,7 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
 
   @Override
   public List<Blocklet> prune(FilterResolverIntf filterExp, SegmentProperties segmentProperties,
-      List<PartitionSpec> partitions) throws IOException {
+      List<PartitionSpec> partitions) {
     List<Blocklet> hitBlocklets = new ArrayList<Blocklet>();
     if (filterExp == null) {
       // null is different from empty here. Empty means after pruning, no blocklet need to scan.
@@ -97,8 +98,8 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
           this.indexPath.toString(), bloomQueryModel.columnName);
       List<BloomDMModel> bloomDMModels = this.bloomDataMapCache.getBloomDMModelByKey(cacheKey);
       for (BloomDMModel bloomDMModel : bloomDMModels) {
-        boolean scanRequired = bloomDMModel.getBloomFilter().mightContain(
-            convertValueToBytes(bloomQueryModel.dataType, bloomQueryModel.filterValue));
+        boolean scanRequired = bloomDMModel.getBloomFilter().membershipTest(new Key(
+            convertValueToBytes(bloomQueryModel.dataType, bloomQueryModel.filterValue)));
         if (scanRequired) {
           LOGGER.debug(String.format("BloomCoarseGrainDataMap: Need to scan -> blocklet#%s",
               String.valueOf(bloomDMModel.getBlockletNo())));
@@ -110,7 +111,6 @@ public class BloomCoarseGrainDataMap extends CoarseGrainDataMap {
         }
       }
     }
-
     return hitBlocklets;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
index 16b49f2..3231551 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomCoarseGrainDataMapFactory.java
@@ -28,6 +28,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants;
 import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.DataMapMeta;
@@ -66,22 +67,33 @@ public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDa
    */
   private static final String BLOOM_SIZE = "bloom_size";
   /**
-   * default size for bloom filter: suppose one blocklet contains 20 pages
-   * and all the indexed value is distinct.
+   * default size for bloom filter, cardinality of the column.
    */
-  private static final int DEFAULT_BLOOM_FILTER_SIZE = 32000 * 20;
+  private static final int DEFAULT_BLOOM_FILTER_SIZE =
+      CarbonV3DataFormatConstants.NUMBER_OF_ROWS_PER_BLOCKLET_COLUMN_PAGE_DEFAULT;
   /**
    * property for fpp(false-positive-probability) of bloom filter
    */
   private static final String BLOOM_FPP = "bloom_fpp";
   /**
-   * default value for fpp of bloom filter
+   * default value for fpp of bloom filter is 1%
    */
-  private static final double DEFAULT_BLOOM_FILTER_FPP = 0.00001d;
+  private static final double DEFAULT_BLOOM_FILTER_FPP = 0.01d;
+
+  /**
+   * property for compressing bloom while saving to disk.
+   */
+  private static final String COMPRESS_BLOOM = "bloom_compress";
+  /**
+   * Default value of compressing bloom while save to disk.
+   */
+  private static final boolean DEFAULT_BLOOM_COMPRESS = true;
+
   private DataMapMeta dataMapMeta;
   private String dataMapName;
   private int bloomFilterSize;
   private double bloomFilterFpp;
+  private boolean bloomCompress;
 
   public BloomCoarseGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema)
       throws MalformedDataMapCommandException {
@@ -94,6 +106,7 @@ public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDa
     List<CarbonColumn> indexedColumns = carbonTable.getIndexedColumns(dataMapSchema);
     this.bloomFilterSize = validateAndGetBloomFilterSize(dataMapSchema);
     this.bloomFilterFpp = validateAndGetBloomFilterFpp(dataMapSchema);
+    this.bloomCompress = validateAndGetBloomCompress(dataMapSchema);
     List<ExpressionType> optimizedOperations = new ArrayList<ExpressionType>();
     // todo: support more optimize operations
     optimizedOperations.add(ExpressionType.EQUALS);
@@ -163,6 +176,21 @@ public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDa
     return bloomFilterFpp;
   }
 
+  /**
+   * validate bloom DataMap COMPRESS_BLOOM
+   * Default value is true
+   */
+  private boolean validateAndGetBloomCompress(DataMapSchema dmSchema) {
+    String bloomCompress = dmSchema.getProperties().get(COMPRESS_BLOOM);
+    if (StringUtils.isBlank(bloomCompress)) {
+      LOGGER.warn(
+          String.format("Bloom compress is not configured for datamap %s, use default value %b",
+              dataMapName, DEFAULT_BLOOM_COMPRESS));
+      return DEFAULT_BLOOM_COMPRESS;
+    }
+    return Boolean.parseBoolean(bloomCompress);
+  }
+
   @Override
   public DataMapWriter createWriter(Segment segment, String shardName) throws IOException {
     LOGGER.info(
@@ -170,14 +198,14 @@ public class BloomCoarseGrainDataMapFactory extends DataMapFactory<CoarseGrainDa
             this.dataMapName, getCarbonTable().getTableName() , shardName));
     return new BloomDataMapWriter(getCarbonTable().getTablePath(), this.dataMapName,
         this.dataMapMeta.getIndexedColumns(), segment, shardName,
-        this.bloomFilterSize, this.bloomFilterFpp);
+        this.bloomFilterSize, this.bloomFilterFpp, bloomCompress);
   }
 
   @Override
   public DataMapBuilder createBuilder(Segment segment, String shardName) throws IOException {
     return new BloomDataMapBuilder(getCarbonTable().getTablePath(), this.dataMapName,
         this.dataMapMeta.getIndexedColumns(), segment, shardName,
-        this.bloomFilterSize, this.bloomFilterFpp);
+        this.bloomFilterSize, this.bloomFilterFpp, bloomCompress);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDMModel.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDMModel.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDMModel.java
index 3cf2f3b..7317c70 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDMModel.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDMModel.java
@@ -16,22 +16,27 @@
  */
 package org.apache.carbondata.datamap.bloom;
 
-import java.io.Serializable;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 
-import com.google.common.hash.BloomFilter;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.util.bloom.CarbonBloomFilter;
 
 /**
  * This class holds a bloom filter for one blocklet
  */
 @InterfaceAudience.Internal
-public class BloomDMModel implements Serializable {
-  private static final long serialVersionUID = 7281578747306832771L;
+public class BloomDMModel implements Writable {
   private int blockletNo;
-  private BloomFilter<byte[]> bloomFilter;
+  private CarbonBloomFilter bloomFilter;
 
-  public BloomDMModel(int blockletNo, BloomFilter<byte[]> bloomFilter) {
+  public BloomDMModel() {
+  }
+
+  public BloomDMModel(int blockletNo, CarbonBloomFilter bloomFilter) {
     this.blockletNo = blockletNo;
     this.bloomFilter = bloomFilter;
   }
@@ -40,15 +45,29 @@ public class BloomDMModel implements Serializable {
     return blockletNo;
   }
 
-  public BloomFilter<byte[]> getBloomFilter() {
+  public CarbonBloomFilter getBloomFilter() {
     return bloomFilter;
   }
 
-  @Override public String toString() {
+  @Override
+  public String toString() {
     final StringBuilder sb = new StringBuilder("BloomDMModel{");
     sb.append(", blockletNo=").append(blockletNo);
     sb.append(", bloomFilter=").append(bloomFilter);
     sb.append('}');
     return sb.toString();
   }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    out.writeInt(blockletNo);
+    bloomFilter.write(out);
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    blockletNo = in.readInt();
+    bloomFilter = new CarbonBloomFilter();
+    bloomFilter.readFields(in);
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
index fa1aef7..e9929e3 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapBuilder.java
@@ -29,6 +29,8 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
 import org.apache.carbondata.core.util.CarbonUtil;
 
+import org.apache.hadoop.util.bloom.Key;
+
 /**
  * Implementation for BloomFilter DataMap to rebuild the datamap for main table with existing data
  */
@@ -36,10 +38,10 @@ import org.apache.carbondata.core.util.CarbonUtil;
 public class BloomDataMapBuilder extends BloomDataMapWriter implements DataMapBuilder {
 
   BloomDataMapBuilder(String tablePath, String dataMapName, List<CarbonColumn> indexColumns,
-      Segment segment, String shardName, int bloomFilterSize, double bloomFilterFpp)
-      throws IOException {
-    super(tablePath, dataMapName, indexColumns, segment, shardName,
-        bloomFilterSize, bloomFilterFpp);
+      Segment segment, String shardName, int bloomFilterSize, double bloomFilterFpp,
+      boolean bloomCompress) throws IOException {
+    super(tablePath, dataMapName, indexColumns, segment, shardName, bloomFilterSize, bloomFilterFpp,
+        bloomCompress);
   }
 
   @Override
@@ -70,7 +72,7 @@ public class BloomDataMapBuilder extends BloomDataMapWriter implements DataMapBu
       } else {
         indexValue = CarbonUtil.getValueAsBytes(dataType, data);
       }
-      indexBloomFilters.get(i).put(indexValue);
+      indexBloomFilters.get(i).add(new Key(indexValue));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
index 2411cf4..3de77ad 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapCache.java
@@ -19,7 +19,6 @@ package org.apache.carbondata.datamap.bloom;
 import java.io.DataInputStream;
 import java.io.EOFException;
 import java.io.IOException;
-import java.io.ObjectInputStream;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
@@ -133,15 +132,14 @@ public class BloomDataMapCache implements Serializable {
    */
   private List<BloomDMModel> loadBloomDataMapModel(CacheKey cacheKey) {
     DataInputStream dataInStream = null;
-    ObjectInputStream objectInStream = null;
     List<BloomDMModel> bloomDMModels = new ArrayList<BloomDMModel>();
     try {
       String indexFile = getIndexFileFromCacheKey(cacheKey);
       dataInStream = FileFactory.getDataInputStream(indexFile, FileFactory.getFileType(indexFile));
-      objectInStream = new ObjectInputStream(dataInStream);
       try {
-        BloomDMModel model = null;
-        while ((model = (BloomDMModel) objectInStream.readObject()) != null) {
+        while (dataInStream.available() > 0) {
+          BloomDMModel model = new BloomDMModel();
+          model.readFields(dataInStream);
           bloomDMModels.add(model);
         }
       } catch (EOFException e) {
@@ -150,12 +148,12 @@ public class BloomDataMapCache implements Serializable {
       }
       this.bloomDMCache.put(cacheKey, bloomDMModels);
       return bloomDMModels;
-    } catch (ClassNotFoundException | IOException e) {
+    } catch (IOException e) {
       clear(cacheKey);
       LOGGER.error(e, "Error occurs while reading bloom index");
       throw new RuntimeException("Error occurs while reading bloom index", e);
     } finally {
-      CarbonUtil.closeStreams(objectInStream, dataInStream);
+      CarbonUtil.closeStreams(dataInStream);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
index f6eb331..b3e69f4 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
@@ -18,7 +18,6 @@ package org.apache.carbondata.datamap.bloom;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
-import java.io.ObjectOutputStream;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -34,8 +33,9 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
 import org.apache.carbondata.core.util.CarbonUtil;
 
-import com.google.common.hash.BloomFilter;
-import com.google.common.hash.Funnels;
+import org.apache.hadoop.util.bloom.CarbonBloomFilter;
+import org.apache.hadoop.util.bloom.Key;
+import org.apache.hadoop.util.hash.Hash;
 
 /**
  * BloomDataMap is constructed in CG level (blocklet level).
@@ -50,23 +50,23 @@ public class BloomDataMapWriter extends DataMapWriter {
       BloomDataMapWriter.class.getCanonicalName());
   private int bloomFilterSize;
   private double bloomFilterFpp;
+  private boolean compressBloom;
   protected int currentBlockletId;
   private List<String> currentDMFiles;
   private List<DataOutputStream> currentDataOutStreams;
-  private List<ObjectOutputStream> currentObjectOutStreams;
-  protected List<BloomFilter<byte[]>> indexBloomFilters;
+  protected List<CarbonBloomFilter> indexBloomFilters;
 
   BloomDataMapWriter(String tablePath, String dataMapName, List<CarbonColumn> indexColumns,
-      Segment segment, String shardName, int bloomFilterSize, double bloomFilterFpp)
+      Segment segment, String shardName, int bloomFilterSize, double bloomFilterFpp,
+      boolean compressBloom)
       throws IOException {
     super(tablePath, dataMapName, indexColumns, segment, shardName);
     this.bloomFilterSize = bloomFilterSize;
     this.bloomFilterFpp = bloomFilterFpp;
-
-    currentDMFiles = new ArrayList<String>(indexColumns.size());
-    currentDataOutStreams = new ArrayList<DataOutputStream>(indexColumns.size());
-    currentObjectOutStreams = new ArrayList<ObjectOutputStream>(indexColumns.size());
-    indexBloomFilters = new ArrayList<BloomFilter<byte[]>>(indexColumns.size());
+    this.compressBloom = compressBloom;
+    currentDMFiles = new ArrayList<>(indexColumns.size());
+    currentDataOutStreams = new ArrayList<>(indexColumns.size());
+    indexBloomFilters = new ArrayList<>(indexColumns.size());
     initDataMapFile();
     resetBloomFilters();
   }
@@ -86,12 +86,31 @@ public class BloomDataMapWriter extends DataMapWriter {
   protected void resetBloomFilters() {
     indexBloomFilters.clear();
     List<CarbonColumn> indexColumns = getIndexColumns();
+    int[] stats = calculateBloomStats();
     for (int i = 0; i < indexColumns.size(); i++) {
-      indexBloomFilters.add(BloomFilter.create(Funnels.byteArrayFunnel(),
-          bloomFilterSize, bloomFilterFpp));
+      indexBloomFilters
+          .add(new CarbonBloomFilter(stats[0], stats[1], Hash.MURMUR_HASH, compressBloom));
     }
   }
 
+  /**
+   * It calculates the bits size and number of hash functions to calculate bloom.
+   */
+  private int[] calculateBloomStats() {
+    /*
+     * n: how many items you expect to have in your filter
+     * p: your acceptable false positive rate
+     * Number of bits (m) = -n*ln(p) / (ln(2)^2)
+     * Number of hashes(k) = m/n * ln(2)
+     */
+    double sizeinBits = -bloomFilterSize * Math.log(bloomFilterFpp) / (Math.pow(Math.log(2), 2));
+    double numberOfHashes = sizeinBits / bloomFilterSize * Math.log(2);
+    int[] stats = new int[2];
+    stats[0] = (int) Math.ceil(sizeinBits);
+    stats[1] = (int) Math.ceil(numberOfHashes);
+    return stats;
+  }
+
   @Override
   public void onBlockletEnd(int blockletId) {
     writeBloomDataMapFile();
@@ -117,7 +136,7 @@ public class BloomDataMapWriter extends DataMapWriter {
         } else {
           indexValue = CarbonUtil.getValueAsBytes(dataType, data);
         }
-        indexBloomFilters.get(i).put(indexValue);
+        indexBloomFilters.get(i).add(new Key(indexValue));
       }
     }
   }
@@ -140,20 +159,17 @@ public class BloomDataMapWriter extends DataMapWriter {
       String dmFile = BloomCoarseGrainDataMap.getBloomIndexFile(dataMapPath,
           indexColumns.get(indexColId).getColName());
       DataOutputStream dataOutStream = null;
-      ObjectOutputStream objectOutStream = null;
       try {
         FileFactory.createNewFile(dmFile, FileFactory.getFileType(dmFile));
         dataOutStream = FileFactory.getDataOutputStream(dmFile,
             FileFactory.getFileType(dmFile));
-        objectOutStream = new ObjectOutputStream(dataOutStream);
       } catch (IOException e) {
-        CarbonUtil.closeStreams(objectOutStream, dataOutStream);
+        CarbonUtil.closeStreams(dataOutStream);
         throw new IOException(e);
       }
 
       this.currentDMFiles.add(dmFile);
       this.currentDataOutStreams.add(dataOutStream);
-      this.currentObjectOutStreams.add(objectOutStream);
     }
   }
 
@@ -165,14 +181,10 @@ public class BloomDataMapWriter extends DataMapWriter {
             new BloomDMModel(this.currentBlockletId, indexBloomFilters.get(indexColId));
         // only in higher version of guava-bloom-filter, it provides readFrom/writeTo interface.
         // In lower version, we use default java serializer to write bloomfilter.
-        this.currentObjectOutStreams.get(indexColId).writeObject(model);
-        this.currentObjectOutStreams.get(indexColId).flush();
+        model.write(this.currentDataOutStreams.get(indexColId));
         this.currentDataOutStreams.get(indexColId).flush();
       }
     } catch (Exception e) {
-      for (ObjectOutputStream objectOutputStream : currentObjectOutStreams) {
-        CarbonUtil.closeStreams(objectOutputStream);
-      }
       for (DataOutputStream dataOutputStream : currentDataOutStreams) {
         CarbonUtil.closeStreams(dataOutputStream);
       }
@@ -194,7 +206,7 @@ public class BloomDataMapWriter extends DataMapWriter {
     List<CarbonColumn> indexColumns = getIndexColumns();
     for (int indexColId = 0; indexColId < indexColumns.size(); indexColId++) {
       CarbonUtil.closeStreams(
-          currentDataOutStreams.get(indexColId), currentObjectOutStreams.get(indexColId));
+          currentDataOutStreams.get(indexColId));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/77a11107/datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java b/datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
new file mode 100644
index 0000000..7c39cad
--- /dev/null
+++ b/datamap/bloom/src/main/java/org/apache/hadoop/util/bloom/CarbonBloomFilter.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util.bloom;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.BitSet;
+
+import org.roaringbitmap.RoaringBitmap;
+
+/**
+ * It is the extendable class to hadoop bloomfilter, it is extendable to implement compressed bloom
+ * and fast serialize and deserialize of bloom.
+ */
+public class CarbonBloomFilter extends BloomFilter {
+
+  private RoaringBitmap bitmap;
+
+  private boolean compress;
+
+  public CarbonBloomFilter() {
+  }
+
+  public CarbonBloomFilter(int vectorSize, int nbHash, int hashType, boolean compress) {
+    super(vectorSize, nbHash, hashType);
+    this.compress = compress;
+  }
+
+  @Override
+  public boolean membershipTest(Key key) {
+    if (key == null) {
+      throw new NullPointerException("key cannot be null");
+    }
+
+    int[] h = hash.hash(key);
+    hash.clear();
+    if (compress) {
+      // If it is compressed check in roaring bitmap
+      for (int i = 0; i < nbHash; i++) {
+        if (!bitmap.contains(h[i])) {
+          return false;
+        }
+      }
+    } else {
+      for (int i = 0; i < nbHash; i++) {
+        if (!bits.get(h[i])) {
+          return false;
+        }
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+    out.writeInt(this.nbHash);
+    out.writeByte(this.hashType);
+    out.writeInt(this.vectorSize);
+    out.writeBoolean(compress);
+    if (!compress) {
+      byte[] bytes = bits.toByteArray();
+      out.writeInt(bytes.length);
+      out.write(bytes);
+    } else {
+      RoaringBitmap bitmap = new RoaringBitmap();
+      int length = bits.cardinality();
+      int nextSetBit = bits.nextSetBit(0);
+      for (int i = 0; i < length; ++i) {
+        bitmap.add(nextSetBit);
+        nextSetBit = bits.nextSetBit(nextSetBit + 1);
+      }
+      bitmap.serialize(out);
+    }
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+    this.nbHash = in.readInt();
+    this.hashType = in.readByte();
+    this.vectorSize = in.readInt();
+    this.compress = in.readBoolean();
+    if (!compress) {
+      int len = in.readInt();
+      byte[] bytes = new byte[len];
+      in.readFully(bytes);
+      this.bits = BitSet.valueOf(bytes);
+    } else {
+      this.bitmap = new RoaringBitmap();
+      bitmap.deserialize(in);
+    }
+    this.hash = new HashFunction(this.vectorSize, this.nbHash, this.hashType);
+  }
+}


[10/50] [abbrv] carbondata git commit: [CARBONDATA-2487] Block filters for lucene with more than one text_match udf

Posted by gv...@apache.org.
[CARBONDATA-2487] Block filters for lucene with more than one text_match udf

This closes #2311


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/7cba44b9
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/7cba44b9
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/7cba44b9

Branch: refs/heads/spark-2.3
Commit: 7cba44b9036dfd39cdc8f16b29cdfa5314c85977
Parents: 0e01197
Author: Indhumathi27 <in...@gmail.com>
Authored: Wed May 16 16:48:30 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Mon May 21 21:58:35 2018 +0800

----------------------------------------------------------------------
 .../lucene/LuceneFineGrainDataMapSuite.scala    | 26 +++++++++++++++-
 .../strategy/CarbonLateDecodeStrategy.scala     | 31 +++++++++++++++-----
 2 files changed, 48 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7cba44b9/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
index b90d190..0e885de 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
@@ -730,7 +730,7 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
     assert(msg.getCause.getMessage.contains("TEXT_MATCH is not supported on table"))
     sql("DROP TABLE table1")
   }
-
+  
   test("test lucene with flush_cache as true") {
     sql("DROP TABLE IF EXISTS datamap_test_table")
     sql(
@@ -775,6 +775,30 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists dm_split_false on table datamap_test_table")
   }
 
+  test("test text_match filters with more than one text_match udf ") {
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+    sql(
+      """
+        | CREATE TABLE datamap_test_table(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm_text ON TABLE datamap_test_table
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    val msg = intercept[MalformedCarbonCommandException] {
+      sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n0*') AND TEXT_MATCH" +
+          "('city:c0*')").show()
+    }
+    assert(msg.getMessage
+      .contains("Specify all search filters for Lucene within a single text_match UDF"))
+    sql("drop datamap if exists dm_text on table datamap_test_table")
+  }
+
   override protected def afterAll(): Unit = {
     LuceneFineGrainDataMapSuite.deleteFile(file2)
     sql("DROP TABLE IF EXISTS normal_test")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7cba44b9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
index bca0946..df4c742 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/CarbonLateDecodeStrategy.scala
@@ -481,14 +481,29 @@ private[sql] class CarbonLateDecodeStrategy extends SparkStrategy {
 
     // For conciseness, all Catalyst filter expressions of type `expressions.Expression` below are
     // called `predicate`s, while all data source filters of type `sources.Filter` are simply called
-    // `filter`s.
-
-    val translated: Seq[(Expression, Filter)] =
-      for {
-        predicate <- predicatesWithoutComplex
-        filter <- translateFilter(predicate)
-      } yield predicate -> filter
-
+    // `filter`s. And block filters for lucene with more than one text_match udf
+    // Todo: handle when lucene and normal query filter is supported
+
+    var count = 0
+    val translated: Seq[(Expression, Filter)] = predicatesWithoutComplex.flatMap {
+      predicate =>
+        if (predicate.isInstanceOf[ScalaUDF]) {
+          predicate match {
+            case u: ScalaUDF if u.function.isInstanceOf[TextMatchUDF] ||
+                                u.function.isInstanceOf[TextMatchMaxDocUDF] => count = count + 1
+          }
+        }
+        if (count > 1) {
+          throw new MalformedCarbonCommandException(
+            "Specify all search filters for Lucene within a single text_match UDF")
+        }
+        val filter = translateFilter(predicate)
+        if (filter.isDefined) {
+          Some(predicate, filter.get)
+        } else {
+          None
+        }
+    }
 
     // A map from original Catalyst expressions to corresponding translated data source filters.
     val translatedMap: Map[Expression, Filter] = translated.toMap


[36/50] [abbrv] carbondata git commit: [CARBONDATA-2555] SDK reader set default isTransactional as false

Posted by gv...@apache.org.
[CARBONDATA-2555] SDK reader set default isTransactional as false

SDK writer is having default value of isTransactional is false. But reader is not like this.
So, Fixing this by deafult make SDK to use flat folder structure.

This closes #2352


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/74770aa3
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/74770aa3
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/74770aa3

Branch: refs/heads/spark-2.3
Commit: 74770aa38173985ece5527847f8e2446966e4fd7
Parents: a7faef8
Author: ajantha-bhat <aj...@gmail.com>
Authored: Tue May 29 12:03:55 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Wed May 30 10:26:19 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               |  2 +-
 .../examples/sdk/CarbonReaderExample.java       |  1 -
 .../carbondata/examples/sdk/SDKS3Example.java   | 26 +++-----------------
 .../sdk/file/CarbonReaderBuilder.java           |  4 +--
 .../carbondata/sdk/file/CarbonReaderTest.java   | 20 +++++++++++----
 5 files changed, 21 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/74770aa3/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 3c575fe..1d225a9 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -492,7 +492,7 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
-   * Default value is true
+   * Default value is false
    *
    * @param isTransactionalTable whether is transactional table or not
    * @return CarbonReaderBuilder object

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74770aa3/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index 937bfa0..d7886c0 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -44,7 +44,6 @@ public class CarbonReaderExample {
             fields[1] = new Field("age", DataTypes.INT);
 
             CarbonWriter writer = CarbonWriter.builder()
-                    .isTransactionalTable(true)
                     .outputPath(path)
                     .persistSchemaFile(true)
                     .buildWriterForCSVInput(new Schema(fields));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74770aa3/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
index 7fab2cc..80c56fc 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
@@ -29,8 +29,8 @@ public class SDKS3Example {
     public static void main(String[] args) throws Exception {
         LogService logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
         if (args == null || args.length < 3) {
-            logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>" +
-                    "<s3-endpoint> [table-path-on-s3] [persistSchema] [transactionalTable]");
+            logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
+                + "<s3-endpoint> [table-path-on-s3] [rows]");
             System.exit(0);
         }
 
@@ -44,24 +44,6 @@ public class SDKS3Example {
             num = Integer.parseInt(args[4]);
         }
 
-        Boolean persistSchema = true;
-        if (args.length > 5) {
-            if (args[5].equalsIgnoreCase("true")) {
-                persistSchema = true;
-            } else {
-                persistSchema = false;
-            }
-        }
-
-        Boolean transactionalTable = true;
-        if (args.length > 6) {
-            if (args[6].equalsIgnoreCase("true")) {
-                transactionalTable = true;
-            } else {
-                transactionalTable = false;
-            }
-        }
-
         Field[] fields = new Field[2];
         fields[0] = new Field("name", DataTypes.STRING);
         fields[1] = new Field("age", DataTypes.INT);
@@ -69,9 +51,7 @@ public class SDKS3Example {
                 .setAccessKey(args[0])
                 .setSecretKey(args[1])
                 .setEndPoint(args[2])
-                .outputPath(path)
-                .persistSchemaFile(persistSchema)
-                .isTransactionalTable(transactionalTable);
+                .outputPath(path);
 
         CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74770aa3/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 1e73e8c..e99ff0d 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -48,7 +48,7 @@ public class CarbonReaderBuilder {
   private String[] projectionColumns;
   private Expression filterExpression;
   private String tableName;
-  private boolean isTransactionalTable = true;
+  private boolean isTransactionalTable;
 
   /**
    * It will be true if use the projectAllColumns method,
@@ -84,7 +84,7 @@ public class CarbonReaderBuilder {
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
-   * Default value is true
+   * Default value is false
    *
    * @param isTransactionalTable whether is transactional table or not
    * @return CarbonReaderBuilder object

http://git-wip-us.apache.org/repos/asf/carbondata/blob/74770aa3/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index ee095a1..deb6d06 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -61,7 +61,7 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     // expected output after sorting
@@ -87,6 +87,7 @@ public class CarbonReaderTest extends TestCase {
     // Read again
     CarbonReader reader2 = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(new String[]{"name", "age"})
         .build();
 
@@ -118,6 +119,7 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "name", "age", "name"})
+        .isTransactionalTable(true)
         .build();
 
     // expected output after sorting
@@ -159,11 +161,13 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
         .build();
     // Reader 2
     CarbonReader reader2 = CarbonReader
         .builder(path, "_temp")
         .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
         .build();
 
     while (reader.hasNext()) {
@@ -191,7 +195,7 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     reader.close();
@@ -305,7 +309,7 @@ public class CarbonReaderTest extends TestCase {
     // Write to a Non Transactional Table
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true, false);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"})
         .isTransactionalTable(false)
         .build();
@@ -422,8 +426,8 @@ public class CarbonReaderTest extends TestCase {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
 
-    CarbonReader reader = CarbonReader
-        .builder(path, "_temp")
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(new String[]{
             "stringField"
             , "shortField"
@@ -548,6 +552,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(strings)
         .build();
 
@@ -662,6 +667,7 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .projection(strings)
+        .isTransactionalTable(true)
         .build();
 
     int i = 0;
@@ -766,6 +772,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projection(strings)
         .build();
 
@@ -808,6 +815,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .projectAllColumns()
         .build();
 
@@ -846,6 +854,7 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
+        .isTransactionalTable(true)
         .build();
 
     // expected output after sorting
@@ -881,6 +890,7 @@ public class CarbonReaderTest extends TestCase {
       CarbonReader reader = CarbonReader
           .builder(path, "_temp")
           .projection(new String[]{})
+          .isTransactionalTable(true)
           .build();
       assert (false);
     } catch (RuntimeException e) {


[45/50] [abbrv] carbondata git commit: [CARONDATA-2559]task id set for each carbonReader in threadlocal

Posted by gv...@apache.org.
[CARONDATA-2559]task id set for each carbonReader in threadlocal

1. Task Id set for CarbonReader because for each CarbonReader object it should be separate Thread Local variable .
2. If sort-Column is not given to CarbonWriter Describe formatted showing default sort_cols is fixed
3. Issue : CarbonReader was being closed after one iteration. So when reader iterates over the next batch it gives NullPointerException because it is already closed.
Solution : reader is closed if any exception encountered. Else It will be closed explicitly by user.
4. CarbonProperties API for SDK moved to common API List because Property setting is common for both(carbonReader and CarbonWriter) .


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/92d9b925
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/92d9b925
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/92d9b925

Branch: refs/heads/spark-2.3
Commit: 92d9b9256373763f05736e29d93b7e835e0da3dd
Parents: 4bb7e27
Author: rahulforallp <ra...@knoldus.in>
Authored: Tue May 29 10:23:46 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Mon Jun 4 17:49:05 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 95 ++++++++++----------
 .../TestNonTransactionalCarbonTable.scala       | 13 +--
 .../carbondata/sdk/file/CarbonReader.java       |  5 ++
 .../sdk/file/CarbonReaderBuilder.java           | 10 ++-
 .../sdk/file/CarbonWriterBuilder.java           |  4 +-
 .../sdk/file/CSVCarbonWriterTest.java           |  2 +-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 41 ++++-----
 .../apache/carbondata/sdk/file/TestUtil.java    |  4 +-
 .../carbondata/store/LocalCarbonStoreTest.java  |  2 +-
 9 files changed, 96 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 2371b33..5dbb5ac 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -350,52 +350,6 @@ public Schema(Field[] fields);
 public static Schema parseJson(String json);
 ```
 
-### Class org.apache.carbondata.core.util.CarbonProperties
-
-```
-/**
-* This method will be responsible to get the instance of CarbonProperties class
-*
-* @return carbon properties instance
-*/
-public static CarbonProperties getInstance();
-```
-
-```
-/**
-* This method will be used to add a new property
-*
-* @param key is a property name to set for carbon.
-* @param value is valid parameter corresponding to property.
-* @return CarbonProperties object
-*/
-public CarbonProperties addProperty(String key, String value);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value.
-* @return properties value for corresponding key. If not set, then returns null.
-*/
-public String getProperty(String key);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value..
-* @param defaultValue used to be returned by function if corrosponding key not set.
-* @return properties value for corresponding key. If not set, then returns specified defaultValue.
-*/
-public String getProperty(String key, String defaultValue);
-```
-Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
-
 ### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
 ```
 /**
@@ -705,3 +659,52 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 ```
 
 Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.
+
+
+# Common API List for CarbonReader and CarbonWriter
+
+### Class org.apache.carbondata.core.util.CarbonProperties
+
+```
+/**
+* This method will be responsible to get the instance of CarbonProperties class
+*
+* @return carbon properties instance
+*/
+public static CarbonProperties getInstance();
+```
+
+```
+/**
+* This method will be used to add a new property
+*
+* @param key is a property name to set for carbon.
+* @param value is valid parameter corresponding to property.
+* @return CarbonProperties object
+*/
+public CarbonProperties addProperty(String key, String value);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value.
+* @return properties value for corresponding key. If not set, then returns null.
+*/
+public String getProperty(String key);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value..
+* @param defaultValue used to be returned by function if corrosponding key not set.
+* @return properties value for corresponding key. If not set, then returns specified defaultValue.
+*/
+public String getProperty(String key, String defaultValue);
+```
+Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 0083733..5beb9c4 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -378,7 +378,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-    checkExistence(sql("describe formatted sdkOutputTable"), true, "name")
+    checkExistence(sql("describe formatted sdkOutputTable"), true, "SORT_COLUMNS                        name")
 
     buildTestDataWithSortColumns(List())
     assert(new File(writerPath).exists())
@@ -390,15 +390,18 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
          |'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-    sql("describe formatted sdkOutputTable").show(false)
+    checkExistence(sql("describe formatted sdkOutputTable"),false,"SORT_COLUMNS                        name")
     sql("select * from sdkOutputTable").show()
 
+    sql("DROP TABLE sdkOutputTable")
+    // drop table should not delete the files
+    assert(new File(writerPath).exists())
+    cleanTestData()
+
     intercept[RuntimeException] {
       buildTestDataWithSortColumns(List(""))
     }
-
-    sql("DROP TABLE sdkOutputTable")
-    // drop table should not delete the files
+    
     assert(!(new File(writerPath).exists()))
     cleanTestData()
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 81db7b2..9af710f 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.util.CarbonTaskInfo;
+import org.apache.carbondata.core.util.ThreadLocalTaskInfo;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
@@ -54,6 +56,9 @@ public class CarbonReader<T> {
     this.readers = readers;
     this.index = 0;
     this.currentReader = readers.get(0);
+    CarbonTaskInfo carbonTaskInfo = new CarbonTaskInfo();
+    carbonTaskInfo.setTaskId(System.nanoTime());
+    ThreadLocalTaskInfo.setCarbonTaskInfo(carbonTaskInfo);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index e99ff0d..9d7470e 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -233,9 +233,13 @@ public class CarbonReaderBuilder {
       TaskAttemptContextImpl attempt =
           new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID());
       RecordReader reader = format.createRecordReader(split, attempt);
-      reader.initialize(split, attempt);
-      reader.close();
-      readers.add(reader);
+      try {
+        reader.initialize(split, attempt);
+        readers.add(reader);
+      } catch (Exception e) {
+        reader.close();
+        throw e;
+      }
     }
 
     return new CarbonReader<>(readers);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index e2dc8c2..bd64568 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -379,7 +379,7 @@ public class CarbonWriterBuilder {
     }
 
     List<String> sortColumnsList = new ArrayList<>();
-    if (sortColumns == null || sortColumns.length == 0) {
+    if (sortColumns == null) {
       // If sort columns are not specified, default set all dimensions to sort column.
       // When dimensions are default set to sort column,
       // Inverted index will be supported by default for sort columns.
@@ -484,7 +484,7 @@ public class CarbonWriterBuilder {
           if (isSortColumn > -1) {
             columnSchema.setSortColumn(true);
             sortColumnsSchemaList[isSortColumn] = columnSchema;
-          } else if (sortColumnsList.isEmpty() && columnSchema.isDimensionColumn()
+          } else if (!sortColumnsList.isEmpty() && columnSchema.isDimensionColumn()
               && columnSchema.getNumberOfChild() < 1) {
             columnSchema.setSortColumn(true);
             sortColumnsSchemaList[i] = columnSchema;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
index 1eed47b..865097b 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
@@ -205,7 +205,7 @@ public class CSVCarbonWriterTest {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     String schemaFile = CarbonTablePath.getSchemaFilePath(path);
     Assert.assertTrue(new File(schemaFile).exists());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 95c25f8..db118cd 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -59,28 +59,28 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(200, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
 
     // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
+    String[] name = new String[200];
+    Integer[] age = new Integer[200];
+    for (int i = 0; i < 200; i++) {
       name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
+      age[i] = i;
     }
 
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
       // Default sort column is applied for dimensions. So, need  to validate accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      assert(Arrays.asList(name).contains(row[0]));
+      assert(Arrays.asList(age).contains(row[1]));
       i++;
     }
-    Assert.assertEquals(i, 100);
+    Assert.assertEquals(i, 200);
 
     reader.close();
 
@@ -95,11 +95,11 @@ public class CarbonReaderTest extends TestCase {
     while (reader2.hasNext()) {
       Object[] row = (Object[]) reader2.readNextRow();
       // Default sort column is applied for dimensions. So, need  to validate accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      assert(Arrays.asList(name).contains(row[0]));
+      assert(Arrays.asList(age).contains(row[1]));
       i++;
     }
-    Assert.assertEquals(i, 100);
+    Assert.assertEquals(i, 200);
     reader2.close();
 
     FileUtils.deleteDirectory(new File(path));
@@ -114,7 +114,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -156,7 +156,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -193,7 +193,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
         .projection(new String[]{"name", "age"}).build();
@@ -233,7 +233,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path)
@@ -309,7 +309,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     File[] dataFiles = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
       @Override public boolean accept(File dir, String name) {
@@ -337,7 +337,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     File[] dataFiles = new File(path + "/Metadata").listFiles(new FilenameFilter() {
       @Override public boolean accept(File dir, String name) {
@@ -887,7 +887,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -926,7 +926,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
@@ -948,6 +948,7 @@ public class CarbonReaderTest extends TestCase {
       Assert.assertEquals(age[i], row[1]);
       i++;
     }
+    reader.close();
     Assert.assertEquals(i, 100);
   }
 
@@ -960,7 +961,7 @@ public class CarbonReaderTest extends TestCase {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     try {
       CarbonReader reader = CarbonReader

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
index eb406e2..0f00d61 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
@@ -39,8 +39,8 @@ public class TestUtil {
     writeFilesAndVerify(100, schema, path, sortColumns, false, -1, -1, true);
   }
 
-  public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema) {
-    writeFilesAndVerify(100, schema, path, null, persistSchema, -1, -1, true);
+  public static void writeFilesAndVerify(int rows, Schema schema, String path, boolean persistSchema) {
+    writeFilesAndVerify(rows, schema, path, null, persistSchema, -1, -1, true);
   }
 
   public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/92d9b925/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
index 51d0b27..c885a26 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/store/LocalCarbonStoreTest.java
@@ -56,7 +56,7 @@ public class LocalCarbonStoreTest {
     fields[0] = new Field("name", DataTypes.STRING);
     fields[1] = new Field("age", DataTypes.INT);
 
-    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+    TestUtil.writeFilesAndVerify(100, new Schema(fields), path, true);
 
     CarbonStore store = new LocalCarbonStore();
     Iterator<CarbonRow> rows = store.scan(path, new String[]{"name, age"}, null);


[06/50] [abbrv] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
[CARBONDATA-2489] Coverity scan fixes

  https://scan4.coverity.com/reports.htm#v29367/p11911

  This closes #2313


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/7ef91645
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/7ef91645
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/7ef91645

Branch: refs/heads/spark-2.3
Commit: 7ef916455d8b490f3e32efd3a0bfeb80ab9127f1
Parents: f184de8
Author: Raghunandan S <ca...@gmail.com>
Authored: Sun Aug 27 23:37:05 2017 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Mon May 21 14:02:24 2018 +0530

----------------------------------------------------------------------
 README.md                                       |  21 +-
 .../impl/ExtendedRollingFileAppender.java       |   9 +-
 .../cache/dictionary/ColumnDictionaryInfo.java  |   3 +
 .../dictionary/DoubleArrayTrieDictionary.java   | 387 ---------------
 .../dictionary/ForwardDictionaryCache.java      |   4 +-
 .../core/constants/CarbonCommonConstants.java   |   2 +-
 .../core/datamap/AbstractDataMapJob.java        |   9 -
 .../carbondata/core/datamap/DataMapChooser.java |   4 +-
 .../core/datamap/DataMapStoreManager.java       |   7 +-
 .../core/datamap/dev/BlockletSerializer.java    |  14 +-
 .../chunk/impl/AbstractDimensionColumnPage.java |   8 +
 .../impl/ColumnGroupDimensionColumnPage.java    | 194 --------
 .../impl/FixedLengthDimensionColumnPage.java    |   5 +-
 .../impl/VariableLengthDimensionColumnPage.java |   5 +-
 ...mpressedDimensionChunkFileBasedReaderV1.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV2.java |  12 +-
 ...mpressedDimensionChunkFileBasedReaderV3.java |   4 +-
 .../AbstractMeasureChunkReaderV2V3Format.java   |  12 +-
 .../core/datastore/compression/Compressor.java  |  10 -
 .../datastore/compression/SnappyCompressor.java |  74 +--
 .../filesystem/AbstractDFSCarbonFile.java       |   4 +-
 .../impl/btree/AbstractBTreeLeafNode.java       |   2 +-
 .../datastore/page/UnsafeDecimalColumnPage.java |  20 +-
 .../page/UnsafeFixLengthColumnPage.java         |  70 +--
 .../page/encoding/EncodingFactory.java          |   3 +
 .../page/statistics/KeyPageStatsCollector.java  |  17 +-
 .../page/statistics/LVStringStatsCollector.java |  21 +-
 .../IncrementalColumnDictionaryGenerator.java   |   4 +-
 .../generator/TableDictionaryGenerator.java     |   8 +-
 .../blockletindex/BlockletDataMapFactory.java   |  12 +-
 .../blockletindex/SegmentIndexFileStore.java    |  15 +-
 .../carbondata/core/locks/ZookeeperInit.java    |  10 +-
 .../core/memory/UnsafeMemoryManager.java        |  11 +-
 .../core/metadata/datatype/ArrayType.java       |  34 +-
 .../core/metadata/datatype/DecimalType.java     |  31 ++
 .../core/metadata/datatype/StructType.java      |  25 +
 .../schema/table/column/ColumnSchema.java       |  38 +-
 .../carbondata/core/preagg/TimeSeriesUDF.java   |   2 +-
 .../CarbonDictionaryMetadataReaderImpl.java     |   6 +
 .../RestructureBasedRawResultCollector.java     |   2 +-
 .../impl/SearchModeDetailQueryExecutor.java     |   4 +-
 .../SearchModeVectorDetailQueryExecutor.java    |   4 +-
 .../scan/filter/FilterExpressionProcessor.java  |   6 +-
 .../carbondata/core/scan/filter/FilterUtil.java |  89 ++--
 .../ExcludeColGroupFilterExecuterImpl.java      |  48 --
 .../IncludeColGroupFilterExecuterImpl.java      | 232 ---------
 .../executer/RangeValueFilterExecuterImpl.java  |   2 +-
 .../executer/RestructureEvaluatorImpl.java      |   1 -
 .../executer/RowLevelFilterExecuterImpl.java    |  20 +-
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   7 +-
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   4 +-
 ...velRangeLessThanEqualFilterExecuterImpl.java |   4 +-
 ...RowLevelRangeLessThanFilterExecuterImpl.java |   4 +-
 .../resolver/ConditionalFilterResolverImpl.java |   1 -
 .../AbstractDetailQueryResultIterator.java      |   9 +-
 .../scanner/impl/BlockletFilterScanner.java     |   4 +
 .../core/statusmanager/LoadMetadataDetails.java |   8 +-
 .../SegmentUpdateStatusManager.java             |  60 +--
 .../util/AbstractDataFileFooterConverter.java   |   7 +-
 .../core/util/CarbonMetadataUtil.java           |   2 +-
 .../carbondata/core/util/DataTypeUtil.java      |  12 +-
 .../core/util/path/CarbonTablePath.java         |  14 +-
 .../impl/ColumnGroupDimensionDataChunkTest.java | 118 -----
 .../filesystem/AlluxioCarbonFileTest.java       |   3 +
 .../filesystem/ViewFsCarbonFileTest.java        |   9 +-
 .../apache/carbondata/hadoop/CacheClient.java   |  49 --
 .../hadoop/api/CarbonOutputCommitter.java       |   3 +
 .../hadoop/internal/segment/Segment.java        |  23 -
 .../hive/CarbonDictionaryDecodeReadSupport.java |  11 +-
 .../hive/MapredCarbonInputFormat.java           |  18 +-
 .../presto/CarbondataSplitManager.java          |  27 +-
 .../presto/impl/CarbonLocalInputSplit.java      |  14 +-
 .../presto/readers/BooleanStreamReader.java     |   6 +-
 .../presto/readers/DoubleStreamReader.java      |   8 +-
 .../presto/readers/IntegerStreamReader.java     |   8 +-
 .../presto/readers/LongStreamReader.java        |   8 +-
 .../presto/readers/ObjectStreamReader.java      |  16 +-
 .../presto/readers/ShortStreamReader.java       |  10 +-
 .../presto/readers/SliceStreamReader.java       |   8 +-
 .../presto/readers/TimestampStreamReader.java   |  11 +-
 .../spark/sql/common/util/QueryTest.scala       |   4 +-
 .../server/SecureDictionaryServer.java          |   4 +-
 .../spark/rdd/CarbonCleanFilesRDD.scala         |  83 ----
 .../spark/rdd/CarbonDeleteLoadByDateRDD.scala   |  89 ----
 .../spark/rdd/CarbonDeleteLoadRDD.scala         |  84 ----
 .../spark/rdd/CarbonDropTableRDD.scala          |  71 ---
 .../apache/spark/sql/test/util/QueryTest.scala  |   9 +
 .../VectorizedCarbonRecordReader.java           |  24 +-
 .../processing/loading/BadRecordsLogger.java    |  19 +-
 .../loading/sort/impl/ThreadStatusObserver.java |  15 +-
 .../UnsafeBatchParallelReadMergeSorterImpl.java |   4 +-
 .../loading/sort/unsafe/UnsafeSortDataRows.java |   3 +-
 .../UnsafeInMemoryIntermediateDataMerger.java   |   7 +-
 .../merger/UnsafeIntermediateFileMerger.java    |   8 +-
 .../unsafe/merger/UnsafeIntermediateMerger.java |  20 +-
 .../UnsafeSingleThreadFinalSortFilesMerger.java |   9 +-
 .../loading/steps/InputProcessorStepImpl.java   |   4 +
 .../processing/merger/CarbonDataMergerUtil.java |  82 ++--
 .../merger/RowResultMergerProcessor.java        |  32 +-
 .../partition/impl/QueryPartitionHelper.java    |  74 ---
 .../sort/sortdata/IntermediateFileMerger.java   |   7 +-
 .../SingleThreadFinalSortFilesMerger.java       |   9 +-
 .../store/writer/AbstractFactDataWriter.java    |  32 +-
 .../processing/util/CarbonQueryUtil.java        |  80 ----
 .../carbondata/processing/StoreCreator.java     | 469 -------------------
 .../sdk/file/CarbonReaderBuilder.java           |   1 +
 .../carbondata/store/LocalCarbonStore.java      |  34 +-
 .../store/worker/SearchRequestHandler.java      |   3 +-
 .../streaming/CarbonStreamRecordReader.java     |   3 -
 109 files changed, 686 insertions(+), 2620 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 4b4577e..be3186c 100644
--- a/README.md
+++ b/README.md
@@ -1,17 +1,17 @@
 <!--
-    Licensed to the Apache Software Foundation (ASF) under one or more 
+    Licensed to the Apache Software Foundation (ASF) under one or more
     contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership. 
+    this work for additional information regarding copyright ownership.
     The ASF licenses this file to you under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with 
+    (the "License"); you may not use this file except in compliance with
     the License.  You may obtain a copy of the License at
 
       http://www.apache.org/licenses/LICENSE-2.0
 
-    Unless required by applicable law or agreed to in writing, software 
-    distributed under the License is distributed on an "AS IS" BASIS, 
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and 
+    See the License for the specific language governing permissions and
     limitations under the License.
 -->
 
@@ -31,6 +31,15 @@ Visit count: [![HitCount](http://hits.dwyl.io/jackylk/apache/carbondata.svg)](ht
 Spark2.2:
 [![Build Status](https://builds.apache.org/buildStatus/icon?job=carbondata-master-spark-2.2)](https://builds.apache.org/view/A-D/view/CarbonData/job/carbondata-master-spark-2.2/lastBuild/testReport)
 [![Coverage Status](https://coveralls.io/repos/github/apache/carbondata/badge.svg?branch=master)](https://coveralls.io/github/apache/carbondata?branch=master)
+<a href="https://scan.coverity.com/projects/carbondata">
+  <img alt="Coverity Scan Build Status"
+       src="https://scan.coverity.com/projects/13444/badge.svg"/>
+</a>
+## Features
+CarbonData file format is a columnar store in HDFS, it has many features that a modern columnar format has, such as splittable, compression schema ,complex data type etc, and CarbonData has following unique features:
+* Stores data along with index: it can significantly accelerate query performance and reduces the I/O scans and CPU resources, where there are filters in the query.  CarbonData index consists of multiple level of indices, a processing framework can leverage this index to reduce the task it needs to schedule and process, and it can also do skip scan in more finer grain unit (called blocklet) in task side scanning instead of scanning the whole file. 
+* Operable encoded data :Through supporting efficient compression and global encoding schemes, can query on compressed/encoded data, the data can be converted just before returning the results to the users, which is "late materialized". 
+* Supports for various use cases with one single Data format : like interactive OLAP-style query, Sequential Access (big scan), Random Access (narrow scan). 
 
 ## Building CarbonData
 CarbonData is built using Apache Maven, to [build CarbonData](https://github.com/apache/carbondata/blob/master/build)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
index 828dd14..089865b 100644
--- a/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
+++ b/common/src/main/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppender.java
@@ -50,7 +50,7 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
    */
 
   private long nextRollover = 0;
-  private boolean cleanupInProgress = false;
+  private volatile boolean cleanupInProgress = false;
 
   /**
    * Total number of files at any point of time should be Backup number of
@@ -195,7 +195,9 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
     }
 
     // Do clean up finally
-    cleanUpLogs(startName, folderPath);
+    if (!cleanupInProgress) {
+      cleanUpLogs(startName, folderPath);
+    }
   }
 
   private void cleanUpLogs(final String startName, final String folderPath) {
@@ -204,9 +206,6 @@ public class ExtendedRollingFileAppender extends RollingFileAppender {
       Runnable r = new Runnable() {
 
         public void run() {
-          if (cleanupInProgress) {
-            return;
-          }
           synchronized (ExtendedRollingFileAppender.class) {
             cleanupInProgress = true;
             try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
index 3b915e0..ad1d201 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -193,6 +193,9 @@ public class ColumnDictionaryInfo extends AbstractColumnDictionaryInfo {
       int mid = (low + high) >>> 1;
       int surrogateKey = sortedSurrogates.get(mid);
       byte[] dictionaryValue = getDictionaryBytesFromSurrogate(surrogateKey);
+      if (null == dictionaryValue) {
+        return CarbonCommonConstants.INVALID_SURROGATE_KEY;
+      }
       int cmp = -1;
       if (this.getDataType() != DataTypes.STRING) {
         cmp = compareFilterKeyWithDictionaryKey(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
deleted file mode 100644
index ef36d7a..0000000
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.cache.dictionary;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.Charset;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * A dictionary based on DoubleArrayTrie data structure that maps enumerations
- * of byte[] to int IDs. With DoubleArrayTrie the memory footprint of the mapping
- * is minimize,d if compared to HashMap.
- * This DAT implementation is inspired by https://linux.thai.net/~thep/datrie/datrie.html
- */
-
-public class DoubleArrayTrieDictionary {
-  private static final byte[] HEAD_MAGIC = new byte[]{
-      0x44, 0x41, 0x54, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74
-  }; // "DATTrieDict"
-  private static final int HEAD_LEN = HEAD_MAGIC.length;
-
-  private static final int INIT_CAPA_VALUE = 256;  // init len of double array
-  private static final int BASE_ROOT_VALUE = 1;    // root base value of trie root
-  private static final int CHCK_ROOT_VALUE = -1;   // root check value of trie root
-  private static final int UUSD_ROOM_VALUE = -2;   // unused position, only for zero
-  private static final int EPTY_BACK_VALUE = 0;    // value of empty position
-
-  private static final int ENCODE_BASE_VALUE = 10; // encode start number
-
-  private int[] base;
-  private int[] check;
-  private int size;
-  private int capacity;
-
-  private int id = ENCODE_BASE_VALUE;
-
-  public DoubleArrayTrieDictionary() {
-    base = new int[INIT_CAPA_VALUE];
-    check = new int[INIT_CAPA_VALUE];
-    capacity = INIT_CAPA_VALUE;
-    base[0] = UUSD_ROOM_VALUE;
-    check[0] = UUSD_ROOM_VALUE;
-    base[1] = BASE_ROOT_VALUE;
-    check[1] = CHCK_ROOT_VALUE;
-    size = 2;
-  }
-
-  private void init(int capacity, int size, int[] base, int[] check) {
-    int blen = base.length;
-    int clen = check.length;
-    if (capacity < size || size < 0 || blen != clen) {
-      throw new IllegalArgumentException("Illegal init parameters");
-    }
-    this.base = new int[capacity];
-    this.check = new int[capacity];
-    this.capacity = capacity;
-    System.arraycopy(base, 0, this.base, 0, blen);
-    System.arraycopy(check, 0, this.check, 0, clen);
-    this.size = size;
-  }
-
-  public void clear() {
-    base = null;
-    check = null;
-    size = 0;
-    capacity = 0;
-  }
-
-  private int reSize(int newCapacity) {
-    if (newCapacity < capacity) {
-      return capacity;
-    }
-    int[] newBase = new int[newCapacity];
-    int[] newCheck = new int[newCapacity];
-    if (capacity > 0) {
-      System.arraycopy(base, 0, newBase, 0, capacity);
-      System.arraycopy(check, 0, newCheck, 0, capacity);
-    }
-    base = newBase;
-    check = newCheck;
-    capacity = newCapacity;
-    return capacity;
-  }
-
-  public int getSize() {
-    return size;
-  }
-
-  public int getCapacity() {
-    return capacity;
-  }
-
-  /**
-   * Get apply value of key
-   *
-   * @param key
-   * @return
-   */
-  public int getValue(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    return getValue(bKeys);
-  }
-
-  /**
-   * Get apply value of bKeys
-   *
-   * @param bKeys
-   * @return
-   */
-  private int getValue(byte[] bKeys) {
-    int from = 1;
-    int to;
-    int current;
-    int len = bKeys.length;
-    if (size == 0) return -1;
-    for (int i = 0; i < len; i++) {
-      current = bKeys[i] & 0xFF;
-      to = base[from] + current;
-      if (check[to] != from) return -1;
-      int baseValue = base[to];
-      if (baseValue <= -ENCODE_BASE_VALUE) {
-        if (i == len - 1) {
-          return -1 * baseValue;
-        } else {
-          return -1;
-        }
-      }
-      from = to;
-
-    }
-    return -1;
-  }
-
-  /**
-   * Get all children of one node
-   *
-   * @param pos
-   * @return
-   */
-  private TreeSet<Integer> getChildren(int pos) {
-    TreeSet<Integer> children = new TreeSet<Integer>();
-    for (int i = 0; i < 0xFF; i++) {
-      int cpos = base[pos] + i;
-      if (cpos >= size) break;
-      if (cpos < 0) {
-        return null;
-      }
-      if (check[cpos] == pos) {
-        children.add(i);
-      }
-    }
-    return children;
-  }
-
-  /**
-   * @TODO: need to optimize performance
-   *
-   * Find multiple free position for {values}
-   * the distance between free position should be as same as {values}
-   *
-   * @param values
-   * @return
-   */
-  private int findFreeRoom(SortedSet<Integer> values) {
-    int min = values.first();
-    int max = values.last();
-    for (int i = min + 1; i < capacity; i++) {
-      if (i + max >= capacity) {
-        reSize(capacity + values.size());
-      }
-      int res = 0;
-      for (Integer v : values) {
-        res = res | base[v - min + i];
-      }
-      if (res == EPTY_BACK_VALUE) return i - min;
-    }
-    return -1;
-  }
-
-  /**
-   * Find one empty position for value
-   *
-   * @param value
-   * @return
-   */
-  private int findAvailableHop(int value) {
-    reSize(size + 1);
-    int result = size - 1;
-    for (int i = value + 1; i < capacity; i++) {
-      if (base[i] == EPTY_BACK_VALUE) {
-        result = i - value;
-        break;
-      }
-    }
-    return result;
-  }
-
-  /**
-   * Resolve when conflict and reset current node and its children.
-   *
-   * @param start current conflict position
-   * @param bKey current byte value which for processing
-   * @return
-   */
-  private int conflict(int start, int bKey) {
-    int from = start;
-    TreeSet<Integer> children = getChildren(from);
-    children.add(bKey);
-    int newBasePos = findFreeRoom(children);
-    children.remove(bKey);
-
-    int oldBasePos = base[start];
-    base[start] = newBasePos;
-
-    int oldPos, newPos;
-    for (Integer child : children) {
-      oldPos = oldBasePos + child;
-      newPos = newBasePos + child;
-      if (oldPos == from) from = newPos;
-      base[newPos] = base[oldPos];
-      check[newPos] = check[oldPos];
-      if (newPos >= size) size = newPos + 1;
-      if (base[oldPos] > 0) {
-        TreeSet<Integer> cs = getChildren(oldPos);
-        for (Integer c : cs) {
-          check[base[oldPos] + c] = newPos;
-        }
-      }
-      base[oldPos] = EPTY_BACK_VALUE;
-      check[oldPos] = EPTY_BACK_VALUE;
-    }
-    return from;
-  }
-
-  /**
-   * Insert element (byte[]) into DAT.
-   * 1. if the element has been DAT then return.
-   * 2. if position which is empty then insert directly.
-   * 3. if conflict then resolve it.
-   *
-   * @param bKeys
-   * @return
-   */
-  private boolean insert(byte[] bKeys) {
-    int from = 1;
-    int klen = bKeys.length;
-    for (int i = 0; i < klen; i++) {
-      int c = bKeys[i] & 0xFF;
-      int to = base[from] + c;
-      reSize((int) (to * 1.2) + 1);
-      if (check[to] == from) {
-        if (i == klen - 1) return true;
-        from = to;
-      } else if (check[to] == EPTY_BACK_VALUE) {
-        check[to] = from;
-        if (i == klen - 1) {
-          base[to] = -id;
-          id = id + 1;
-          return true;
-        } else {
-          int next = bKeys[i + 1] & 0xFF;
-          base[to] = findAvailableHop(next);
-          from = to;
-        }
-        if (to >= size) size = to + 1;
-      } else {
-        int rConflict = conflict(from, c);
-        int locate = base[rConflict] + c;
-        if (check[locate] != EPTY_BACK_VALUE) {
-          System.err.println("conflict");
-        }
-        check[locate] = rConflict;
-        if (i == klen - 1) {
-          base[locate] = -id;
-          id = id + 1;
-        } else {
-          int nah = bKeys[i + 1] & 0xFF;
-          base[locate] = findAvailableHop(nah);
-        }
-        if (locate >= size) size = locate + 1;
-        from = locate;
-        if (i == klen - 1) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Insert element (String) into DAT, the element will be transformed to
-   * byte[] firstly then insert into DAT.
-   *
-   * @param key
-   * @return
-   */
-  public boolean insert(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes(Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
-    if (!insert(bKeys)) {
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Serialize the DAT to data output stream
-   *
-   * @param out
-   * @throws IOException
-   */
-  public void write(DataOutputStream out) throws IOException {
-    out.write(HEAD_MAGIC);
-    out.writeInt(capacity);
-    out.writeInt(size);
-    for (int i = 0; i < size; i++) {
-      out.writeInt(base[i]);
-    }
-    for (int i = 0; i < size; i++) {
-      out.writeInt(check[i]);
-    }
-  }
-
-  /**
-   * Deserialize the DAT from data input stream
-   *
-   * @param in
-   * @throws IOException
-   */
-  public void read(DataInputStream in) throws IOException {
-    byte[] header = new byte[HEAD_LEN];
-    in.readFully(header);
-    int comp = 0;
-    for (int i = 0; i < HEAD_LEN; i++) {
-      comp = HEAD_MAGIC[i] - header[i];
-      if (comp != 0) break;
-    }
-    if (comp != 0) throw new IllegalArgumentException("Illegal file type");
-    int capacity = in.readInt();
-    int size = in.readInt();
-    if (capacity < size || size < 0) throw new IllegalArgumentException("Illegal parameters");
-    int[] base = new int[size];
-    int[] check = new int[size];
-    for (int i = 0; i < size; i++) {
-      base[i] = in.readInt();
-    }
-    for (int i = 0; i < size; i++) {
-      check[i] = in.readInt();
-    }
-    init(capacity, size, base, check);
-  }
-
-  /**
-   * Dump double array value about Trie
-   */
-  public void dump(PrintStream out) {
-    out.println("Capacity = " + capacity + ", Size = " + size);
-    for (int i = 0; i < size; i++) {
-      if (base[i] != EPTY_BACK_VALUE) {
-        out.print(i + ":[" + base[i] + "," + check[i] + "], ");
-      }
-    }
-    out.println();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
index f43e21b..55a1c05 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
@@ -19,10 +19,10 @@ package org.apache.carbondata.core.cache.dictionary;
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
@@ -51,7 +51,7 @@ public class ForwardDictionaryCache<K extends
       LogServiceFactory.getLogService(ForwardDictionaryCache.class.getName());
 
   private static final Map<DictionaryColumnUniqueIdentifier, Object> DICTIONARY_LOCK_OBJECT =
-      new HashMap<>();
+      new ConcurrentHashMap<>();
 
   private static final long sizeOfEmptyDictChunks =
       ObjectSizeCalculator.estimate(new ArrayList<byte[]>(CarbonUtil.getDictionaryChunkSize()), 16);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index f3a821b..5ba1fec 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1003,7 +1003,7 @@ public final class CarbonCommonConstants {
   /**
    * Indicates alter partition
    */
-  public static String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
+  public static final String ALTER_PARTITION_KEY_WORD = "ALTER_PARTITION";
 
   /**
    * hdfs temporary directory key

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
index bdbf9fc..7d1cb48 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/AbstractDataMapJob.java
@@ -17,12 +17,8 @@
 
 package org.apache.carbondata.core.datamap;
 
-import java.util.List;
-
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
-import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 
@@ -34,9 +30,4 @@ public abstract class AbstractDataMapJob implements DataMapJob {
   @Override public void execute(CarbonTable carbonTable,
       FileInputFormat<Void, BlockletDataMapIndexWrapper> format) {
   }
-
-  @Override public List<ExtendedBlocklet> execute(DistributableDataMapFormat dataMapFormat,
-      FilterResolverIntf resolverIntf) {
-    return null;
-  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
index 4d1c718..cf5dffd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapChooser.java
@@ -283,8 +283,8 @@ public class DataMapChooser {
       List<ColumnExpression> columnExpressions, Set<ExpressionType> expressionTypes) {
     List<DataMapTuple> tuples = new ArrayList<>();
     for (TableDataMap dataMap : allDataMap) {
-      if (contains(dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes))
-      {
+      if (null != dataMap.getDataMapFactory().getMeta() && contains(
+          dataMap.getDataMapFactory().getMeta(), columnExpressions, expressionTypes)) {
         tuples.add(
             new DataMapTuple(dataMap.getDataMapFactory().getMeta().getIndexedColumns().size(),
                 dataMap));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index c739dc3..1359e85 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -207,8 +207,8 @@ public final class DataMapStoreManager {
    * @param providerName
    * @return
    */
-  public DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider, String providerName)
-      throws IOException {
+  public synchronized DataMapCatalog getDataMapCatalog(DataMapProvider dataMapProvider,
+      String providerName) throws IOException {
     intializeDataMapCatalogs(dataMapProvider);
     return dataMapCatalogs.get(providerName);
   }
@@ -225,6 +225,9 @@ public final class DataMapStoreManager {
         DataMapCatalog dataMapCatalog = dataMapCatalogs.get(schema.getProviderName());
         if (dataMapCatalog == null) {
           dataMapCatalog = dataMapProvider.createDataMapCatalog();
+          if (null == dataMapCatalog) {
+            throw new RuntimeException("Internal Error.");
+          }
           dataMapCatalogs.put(schema.getProviderName(), dataMapCatalog);
         }
         try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
index bd5f994..ebcf972 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/BlockletSerializer.java
@@ -41,8 +41,11 @@ public class BlockletSerializer {
       throws IOException {
     DataOutputStream dataOutputStream =
         FileFactory.getDataOutputStream(writePath, FileFactory.getFileType(writePath));
-    grainBlocklet.write(dataOutputStream);
-    dataOutputStream.close();
+    try {
+      grainBlocklet.write(dataOutputStream);
+    } finally {
+      dataOutputStream.close();
+    }
   }
 
   /**
@@ -55,8 +58,11 @@ public class BlockletSerializer {
     DataInputStream inputStream =
         FileFactory.getDataInputStream(writePath, FileFactory.getFileType(writePath));
     FineGrainBlocklet blocklet = new FineGrainBlocklet();
-    blocklet.readFields(inputStream);
-    inputStream.close();
+    try {
+      blocklet.readFields(inputStream);
+    } finally {
+      inputStream.close();
+    }
     return blocklet;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
index 91e55dc..d400952 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/AbstractDimensionColumnPage.java
@@ -29,6 +29,14 @@ public abstract class AbstractDimensionColumnPage implements DimensionColumnPage
    */
   DimensionDataChunkStore dataChunkStore;
 
+
+  /**
+   * @return whether data is explicitly sorted or not
+   */
+  protected boolean isExplicitSorted(int[] invertedIndex) {
+    return (null == invertedIndex || 0 == invertedIndex.length) ? false : true;
+  }
+
   /**
    * @return whether columns where explicitly sorted or not
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
deleted file mode 100644
index 741c13d..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionColumnPage.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.datastore.chunk.impl;
-
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
-import org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
-
-/**
- * This class is gives access to column group dimension data chunk store
- */
-public class ColumnGroupDimensionColumnPage extends AbstractDimensionColumnPage {
-
-  /**
-   * Constructor for this class
-   *
-   * @param dataChunk       data chunk
-   * @param columnValueSize chunk attributes
-   * @param numberOfRows
-   */
-  public ColumnGroupDimensionColumnPage(byte[] dataChunk, int columnValueSize, int numberOfRows) {
-    this.dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, false, numberOfRows, dataChunk.length,
-        DimensionStoreType.FIXEDLENGTH);
-    this.dataChunkStore.putArray(null, null, dataChunk);
-  }
-
-  /**
-   * Below method will be used to fill the data based on offset and row id
-   *
-   * @param rowId             row id of the chunk
-   * @param offset            offset from which data need to be filed
-   * @param data              data to filed
-   * @param restructuringInfo define the structure of the key
-   * @return how many bytes was copied
-   */
-  @Override public int fillRawData(int rowId, int offset, byte[] data,
-      KeyStructureInfo restructuringInfo) {
-    byte[] row = dataChunkStore.getRow(rowId);
-    byte[] maskedKey = getMaskedKey(row, restructuringInfo);
-    System.arraycopy(maskedKey, 0, data, offset, maskedKey.length);
-    return maskedKey.length;
-  }
-
-  /**
-   * Converts to column dictionary integer value
-   *
-   * @param rowId
-   * @param chunkIndex
-   * @param outputSurrogateKey
-   * @param info          KeyStructureInfo
-   * @return
-   */
-  @Override public int fillSurrogateKey(int rowId, int chunkIndex, int[] outputSurrogateKey,
-      KeyStructureInfo info) {
-    byte[] data = dataChunkStore.getRow(rowId);
-    long[] keyArray = info.getKeyGenerator().getKeyArray(data);
-    int[] ordinal = info.getMdkeyQueryDimensionOrdinal();
-    for (int i = 0; i < ordinal.length; i++) {
-      outputSurrogateKey[chunkIndex++] = (int) keyArray[ordinal[i]];
-    }
-    return chunkIndex;
-  }
-
-  /**
-   * Below method will be used to get the masked key
-   *
-   * @param data   data
-   * @param info
-   * @return
-   */
-  private byte[] getMaskedKey(byte[] data, KeyStructureInfo info) {
-    byte[] maskedKey = new byte[info.getMaskByteRanges().length];
-    int counter = 0;
-    int byteRange = 0;
-    for (int i = 0; i < info.getMaskByteRanges().length; i++) {
-      byteRange = info.getMaskByteRanges()[i];
-      maskedKey[counter++] = (byte) (data[byteRange] & info.getMaxKey()[byteRange]);
-    }
-    return maskedKey;
-  }
-
-  /**
-   * @return inverted index
-   */
-  @Override public int getInvertedIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @param rowId
-   * @return inverted index reverse
-   */
-  @Override public int getInvertedReverseIndex(int rowId) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * @return whether columns where explictly sorted or not
-   */
-  @Override public boolean isExplicitSorted() {
-    return false;
-  }
-
-  /**
-   * to compare the data
-   *
-   * @param rowId        row index to be compared
-   * @param compareValue value to compare
-   * @return compare result
-   */
-  @Override public int compareTo(int rowId, byte[] compareValue) {
-    throw new UnsupportedOperationException("Operation not supported in case of cloumn group");
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(ColumnVectorInfo[] vectorInfo, int chunkIndex,
-      KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray = restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(k));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-
-  /**
-   * Fill the data to vector
-   *
-   * @param filteredRowId
-   * @param vectorInfo
-   * @param chunkIndex
-   * @param restructuringInfo
-   * @return next column index
-   */
-  @Override public int fillVector(int[] filteredRowId, ColumnVectorInfo[] vectorInfo,
-      int chunkIndex, KeyStructureInfo restructuringInfo) {
-    ColumnVectorInfo columnVectorInfo = vectorInfo[chunkIndex];
-    int offset = columnVectorInfo.offset;
-    int vectorOffset = columnVectorInfo.vectorOffset;
-    int len = offset + columnVectorInfo.size;
-    int[] ordinal = restructuringInfo.getMdkeyQueryDimensionOrdinal();
-    for (int k = offset; k < len; k++) {
-      long[] keyArray =
-          restructuringInfo.getKeyGenerator().getKeyArray(dataChunkStore.getRow(filteredRowId[k]));
-      int index = 0;
-      for (int i = chunkIndex; i < chunkIndex + ordinal.length; i++) {
-        if (vectorInfo[i].directDictionaryGenerator == null) {
-          vectorInfo[i].vector.putInt(vectorOffset, (int) keyArray[ordinal[index++]]);
-        } else {
-          vectorInfo[i].vector.putLong(vectorOffset, (long) vectorInfo[i].directDictionaryGenerator
-              .getValueFromSurrogate((int) keyArray[ordinal[index++]]));
-        }
-      }
-      vectorOffset++;
-    }
-    return chunkIndex + ordinal.length;
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
index ff54b12..76bcf30 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionColumnPage.java
@@ -41,11 +41,12 @@ public class FixedLengthDimensionColumnPage extends AbstractDimensionColumnPage
    */
   public FixedLengthDimensionColumnPage(byte[] dataChunk, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows, int columnValueSize) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         dataChunk.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) :
         dataChunk.length;
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(columnValueSize, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(columnValueSize, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.FIXEDLENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunk);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
index d03b2de..1c6b7f4 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionColumnPage.java
@@ -37,12 +37,13 @@ public class VariableLengthDimensionColumnPage extends AbstractDimensionColumnPa
    */
   public VariableLengthDimensionColumnPage(byte[] dataChunks, int[] invertedIndex,
       int[] invertedIndexReverse, int numberOfRows) {
-    long totalSize = null != invertedIndex ?
+    boolean isExplicitSorted = isExplicitSorted(invertedIndex);
+    long totalSize = isExplicitSorted ?
         (dataChunks.length + (2 * numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE) + (
             numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE)) :
         (dataChunks.length + (numberOfRows * CarbonCommonConstants.INT_SIZE_IN_BYTE));
     dataChunkStore = DimensionChunkStoreFactory.INSTANCE
-        .getDimensionChunkStore(0, null != invertedIndex, numberOfRows, totalSize,
+        .getDimensionChunkStore(0, isExplicitSorted, numberOfRows, totalSize,
             DimensionStoreType.VARIABLELENGTH);
     dataChunkStore.putArray(invertedIndex, invertedIndexReverse, dataChunks);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 0dc1c1b..6679402 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -22,7 +22,6 @@ import java.util.List;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -102,8 +101,8 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     int blockIndex = dimensionRawColumnChunk.getColumnIndex();
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     FileReader fileReader = dimensionRawColumnChunk.getFileReader();
 
@@ -146,14 +145,9 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
     }
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
-    if (dataChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!CarbonUtil
+    if (!CarbonUtil
         .hasEncoding(dataChunk.getEncodingList(), Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 31fa819..8938260 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -21,7 +21,6 @@ import java.nio.ByteBuffer;
 
 import org.apache.carbondata.core.datastore.FileReader;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.ColumnGroupDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionColumnPage;
 import org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionColumnPage;
@@ -118,8 +117,8 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
   public DimensionColumnPage decodeColumnPage(
       DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber) throws IOException {
     byte[] dataPage = null;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     int[] rlePage = null;
     DataChunk2 dimensionColumnChunk = null;
     int copySourcePoint = (int) dimensionRawColumnChunk.getOffSet();
@@ -171,14 +170,9 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
     // fill chunk attributes
     DimensionColumnPage columnDataChunk = null;
 
-    if (dimensionColumnChunk.isRowMajor()) {
-      // to store fixed length column chunk values
-      columnDataChunk = new ColumnGroupDimensionColumnPage(
-          dataPage, eachColumnValueSize[blockIndex], numberOfRows);
-    }
     // if no dictionary column then first create a no dictionary column chunk
     // and set to data chunk instance
-    else if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
+    if (!hasEncoding(dimensionColumnChunk.encoders, Encoding.DICTIONARY)) {
       columnDataChunk =
           new VariableLengthDimensionColumnPage(dataPage, invertedIndexes, invertedIndexesReverse,
               numberOfRows);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
index 0fdc515..58a9b18 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
@@ -244,8 +244,8 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
       ByteBuffer pageData, DataChunk2 pageMetadata, int offset) {
     byte[] dataPage;
     int[] rlePage;
-    int[] invertedIndexes = null;
-    int[] invertedIndexesReverse = null;
+    int[] invertedIndexes = new int[0];
+    int[] invertedIndexesReverse = new int[0];
     dataPage = COMPRESSOR.unCompressByte(pageData.array(), offset, pageMetadata.data_page_length);
     offset += pageMetadata.data_page_length;
     // if row id block is present then read the row id chunk and uncompress it

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
index a3ed339..7d59d47 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/AbstractMeasureChunkReaderV2V3Format.java
@@ -103,11 +103,15 @@ public abstract class AbstractMeasureChunkReaderV2V3Format extends AbstractMeasu
    * @param presentMetadataThrift
    * @return wrapper presence meta
    */
-  protected BitSet getNullBitSet(
-      org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
+  protected BitSet getNullBitSet(org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
     Compressor compressor = CompressorFactory.getInstance().getCompressor();
-    return BitSet.valueOf(
-        compressor.unCompressByte(presentMetadataThrift.getPresent_bit_stream()));
+    final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
+    if (null != present_bit_stream) {
+      return BitSet
+          .valueOf(compressor.unCompressByte(present_bit_stream));
+    } else {
+      return new BitSet(1);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
index a32651a..5c2a5fb 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/Compressor.java
@@ -33,32 +33,22 @@ public interface Compressor {
 
   byte[] compressShort(short[] unCompInput);
 
-  short[] unCompressShort(byte[] compInput);
-
   short[] unCompressShort(byte[] compInput, int offset, int lenght);
 
   byte[] compressInt(int[] unCompInput);
 
-  int[] unCompressInt(byte[] compInput);
-
   int[] unCompressInt(byte[] compInput, int offset, int length);
 
   byte[] compressLong(long[] unCompInput);
 
-  long[] unCompressLong(byte[] compInput);
-
   long[] unCompressLong(byte[] compInput, int offset, int length);
 
   byte[] compressFloat(float[] unCompInput);
 
-  float[] unCompressFloat(byte[] compInput);
-
   float[] unCompressFloat(byte[] compInput, int offset, int length);
 
   byte[] compressDouble(double[] unCompInput);
 
-  double[] unCompressDouble(byte[] compInput);
-
   double[] unCompressDouble(byte[] compInput, int offset, int length);
 
   long rawCompress(long inputAddress, int inputSize, long outputAddress) throws IOException;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
index f234f80..65244d2 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/compression/SnappyCompressor.java
@@ -59,7 +59,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, unCompInput.length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -68,7 +68,7 @@ public class SnappyCompressor implements Compressor {
       return Snappy.rawCompress(unCompInput, byteSize);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
+      throw new RuntimeException(e);
     }
   }
 
@@ -77,19 +77,20 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompress(compInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return compInput;
   }
 
   @Override public byte[] unCompressByte(byte[] compInput, int offset, int length) {
     int uncompressedLength = 0;
-    byte[] data = null;
+    byte[] data;
     try {
       uncompressedLength = Snappy.uncompressedLength(compInput, offset, length);
       data = new byte[uncompressedLength];
       Snappy.uncompress(compInput, offset, length, data, 0);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
     return data;
   }
@@ -99,17 +100,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public short[] unCompressShort(byte[] compInput) {
-    try {
-      return Snappy.uncompressShortArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public short[] unCompressShort(byte[] compInput, int offset, int lenght) {
@@ -117,8 +109,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressShortArray(compInput, offset, lenght);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressInt(int[] unCompInput) {
@@ -126,17 +118,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public int[] unCompressInt(byte[] compInput) {
-    try {
-      return Snappy.uncompressIntArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public int[] unCompressInt(byte[] compInput, int offset, int length) {
@@ -144,8 +127,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressIntArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressLong(long[] unCompInput) {
@@ -153,17 +136,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public long[] unCompressLong(byte[] compInput) {
-    try {
-      return Snappy.uncompressLongArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public long[] unCompressLong(byte[] compInput, int offset, int length) {
@@ -171,8 +145,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressLongArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressFloat(float[] unCompInput) {
@@ -180,17 +154,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public float[] unCompressFloat(byte[] compInput) {
-    try {
-      return Snappy.uncompressFloatArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public float[] unCompressFloat(byte[] compInput, int offset, int length) {
@@ -198,8 +163,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.uncompressFloatArray(compInput, offset, length);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public byte[] compressDouble(double[] unCompInput) {
@@ -207,17 +172,8 @@ public class SnappyCompressor implements Compressor {
       return Snappy.compress(unCompInput);
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
-      return null;
-    }
-  }
-
-  @Override public double[] unCompressDouble(byte[] compInput) {
-    try {
-      return Snappy.uncompressDoubleArray(compInput);
-    } catch (IOException e) {
-      LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override public double[] unCompressDouble(byte[] compInput, int offset, int length) {
@@ -228,8 +184,8 @@ public class SnappyCompressor implements Compressor {
       return result;
     } catch (IOException e) {
       LOGGER.error(e, e.getMessage());
+      throw new RuntimeException(e);
     }
-    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
index 7255237..05f96c5 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/filesystem/AbstractDFSCarbonFile.java
@@ -274,9 +274,9 @@ public abstract class AbstractDFSCarbonFile implements CarbonFile {
           int count = dataInputStream.available();
           // create buffer
           byte[] byteStreamBuffer = new byte[count];
-          dataInputStream.read(byteStreamBuffer);
+          int bytesRead = dataInputStream.read(byteStreamBuffer);
           stream = fileSystem.create(pt, true, bufferSize);
-          stream.write(byteStreamBuffer);
+          stream.write(byteStreamBuffer, 0, bytesRead);
         } else {
           stream = fileSystem.append(pt, bufferSize);
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
index f5a751b..9477dff 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
@@ -202,7 +202,7 @@ public abstract class AbstractBTreeLeafNode implements BTreeNode {
       int[][] columnIndexRange) throws IOException {
     // No required here as leaf which will will be use this class will implement its own get
     // measure chunks
-    return null;
+    throw new UnsupportedOperationException("Unsupported operation");
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
index 378b51f..1cdefc8 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeDecimalColumnPage.java
@@ -124,13 +124,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = (long)rowId << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -145,13 +145,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = (long)rowId << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
@@ -187,7 +187,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = (long)rowId << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
@@ -202,7 +202,7 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = (long) rowId << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -218,13 +218,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = (long)rowId << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = (long) rowId << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
@@ -266,13 +266,13 @@ public class UnsafeDecimalColumnPage extends DecimalColumnPage {
     switch (decimalConverter.getDecimalConverterType()) {
       case DECIMAL_INT:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << intBits;
+          long offset = (long)i << intBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
         }
         break;
       case DECIMAL_LONG:
         for (int i = 0; i < pageSize; i++) {
-          long offset = i << longBits;
+          long offset = (long)i << longBits;
           codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
         }
         break;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
index 6847ab9..7965e93 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/UnsafeFixLengthColumnPage.java
@@ -94,13 +94,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putByte(int rowId, byte value) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     CarbonUnsafe.getUnsafe().putByte(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putShort(int rowId, short value) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     CarbonUnsafe.getUnsafe().putShort(baseAddress, baseOffset + offset, value);
   }
 
@@ -115,19 +115,19 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public void putInt(int rowId, int value) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     CarbonUnsafe.getUnsafe().putInt(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putLong(int rowId, long value) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     CarbonUnsafe.getUnsafe().putLong(baseAddress, baseOffset + offset, value);
   }
 
   @Override
   public void putDouble(int rowId, double value) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     CarbonUnsafe.getUnsafe().putDouble(baseAddress, baseOffset + offset, value);
   }
 
@@ -151,13 +151,13 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public byte getByte(int rowId) {
-    long offset = rowId << byteBits;
+    long offset = ((long)rowId) << byteBits;
     return CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
   }
 
   @Override
   public short getShort(int rowId) {
-    long offset = rowId << shortBits;
+    long offset = ((long)rowId) << shortBits;
     return CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
   }
 
@@ -173,25 +173,25 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
 
   @Override
   public int getInt(int rowId) {
-    long offset = rowId << intBits;
+    long offset = ((long)rowId) << intBits;
     return CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
   }
 
   @Override
   public long getLong(int rowId) {
-    long offset = rowId << longBits;
+    long offset = ((long)rowId) << longBits;
     return CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
   }
 
   @Override
   public float getFloat(int rowId) {
-    long offset = rowId << floatBits;
+    long offset = ((long)rowId) << floatBits;
     return CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
   }
 
   @Override
   public double getDouble(int rowId) {
-    long offset = rowId << doubleBits;
+    long offset = ((long)rowId) << doubleBits;
     return CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
   }
 
@@ -219,9 +219,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public byte[] getBytePage() {
     byte[] data = new byte[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << byteBits;
-      data[i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -229,9 +229,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public short[] getShortPage() {
     short[] data = new short[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << shortBits;
-      data[i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -247,9 +247,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public int[] getIntPage() {
     int[] data = new int[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << intBits;
-      data[i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -257,9 +257,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public long[] getLongPage() {
     long[] data = new long[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << longBits;
-      data[i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -267,9 +267,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public float[] getFloatPage() {
     float[] data = new float[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << floatBits;
-      data[i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -277,9 +277,9 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   @Override
   public double[] getDoublePage() {
     double[] data = new double[getPageSize()];
-    for (int i = 0; i < data.length; i++) {
+    for (long i = 0; i < data.length; i++) {
       long offset = i << doubleBits;
-      data[i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
+      data[(int)i] = CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset);
     }
     return data;
   }
@@ -363,34 +363,34 @@ public class UnsafeFixLengthColumnPage extends ColumnPage {
   public void convertValue(ColumnPageValueConverter codec) {
     int pageSize = getPageSize();
     if (dataType == DataTypes.BYTE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << byteBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getByte(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.SHORT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << shortBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getShort(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.INT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << intBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getInt(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.LONG) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << longBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getLong(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.FLOAT) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << floatBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getFloat(baseAddress, baseOffset + offset));
       }
     } else if (dataType == DataTypes.DOUBLE) {
-      for (int i = 0; i < pageSize; i++) {
+      for (long i = 0; i < pageSize; i++) {
         long offset = i << doubleBits;
-        codec.encode(i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
+        codec.encode((int)i, CarbonUnsafe.getUnsafe().getDouble(baseAddress, baseOffset + offset));
       }
     } else {
       throw new UnsupportedOperationException("invalid data type: " + dataType);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
index 597def0..318d55d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/encoding/EncodingFactory.java
@@ -118,6 +118,9 @@ public abstract class EncodingFactory {
    * Old way of creating decoder, based on algorithm
    */
   public ColumnPageDecoder createDecoderLegacy(ValueEncoderMeta metadata) {
+    if (null == metadata) {
+      throw new RuntimeException("internal error");
+    }
     SimpleStatsResult stats = PrimitivePageStatsCollector.newInstance(metadata);
     TableSpec.ColumnSpec spec =
         TableSpec.ColumnSpec.newInstanceLegacy("legacy", stats.getDataType(), ColumnType.MEASURE);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
index e6cf29e..22537db 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/KeyPageStatsCollector.java
@@ -73,16 +73,17 @@ public class KeyPageStatsCollector implements ColumnPageStatsCollector {
 
   @Override
   public void update(byte[] value) {
-    if (min == null && max == null) {
+    if (null == min) {
       min = value;
+    }
+    if (null == max) {
+      max = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
+      min = value;
+    }
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
       max = value;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, value) > 0) {
-        min = value;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, value) < 0) {
-        max = value;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
index 23795c5..7958a8d 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/page/statistics/LVStringStatsCollector.java
@@ -84,16 +84,21 @@ public class LVStringStatsCollector implements ColumnPageStatsCollector {
       newValue = new byte[value.length - 2];
       System.arraycopy(value, 2, newValue, 0, newValue.length);
     }
-    if (min == null && max == null) {
+
+    if (null == min) {
       min = newValue;
+    }
+
+    if (null == max) {
+      max = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
+      min = newValue;
+    }
+
+    if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
       max = newValue;
-    } else {
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(min, newValue) > 0) {
-        min = newValue;
-      }
-      if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(max, newValue) < 0) {
-        max = newValue;
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index e0feb04..8a69b80 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -97,7 +97,9 @@ public class IncrementalColumnDictionaryGenerator implements BiDictionary<Intege
   }
 
   @Override public int size() {
-    return currentDictionarySize;
+    synchronized (lock) {
+      return currentDictionarySize;
+    }
   }
 
   @Override public Integer generateKey(String value) throws DictionaryGenerationException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
index 5db13b6..7bb8259 100644
--- a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
+++ b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/TableDictionaryGenerator.java
@@ -57,6 +57,9 @@ public class TableDictionaryGenerator
       throws DictionaryGenerationException {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(value.getColumnName());
 
+    if (null == dimension) {
+      throw new DictionaryGenerationException("Dictionary Generation Failed");
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return generator.generateKey(value.getData());
@@ -65,6 +68,9 @@ public class TableDictionaryGenerator
   public Integer size(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable.getPrimitiveDimensionByName(key.getColumnName());
 
+    if (null == dimension) {
+      return 0;
+    }
     DictionaryGenerator<Integer, String> generator =
             columnMap.get(dimension.getColumnId());
     return ((BiDictionary) generator).size();
@@ -91,7 +97,7 @@ public class TableDictionaryGenerator
   public void updateGenerator(DictionaryMessage key) {
     CarbonDimension dimension = carbonTable
         .getPrimitiveDimensionByName(key.getColumnName());
-    if (null == columnMap.get(dimension.getColumnId())) {
+    if (null != dimension && null == columnMap.get(dimension.getColumnId())) {
       synchronized (columnMap) {
         if (null == columnMap.get(dimension.getColumnId())) {
           columnMap.put(dimension.getColumnId(),

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
index 021fb82..0188281 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
@@ -17,11 +17,7 @@
 package org.apache.carbondata.core.indexstore.blockletindex;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
@@ -169,9 +165,11 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
       return false;
     }
     for (int i = 0; i < tableColumnList.size(); i++) {
-      return indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i));
+      if (!indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i))) {
+        return false;
+      }
     }
-    return false;
+    return true;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index c6efd77..c2686d0 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -282,12 +282,15 @@ public class SegmentIndexFileStore {
     DataInputStream dataInputStream =
         FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
     byte[] bytes = new byte[(int) indexFile.getSize()];
-    dataInputStream.readFully(bytes);
-    carbonIndexMap.put(indexFile.getName(), bytes);
-    carbonIndexMapWithFullPath.put(
-        indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
-            + indexFile.getName(), bytes);
-    dataInputStream.close();
+    try {
+      dataInputStream.readFully(bytes);
+      carbonIndexMap.put(indexFile.getName(), bytes);
+      carbonIndexMapWithFullPath.put(
+          indexFile.getParentFile().getAbsolutePath() + CarbonCommonConstants.FILE_SEPARATOR
+              + indexFile.getName(), bytes);
+    } finally {
+      dataInputStream.close();
+    }
   }
 
   private MergedBlockIndexHeader readMergeBlockIndexHeader(ThriftReader thriftReader)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
index 9a8d3f6..ecdb672 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZookeeperInit.java
@@ -54,12 +54,10 @@ public class ZookeeperInit {
 
   public static ZookeeperInit getInstance(String zooKeeperUrl) {
 
-    if (null == zooKeeperInit) {
-      synchronized (ZookeeperInit.class) {
-        if (null == zooKeeperInit) {
-          LOGGER.info("Initiating Zookeeper client.");
-          zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
-        }
+    synchronized (ZookeeperInit.class) {
+      if (null == zooKeeperInit) {
+        LOGGER.info("Initiating Zookeeper client.");
+        zooKeeperInit = new ZookeeperInit(zooKeeperUrl);
       }
     }
     return zooKeeperInit;


[32/50] [abbrv] carbondata git commit: [CARBONDATA-2500] Add new API to read user's schema in SDK

Posted by gv...@apache.org.
[CARBONDATA-2500] Add new API to read user's schema in SDK

The field order in schema that SDK returns is different between write and read data type of schema in SDK

This closes #2341


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8896a633
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8896a633
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8896a633

Branch: refs/heads/spark-2.3
Commit: 8896a63342fdc5257665131074b168e24ceb5b72
Parents: 22d5035
Author: xubo245 <xu...@huawei.com>
Authored: Fri May 25 16:07:55 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Tue May 29 15:33:41 2018 +0800

----------------------------------------------------------------------
 .../ThriftWrapperSchemaConverterImpl.java       |  12 +-
 .../core/metadata/schema/table/CarbonTable.java |   6 +-
 .../core/metadata/schema/table/TableInfo.java   |   4 +-
 .../core/metadata/schema/table/TableSchema.java |  14 +-
 .../schema/table/TableSchemaBuilder.java        |   2 +-
 .../core/reader/CarbonHeaderReader.java         |   4 +-
 .../util/AbstractDataFileFooterConverter.java   |   6 +-
 .../apache/carbondata/core/util/CarbonUtil.java |   8 +-
 .../core/util/DataFileFooterConverter.java      |   4 +-
 .../core/util/DataFileFooterConverter2.java     |   2 +-
 .../core/util/DataFileFooterConverterV3.java    |   4 +-
 .../ThriftWrapperSchemaConverterImplTest.java   |   4 +-
 .../hadoop/testutil/StoreCreator.java           |   2 +-
 .../presto/util/CarbonDataStoreCreator.scala    |   2 +-
 .../command/carbonTableSchemaCommon.scala       |   2 +-
 .../org/apache/spark/sql/CarbonSource.scala     |   2 +-
 .../datasources/SparkCarbonFileFormat.scala     |   2 +-
 .../spark/sql/hive/CarbonFileMetastore.scala    |   2 +-
 .../spark/sql/hive/CarbonHiveMetaStore.scala    |   2 +-
 .../carbondata/sdk/file/CarbonReader.java       |  63 +++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 454 ++++++++++++++++++-
 21 files changed, 549 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
index 644e6a3..f03b997 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImpl.java
@@ -269,7 +269,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       thriftColumnSchema.add(fromWrapperToExternalColumnSchema(wrapperColumnSchema));
     }
     org.apache.carbondata.format.SchemaEvolution schemaEvolution =
-        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvalution());
+        fromWrapperToExternalSchemaEvolution(wrapperTableSchema.getSchemaEvolution());
     org.apache.carbondata.format.TableSchema externalTableSchema =
         new org.apache.carbondata.format.TableSchema(
             wrapperTableSchema.getTableId(), thriftColumnSchema, schemaEvolution);
@@ -535,7 +535,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
         externalColumnSchema.getParentColumnTableRelations();
     if (null != parentColumnTableRelation) {
       wrapperColumnSchema.setParentColumnTableRelations(
-          fromExtrenalToWrapperParentTableColumnRelations(parentColumnTableRelation));
+          fromExternalToWrapperParentTableColumnRelations(parentColumnTableRelation));
     }
     return wrapperColumnSchema;
   }
@@ -595,11 +595,11 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
       listOfColumns.add(fromExternalToWrapperColumnSchema(externalColumnSchema));
     }
     wrapperTableSchema.setListOfColumns(listOfColumns);
-    wrapperTableSchema.setSchemaEvalution(
+    wrapperTableSchema.setSchemaEvolution(
         fromExternalToWrapperSchemaEvolution(externalTableSchema.getSchema_evolution()));
     if (externalTableSchema.isSetBucketingInfo()) {
       wrapperTableSchema.setBucketingInfo(
-          fromExternalToWarpperBucketingInfo(externalTableSchema.bucketingInfo));
+          fromExternalToWrapperBucketingInfo(externalTableSchema.bucketingInfo));
     }
     if (externalTableSchema.getPartitionInfo() != null) {
       wrapperTableSchema.setPartitionInfo(
@@ -608,7 +608,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return wrapperTableSchema;
   }
 
-  private BucketingInfo fromExternalToWarpperBucketingInfo(
+  private BucketingInfo fromExternalToWrapperBucketingInfo(
       org.apache.carbondata.format.BucketingInfo externalBucketInfo) {
     List<ColumnSchema> listOfColumns = new ArrayList<ColumnSchema>();
     for (org.apache.carbondata.format.ColumnSchema externalColumnSchema :
@@ -661,7 +661,7 @@ public class ThriftWrapperSchemaConverterImpl implements SchemaConverter {
     return childSchema;
   }
 
-  private List<ParentColumnTableRelation> fromExtrenalToWrapperParentTableColumnRelations(
+  private List<ParentColumnTableRelation> fromExternalToWrapperParentTableColumnRelations(
       List<org.apache.carbondata.format.ParentColumnTableRelation> thirftParentColumnRelation) {
     List<ParentColumnTableRelation> parentColumnTableRelationList = new ArrayList<>();
     for (org.apache.carbondata.format.ParentColumnTableRelation carbonTableRelation :

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index b1ed981..ba051be 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -566,15 +566,15 @@ public class CarbonTable implements Serializable {
     List<CarbonDimension> dimensions = tableDimensionsMap.get(tableName);
     List<CarbonMeasure> measures = tableMeasuresMap.get(tableName);
     List<CarbonColumn> columnList = new ArrayList<>(dimensions.size() + measures.size());
-    List<CarbonColumn> complexdimensionList = new ArrayList<>(dimensions.size());
+    List<CarbonColumn> complexDimensionList = new ArrayList<>(dimensions.size());
     for (CarbonColumn column : dimensions) {
       if (column.isComplex()) {
-        complexdimensionList.add(column);
+        complexDimensionList.add(column);
       } else {
         columnList.add(column);
       }
     }
-    columnList.addAll(complexdimensionList);
+    columnList.addAll(complexDimensionList);
     for (CarbonColumn column : measures) {
       if (!(column.getColName().equals("default_dummy_measure"))) {
         columnList.add(column);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
index c7bcf2e..38145e5 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableInfo.java
@@ -124,13 +124,13 @@ public class TableInfo implements Serializable, Writable {
   }
 
   private void updateIsSchemaModified() {
-    if (null != factTable.getSchemaEvalution()) {
+    if (null != factTable.getSchemaEvolution()) {
       // If schema evolution entry list size is > 1 that means an alter operation is performed
       // which has added the new schema entry in the schema evolution list.
       // Currently apart from create table schema evolution entries
       // are getting added only in the alter operations.
       isSchemaModified =
-          factTable.getSchemaEvalution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
+          factTable.getSchemaEvolution().getSchemaEvolutionEntryList().size() > 1 ? true : false;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
index f008821..3d9e068 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchema.java
@@ -61,7 +61,7 @@ public class TableSchema implements Serializable, Writable {
   /**
    * History of schema evolution of this table
    */
-  private SchemaEvolution schemaEvalution;
+  private SchemaEvolution schemaEvolution;
 
   /**
    * contains all key value pairs for table properties set by user in craete DDL
@@ -112,17 +112,17 @@ public class TableSchema implements Serializable, Writable {
   }
 
   /**
-   * @return the schemaEvalution
+   * @return the schemaEvolution
    */
-  public SchemaEvolution getSchemaEvalution() {
-    return schemaEvalution;
+  public SchemaEvolution getSchemaEvolution() {
+    return schemaEvolution;
   }
 
   /**
-   * @param schemaEvalution the schemaEvalution to set
+   * @param schemaEvolution the schemaEvolution to set
    */
-  public void setSchemaEvalution(SchemaEvolution schemaEvalution) {
-    this.schemaEvalution = schemaEvalution;
+  public void setSchemaEvolution(SchemaEvolution schemaEvolution) {
+    this.schemaEvolution = schemaEvolution;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
index 03d03f8..bb7e901 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/TableSchemaBuilder.java
@@ -90,7 +90,7 @@ public class TableSchemaBuilder {
     schema.setBucketingInfo(null);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    schema.setSchemaEvalution(schemaEvol);
+    schema.setSchemaEvolution(schemaEvol);
     List<ColumnSchema> allColumns = new LinkedList<>(sortColumns);
     allColumns.addAll(dimension);
     allColumns.addAll(complex);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
index 9bbdca9..dfd5815 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonHeaderReader.java
@@ -23,7 +23,7 @@ import java.util.List;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.format.FileHeader;
 
-import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.thrift.TBase;
 
@@ -78,7 +78,7 @@ public class CarbonHeaderReader {
     List<ColumnSchema> columnSchemaList = new ArrayList<>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (org.apache.carbondata.format.ColumnSchema table_column : table_columns) {
-      ColumnSchema col = thriftColumnSchmeaToWrapperColumnSchema(table_column);
+      ColumnSchema col = thriftColumnSchemaToWrapperColumnSchema(table_column);
       col.setColumnReferenceId(col.getColumnUniqueId());
       columnSchemaList.add(col);
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index e30ad03..f005d88 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -89,7 +89,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -151,7 +151,7 @@ public abstract class AbstractDataFileFooterConverter {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // get the segment info
       SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
@@ -284,7 +284,7 @@ public abstract class AbstractDataFileFooterConverter {
     return blockletIndex;
   }
 
-  protected ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  protected ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 9ccd772..1526047 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2204,7 +2204,7 @@ public final class CarbonUtil {
     return tableInfo;
   }
 
-  public static ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+  public static ColumnSchema thriftColumnSchemaToWrapperColumnSchema(
       org.apache.carbondata.format.ColumnSchema externalColumnSchema) {
     ColumnSchema wrapperColumnSchema = new ColumnSchema();
     wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());
@@ -2387,7 +2387,7 @@ public final class CarbonUtil {
       List<org.apache.carbondata.format.ColumnSchema> table_columns =
           readIndexHeader.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       // only columnSchema is the valid entry, reset all dummy entries.
       TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
@@ -2412,7 +2412,7 @@ public final class CarbonUtil {
     TableSchema tableSchema = new TableSchema();
     tableSchema.setTableName(tableName);
     tableSchema.setBucketingInfo(null);
-    tableSchema.setSchemaEvalution(null);
+    tableSchema.setSchemaEvolution(null);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableSchema.setListOfColumns(columnSchemaList);
 
@@ -2422,7 +2422,7 @@ public final class CarbonUtil {
     List<SchemaEvolutionEntry> schEntryList = new ArrayList<>();
     schEntryList.add(schemaEvolutionEntry);
     schemaEvol.setSchemaEvolutionEntryList(schEntryList);
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     return tableSchema;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
index d665379..670536e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter.java
@@ -60,7 +60,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
       dataFileFooter.setColumnInTable(columnSchemaList);
 
@@ -135,7 +135,7 @@ public class DataFileFooterConverter extends AbstractDataFileFooterConverter {
       FileFooter footer = reader.readFooter();
       List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
       for (int i = 0; i < table_columns.size(); i++) {
-        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
       }
     } finally {
       if (null != fileReader) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
index 863e1df..07391dc 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverter2.java
@@ -51,7 +51,7 @@ public class DataFileFooterConverter2 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
index 214e217..6a968b4 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataFileFooterConverterV3.java
@@ -61,7 +61,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     dataFileFooter.setColumnInTable(columnSchemaList);
     List<org.apache.carbondata.format.BlockletIndex> leaf_node_indices_Thrift =
@@ -91,7 +91,7 @@ public class DataFileFooterConverterV3 extends AbstractDataFileFooterConverter {
     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
     List<org.apache.carbondata.format.ColumnSchema> table_columns = fileHeader.getColumn_schema();
     for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i)));
     }
     return columnSchemaList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
index d4bb344..67c7594 100644
--- a/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/metadata/converter/ThriftWrapperSchemaConverterImplTest.java
@@ -1421,7 +1421,7 @@ public class ThriftWrapperSchemaConverterImplTest {
         return columnSchemas;
       }
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 
@@ -1537,7 +1537,7 @@ public class ThriftWrapperSchemaConverterImplTest {
       final SchemaEvolution schemaEvolution = new SchemaEvolution();
       final Map mapTableProperties = new HashMap<String, String>();
 
-      @Mock public SchemaEvolution getSchemaEvalution() {
+      @Mock public SchemaEvolution getSchemaEvolution() {
         return schemaEvolution;
       }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 9fd1812..63acad3 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -278,7 +278,7 @@ public class StoreCreator {
     tableSchema.setListOfColumns(columnSchemas);
     SchemaEvolution schemaEvol = new SchemaEvolution();
     schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    tableSchema.setSchemaEvalution(schemaEvol);
+    tableSchema.setSchemaEvolution(schemaEvol);
     tableSchema.setTableId(UUID.randomUUID().toString());
     tableInfo.setTableUniqueName(
         identifier.getCarbonTableIdentifier().getTableUniqueName()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
----------------------------------------------------------------------
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
index 6a8c40d..f4415b8 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
@@ -310,7 +310,7 @@ object CarbonDataStoreCreator {
     val schemaEvol: SchemaEvolution = new SchemaEvolution()
     schemaEvol.setSchemaEvolutionEntryList(
       new util.ArrayList[SchemaEvolutionEntry]())
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableSchema.setTableId(UUID.randomUUID().toString)
     tableInfo.setTableUniqueName(
       absoluteTableIdentifier.getCarbonTableIdentifier.getTableUniqueName

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index a830185..aa40a1f 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -694,7 +694,7 @@ class TableNewProcessor(cm: TableModel) {
     }
     tableSchema.setTableName(cm.tableName)
     tableSchema.setListOfColumns(allColumns.asJava)
-    tableSchema.setSchemaEvalution(schemaEvol)
+    tableSchema.setSchemaEvolution(schemaEvol)
     tableInfo.setDatabaseName(cm.databaseNameOp.getOrElse(null))
     tableInfo.setTableUniqueName(CarbonTable.buildUniqueName(cm.databaseNameOp.getOrElse(null),
       cm.tableName))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index 8376136..0a23d06 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -358,7 +358,7 @@ object CarbonSource {
     tableInfo.setDatabaseName(identifier.getDatabaseName)
     val schemaEvolutionEntry = new SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     val map = if (!metaStore.isReadFromHiveMetaStore && isTransactionalTable) {
       metaStore.saveToDisk(tableInfo, identifier.getTablePath)
       new java.util.HashMap[String, String]()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
index 1da6507..934f5c7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
@@ -79,7 +79,7 @@ class SparkCarbonFileFormat extends FileFormat
       .getColumn_schema
     var colArray = ArrayBuffer[StructField]()
     for (i <- 0 to table_columns.size() - 1) {
-      val col = CarbonUtil.thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i))
+      val col = CarbonUtil.thriftColumnSchemaToWrapperColumnSchema(table_columns.get(i))
       colArray += (new StructField(col.getColumnName,
         CarbonScalaUtil.convertCarbonToSparkDataType(col.getDataType), false))
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
index 2d24abf..81a6bed 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonFileMetastore.scala
@@ -369,7 +369,7 @@ class CarbonFileMetastore extends CarbonMetaStore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     removeTableFromMetadata(tableInfo.getDatabaseName, tableInfo.getFactTable.getTableName)
     CarbonMetadata.getInstance().loadTableMetadata(tableInfo)
     addTableCache(tableInfo, absoluteTableIdentifier)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
index 1300c22..2e6ebee 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
@@ -186,7 +186,7 @@ class CarbonHiveMetaStore extends CarbonFileMetastore {
       absoluteTableIdentifier: AbsoluteTableIdentifier): String = {
     val schemaEvolutionEntry = new schema.SchemaEvolutionEntry
     schemaEvolutionEntry.setTimeStamp(tableInfo.getLastUpdatedTime)
-    tableInfo.getFactTable.getSchemaEvalution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
+    tableInfo.getFactTable.getSchemaEvolution.getSchemaEvolutionEntryList.add(schemaEvolutionEntry)
     CarbonUtil.convertToMultiGsonStrings(tableInfo, " ", "", ",")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index d85bf4b..9ae940b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -17,20 +17,30 @@
 
 package org.apache.carbondata.sdk.file;
 
+import java.io.DataInputStream;
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.reader.CarbonIndexFileReader;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
+
 /**
  * Reader for carbondata file
  */
@@ -102,6 +112,59 @@ public class CarbonReader<T> {
   }
 
   /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return null, if the index file is not present in the path.
+   * List<ColumnSchema> from the index file.
+   * @throws IOException
+   */
+  public static List<ColumnSchema> readSchemaInIndexFile(String indexFilePath) throws IOException {
+    CarbonFile indexFile =
+        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
+    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
+      throw new IOException("Not an index file name");
+    }
+    // read schema from the first index file
+    DataInputStream dataInputStream =
+        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
+    byte[] bytes = new byte[(int) indexFile.getSize()];
+    try {
+      //get the file in byte buffer
+      dataInputStream.readFully(bytes);
+      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
+      // read from byte buffer.
+      indexReader.openThriftReader(bytes);
+      // get the index header
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
+      }
+      return columnSchemaList;
+    } finally {
+      dataInputStream.close();
+    }
+  }
+
+  /**
+   * Read CarbonData file and return the user schema,
+   * the schema order is the same as user save schema
+   */
+  public static List<ColumnSchema> readUserSchema(String indexFilePath) throws IOException {
+    List<ColumnSchema> columnSchemas = readSchemaInIndexFile(indexFilePath);
+    Collections.sort(columnSchemas, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+    return columnSchemas;
+  }
+
+  /**
    * Read schema file and return table info object
    */
   public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8896a633/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 756dbe4..30d4091 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -17,14 +17,15 @@
 
 package org.apache.carbondata.sdk.file;
 
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FilenameFilter;
-import java.io.IOException;
+import java.io.*;
 import java.sql.Date;
 import java.sql.Timestamp;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
 
+import org.apache.avro.generic.GenericData;
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
@@ -36,10 +37,9 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import junit.framework.TestCase;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.apache.commons.lang.CharEncoding;
+import org.junit.*;
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
 
 public class CarbonReaderTest extends TestCase {
 
@@ -371,7 +371,8 @@ public class CarbonReaderTest extends TestCase {
     Assert.assertNotNull(dataFiles);
     Assert.assertTrue(dataFiles.length > 0);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp")
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
         .projection(new String[]{
             "stringField"
             , "shortField"
@@ -381,7 +382,343 @@ public class CarbonReaderTest extends TestCase {
             , "boolField"
             , "dateField"
             , "timeField"
-            , "decimalField"}).build();
+            , "decimalField"})
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles = new File(path + "/Metadata").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("schema");
+      }
+    });
+    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
+
+    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
+
+    // sort the schema
+    Collections.sort(tableInfo.getFactTable().getListOfColumns(), new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    Assert.assertNotNull(dataFiles);
+    Assert.assertTrue(dataFiles.length > 0);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadSchemaInDataFileAndSort() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbondata");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
+
+    // sort the schema
+    Collections.sort(columns, new Comparator<ColumnSchema>() {
+      @Override
+      public int compare(ColumnSchema o1, ColumnSchema o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
+  @Test
+  public void testReadUserSchema() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("shortField", DataTypes.SHORT);
+    fields[2] = new Field("intField", DataTypes.INT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    // Transform the schema
+    String[] strings= new String[columns.size()];
+    for (int i = 0; i < columns.size(); i++) {
+      strings[i]= columns.get(i).getColumnName();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(strings)
+        .build();
 
     int i = 0;
     while (reader.hasNext()) {
@@ -510,4 +847,101 @@ public class CarbonReaderTest extends TestCase {
       assert(row.length==0);
     }
   }
+
+  private void WriteAvroComplexData(String mySchema, String json, String[] sortColumns, String path)
+      throws IOException, InvalidLoadOptionException {
+
+    // conversion to GenericData.Record
+    org.apache.avro.Schema nn = new org.apache.avro.Schema.Parser().parse(mySchema);
+    JsonAvroConverter converter = new JsonAvroConverter();
+    GenericData.Record record = converter.convertToGenericDataRecord(
+        json.getBytes(CharEncoding.UTF_8), nn);
+
+    try {
+      CarbonWriter writer = CarbonWriter.builder()
+          .outputPath(path)
+          .isTransactionalTable(true)
+          .buildWriterForAvroInput(nn);
+
+      for (int i = 0; i < 100; i++) {
+        writer.write(record);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw e;
+    }
+  }
+
+  // TODO: support get schema of complex data type
+  @Ignore
+  public void testReadUserSchemaOfComplex() throws IOException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    String mySchema =
+        "{" +
+            "  \"name\": \"address\", " +
+            "   \"type\": \"record\", " +
+            "    \"fields\": [  " +
+            "  { \"name\": \"name\", \"type\": \"string\"}, " +
+            "  { \"name\": \"age\", \"type\": \"int\"}, " +
+            "  { " +
+            "    \"name\": \"address\", " +
+            "      \"type\": { " +
+            "    \"type\" : \"record\", " +
+            "        \"name\" : \"my_address\", " +
+            "        \"fields\" : [ " +
+            "    {\"name\": \"street\", \"type\": \"string\"}, " +
+            "    {\"name\": \"city\", \"type\": \"string\"} " +
+            "  ]} " +
+            "  }, " +
+            "  {\"name\" :\"doorNum\", " +
+            "   \"type\" : { " +
+            "   \"type\" :\"array\", " +
+            "   \"items\":{ " +
+            "   \"name\" :\"EachdoorNums\", " +
+            "   \"type\" : \"int\", " +
+            "   \"default\":-1} " +
+            "              } " +
+            "  }] " +
+            "}";
+
+    String json = "{\"name\":\"bob\", \"age\":10, \"address\" : {\"street\":\"abc\", \"city\":\"bang\"}, "
+        + "   \"doorNum\" : [1,2,3,4]}";
+
+    try {
+      WriteAvroComplexData(mySchema, json, null, path);
+    } catch (InvalidLoadOptionException e) {
+      e.printStackTrace();
+    }
+
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    Assert.assertNotNull(dataFiles);
+    Assert.assertEquals(1, dataFiles.length);
+
+
+    File[] dataFiles2 = new File(path + "/Fact/Part0/Segment_null/").listFiles(new FilenameFilter() {
+      @Override
+      public boolean accept(File dir, String name) {
+        return name.endsWith("carbonindex");
+      }
+    });
+
+    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+
+    for (int i = 0; i < columns.size(); i++) {
+      System.out.println(columns.get(i).getColumnName() + "\t" + columns.get(i).getSchemaOrdinal());
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
+
 }


[04/50] [abbrv] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
index 7ddd181..ffe1aef 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/IntegerStreamReader.java
@@ -49,7 +49,7 @@ public class IntegerStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -62,10 +62,8 @@ public class IntegerStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, ((Integer) streamData[i]).longValue());
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, ((Integer) streamData[i]).longValue());
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
index 015ac80..e1000c5 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/LongStreamReader.java
@@ -49,7 +49,7 @@ public class LongStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         }
         if (columnVector.anyNullsSet()) {
@@ -61,10 +61,8 @@ public class LongStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Long) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Long) streamData[i]);
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
index 82d62ad..8952712 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ObjectStreamReader.java
@@ -41,32 +41,26 @@ public class ObjectStreamReader  extends AbstractStreamReader {
    * @return
    * @throws IOException
    */
-  public Block readBlock(Type type)
-      throws IOException
-  {
+  public Block readBlock(Type type) throws IOException {
     int numberOfRows = 0;
     BlockBuilder builder = null;
-    if(isVectorReader) {
+    if (isVectorReader) {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        for(int i = 0; i < numberOfRows ; i++ ){
+        for (int i = 0; i < numberOfRows; i++) {
           type.writeObject(builder, columnVector.getData(i));
         }
       }
-
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for(int i = 0; i < numberOfRows ; i++ ){
-          type.writeObject(builder, streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeObject(builder, streamData[i]);
       }
     }
 
     return builder.build();
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
index 00e5485..51f1cd5 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/ShortStreamReader.java
@@ -49,7 +49,7 @@ public class ShortStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -59,13 +59,11 @@ public class ShortStreamReader extends AbstractStreamReader {
           }
         }
       }
-   } else {
+    } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Short) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Short) streamData[i]);
       }
     }
     return builder.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
index d98afa3..cce35e0 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/SliceStreamReader.java
@@ -72,7 +72,7 @@ public class SliceStreamReader extends AbstractStreamReader {
           }
           return new DictionaryBlock(batchSize, dictionarySliceArrayBlock, values);
         } else {
-          if(columnVector.anyNullsSet()) {
+          if (columnVector.anyNullsSet()) {
             handleNullInVector(type, numberOfRows, builder);
           } else {
             populateVector(type, numberOfRows, builder);
@@ -82,10 +82,8 @@ public class SliceStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeSlice(builder, utf8Slice(streamData[i].toString()));
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeSlice(builder, utf8Slice(streamData[i].toString()));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
index 01b7939..a22ef29 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/TimestampStreamReader.java
@@ -39,10 +39,9 @@ public class TimestampStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(columnVector.anyNullsSet()) {
+        if (columnVector.anyNullsSet()) {
           handleNullInVector(type, numberOfRows, builder);
-        }
-        else {
+        } else {
           populateVector(type, numberOfRows, builder);
         }
       }
@@ -50,10 +49,8 @@ public class TimestampStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeLong(builder, (Long) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeLong(builder, (Long) streamData[i]);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
index d45e759..49aa7ff 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.common.util
 
-import java.io.{FileInputStream, ObjectInputStream, ObjectOutputStream}
-import java.math
-import java.math.RoundingMode
+import java.io.{ObjectInputStream, ObjectOutputStream}
 import java.util.{Locale, TimeZone}
 
 import org.apache.carbondata.common.logging.LogServiceFactory

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
index 1e98ec2..f4948c4 100644
--- a/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
+++ b/integration/spark-common/src/main/java/org/apache/carbondata/spark/dictionary/server/SecureDictionaryServer.java
@@ -38,7 +38,6 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.network.TransportContext;
 import org.apache.spark.network.netty.SparkTransportConf;
 import org.apache.spark.network.sasl.SaslServerBootstrap;
-import org.apache.spark.network.server.TransportServer;
 import org.apache.spark.network.server.TransportServerBootstrap;
 import org.apache.spark.network.util.TransportConf;
 import scala.Some;
@@ -144,8 +143,7 @@ public class SecureDictionaryServer extends AbstractDictionaryServer implements
         TransportServerBootstrap bootstrap =
             new SaslServerBootstrap(transportConf, securityManager);
         String host = findLocalIpAddress(LOGGER);
-        TransportServer transportServer = context
-            .createServer(host, port, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
+        context.createServer(host, port, Lists.<TransportServerBootstrap>newArrayList(bootstrap));
         LOGGER.audit("Dictionary Server started, Time spent " + (System.currentTimeMillis() - start)
             + " Listening on port " + newPort);
         this.port = newPort;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
deleted file mode 100644
index 9936a2a..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.execution.command.Partitioner
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonCleanFilesRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    databaseName: String,
-    tableName: String,
-    partitioner: Partitioner)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map(s => new CarbonLoadPartition(id, s._2, s._1))
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-    val iter = new Iterator[(V)] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      // TODO call CARBON delete API
-
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-
-    }
-    iter
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
deleted file mode 100644
index b11dfad..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadByDateRDD.scala
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.statusmanager.LoadMetadataDetails
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.DeletedLoadResult
-
-class CarbonDeleteLoadByDateRDD[K, V](
-    sc: SparkContext,
-    result: DeletedLoadResult[K, V],
-    databaseName: String,
-    tableName: String,
-    dateField: String,
-    dateFieldActualName: String,
-    dateValue: String,
-    factTableName: String,
-    dimTableName: String,
-    storePath: String,
-    loadMetadataDetails: List[LoadMetadataDetails])
-  extends CarbonRDD[(K, V)](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map {s =>
-      new CarbonLoadPartition(id, s._2, s._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[(K, V)] = {
-    new Iterator[(K, V)] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      val partitionID = split.serializableHadoopSplit.value.getPartition.getUniqueID
-
-      // TODO call CARBON delete API
-      logInfo("Applying data retention as per date value " + dateValue)
-      var dateFormat = ""
-      try {
-        dateFormat = CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT
-      } catch {
-        case e: Exception => logInfo("Unable to parse with default time format " + dateValue)
-      }
-      // TODO: Implement it
-      val finished = false
-
-      override def hasNext: Boolean = {
-        finished
-      }
-
-      override def next(): (K, V) = {
-        result.getKey(null, null)
-      }
-    }
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
deleted file mode 100644
index 759ed42..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.collection.JavaConverters._
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.execution.command.Partitioner
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonDeleteLoadRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    loadId: Int,
-    databaseName: String,
-    tableName: String,
-    partitioner: Partitioner)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map {f =>
-      new CarbonLoadPartition(id, f._2, f._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-    val iter = new Iterator[V] {
-      val split = theSplit.asInstanceOf[CarbonLoadPartition]
-      logInfo("Input split: " + split.serializableHadoopSplit.value)
-      // TODO call CARBON delete API
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-
-    }
-    logInfo("********Deleting***************")
-    iter
-  }
-
-  override def getPreferredLocations(split: Partition): Seq[String] = {
-    val theSplit = split.asInstanceOf[CarbonLoadPartition]
-    val s = theSplit.serializableHadoopSplit.value.getLocations.asScala
-    logInfo("Host Name: " + s.head + s.length)
-    s
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
deleted file mode 100644
index f327d88..0000000
--- a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDropTableRDD.scala
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.spark.rdd
-
-import scala.reflect.ClassTag
-
-import org.apache.spark.{Partition, SparkContext, TaskContext}
-import org.apache.spark.rdd.RDD
-
-import org.apache.carbondata.processing.util.CarbonQueryUtil
-import org.apache.carbondata.spark.Value
-
-class CarbonDropTableRDD[V: ClassTag](
-    sc: SparkContext,
-    valueClass: Value[V],
-    databaseName: String,
-    tableName: String)
-  extends CarbonRDD[V](sc, Nil, sc.hadoopConfiguration) {
-
-  sc.setLocalProperty("spark.scheduler.pool", "DDL")
-
-  override def getPartitions: Array[Partition] = {
-    val splits = CarbonQueryUtil.getTableSplits(databaseName, tableName, null)
-    splits.zipWithIndex.map { s =>
-      new CarbonLoadPartition(id, s._2, s._1)
-    }
-  }
-
-  override def internalCompute(theSplit: Partition, context: TaskContext): Iterator[V] = {
-
-      val iter = new Iterator[V] {
-      // TODO: Clear Btree from memory
-
-      var havePair = false
-      var finished = false
-
-      override def hasNext: Boolean = {
-        if (!finished && !havePair) {
-          finished = true
-          havePair = !finished
-        }
-        !finished
-      }
-
-      override def next(): V = {
-        if (!hasNext) {
-          throw new java.util.NoSuchElementException("End of stream")
-        }
-        havePair = false
-        valueClass.getValue(null)
-      }
-    }
-    iter
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
index c2c4ab3..5c3ace3 100644
--- a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
+++ b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/util/QueryTest.scala
@@ -153,6 +153,15 @@ object QueryTest {
         Row.fromSeq(s.toSeq.map {
           case d: java.math.BigDecimal => BigDecimal(d)
           case b: Array[Byte] => b.toSeq
+          case d : Double =>
+            if (!d.isInfinite && !d.isNaN) {
+              var bd = BigDecimal(d)
+              bd = bd.setScale(5, BigDecimal.RoundingMode.UP)
+              bd.doubleValue()
+            }
+            else {
+              d
+            }
           case o => o
         })
       }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java b/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
index 903bf44..082ef8b 100644
--- a/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
+++ b/integration/spark2/src/main/java/org/apache/carbondata/spark/vectorreader/VectorizedCarbonRecordReader.java
@@ -48,6 +48,7 @@ import org.apache.carbondata.hadoop.CarbonMultiBlockSplit;
 import org.apache.carbondata.hadoop.InputMetricsStats;
 import org.apache.carbondata.spark.util.CarbonScalaUtil;
 
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.spark.memory.MemoryMode;
@@ -134,24 +135,17 @@ class VectorizedCarbonRecordReader extends AbstractRecordReader<Object> {
       queryExecutor = QueryExecutorFactory.getQueryExecutor(queryModel);
       iterator = (AbstractDetailQueryResultIterator) queryExecutor.execute(queryModel);
     } catch (QueryExecutionException e) {
-      Throwable ext = e;
-      while (ext != null) {
-        if (ext instanceof FileNotFoundException) {
-          throw new InterruptedException(
-              "Insert overwrite may be in progress.Please check " + e.getMessage());
-        }
-        ext = ext.getCause();
+      if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) {
+        LOGGER.error(e);
+        throw new InterruptedException(
+            "Insert overwrite may be in progress.Please check " + e.getMessage());
       }
       throw new InterruptedException(e.getMessage());
     } catch (Exception e) {
-      Throwable ext = e;
-      while (ext != null) {
-        if (ext instanceof FileNotFoundException) {
-          LOGGER.error(e);
-          throw new InterruptedException(
-              "Insert overwrite may be in progress.Please check " + e.getMessage());
-        }
-        ext = ext.getCause();
+      if (ExceptionUtils.indexOfThrowable(e, FileNotFoundException.class) > 0) {
+        LOGGER.error(e);
+        throw new InterruptedException(
+            "Insert overwrite may be in progress.Please check " + e.getMessage());
       }
       throw e;
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
index d668329..58ec0d5 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/BadRecordsLogger.java
@@ -24,8 +24,8 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
-import java.util.HashMap;
 import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
@@ -47,7 +47,7 @@ public class BadRecordsLogger {
    * the status
    */
   private static Map<String, String> badRecordEntry =
-      new HashMap<String, String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+      new ConcurrentHashMap<String, String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
   /**
    * File Name
    */
@@ -121,6 +121,9 @@ public class BadRecordsLogger {
 
   public void addBadRecordsToBuilder(Object[] row, String reason)
       throws CarbonDataLoadingException {
+    // setting partial success entry since even if bad records are there then load
+    // status should be partial success regardless of bad record logged
+    badRecordEntry.put(taskKey, "Partially");
     if (badRecordsLogRedirect || badRecordLoggerEnable) {
       StringBuilder logStrings = new StringBuilder();
       int size = row.length;
@@ -158,10 +161,6 @@ public class BadRecordsLogger {
         }
         writeBadRecordsToFile(logStrings);
       }
-    } else {
-      // setting partial success entry since even if bad records are there then load
-      // status should be partial success regardless of bad record logged
-      badRecordEntry.put(taskKey, "Partially");
     }
   }
 
@@ -200,11 +199,6 @@ public class BadRecordsLogger {
     } catch (IOException e) {
       LOGGER.error("Error While writing bad record log File");
       throw new CarbonDataLoadingException("Error While writing bad record log File", e);
-    } finally {
-      // if the Bad record file is created means it partially success
-      // if any entry present with key that means its have bad record for
-      // that key
-      badRecordEntry.put(taskKey, "Partially");
     }
   }
 
@@ -246,9 +240,6 @@ public class BadRecordsLogger {
       LOGGER.error("Error While writing bad record csv File");
       throw new CarbonDataLoadingException("Error While writing bad record csv File", e);
     }
-    finally {
-      badRecordEntry.put(taskKey, "Partially");
-    }
   }
 
   public boolean isBadRecordConvertNullDisable() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
index ed35a96..12f6927 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/ThreadStatusObserver.java
@@ -39,17 +39,18 @@ public class ThreadStatusObserver {
     // should assign the throwable object else the actual cause for failure can be overridden as
     // all the running threads will throw interrupted exception on calling shutdownNow and
     // will override the throwable object
-    if (null == this.throwable) {
-      synchronized (lock) {
-        if (null == this.throwable) {
-          executorService.shutdownNow();
-          this.throwable = throwable;
-        }
+    synchronized (lock) {
+      if (null == this.throwable) {
+        executorService.shutdownNow();
+        this.throwable = throwable;
       }
     }
   }
 
   public Throwable getThrowable() {
-    return throwable;
+
+    synchronized (lock) {
+      return throwable;
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
index 80887c1..cb72f54 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/impl/UnsafeBatchParallelReadMergeSorterImpl.java
@@ -164,7 +164,9 @@ public class UnsafeBatchParallelReadMergeSorterImpl extends AbstractMergeSorter
         LOGGER.error(e);
         this.threadStatusObserver.notifyFailed(e);
       } finally {
-        sortDataRows.finishThread();
+        synchronized (sortDataRows) {
+          sortDataRows.finishThread();
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
index 1c6ce8d..fb0bcc3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/UnsafeSortDataRows.java
@@ -109,7 +109,6 @@ public class UnsafeSortDataRows {
     // observer of writing file in thread
     this.threadStatusObserver = new ThreadStatusObserver();
     this.taskId = ThreadLocalTaskInfo.getCarbonTaskInfo().getTaskId();
-    this.inMemoryChunkSize = inMemoryChunkSize;
     this.inMemoryChunkSize = inMemoryChunkSize * 1024L * 1024L;
     enableInMemoryIntermediateMerge = Boolean.parseBoolean(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.ENABLE_INMEMORY_MERGE_SORT,
@@ -121,7 +120,7 @@ public class UnsafeSortDataRows {
       // in sort memory size.
       this.maxSizeAllowed = UnsafeMemoryManager.INSTANCE.getUsableMemory() / 2;
     } else {
-      this.maxSizeAllowed = this.maxSizeAllowed * 1024 * 1024;
+      this.maxSizeAllowed = this.maxSizeAllowed * 1024L * 1024L;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
index 01e7649..a65de16 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeInMemoryIntermediateDataMerger.java
@@ -21,6 +21,7 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.Random;
 import java.util.concurrent.Callable;
@@ -215,7 +216,11 @@ public class UnsafeInMemoryIntermediateDataMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private UnsafeCarbonRowForMerge next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromMemory();
+    if (hasNext()) {
+      return getSortedRecordFromMemory();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
index 22673ff..c5b215e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateFileMerger.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 
@@ -238,7 +239,12 @@ public class UnsafeIntermediateFileMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private IntermediateSortTempRow next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromFile();
+    if (hasNext()) {
+      return getSortedRecordFromFile();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
+
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
index 0c692c7..8d2c52a 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeIntermediateMerger.java
@@ -38,6 +38,8 @@ import org.apache.carbondata.processing.loading.sort.unsafe.UnsafeCarbonRowPage;
 import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
 import org.apache.carbondata.processing.sort.sortdata.SortParameters;
 
+import org.apache.commons.collections.list.SynchronizedList;
+
 /**
  * It does mergesort intermediate files to big file.
  */
@@ -76,7 +78,8 @@ public class UnsafeIntermediateMerger {
     this.mergedPages = new ArrayList<>();
     this.executorService = Executors.newFixedThreadPool(parameters.getNumberOfCores(),
         new CarbonThreadFactory("UnsafeIntermediatePool:" + parameters.getTableName()));
-    this.procFiles = new ArrayList<File>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
+    this.procFiles =
+        SynchronizedList.decorate(new ArrayList<File>(CarbonCommonConstants.CONSTANT_SIZE_TEN));
     this.mergerTask = new ArrayList<>();
 
     Integer spillPercentage;
@@ -111,15 +114,18 @@ public class UnsafeIntermediateMerger {
   }
 
   public void startFileMergingIfPossible() {
-    File[] fileList;
-    if (procFiles.size() >= parameters.getNumberOfIntermediateFileToBeMerged()) {
-      synchronized (lockObject) {
+    File[] fileList = null;
+    synchronized (lockObject) {
+      if (procFiles.size() >= parameters.getNumberOfIntermediateFileToBeMerged()) {
         fileList = procFiles.toArray(new File[procFiles.size()]);
         this.procFiles = new ArrayList<File>();
+        if (LOGGER.isDebugEnabled()) {
+          LOGGER
+              .debug("Submitting request for intermediate merging no of files: " + fileList.length);
+        }
       }
-      if (LOGGER.isDebugEnabled()) {
-        LOGGER.debug("Sumitting request for intermediate merging no of files: " + fileList.length);
-      }
+    }
+    if (null != fileList) {
       startIntermediateMerging(fileList);
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
index 073d13b..6defeb7 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/sort/unsafe/merger/UnsafeSingleThreadFinalSortFilesMerger.java
@@ -24,6 +24,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 
 import org.apache.carbondata.common.CarbonIterator;
@@ -195,8 +196,12 @@ public class UnsafeSingleThreadFinalSortFilesMerger extends CarbonIterator<Objec
    * @return sorted row
    */
   public Object[] next() {
-    IntermediateSortTempRow sortTempRow =  getSortedRecordFromFile();
-    return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    if (hasNext()) {
+      IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
+      return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
index 4078a13..d0e78fc 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/InputProcessorStepImpl.java
@@ -96,6 +96,10 @@ public class InputProcessorStepImpl extends AbstractDataLoadProcessorStep {
     // to be launched.
     int parallelThreadNumber = Math.min(inputIterators.length, numberOfCores);
 
+    if (parallelThreadNumber <= 0) {
+      parallelThreadNumber = 1;
+    }
+
     List<CarbonIterator<Object[]>>[] iterators = new List[parallelThreadNumber];
     for (int i = 0; i < parallelThreadNumber; i++) {
       iterators[i] = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
index 1744675..81031de 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonDataMergerUtil.java
@@ -1108,23 +1108,24 @@ public final class CarbonDataMergerUtil {
 
       CarbonFile[] deleteDeltaFiles =
           segmentUpdateStatusManager.getDeleteDeltaFilesList(seg, blockName);
+      if (null != deleteDeltaFiles) {
+        // The Delete Delta files may have Spill over blocks. Will consider multiple spill over
+        // blocks as one. Currently DeleteDeltaFiles array contains Delete Delta Block name which
+        // lies within Delete Delta Start TimeStamp and End TimeStamp. In order to eliminate
+        // Spill Over Blocks will choose files with unique taskID.
+        for (CarbonFile blocks : deleteDeltaFiles) {
+          // Get Task ID and the Timestamp from the Block name for e.g.
+          // part-0-3-1481084721319.carbondata => "3-1481084721319"
+          String task = CarbonTablePath.DataFileUtil.getTaskNo(blocks.getName());
+          String timestamp =
+              CarbonTablePath.DataFileUtil.getTimeStampFromDeleteDeltaFile(blocks.getName());
+          String taskAndTimeStamp = task + "-" + timestamp;
+          uniqueBlocks.add(taskAndTimeStamp);
+        }
 
-      // The Delete Delta files may have Spill over blocks. Will consider multiple spill over
-      // blocks as one. Currently DeleteDeltaFiles array contains Delete Delta Block name which
-      // lies within Delete Delta Start TimeStamp and End TimeStamp. In order to eliminate
-      // Spill Over Blocks will choose files with unique taskID.
-      for (CarbonFile blocks : deleteDeltaFiles) {
-        // Get Task ID and the Timestamp from the Block name for e.g.
-        // part-0-3-1481084721319.carbondata => "3-1481084721319"
-        String task = CarbonTablePath.DataFileUtil.getTaskNo(blocks.getName());
-        String timestamp =
-            CarbonTablePath.DataFileUtil.getTimeStampFromDeleteDeltaFile(blocks.getName());
-        String taskAndTimeStamp = task + "-" + timestamp;
-        uniqueBlocks.add(taskAndTimeStamp);
-      }
-
-      if (uniqueBlocks.size() > numberDeltaFilesThreshold) {
-        return true;
+        if (uniqueBlocks.size() > numberDeltaFilesThreshold) {
+          return true;
+        }
       }
     }
     return false;
@@ -1152,7 +1153,7 @@ public final class CarbonDataMergerUtil {
       CarbonFile[] deleteDeltaFiles =
           segmentUpdateStatusManager.getDeleteDeltaFilesList(seg, blockName);
 
-      if (deleteDeltaFiles.length > numberDeltaFilesThreshold) {
+      if (null != deleteDeltaFiles && (deleteDeltaFiles.length > numberDeltaFilesThreshold)) {
         blockLists.add(seg.getSegmentNo() + "/" + blockName);
       }
     }
@@ -1200,31 +1201,34 @@ public final class CarbonDataMergerUtil {
 
     String destFileName =
         blockName + "-" + timestamp.toString() + CarbonCommonConstants.DELETE_DELTA_FILE_EXT;
-    String fullBlockFilePath = deleteDeltaFiles[0].getParentFile().getCanonicalPath()
-        + CarbonCommonConstants.FILE_SEPARATOR + destFileName;
-
-    List<String> deleteFilePathList = new ArrayList<String>();
-    for (CarbonFile cFile : deleteDeltaFiles) {
-      deleteFilePathList.add(cFile.getCanonicalPath());
-    }
+    List<String> deleteFilePathList = new ArrayList<>();
+    if (null != deleteDeltaFiles && deleteDeltaFiles.length > 0 && null != deleteDeltaFiles[0]
+        .getParentFile()) {
+      String fullBlockFilePath = deleteDeltaFiles[0].getParentFile().getCanonicalPath()
+          + CarbonCommonConstants.FILE_SEPARATOR + destFileName;
+
+      for (CarbonFile cFile : deleteDeltaFiles) {
+        deleteFilePathList.add(cFile.getCanonicalPath());
+      }
 
-    CarbonDataMergerUtilResult blockDetails = new CarbonDataMergerUtilResult();
-    blockDetails.setBlockName(blockName);
-    blockDetails.setSegmentName(seg);
-    blockDetails.setDeleteDeltaStartTimestamp(timestamp.toString());
-    blockDetails.setDeleteDeltaEndTimestamp(timestamp.toString());
+      CarbonDataMergerUtilResult blockDetails = new CarbonDataMergerUtilResult();
+      blockDetails.setBlockName(blockName);
+      blockDetails.setSegmentName(seg);
+      blockDetails.setDeleteDeltaStartTimestamp(timestamp.toString());
+      blockDetails.setDeleteDeltaEndTimestamp(timestamp.toString());
 
-    try {
-      if (startCompactionDeleteDeltaFiles(deleteFilePathList, blockName, fullBlockFilePath)) {
-        blockDetails.setCompactionStatus(true);
-      } else {
-        blockDetails.setCompactionStatus(false);
+      try {
+        if (startCompactionDeleteDeltaFiles(deleteFilePathList, blockName, fullBlockFilePath)) {
+          blockDetails.setCompactionStatus(true);
+        } else {
+          blockDetails.setCompactionStatus(false);
+        }
+        resultList.add(blockDetails);
+      } catch (IOException e) {
+        LOGGER.error("Compaction of Delete Delta Files failed. The complete file path is "
+            + fullBlockFilePath);
+        throw new IOException();
       }
-      resultList.add(blockDetails);
-    } catch (IOException e) {
-      LOGGER.error("Compaction of Delete Delta Files failed. The complete file path is "
-          + fullBlockFilePath);
-      throw new IOException();
     }
     return resultList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
index 442f1c5..9a3258e 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/RowResultMergerProcessor.java
@@ -138,21 +138,23 @@ public class RowResultMergerProcessor extends AbstractResultProcessor {
       // if record holder is not empty then iterator the slice holder from
       // heap
       iterator = this.recordHolderHeap.poll();
-      while (true) {
-        Object[] convertedRow = iterator.next();
-        if (null == convertedRow) {
-          iterator.close();
-          break;
-        }
-        // do it only once
-        if (!isDataPresent) {
-          dataHandler.initialise();
-          isDataPresent = true;
-        }
-        addRow(convertedRow);
-        // check if leaf contains no record
-        if (!iterator.hasNext()) {
-          break;
+      if (null != iterator) {
+        while (true) {
+          Object[] convertedRow = iterator.next();
+          if (null == convertedRow) {
+            iterator.close();
+            break;
+          }
+          // do it only once
+          if (!isDataPresent) {
+            dataHandler.initialise();
+            isDataPresent = true;
+          }
+          addRow(convertedRow);
+          // check if leaf contains no record
+          if (!iterator.hasNext()) {
+            break;
+          }
         }
       }
       if (isDataPresent)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java b/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
deleted file mode 100644
index b6f12a5..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/partition/impl/QueryPartitionHelper.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.processing.partition.impl;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.processing.partition.DataPartitioner;
-import org.apache.carbondata.processing.partition.Partition;
-
-
-public final class QueryPartitionHelper {
-  private static QueryPartitionHelper instance = new QueryPartitionHelper();
-  private Map<String, DataPartitioner> partitionerMap =
-      new HashMap<String, DataPartitioner>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-  private Map<String, DefaultLoadBalancer> loadBalancerMap =
-      new HashMap<String, DefaultLoadBalancer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-
-  private QueryPartitionHelper() {
-
-  }
-
-  public static QueryPartitionHelper getInstance() {
-    return instance;
-  }
-
-  /**
-   * Get partitions applicable for query based on filters applied in query
-   */
-  public List<Partition> getPartitionsForQuery(String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DataPartitioner dataPartitioner = partitionerMap.get(tableUniqueName);
-
-    return dataPartitioner.getPartitions();
-  }
-
-  public List<Partition> getAllPartitions(String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DataPartitioner dataPartitioner = partitionerMap.get(tableUniqueName);
-
-    return dataPartitioner.getAllPartitions();
-  }
-
-  /**
-   * Get the node name where the partition is assigned to.
-   */
-  public String getLocation(Partition partition, String databaseName, String tableName) {
-    String tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
-
-    DefaultLoadBalancer loadBalancer = loadBalancerMap.get(tableUniqueName);
-    return loadBalancer.getNodeForPartitions(partition);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
index c06819c..364515c 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/IntermediateFileMerger.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.AbstractQueue;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 
@@ -237,7 +238,11 @@ public class IntermediateFileMerger implements Callable<Void> {
    * @throws CarbonSortKeyAndGroupByException
    */
   private IntermediateSortTempRow next() throws CarbonSortKeyAndGroupByException {
-    return getSortedRecordFromFile();
+    if (hasNext()) {
+      return getSortedRecordFromFile();
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
index 1a839a2..09c1920 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/sort/sortdata/SingleThreadFinalSortFilesMerger.java
@@ -23,6 +23,7 @@ import java.util.AbstractQueue;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.NoSuchElementException;
 import java.util.PriorityQueue;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
@@ -233,8 +234,12 @@ public class SingleThreadFinalSortFilesMerger extends CarbonIterator<Object[]> {
    * @throws CarbonSortKeyAndGroupByException
    */
   public Object[] next() {
-    IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
-    return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    if (hasNext()) {
+      IntermediateSortTempRow sortTempRow = getSortedRecordFromFile();
+      return sortStepRowHandler.convertIntermediateSortTempRowTo3Parted(sortTempRow);
+    } else {
+      throw new NoSuchElementException("No more elements to return");
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index 8115f97..9d0c933 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -43,7 +43,6 @@ import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.index.BlockIndexInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.util.CarbonMergerUtil;
 import org.apache.carbondata.core.util.CarbonMetadataUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
@@ -56,8 +55,6 @@ import org.apache.carbondata.format.IndexHeader;
 import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
 
-import org.apache.commons.lang3.ArrayUtils;
-
 public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
 
   private static final LogService LOGGER =
@@ -168,13 +165,13 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
             CarbonCommonConstants.CARBON_BLOCK_META_RESERVED_SPACE_DEFAULT));
     this.blockSizeThreshold =
         fileSizeInBytes - (fileSizeInBytes * spaceReservedForBlockMetaSize) / 100;
-    LOGGER.info("Total file size: " + fileSizeInBytes + " and dataBlock Size: " +
-        blockSizeThreshold);
+    LOGGER
+        .info("Total file size: " + fileSizeInBytes + " and dataBlock Size: " + blockSizeThreshold);
 
     // whether to directly write fact data to HDFS
-    String directlyWriteData2Hdfs = propInstance.getProperty(
-        CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS,
-        CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS_DEFAULT);
+    String directlyWriteData2Hdfs = propInstance
+        .getProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS,
+            CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_HDFS_DEFAULT);
     this.enableDirectlyWriteData2Hdfs = "TRUE".equalsIgnoreCase(directlyWriteData2Hdfs);
 
     if (enableDirectlyWriteData2Hdfs) {
@@ -189,22 +186,9 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter {
     // in case of compaction we will pass the cardinality.
     this.localCardinality = this.model.getColCardinality();
 
-    //TODO: We should delete the levelmetadata file after reading here.
-    // so only data loading flow will need to read from cardinality file.
-    if (null == this.localCardinality) {
-      this.localCardinality = CarbonMergerUtil
-          .getCardinalityFromLevelMetadata(this.model.getStoreLocation(),
-              this.model.getTableName());
-      List<Integer> cardinalityList = new ArrayList<Integer>();
-      thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          this.model.getWrapperColumnSchema());
-      localCardinality =
-          ArrayUtils.toPrimitive(cardinalityList.toArray(new Integer[cardinalityList.size()]));
-    } else { // for compaction case
-      List<Integer> cardinalityList = new ArrayList<Integer>();
-      thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          this.model.getWrapperColumnSchema());
-    }
+    List<Integer> cardinalityList = new ArrayList<Integer>();
+    thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
+        this.model.getWrapperColumnSchema());
     this.numberCompressor = new NumberCompressor(Integer.parseInt(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.BLOCKLET_SIZE,
             CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL)));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
index 4abdf3c..8e23489 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonQueryUtil.java
@@ -17,19 +17,8 @@
 
 package org.apache.carbondata.processing.util;
 
-import java.util.ArrayList;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.scan.model.QueryProjection;
-import org.apache.carbondata.processing.partition.Partition;
-import org.apache.carbondata.processing.partition.impl.DefaultLoadBalancer;
-import org.apache.carbondata.processing.partition.impl.PartitionMultiFileImpl;
-import org.apache.carbondata.processing.partition.impl.QueryPartitionHelper;
-import org.apache.carbondata.processing.splits.TableSplit;
 
 import org.apache.commons.lang3.StringUtils;
 
@@ -43,58 +32,6 @@ public class CarbonQueryUtil {
   }
 
   /**
-   * It creates the one split for each region server.
-   */
-  public static synchronized TableSplit[] getTableSplits(String databaseName, String tableName,
-      QueryProjection queryPlan) {
-
-    //Just create splits depends on locations of region servers
-    List<Partition> allPartitions = null;
-    if (queryPlan == null) {
-      allPartitions =
-          QueryPartitionHelper.getInstance().getAllPartitions(databaseName, tableName);
-    } else {
-      allPartitions =
-          QueryPartitionHelper.getInstance().getPartitionsForQuery(databaseName, tableName);
-    }
-    TableSplit[] splits = new TableSplit[allPartitions.size()];
-    for (int i = 0; i < splits.length; i++) {
-      splits[i] = new TableSplit();
-      List<String> locations = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-      Partition partition = allPartitions.get(i);
-      String location = QueryPartitionHelper.getInstance()
-          .getLocation(partition, databaseName, tableName);
-      locations.add(location);
-      splits[i].setPartition(partition);
-      splits[i].setLocations(locations);
-    }
-
-    return splits;
-  }
-
-  /**
-   * It creates the one split for each region server.
-   */
-  public static TableSplit[] getTableSplitsForDirectLoad(String sourcePath) {
-
-    //Just create splits depends on locations of region servers
-    DefaultLoadBalancer loadBalancer = null;
-    List<Partition> allPartitions = getAllFilesForDataLoad(sourcePath);
-    loadBalancer = new DefaultLoadBalancer(new ArrayList<String>(), allPartitions);
-    TableSplit[] tblSplits = new TableSplit[allPartitions.size()];
-    for (int i = 0; i < tblSplits.length; i++) {
-      tblSplits[i] = new TableSplit();
-      List<String> locations = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-      Partition partition = allPartitions.get(i);
-      String location = loadBalancer.getNodeForPartitions(partition);
-      locations.add(location);
-      tblSplits[i].setPartition(partition);
-      tblSplits[i].setLocations(locations);
-    }
-    return tblSplits;
-  }
-
-  /**
    * split sourcePath by comma
    */
   public static void splitFilePath(String sourcePath, List<String> partitionsFiles,
@@ -104,21 +41,4 @@ public class CarbonQueryUtil {
       Collections.addAll(partitionsFiles, files);
     }
   }
-
-  private static List<Partition> getAllFilesForDataLoad(String sourcePath) {
-    List<String> files = new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-    splitFilePath(sourcePath, files, CarbonCommonConstants.COMMA);
-    List<Partition> partitionList =
-        new ArrayList<Partition>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
-    Map<Integer, List<String>> partitionFiles = new HashMap<Integer, List<String>>();
-
-    partitionFiles.put(0, new ArrayList<String>(CarbonCommonConstants.CONSTANT_SIZE_TEN));
-    partitionList.add(new PartitionMultiFileImpl(0 + "", partitionFiles.get(0)));
-
-    for (int i = 0; i < files.size(); i++) {
-      partitionFiles.get(0).add(files.get(i));
-    }
-    return partitionList;
-  }
-
 }


[20/50] [abbrv] carbondata git commit: [HOTFIX] Implementing getMemorySize in BlockletDataMapIndexWrapper

Posted by gv...@apache.org.
[HOTFIX] Implementing getMemorySize in BlockletDataMapIndexWrapper

This closes #2330


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/16ed99a1
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/16ed99a1
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/16ed99a1

Branch: refs/heads/spark-2.3
Commit: 16ed99a119f95b70a01c95120b20e632405d0a54
Parents: 9aa3a8c
Author: dhatchayani <dh...@gmail.com>
Authored: Tue May 22 12:33:24 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue May 22 22:03:31 2018 +0530

----------------------------------------------------------------------
 .../core/indexstore/BlockletDataMapIndexStore.java        |  9 ++-------
 .../core/indexstore/BlockletDataMapIndexWrapper.java      | 10 +++++++++-
 2 files changed, 11 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/16ed99a1/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
index ba4193e..db49976 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
@@ -78,7 +78,6 @@ public class BlockletDataMapIndexStore
       try {
         SegmentIndexFileStore indexFileStore = new SegmentIndexFileStore();
         Set<String> filesRead = new HashSet<>();
-        long memorySize = 0L;
         String segmentFilePath = identifier.getIndexFilePath();
         Map<String, BlockMetaInfo> carbonDataFileBlockMetaInfoMapping = BlockletDataMapUtil
             .createCarbonDataFileBlockMetaInfoMapping(segmentFilePath);
@@ -89,7 +88,6 @@ public class BlockletDataMapIndexStore
                   carbonDataFileBlockMetaInfoMapping);
           BlockletDataMap blockletDataMap =
               loadAndGetDataMap(identifier, indexFileStore, blockMetaInfoMap);
-          memorySize += blockletDataMap.getMemorySize();
           dataMaps.add(blockletDataMap);
           blockletDataMapIndexWrapper = new BlockletDataMapIndexWrapper(dataMaps);
         } else {
@@ -103,13 +101,12 @@ public class BlockletDataMapIndexStore
                     carbonDataFileBlockMetaInfoMapping);
             BlockletDataMap blockletDataMap =
                 loadAndGetDataMap(blockIndexUniqueIdentifier, indexFileStore, blockMetaInfoMap);
-            memorySize += blockletDataMap.getMemorySize();
             dataMaps.add(blockletDataMap);
           }
           blockletDataMapIndexWrapper = new BlockletDataMapIndexWrapper(dataMaps);
         }
         lruCache.put(identifier.getUniqueTableSegmentIdentifier(), blockletDataMapIndexWrapper,
-            memorySize);
+            blockletDataMapIndexWrapper.getMemorySize());
       } catch (Throwable e) {
         // clear all the memory used by datamaps loaded
         for (DataMap dataMap : dataMaps) {
@@ -189,7 +186,6 @@ public class BlockletDataMapIndexStore
     if (lock == null) {
       lock = addAndGetSegmentLock(uniqueTableSegmentIdentifier);
     }
-    long memorySize = 0L;
     // As dataMap will use unsafe memory, it is not recommended to overwrite an existing entry
     // as in that case clearing unsafe memory need to be taken card. If at all datamap entry
     // in the cache need to be overwritten then use the invalidate interface
@@ -201,10 +197,9 @@ public class BlockletDataMapIndexStore
           try {
             for (BlockletDataMap blockletDataMap: dataMaps) {
               blockletDataMap.convertToUnsafeDMStore();
-              memorySize += blockletDataMap.getMemorySize();
             }
             lruCache.put(tableBlockIndexUniqueIdentifier.getUniqueTableSegmentIdentifier(), wrapper,
-                memorySize);
+                wrapper.getMemorySize());
           } catch (Throwable e) {
             // clear all the memory acquired by data map in case of any failure
             for (DataMap blockletDataMap : dataMaps) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/16ed99a1/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexWrapper.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexWrapper.java
index d674cb4..ea85930 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexWrapper.java
@@ -30,8 +30,16 @@ public class BlockletDataMapIndexWrapper implements Cacheable, Serializable {
 
   private List<BlockletDataMap> dataMaps;
 
+  // size of the wrapper. basically the total size of the datamaps this wrapper is holding
+  private long wrapperSize;
+
   public BlockletDataMapIndexWrapper(List<BlockletDataMap> dataMaps) {
     this.dataMaps = dataMaps;
+    this.wrapperSize = 0L;
+    // add the size of each and every datamap in this wrapper
+    for (BlockletDataMap dataMap : dataMaps) {
+      this.wrapperSize += dataMap.getMemorySize();
+    }
   }
 
   @Override public long getFileTimeStamp() {
@@ -43,7 +51,7 @@ public class BlockletDataMapIndexWrapper implements Cacheable, Serializable {
   }
 
   @Override public long getMemorySize() {
-    return 0;
+    return wrapperSize;
   }
 
   public List<BlockletDataMap> getDataMaps() {


[44/50] [abbrv] carbondata git commit: [Hoxfix] Upgrade dev version to 1.5.0-SNAPSHOT and fix some small issues

Posted by gv...@apache.org.
[Hoxfix] Upgrade dev version to 1.5.0-SNAPSHOT and fix some small issues

1.Upgrade dev version to 1.5.0-SNAPSHOT
2.Fix carbon-spark-sql issue
3.Remove hadoop 2.2 profile

This closes #2359


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/4bb7e278
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/4bb7e278
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/4bb7e278

Branch: refs/heads/spark-2.3
Commit: 4bb7e2785f961b2697b7de2a3a2556c25a5bb6b3
Parents: 56bf4e4
Author: chenliang613 <ch...@huawei.com>
Authored: Sat Jun 2 12:58:57 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Sat Jun 2 12:39:10 2018 +0530

----------------------------------------------------------------------
 assembly/pom.xml                              | 2 +-
 bin/carbon-spark-sql                          | 4 ++--
 common/pom.xml                                | 2 +-
 core/pom.xml                                  | 2 +-
 datamap/bloom/pom.xml                         | 2 +-
 datamap/examples/pom.xml                      | 2 +-
 datamap/lucene/pom.xml                        | 2 +-
 datamap/mv/core/pom.xml                       | 2 +-
 datamap/mv/plan/pom.xml                       | 2 +-
 examples/flink/pom.xml                        | 2 +-
 examples/spark2/pom.xml                       | 2 +-
 format/pom.xml                                | 2 +-
 hadoop/pom.xml                                | 2 +-
 integration/hive/pom.xml                      | 2 +-
 integration/presto/pom.xml                    | 2 +-
 integration/spark-common-cluster-test/pom.xml | 2 +-
 integration/spark-common-test/pom.xml         | 2 +-
 integration/spark-common/pom.xml              | 2 +-
 integration/spark2/pom.xml                    | 2 +-
 pom.xml                                       | 8 +-------
 processing/pom.xml                            | 2 +-
 store/sdk/pom.xml                             | 2 +-
 store/search/pom.xml                          | 2 +-
 streaming/pom.xml                             | 2 +-
 24 files changed, 25 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/assembly/pom.xml
----------------------------------------------------------------------
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 56522d0..eb3d3a9 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/bin/carbon-spark-sql
----------------------------------------------------------------------
diff --git a/bin/carbon-spark-sql b/bin/carbon-spark-sql
index 4b927d1..9916fef 100755
--- a/bin/carbon-spark-sql
+++ b/bin/carbon-spark-sql
@@ -36,13 +36,13 @@ export CARBON_SOURCE="$(cd "`dirname "$0"`"/..; pwd)"
 ASSEMBLY_DIR="$CARBON_SOURCE/assembly/target/scala-2.11"
 
 GREP_OPTIONS=
-num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^carbondata.*hadoop.*\.jar$" | wc -l)"
+num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^apache-carbondata.*\.jar$" | wc -l)"
 if [ "$num_jars" -eq "0" -a -z "$ASSEMBLY_DIR" ]; then
   echo "Failed to find Carbondata assembly in $ASSEMBLY_DIR." 1>&2
   echo "You need to build Carbondata before running this program." 1>&2
   exit 1
 fi
-ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^carbondata.*hadoop.*\.jar$" || true)"
+ASSEMBLY_JARS="$(ls -1 "$ASSEMBLY_DIR" | grep "^apache-carbondata.*\.jar$" || true)"
 if [ "$num_jars" -gt "1" ]; then
   echo "Found multiple Carbondata assembly jars in $ASSEMBLY_DIR:" 1>&2
   echo "$ASSEMBLY_JARS" 1>&2

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index 433d575..1209388 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index d9c756e..7d87037 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/datamap/bloom/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/bloom/pom.xml b/datamap/bloom/pom.xml
index b7db969..f13d477 100644
--- a/datamap/bloom/pom.xml
+++ b/datamap/bloom/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/datamap/examples/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/examples/pom.xml b/datamap/examples/pom.xml
index 309828d..be65529 100644
--- a/datamap/examples/pom.xml
+++ b/datamap/examples/pom.xml
@@ -24,7 +24,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/datamap/lucene/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/lucene/pom.xml b/datamap/lucene/pom.xml
index bdb8876..c5c7555 100644
--- a/datamap/lucene/pom.xml
+++ b/datamap/lucene/pom.xml
@@ -6,7 +6,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/datamap/mv/core/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/mv/core/pom.xml b/datamap/mv/core/pom.xml
index 99a8e22..54960a1 100644
--- a/datamap/mv/core/pom.xml
+++ b/datamap/mv/core/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/datamap/mv/plan/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/mv/plan/pom.xml b/datamap/mv/plan/pom.xml
index 6a36fc5..fcf0e51 100644
--- a/datamap/mv/plan/pom.xml
+++ b/datamap/mv/plan/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/examples/flink/pom.xml
----------------------------------------------------------------------
diff --git a/examples/flink/pom.xml b/examples/flink/pom.xml
index b783435..6af8f19 100644
--- a/examples/flink/pom.xml
+++ b/examples/flink/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/examples/spark2/pom.xml
----------------------------------------------------------------------
diff --git a/examples/spark2/pom.xml b/examples/spark2/pom.xml
index 196bc16..7a55333 100644
--- a/examples/spark2/pom.xml
+++ b/examples/spark2/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/format/pom.xml
----------------------------------------------------------------------
diff --git a/format/pom.xml b/format/pom.xml
index 41197cf..039b0a0 100644
--- a/format/pom.xml
+++ b/format/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop/pom.xml b/hadoop/pom.xml
index 07883cd..1d7fab3 100644
--- a/hadoop/pom.xml
+++ b/hadoop/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/hive/pom.xml
----------------------------------------------------------------------
diff --git a/integration/hive/pom.xml b/integration/hive/pom.xml
index 68245db..c144353 100644
--- a/integration/hive/pom.xml
+++ b/integration/hive/pom.xml
@@ -22,7 +22,7 @@
     <parent>
         <groupId>org.apache.carbondata</groupId>
         <artifactId>carbondata-parent</artifactId>
-        <version>1.4.0-SNAPSHOT</version>
+        <version>1.5.0-SNAPSHOT</version>
         <relativePath>../../pom.xml</relativePath>
     </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/presto/pom.xml
----------------------------------------------------------------------
diff --git a/integration/presto/pom.xml b/integration/presto/pom.xml
index c2d941a..bfa05f9 100644
--- a/integration/presto/pom.xml
+++ b/integration/presto/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/spark-common-cluster-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/pom.xml b/integration/spark-common-cluster-test/pom.xml
index d8aecc2..87e08d9 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/spark-common-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/pom.xml b/integration/spark-common-test/pom.xml
index ae78523..b8629bf 100644
--- a/integration/spark-common-test/pom.xml
+++ b/integration/spark-common-test/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/spark-common/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common/pom.xml b/integration/spark-common/pom.xml
index f011a75..599c6c8 100644
--- a/integration/spark-common/pom.xml
+++ b/integration/spark-common/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/integration/spark2/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark2/pom.xml b/integration/spark2/pom.xml
index 73d48ef..2bce694 100644
--- a/integration/spark2/pom.xml
+++ b/integration/spark2/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e9551c0..1413fd1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -35,7 +35,7 @@
   <inceptionYear>2016</inceptionYear>
   <packaging>pom</packaging>
 
-  <version>1.4.0-SNAPSHOT</version>
+  <version>1.5.0-SNAPSHOT</version>
 
   <licenses>
     <license>
@@ -464,12 +464,6 @@
       </build>
     </profile>
     <profile>
-      <id>hadoop-2.2.0</id>
-      <properties>
-        <hadoop.version>2.2.0</hadoop.version>
-      </properties>
-    </profile>
-    <profile>
       <id>spark-2.1</id>
       <properties>
         <spark.version>2.1.0</spark.version>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/processing/pom.xml
----------------------------------------------------------------------
diff --git a/processing/pom.xml b/processing/pom.xml
index b1a103a..ab7c96c 100644
--- a/processing/pom.xml
+++ b/processing/pom.xml
@@ -22,7 +22,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/store/sdk/pom.xml
----------------------------------------------------------------------
diff --git a/store/sdk/pom.xml b/store/sdk/pom.xml
index af0d079..fbeb562 100644
--- a/store/sdk/pom.xml
+++ b/store/sdk/pom.xml
@@ -7,7 +7,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/store/search/pom.xml
----------------------------------------------------------------------
diff --git a/store/search/pom.xml b/store/search/pom.xml
index 9d833f2..6acbbfb 100644
--- a/store/search/pom.xml
+++ b/store/search/pom.xml
@@ -7,7 +7,7 @@
   <parent>
     <groupId>org.apache.carbondata</groupId>
     <artifactId>carbondata-parent</artifactId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../../pom.xml</relativePath>
   </parent>
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4bb7e278/streaming/pom.xml
----------------------------------------------------------------------
diff --git a/streaming/pom.xml b/streaming/pom.xml
index b8c447d..0883f70 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -4,7 +4,7 @@
   <parent>
     <artifactId>carbondata-parent</artifactId>
     <groupId>org.apache.carbondata</groupId>
-    <version>1.4.0-SNAPSHOT</version>
+    <version>1.5.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
   <modelVersion>4.0.0</modelVersion>


[11/50] [abbrv] carbondata git commit: [CARBONDATA-2493] DataType.equals() failes for complex types

Posted by gv...@apache.org.
[CARBONDATA-2493] DataType.equals() failes for complex types

This closes #2319


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/33941281
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/33941281
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/33941281

Branch: refs/heads/spark-2.3
Commit: 33941281ea3b0e9a3c623179642dd1f41b263551
Parents: 7cba44b
Author: ajantha-bhat <aj...@gmail.com>
Authored: Fri May 18 13:21:33 2018 +0530
Committer: Venkata Ramana G <ra...@huawei.com>
Committed: Mon May 21 23:54:08 2018 +0530

----------------------------------------------------------------------
 .../core/metadata/datatype/ArrayType.java       | 10 ++++-
 .../core/metadata/datatype/DataType.java        | 24 ++++++++++++
 .../core/metadata/datatype/DecimalType.java     | 11 ++++--
 .../core/metadata/datatype/MapType.java         | 34 +++++++++++++++++
 .../core/metadata/datatype/StructField.java     | 40 ++++++++++++++++++++
 .../core/metadata/datatype/StructType.java      |  9 ++++-
 6 files changed, 120 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
index 94a4e89..9dea241 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
@@ -39,10 +39,14 @@ public class ArrayType extends DataType {
     if (obj == null) {
       return false;
     }
-    if (!(obj instanceof ArrayType)) {
+    if (getClass() != obj.getClass()) {
       return false;
     }
-    if (!this.getName().equalsIgnoreCase(((ArrayType) obj).getName())) {
+    ArrayType other = (ArrayType) obj;
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (!this.getElementType().equals(other.getElementType())) {
       return false;
     }
     return true;
@@ -53,10 +57,12 @@ public class ArrayType extends DataType {
     final int prime = 31;
     int result = 1;
     result = prime * result + getName().hashCode();
+    result = prime * result + getElementType().hashCode();
     return result;
   }
 
   public DataType getElementType() {
     return elementType;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataType.java
index 8e08436..d71f984 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataType.java
@@ -115,4 +115,28 @@ public class DataType implements Serializable {
     }
   }
 
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((DataType) obj).getName())) {
+      return false;
+    }
+    return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
index 8536222..a7f7a4e 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
@@ -37,16 +37,17 @@ public class DecimalType extends DataType {
     if (obj == null) {
       return false;
     }
-    if (!(obj instanceof DecimalType)) {
+    if (getClass() != obj.getClass()) {
       return false;
     }
-    if (!this.getName().equalsIgnoreCase(((DecimalType) obj).getName())) {
+    DecimalType other = (DecimalType)obj;
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
       return false;
     }
-    if (this.precision != ((DecimalType) obj).precision) {
+    if (this.precision != other.precision) {
       return false;
     }
-    if (this.scale != ((DecimalType) obj).scale) {
+    if (this.scale != other.scale) {
       return false;
     }
     return true;
@@ -57,6 +58,8 @@ public class DecimalType extends DataType {
     final int prime = 31;
     int result = 1;
     result = prime * result + getName().hashCode();
+    result = prime * result + getPrecision();
+    result = prime * result + getScale();
     return result;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/MapType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/MapType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/MapType.java
index 69d49b8..47e536a 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/MapType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/MapType.java
@@ -32,4 +32,38 @@ public class MapType extends DataType {
   public boolean isComplexType() {
     return true;
   }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    MapType other = (MapType) obj;
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (!this.keyType.equals(other.keyType)) {
+      return false;
+    }
+    if (!this.valueType.equals(other.valueType)) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    result = prime * result + keyType.hashCode();
+    result = prime * result + valueType.hashCode();
+    return result;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructField.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructField.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructField.java
index bfca057..c076f69 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructField.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructField.java
@@ -54,4 +54,44 @@ public class StructField implements Serializable {
   public List<StructField> getChildren() {
     return children;
   }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + fieldName.hashCode();
+    result = prime * result + dataType.hashCode();
+    result = prime * result + ((children == null) ? 0 : children.hashCode());
+    return result;
+  }
+
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (getClass() != obj.getClass()) {
+      return false;
+    }
+    StructField other = (StructField) obj;
+    if (!this.fieldName.equalsIgnoreCase(other.fieldName)) {
+      return false;
+    }
+    if (!this.dataType.equals(other.dataType)) {
+      return false;
+    }
+    if (children == null) {
+      if (other.children != null) {
+        return false;
+      }
+    } else if (other.children == null) {
+      return false;
+    } else if (!children.equals(other.children)) {
+      return false;
+    }
+    return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33941281/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
index 90b7374..e8559b2 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
@@ -41,10 +41,14 @@ public class StructType extends DataType {
     if (obj == null) {
       return false;
     }
-    if (!(obj instanceof StructType)) {
+    if (getClass() != obj.getClass()) {
       return false;
     }
-    if (!this.getName().equalsIgnoreCase(((StructType) obj).getName())) {
+    StructType other = (StructType) obj;
+    if (!this.getName().equalsIgnoreCase(other.getName())) {
+      return false;
+    }
+    if (!this.getFields().equals(other.getFields())) {
       return false;
     }
     return true;
@@ -55,6 +59,7 @@ public class StructType extends DataType {
     final int prime = 31;
     int result = 1;
     result = prime * result + getName().hashCode();
+    result = prime * result + getFields().hashCode();
     return result;
   }
 


[42/50] [abbrv] carbondata git commit: [CARBONDATA-2355] Support run SQL on carbondata files directly

Posted by gv...@apache.org.
[CARBONDATA-2355] Support run SQL on carbondata files directly

Support run SQL on carbondata files directly

This closes #2181


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/9469e6bd
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/9469e6bd
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/9469e6bd

Branch: refs/heads/spark-2.3
Commit: 9469e6bd4da5c75ba836fb550112cec01f666544
Parents: 4d22ddc
Author: xubo245 <60...@qq.com>
Authored: Wed Apr 18 17:34:12 2018 +0800
Committer: chenliang613 <ch...@huawei.com>
Committed: Fri Jun 1 18:01:33 2018 +0800

----------------------------------------------------------------------
 docs/sdk-guide.md                               |   7 ++
 .../carbondata/examples/DirectSQLExample.scala  | 100 +++++++++++++++++++
 .../carbondata/examples/S3UsingSDkExample.scala |   2 +-
 ...FileInputFormatWithExternalCarbonTable.scala |   2 +-
 ...tCreateTableUsingSparkCarbonFileFormat.scala |  30 +++++-
 .../TestNonTransactionalCarbonTable.scala       |   2 +-
 ...ransactionalCarbonTableWithComplexType.scala |   2 +-
 ...tSparkCarbonFileFormatWithSparkSession.scala |   2 +-
 .../datasources/SparkCarbonFileFormat.scala     |  26 ++++-
 9 files changed, 164 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 360516a..ec70919 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -128,7 +128,14 @@ Each of SQL data types are mapped into data types of SDK. Following are the mapp
 | STRING | DataTypes.STRING |
 | DECIMAL | DataTypes.createDecimalType(precision, scale) |
 
+## Run SQL on files directly
+Instead of creating table and query it, you can also query that file directly with SQL.
 
+### Example
+```
+SELECT * FROM carbonfile.`$Path`
+```
+Find example code at [DirectSQLExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala) in the CarbonData repo.
 ## API List
 
 ### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
new file mode 100644
index 0000000..a011d80
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import java.io.File
+
+import org.apache.commons.io.FileUtils
+
+import org.apache.carbondata.core.metadata.datatype.DataTypes
+import org.apache.carbondata.examples.util.ExampleUtils
+import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
+
+/**
+ * Running SQL on carbon files directly
+ * No need to create table first
+ * TODO: support more than one carbon file
+ */
+object DirectSQLExample {
+
+  // prepare SDK writer output
+  def buildTestData(
+      path: String,
+      num: Int = 3,
+      persistSchema: Boolean = false): Any = {
+
+    // getCanonicalPath gives path with \, but the code expects /.
+    val writerPath = path.replace("\\", "/");
+
+    val fields: Array[Field] = new Array[Field](3)
+    fields(0) = new Field("name", DataTypes.STRING)
+    fields(1) = new Field("age", DataTypes.INT)
+    fields(2) = new Field("height", DataTypes.DOUBLE)
+
+    try {
+      val builder = CarbonWriter
+        .builder()
+        .outputPath(writerPath)
+        .isTransactionalTable(true)
+        .uniqueIdentifier(System.currentTimeMillis)
+        .withBlockSize(2)
+      if (persistSchema) {
+        builder.persistSchemaFile(true)
+      }
+      val writer = builder.buildWriterForCSVInput(new Schema(fields))
+      var i = 0
+      while (i < num) {
+        writer.write(Array[String]("robot" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+        i += 1
+      }
+      writer.close()
+    } catch {
+      case e: Exception => throw e
+    }
+  }
+
+  def cleanTestData(path: String): Unit = {
+    FileUtils.deleteDirectory(new File(path))
+  }
+
+  // scalastyle:off
+  def main(args: Array[String]) {
+    val carbonSession = ExampleUtils.createCarbonSession("DirectSQLExample")
+    val rootPath = new File(this.getClass.getResource("/").getPath
+      + "../../../..").getCanonicalPath
+    val path = s"$rootPath/examples/spark2/target/carbonFile/"
+
+    import carbonSession._
+    // 1. generate data file
+    cleanTestData(path)
+    buildTestData(path, 20)
+    val readPath = path + "Fact/Part0/Segment_null"
+
+    println("Running SQL on carbon files directly")
+    try {
+      // 2. run queries directly, no need to create table first
+      sql(s"""select * FROM  carbonfile.`$readPath` limit 10""".stripMargin).show()
+    } catch {
+      case e: Exception => throw e
+    } finally {
+      // 3.delete data files
+      cleanTestData(path)
+    }
+  }
+  // scalastyle:on
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
index 022b28e..1795960 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
@@ -36,7 +36,7 @@ object S3UsingSDKExample {
       num: Int = 3,
       persistSchema: Boolean = false): Any = {
 
-    // getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+    // getCanonicalPath gives path with \, but the code expects /.
     val writerPath = path.replace("\\", "/");
 
     val fields: Array[Field] = new Array[Field](3)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
index 019b915..e6d39d3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
@@ -38,7 +38,7 @@ class TestCarbonFileInputFormatWithExternalCarbonTable extends QueryTest with Be
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
index 66be8e4..211bc8c 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
@@ -46,7 +46,7 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
   val filePath = writerPath + "/Fact/Part0/Segment_null/"
@@ -153,6 +153,34 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
     cleanTestData()
   }
 
+  test("Running SQL directly and read carbondata files (sdk Writer Output) using the SparkCarbonFileFormat ") {
+    buildTestData(false)
+    assert(new File(filePath).exists())
+    sql("DROP TABLE IF EXISTS sdkOutputTable")
+
+    //data source file format
+    if (sqlContext.sparkContext.version.startsWith("2.1")) {
+      //data source file format
+      sql(s"""CREATE TABLE sdkOutputTable USING carbonfile OPTIONS (PATH '$filePath') """)
+    } else if (sqlContext.sparkContext.version.startsWith("2.2")) {
+      //data source file format
+      sql(
+        s"""CREATE TABLE sdkOutputTable USING carbonfile LOCATION
+           |'$filePath' """.stripMargin)
+    } else {
+      // TO DO
+    }
+
+    val directSQL = sql(s"""select * FROM  carbonfile.`$filePath`""".stripMargin)
+    directSQL.show(false)
+    checkAnswer(sql("select * from sdkOutputTable"), directSQL)
+
+    sql("DROP TABLE sdkOutputTable")
+    // drop table should not delete the files
+    assert(new File(filePath).exists())
+    cleanTestData()
+  }
+
 
   test("should not allow to alter datasource carbontable ") {
     buildTestData(false)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 61b37d5..0083733 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -55,7 +55,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
                             "../." +
                             "./target/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/")
 
   def buildTestDataSingleFile(): Any = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
index d4de428..19aaf72 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
@@ -39,7 +39,7 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/")
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
index 54b23a5..79b64ae 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
@@ -36,7 +36,7 @@ object TestSparkCarbonFileFormatWithSparkSession {
                             "../." +
                             "./src/test/resources/SparkCarbonFileFormat/WriterOutput/")
     .getCanonicalPath
-  //getCanonicalPath gives path with \, so code expects /. Need to handle in code ?
+  //getCanonicalPath gives path with \, but the code expects /.
   writerPath = writerPath.replace("\\", "/");
 
   val filePath = writerPath + "/Fact/Part0/Segment_null/"

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9469e6bd/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
index 934f5c7..697eec5 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/datasources/SparkCarbonFileFormat.scala
@@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.datasources
 
 import java.net.URI
 
+import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import org.apache.hadoop.conf.Configuration
@@ -68,8 +69,23 @@ class SparkCarbonFileFormat extends FileFormat
   override def inferSchema(sparkSession: SparkSession,
       options: Map[String, String],
       files: Seq[FileStatus]): Option[StructType] = {
-    val filePaths = CarbonUtil.getFilePathExternalFilePath(
-      options.get("path").get)
+    val filePaths = if (options.isEmpty) {
+      val carbondataFiles = files.seq.filter { each =>
+        if (each.isFile) {
+          each.getPath.getName.contains(".carbondata")
+        } else {
+          false
+        }
+      }
+
+      carbondataFiles.map { each =>
+        each.getPath.toString
+      }.toList.asJava
+    } else {
+      CarbonUtil.getFilePathExternalFilePath(
+        options.get("path").get)
+    }
+
     if (filePaths.size() == 0) {
       throw new SparkException("CarbonData file is not present in the location mentioned in DDL")
     }
@@ -193,7 +209,11 @@ class SparkCarbonFileFormat extends FileFormat
         val fileSplit =
           new FileSplit(new Path(new URI(file.filePath)), file.start, file.length, Array.empty)
 
-        val path: String = options.get("path").get
+        val path: String = if (options.isEmpty) {
+          file.filePath
+        } else {
+          options.get("path").get
+        }
         val endindex: Int = path.indexOf("Fact") - 1
         val tablePath = path.substring(0, endindex)
         lazy val identifier: AbsoluteTableIdentifier = AbsoluteTableIdentifier.from(


[37/50] [abbrv] carbondata git commit: [CARBONDATA-2389] Search mode support FG datamap

Posted by gv...@apache.org.
[CARBONDATA-2389] Search mode support FG datamap

Search mode support FG datamap

This closes #2290


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/b3384593
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/b3384593
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/b3384593

Branch: refs/heads/spark-2.3
Commit: b3384593640bd941054d37ddc364181785b994d2
Parents: 74770aa
Author: xubo245 <60...@qq.com>
Authored: Wed May 9 21:20:59 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Wed May 30 23:53:55 2018 +0800

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  12 +
 .../core/datamap/DataMapStoreManager.java       |  20 +-
 .../apache/carbondata/core/datamap/Segment.java |   2 +-
 .../datamap/dev/expr/AndDataMapExprWrapper.java |  16 +
 .../datamap/dev/expr/DataMapExprWrapper.java    |  13 +
 .../dev/expr/DataMapExprWrapperImpl.java        |   8 +
 .../datamap/dev/expr/OrDataMapExprWrapper.java  |  13 +
 .../LatestFilesReadCommittedScope.java          |  43 ++-
 .../core/readcommitter/ReadCommittedScope.java  |   2 +-
 .../TableStatusReadCommittedScope.java          |   2 +-
 .../lucene/LuceneDataMapFactoryBase.java        |   4 +-
 .../examples/LuceneDataMapExample.scala         |   2 -
 .../carbondata/hadoop/CarbonRecordReader.java   |   8 +-
 .../hadoop/api/CarbonInputFormat.java           |   6 +-
 .../lucene/LuceneFineGrainDataMapSuite.scala    |   1 +
 ...eneFineGrainDataMapWithSearchModeSuite.scala | 328 +++++++++++++++++++
 .../detailquery/SearchModeTestCase.scala        |  27 ++
 .../execution/command/CarbonHiveCommands.scala  |   4 +-
 .../spark/sql/optimizer/CarbonFilters.scala     |   2 +
 .../store/worker/SearchRequestHandler.java      |  37 ++-
 .../scala/org/apache/spark/rpc/Master.scala     |  13 +-
 21 files changed, 521 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 8ebce9e..08aa704 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1658,6 +1658,18 @@ public final class CarbonCommonConstants {
   public static final String CARBON_SEARCH_MODE_ENABLE_DEFAULT = "false";
 
   /**
+   * It's timeout threshold of carbon search query
+   */
+  @CarbonProperty
+  @InterfaceStability.Unstable
+  public static final String CARBON_SEARCH_QUERY_TIMEOUT = "carbon.search.query.timeout";
+
+  /**
+   * Default value is 10 seconds
+   */
+  public static final String CARBON_SEARCH_QUERY_TIMEOUT_DEFAULT = "10s";
+
+  /**
    * The size of thread pool used for reading files in Work for search mode. By default,
    * it is number of cores in Worker
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 0fcf4cd..96d2b1c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -96,13 +96,19 @@ public final class DataMapStoreManager {
       String dbName = carbonTable.getDatabaseName();
       String tableName = carbonTable.getTableName();
       String dmName = dataMap.getDataMapSchema().getDataMapName();
-      boolean isDmVisible = sessionInfo.getSessionParams().getProperty(
-          String.format("%s%s.%s.%s", CarbonCommonConstants.CARBON_DATAMAP_VISIBLE,
-              dbName, tableName, dmName), "true").trim().equalsIgnoreCase("true");
-      if (!isDmVisible) {
-        LOGGER.warn(String.format("Ignore invisible datamap %s on table %s.%s",
-            dmName, dbName, tableName));
-        dataMapIterator.remove();
+      // TODO: need support get the visible status of datamap without sessionInfo in the future
+      if (sessionInfo != null) {
+        boolean isDmVisible = sessionInfo.getSessionParams().getProperty(
+            String.format("%s%s.%s.%s", CarbonCommonConstants.CARBON_DATAMAP_VISIBLE,
+                dbName, tableName, dmName), "true").trim().equalsIgnoreCase("true");
+        if (!isDmVisible) {
+          LOGGER.warn(String.format("Ignore invisible datamap %s on table %s.%s",
+              dmName, dbName, tableName));
+          dataMapIterator.remove();
+        }
+      } else {
+        String message = "Carbon session info is null";
+        LOGGER.info(message);
       }
     }
     return allDataMaps;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java b/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
index 85c7176..7b63b84 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/Segment.java
@@ -115,7 +115,7 @@ public class Segment implements Serializable {
 
   public SegmentRefreshInfo getSegmentRefreshInfo(UpdateVO updateVo)
       throws IOException {
-    return readCommittedScope.getCommitedSegmentRefreshInfo(this, updateVo);
+    return readCommittedScope.getCommittedSegmentRefreshInfo(this, updateVo);
   }
 
   public String getSegmentNo() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
index 1de16bc..ec674de 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/AndDataMapExprWrapper.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -59,6 +60,21 @@ public class AndDataMapExprWrapper implements DataMapExprWrapper {
     return andBlocklets;
   }
 
+  @Override
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException {
+    List<ExtendedBlocklet> leftPrune = left.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> rightPrune = right.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> andBlocklets = new ArrayList<>();
+    for (ExtendedBlocklet blocklet : leftPrune) {
+      if (rightPrune.contains(blocklet)) {
+        andBlocklets.add(blocklet);
+      }
+    }
+    return andBlocklets;
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> leftPrune = left.pruneBlocklets(blocklets);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
index 5a04529..901cfc7 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapper.java
@@ -20,6 +20,7 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -41,6 +42,18 @@ public interface DataMapExprWrapper extends Serializable {
       throws IOException;
 
   /**
+   * prune blocklet according distributable
+   *
+   * @param distributable     distributable
+   * @param partitionsToPrune partitions to prune
+   * @return the pruned ExtendedBlocklet list
+   * @throws IOException
+   */
+  List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException;
+
+  /**
    * It is used in case on distributable datamap. First using job it gets all blockets from all
    * related datamaps. These blocklets are passed to this method to apply expression.
    * @param blocklets

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
index 38f2336..6537976 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/DataMapExprWrapperImpl.java
@@ -25,6 +25,7 @@ import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datamap.TableDataMap;
+import org.apache.carbondata.core.datamap.dev.DataMap;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.indexstore.PartitionSpec;
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
@@ -52,6 +53,13 @@ public class DataMapExprWrapperImpl implements DataMapExprWrapper {
     return dataMap.prune(segments, expression, partitionsToPrune);
   }
 
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+      throws IOException {
+    List<DataMap> dataMaps = dataMap.getTableDataMaps(distributable);
+    return dataMap.prune(dataMaps, distributable, expression, partitionsToPrune);
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> blockletList = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
index 4988903..bb98535 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/expr/OrDataMapExprWrapper.java
@@ -22,6 +22,7 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
@@ -58,6 +59,18 @@ public class OrDataMapExprWrapper implements DataMapExprWrapper {
     return new ArrayList<>(andBlocklets);
   }
 
+  @Override
+  public List<ExtendedBlocklet> prune(DataMapDistributable distributable,
+      List<PartitionSpec> partitionsToPrune)
+          throws IOException {
+    List<ExtendedBlocklet> leftPrune = left.prune(distributable, partitionsToPrune);
+    List<ExtendedBlocklet> rightPrune = right.prune(distributable, partitionsToPrune);
+    Set<ExtendedBlocklet> andBlocklets = new HashSet<>();
+    andBlocklets.addAll(leftPrune);
+    andBlocklets.addAll(rightPrune);
+    return new ArrayList<>(andBlocklets);
+  }
+
   @Override public List<ExtendedBlocklet> pruneBlocklets(List<ExtendedBlocklet> blocklets)
       throws IOException {
     List<ExtendedBlocklet> leftPrune = left.pruneBlocklets(blocklets);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
index 14bba65..6a1234e 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
@@ -17,10 +17,7 @@
 package org.apache.carbondata.core.readcommitter;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
@@ -43,11 +40,20 @@ import org.apache.carbondata.core.util.path.CarbonTablePath;
 public class LatestFilesReadCommittedScope implements ReadCommittedScope {
 
   private String carbonFilePath;
+  private String segmentId;
   private ReadCommittedIndexFileSnapShot readCommittedIndexFileSnapShot;
   private LoadMetadataDetails[] loadMetadataDetails;
 
-  public LatestFilesReadCommittedScope(String path)  {
+  /**
+   * a new constructor of this class
+   *
+   * @param path      carbon file path
+   * @param segmentId segment id
+   */
+  public LatestFilesReadCommittedScope(String path, String segmentId) {
+    Objects.requireNonNull(path);
     this.carbonFilePath = path;
+    this.segmentId = segmentId;
     try {
       takeCarbonIndexFileSnapShot();
     } catch (IOException ex) {
@@ -55,6 +61,15 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     }
   }
 
+  /**
+   * a new constructor with path
+   *
+   * @param path carbon file path
+   */
+  public LatestFilesReadCommittedScope(String path) {
+    this(path, null);
+  }
+
   private void prepareLoadMetadata() {
     int loadCount = 0;
     Map<String, List<String>> snapshotMap =
@@ -101,13 +116,16 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
       segName = segment.getSegmentFileName();
     }
     List<String> index = snapShot.get(segName);
+    if (null == index) {
+      index = new LinkedList<>();
+    }
     for (String indexPath : index) {
       indexFileStore.put(indexPath, null);
     }
     return indexFileStore;
   }
 
-  @Override public SegmentRefreshInfo getCommitedSegmentRefreshInfo(
+  @Override public SegmentRefreshInfo getCommittedSegmentRefreshInfo(
       Segment segment, UpdateVO updateVo) throws IOException {
     Map<String, SegmentRefreshInfo> snapShot =
         readCommittedIndexFileSnapShot.getSegmentTimestampUpdaterMap();
@@ -140,9 +158,10 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     // Read the current file Path get the list of indexes from the path.
     CarbonFile file = FileFactory.getCarbonFile(carbonFilePath);
     CarbonFile[] files = file.listFiles(new CarbonFileFilter() {
-      @Override public boolean accept(CarbonFile file) {
+      @Override
+      public boolean accept(CarbonFile file) {
         return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
-            .endsWith(CarbonTablePath.CARBON_DATA_EXT);
+            .endsWith(CarbonTablePath.CARBON_DATA_EXT) || file.getName().endsWith("Fact");
       }
     });
     if (files.length == 0) {
@@ -152,8 +171,14 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
     }
     Map<String, List<String>> indexFileStore = new HashMap<>();
     Map<String, SegmentRefreshInfo> segmentTimestampUpdaterMap = new HashMap<>();
+    CarbonFile[] carbonIndexFiles = null;
     if (file.isDirectory()) {
-      CarbonFile[] carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(carbonFilePath);
+      if (segmentId == null) {
+        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(carbonFilePath);
+      } else {
+        String segmentPath = CarbonTablePath.getSegmentPath(carbonFilePath, segmentId);
+        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(segmentPath);
+      }
       for (int i = 0; i < carbonIndexFiles.length; i++) {
         // TODO. If Required to support merge index, then this code has to be modified.
         // TODO. Nested File Paths.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
index 6ff4b89..d177a00 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedScope.java
@@ -45,7 +45,7 @@ public interface ReadCommittedScope extends Serializable {
    */
   public Map<String, String> getCommittedIndexFile(Segment segment) throws IOException ;
 
-  public SegmentRefreshInfo getCommitedSegmentRefreshInfo(
+  public SegmentRefreshInfo getCommittedSegmentRefreshInfo(
       Segment segment, UpdateVO updateVo) throws IOException;
 
   public void takeCarbonIndexFileSnapShot() throws IOException;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
index 91ebd41..1f61aab 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/TableStatusReadCommittedScope.java
@@ -79,7 +79,7 @@ public class TableStatusReadCommittedScope implements ReadCommittedScope {
     return indexFiles;
   }
 
-  public SegmentRefreshInfo getCommitedSegmentRefreshInfo(Segment segment, UpdateVO updateVo)
+  public SegmentRefreshInfo getCommittedSegmentRefreshInfo(Segment segment, UpdateVO updateVo)
       throws IOException {
     SegmentRefreshInfo segmentRefreshInfo;
     if (updateVo != null) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
index fab0565..1da8edd 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
@@ -29,6 +29,7 @@ import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandExcept
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapDistributable;
+import org.apache.carbondata.core.datamap.DataMapLevel;
 import org.apache.carbondata.core.datamap.DataMapMeta;
 import org.apache.carbondata.core.datamap.DataMapStoreManager;
 import org.apache.carbondata.core.datamap.Segment;
@@ -235,7 +236,8 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
     }
     for (CarbonFile indexDir : indexDirs) {
       // Filter out the tasks which are filtered through CG datamap.
-      if (!segment.getFilteredIndexShardNames().contains(indexDir.getName())) {
+      if (getDataMapLevel() != DataMapLevel.FG &&
+          !segment.getFilteredIndexShardNames().contains(indexDir.getName())) {
         continue;
       }
       DataMapDistributable luceneDataMapDistributable = new LuceneDataMapDistributable(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
index efe2a63..fe94f54 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
@@ -61,8 +61,6 @@ object LuceneDataMapExample {
          | DMProperties('INDEX_COLUMNS'='id , name')
       """.stripMargin)
 
-    spark.sql("refresh datamap dm ON TABLE personTable")
-
     // 1. Compare the performance:
 
     def time(code: => Unit): Double = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index cad20fc..da84c00 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -78,8 +78,12 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
     } else {
       throw new RuntimeException("unsupported input split type: " + inputSplit);
     }
-    List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList);
-    queryModel.setTableBlockInfos(tableBlockInfoList);
+    // It should use the exists tableBlockInfos if tableBlockInfos of queryModel is not empty
+    // otherwise the prune is no use before this method
+    if (queryModel.getTableBlockInfos().isEmpty()) {
+      List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList);
+      queryModel.setTableBlockInfos(tableBlockInfoList);
+    }
     readSupport.initialize(queryModel.getProjectionColumns(), queryModel.getTable());
     try {
       carbonIterator = new ChunkRowIterator(queryExecutor.execute(queryModel));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index cf51162..05c70f8 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -372,7 +372,7 @@ m filterExpression
     List<ExtendedBlocklet> prunedBlocklets =
         getPrunedBlocklets(job, carbonTable, resolver, segmentIds);
 
-    List<CarbonInputSplit> resultFilterredBlocks = new ArrayList<>();
+    List<CarbonInputSplit> resultFilteredBlocks = new ArrayList<>();
     int partitionIndex = 0;
     List<Integer> partitionIdList = new ArrayList<>();
     if (partitionInfo != null && partitionInfo.getPartitionType() != PartitionType.NATIVE_HIVE) {
@@ -401,7 +401,7 @@ m filterExpression
         if (matchedPartitions == null || matchedPartitions.get(partitionIndex)) {
           CarbonInputSplit inputSplit = convertToCarbonInputSplit(blocklet);
           if (inputSplit != null) {
-            resultFilterredBlocks.add(inputSplit);
+            resultFilteredBlocks.add(inputSplit);
           }
         }
       }
@@ -409,7 +409,7 @@ m filterExpression
     statistic
         .addStatistics(QueryStatisticsConstants.LOAD_BLOCKS_DRIVER, System.currentTimeMillis());
     recorder.recordStatisticsForDriver(statistic, job.getConfiguration().get("query.id"));
-    return resultFilterredBlocks;
+    return resultFilteredBlocks;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
index 638d24d..f64a349 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
@@ -438,6 +438,7 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
       .contains("Unsupported alter operation on hive table"))
     sql("drop datamap if exists dm2 on table datamap_test_table")
   }
+
   test("test Clean Files and check Lucene DataMap") {
     sql("DROP TABLE IF EXISTS datamap_test_table")
     sql(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
new file mode 100644
index 0000000..0ceead8
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapWithSearchModeSuite.scala
@@ -0,0 +1,328 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.datamap.lucene
+
+import java.io.{File, PrintWriter}
+
+import scala.util.Random
+
+import org.apache.spark.sql.{CarbonEnv, CarbonSession, Row}
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.core.datamap.status.DataMapStatusManager
+
+/**
+  * Test lucene fine grain datamap with search mode
+  */
+class LuceneFineGrainDataMapWithSearchModeSuite extends QueryTest with BeforeAndAfterAll {
+
+  val file2 = resourcesPath + "/datamap_input.csv"
+
+  override protected def beforeAll(): Unit = {
+    //n should be about 5000000 of reset if size is default 1024
+    val n = 500000
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    CarbonProperties
+      .getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT, "100s")
+    LuceneFineGrainDataMapSuite.createFile(file2, n)
+    sql("create database if not exists lucene")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION,
+        CarbonEnv.getDatabaseLocation("lucene", sqlContext.sparkSession))
+    sql("use lucene")
+    sql("DROP TABLE IF EXISTS datamap_test")
+    sql(
+      """
+        | CREATE TABLE datamap_test(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+  }
+
+  test("test lucene fine grain data map with search mode") {
+
+    sqlContext.sparkSession.sparkContext.setLogLevel("WARN")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test WHERE TEXT_MATCH('name:n10')"),
+      sql(s"select * from datamap_test where name='n10'"))
+
+    sql("drop datamap dm on table datamap_test")
+  }
+
+  // TODO: optimize performance
+  ignore("test lucene fine grain data map with TEXT_MATCH 'AND' Filter") {
+    sql("drop datamap if exists dm on table datamap_test")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(
+      sql("SELECT count(*) FROM datamap_test WHERE TEXT_MATCH('name:n2*') " +
+        "AND age=28 and id=200149"),
+      sql("SELECT count(*) FROM datamap_test WHERE name like 'n2%' " +
+        "AND age=28 and id=200149"))
+    sql("drop datamap if exists dm on table datamap_test")
+  }
+
+  // TODO: optimize performance
+  ignore("test lucene fine grain data map with TEXT_MATCH 'AND' and 'OR' Filter ") {
+    sql("drop datamap if exists dm on table datamap_test")
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test WHERE TEXT_MATCH('name:n1*') OR TEXT_MATCH ('city:c01*') " +
+      "AND TEXT_MATCH('city:C02*')"),
+      sql("select * from datamap_test where name like 'n1%' OR city like 'c01%' and city like" +
+        " 'c02%'"))
+    sql("drop datamap if exists dm on table datamap_test")
+  }
+
+  test("test lucene fine grain data map with compaction-Major ") {
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+    sql(
+      """
+        | CREATE TABLE datamap_test_table(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test_table
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n10')"),
+      sql("select * from datamap_test_table where name='n10'"))
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    sql("alter table datamap_test_table compact 'major'")
+    checkAnswer(sql("SELECT COUNT(*) FROM datamap_test_table WHERE TEXT_MATCH('name:n10')"),
+      sql("select COUNT(*) from datamap_test_table where name='n10'"))
+    sql("drop datamap if exists dm on table datamap_test_table")
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+  }
+
+  test("test lucene fine grain datamap rebuild") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | WITH DEFERRED REBUILD
+         | DMProperties('INDEX_COLUMNS'='city')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    val map = DataMapStatusManager.readDataMapStatusMap()
+    assert(!map.get("dm").isEnabled)
+    sql("REBUILD DATAMAP dm ON TABLE datamap_test5")
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  test("test lucene fine grain datamap rebuild with table block size") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'TABLE_BLOCKSIZE'='1')
+      """.stripMargin)
+    sql(
+    s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name , cIty')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c00')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c00'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c0100085')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c0100085'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c09560')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c09560'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  test("test lucene fine grain multiple data map on table") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm2 ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='city')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm1 ON TABLE datamap_test5
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='Name')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('name:n10')"),
+      sql(s"select * from datamap_test5 where name='n10'"))
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  // TODO: support it  in the future
+  ignore("test lucene datamap and validate the visible and invisible status of datamap ") {
+    val tableName = "datamap_test2"
+    val dataMapName1 = "ggdatamap1";
+    sql(s"DROP TABLE IF EXISTS $tableName")
+    sql(
+      s"""
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    // register datamap writer
+    sql(
+      s"""
+         | CREATE DATAMAP ggdatamap1 ON TABLE $tableName
+         | USING 'lucene'
+         | DMPROPERTIES('index_columns'='name')
+       """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE TEXT_MATCH('name:n502670')").collect()
+    sql(s"SELECT * FROM $tableName WHERE TEXT_MATCH('name:n502670')").show()
+    println(df1(0).getString(0))
+    assertResult(
+      s"""== CarbonData Profiler ==
+         |Table Scan on datamap_test2
+         | - total blocklets: 1
+         | - filter: TEXT_MATCH('name:n502670')
+         | - pruned by Main DataMap
+         |    - skipped blocklets: 0
+         | - pruned by FG DataMap
+         |    - name: ggdatamap1
+         |    - provider: lucene
+         |    - skipped blocklets: 1
+         |""".stripMargin)(df1(0).getString(0))
+
+    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670'").collect()
+    println(df2(0).getString(0))
+    assertResult(
+      s"""== CarbonData Profiler ==
+         |Table Scan on $tableName
+         | - total blocklets: 1
+         | - filter: (name <> null and name = n502670)
+         | - pruned by Main DataMap
+         |    - skipped blocklets: 0
+         |""".stripMargin)(df2(0).getString(0))
+
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"))
+    sql(s"DROP TABLE IF EXISTS $tableName")
+  }
+
+  ignore("test lucene fine grain datamap rebuild with table block size, rebuild") {
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql(
+      """
+        | CREATE TABLE datamap_test5(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'TABLE_BLOCKSIZE'='1')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm ON TABLE datamap_test5
+         | USING 'lucene'
+         | WITH DEFERRED REBUILD
+         | DMProperties('INDEX_COLUMNS'='Name , cIty')
+      """.stripMargin)
+
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test5 OPTIONS('header'='false')")
+    sql("REBUILD DATAMAP dm ON TABLE datamap_test5")
+
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+    sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')").show()
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')").show()
+    checkAnswer(sql("SELECT * FROM datamap_test5 WHERE TEXT_MATCH('city:c020')"),
+      sql(s"SELECT * FROM datamap_test5 WHERE city='c020'"))
+    sql("DROP TABLE IF EXISTS datamap_test5")
+  }
+
+  override protected def afterAll(): Unit = {
+    LuceneFineGrainDataMapSuite.deleteFile(file2)
+    sql("DROP TABLE IF EXISTS datamap_test")
+    sql("DROP TABLE IF EXISTS datamap_test5")
+    sql("use default")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SYSTEM_FOLDER_LOCATION,
+        CarbonProperties.getStorePath)
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+  }
+
+  def createFile(fileName: String, line: Int = 10000, start: Int = 0) = {
+    val write = new PrintWriter(new File(fileName))
+    for (i <- start until (start + line)) {
+      write.println(i + "," + "n" + i + "," + "c0" + i + "," + Random.nextInt(80))
+    }
+    write.close()
+  }
+
+  def deleteFile(fileName: String): Unit = {
+    val file = new File(fileName)
+    if (file.exists()) {
+      file.delete()
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
index 2c94dab..d278fc5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
@@ -109,4 +109,31 @@ class SearchModeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("set carbon.search.enabled = false")
     assert(!sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
   }
+
+  test("test lucene datamap with search mode") {
+    sql("DROP DATAMAP IF EXISTS dm ON TABLE main")
+    sql("CREATE DATAMAP dm ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='id') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('id:100000')"),
+      sql(s"SELECT * FROM main WHERE id='100000'"))
+    sql("DROP DATAMAP if exists dm ON TABLE main")
+  }
+
+  test("test lucene datamap with search mode 2") {
+    sql("drop datamap if exists dm3 ON TABLE main")
+    sql("CREATE DATAMAP dm3 ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='city') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('city:city6')"),
+      sql("SELECT * FROM main WHERE city='city6'"))
+    sql("DROP DATAMAP if exists dm3 ON TABLE main")
+  }
+
+  test("test lucene datamap with search mode, two column") {
+    sql("drop datamap if exists dm3 ON TABLE main")
+    sql("CREATE DATAMAP dm3 ON TABLE main USING 'lucene' DMProperties('INDEX_COLUMNS'='city , id') ")
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('city:city6')"),
+      sql("SELECT * FROM main WHERE city='city6'"))
+    checkAnswer(sql("SELECT * FROM main WHERE TEXT_MATCH('id:100000')"),
+      sql(s"SELECT * FROM main WHERE id='100000'"))
+    sql("DROP DATAMAP if exists dm3 ON TABLE main")
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
index 29dcec9..186e39e 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/execution/command/CarbonHiveCommands.scala
@@ -68,10 +68,10 @@ case class CarbonSetCommand(command: SetCommand)
   override val output: Seq[Attribute] = command.output
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
-    val sessionParms = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.getSessionParams
+    val sessionParams = CarbonEnv.getInstance(sparkSession).carbonSessionInfo.getSessionParams
     command.kv match {
       case Some((key, Some(value))) =>
-        CarbonSetCommand.validateAndSetValue(sessionParms, key, value)
+        CarbonSetCommand.validateAndSetValue(sessionParams, key, value)
 
         // handle search mode start/stop for ThriftServer usage
         if (key.equalsIgnoreCase(CarbonCommonConstants.CARBON_SEARCH_MODE_ENABLE)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
index 07a444f..c052cd7 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/optimizer/CarbonFilters.scala
@@ -382,6 +382,8 @@ object CarbonFilters {
             CarbonScalaUtil.convertSparkToCarbonDataType(dataType)))
         new AndExpression(l, r)
       case StringTrim(child) => transformExpression(child)
+      case s: ScalaUDF =>
+        new MatchExpression(s.children.head.toString())
       case _ =>
         new SparkUnknownExpression(expr.transform {
           case AttributeReference(name, dataType, _, _) =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
----------------------------------------------------------------------
diff --git a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
index 9727352..f6406c7 100644
--- a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
+++ b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
@@ -18,6 +18,7 @@
 package org.apache.carbondata.store.worker;
 
 import java.io.IOException;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
@@ -27,7 +28,9 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datamap.DataMapChooser;
+import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.Segment;
+import org.apache.carbondata.core.datamap.dev.expr.DataMapDistributableWrapper;
 import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
@@ -112,6 +115,8 @@ public class SearchRequestHandler {
     queryModel.setVectorReader(false);
 
     CarbonMultiBlockSplit mbSplit = request.split().value();
+    List<TableBlockInfo> list = CarbonInputSplit.createBlocks(mbSplit.getAllSplits());
+    queryModel.setTableBlockInfos(list);
     long limit = request.limit();
     long rowCount = 0;
 
@@ -158,22 +163,38 @@ public class SearchRequestHandler {
       CarbonMultiBlockSplit mbSplit, DataMapExprWrapper datamap) throws IOException {
     Objects.requireNonNull(datamap);
     List<Segment> segments = new LinkedList<>();
+    HashMap<String, Integer> uniqueSegments = new HashMap<>();
     for (CarbonInputSplit split : mbSplit.getAllSplits()) {
-      segments.add(
-          Segment.toSegment(split.getSegmentId(),
-              new LatestFilesReadCommittedScope(table.getTablePath())));
+      String segmentId = split.getSegmentId();
+      if (uniqueSegments.get(segmentId) == null) {
+        segments.add(Segment.toSegment(
+                segmentId,
+                new LatestFilesReadCommittedScope(table.getTablePath(), segmentId)));
+        uniqueSegments.put(segmentId, 1);
+      } else {
+        uniqueSegments.put(segmentId, uniqueSegments.get(segmentId) + 1);
+      }
+    }
+
+    List<DataMapDistributableWrapper> distributables = datamap.toDistributable(segments);
+    List<ExtendedBlocklet> prunnedBlocklets = new LinkedList<ExtendedBlocklet>();
+    for (int i = 0; i < distributables.size(); i++) {
+      DataMapDistributable dataMapDistributable = distributables.get(i).getDistributable();
+      prunnedBlocklets.addAll(datamap.prune(dataMapDistributable, null));
     }
-    List<ExtendedBlocklet> prunnedBlocklets = datamap.prune(segments, null);
 
-    List<String> pathToRead = new LinkedList<>();
-    for (ExtendedBlocklet prunnedBlocklet : prunnedBlocklets) {
-      pathToRead.add(prunnedBlocklet.getPath());
+    HashMap<String, ExtendedBlocklet> pathToRead = new HashMap<>();
+    for (ExtendedBlocklet prunedBlocklet : prunnedBlocklets) {
+      pathToRead.put(prunedBlocklet.getFilePath(), prunedBlocklet);
     }
 
     List<TableBlockInfo> blocks = queryModel.getTableBlockInfos();
     List<TableBlockInfo> blockToRead = new LinkedList<>();
     for (TableBlockInfo block : blocks) {
-      if (pathToRead.contains(block.getFilePath())) {
+      if (pathToRead.keySet().contains(block.getFilePath())) {
+        // If not set this, it will can't create FineGrainBlocklet object in
+        // org.apache.carbondata.core.indexstore.blockletindex.BlockletDataRefNode.getIndexedData
+        block.setDataMapWriterPath(pathToRead.get(block.getFilePath()).getDataMapWriterPath());
         blockToRead.add(block);
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/b3384593/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
----------------------------------------------------------------------
diff --git a/store/search/src/main/scala/org/apache/spark/rpc/Master.scala b/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
index 26de74c..f48f5e4 100644
--- a/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
+++ b/store/search/src/main/scala/org/apache/spark/rpc/Master.scala
@@ -38,8 +38,7 @@ import org.apache.spark.util.ThreadUtils
 
 import org.apache.carbondata.common.annotations.InterfaceAudience
 import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.core.datamap.DataMapChooser
-import org.apache.carbondata.core.datamap.dev.expr.DataMapExprWrapper
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.block.Distributable
 import org.apache.carbondata.core.datastore.row.CarbonRow
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -232,10 +231,14 @@ class Master(sparkConf: SparkConf) {
 
       // if we have enough data already, we do not need to collect more result
       if (rowCount < globalLimit) {
-        // wait for worker for 10s
-        ThreadUtils.awaitResult(future, Duration.apply("10s"))
+        // wait for worker
+        val timeout = CarbonProperties
+          .getInstance()
+          .getProperty(CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT,
+            CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT_DEFAULT)
+        ThreadUtils.awaitResult(future, Duration.apply(timeout))
         LOG.info(s"[SearchId:$queryId] receive search response from worker " +
-                 s"${worker.address}:${worker.port}")
+          s"${worker.address}:${worker.port}")
         try {
           future.value match {
             case Some(response: Try[SearchResult]) =>


[15/50] [abbrv] carbondata git commit: [CARBONDATA-2495][Doc][BloomDataMap] Add document for bloomfilter datamap

Posted by gv...@apache.org.
[CARBONDATA-2495][Doc][BloomDataMap] Add document for bloomfilter datamap

add document for bloomfilter datamap

This closes #2323


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d9534c2c
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d9534c2c
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d9534c2c

Branch: refs/heads/spark-2.3
Commit: d9534c2c086d4b524061fe265a7c8f17b5593c45
Parents: 604902b
Author: xuchuanyin <xu...@hust.edu.cn>
Authored: Sat May 19 22:33:43 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Tue May 22 15:52:33 2018 +0800

----------------------------------------------------------------------
 docs/datamap/bloomfilter-datamap-guide.md | 98 ++++++++++++++++++++++++++
 1 file changed, 98 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d9534c2c/docs/datamap/bloomfilter-datamap-guide.md
----------------------------------------------------------------------
diff --git a/docs/datamap/bloomfilter-datamap-guide.md b/docs/datamap/bloomfilter-datamap-guide.md
new file mode 100644
index 0000000..fc46f54
--- /dev/null
+++ b/docs/datamap/bloomfilter-datamap-guide.md
@@ -0,0 +1,98 @@
+# CarbonData BloomFilter DataMap (Alpha feature in 1.4.0)
+
+* [DataMap Management](#datamap-management)
+* [BloomFilter Datamap Introduction](#bloomfilter-datamap-introduction)
+* [Loading Data](#loading-data)
+* [Querying Data](#querying-data)
+* [Data Management](#data-management-with-bloomfilter-datamap)
+
+#### DataMap Management
+Creating BloomFilter DataMap
+  ```
+  CREATE DATAMAP [IF NOT EXISTS] datamap_name
+  ON TABLE main_table
+  USING 'bloomfilter'
+  DMPROPERTIES ('index_columns'='city, name', 'BLOOM_SIZE'='640000', 'BLOOM_FPP'='0.00001')
+  ```
+
+Dropping specified datamap
+  ```
+  DROP DATAMAP [IF EXISTS] datamap_name
+  ON TABLE main_table
+  ```
+
+Showing all DataMaps on this table
+  ```
+  SHOW DATAMAP
+  ON TABLE main_table
+  ```
+It will show all DataMaps created on main table.
+
+
+## BloomFilter DataMap Introduction
+A Bloom filter is a space-efficient probabilistic data structure that is used to test whether an element is a member of a set.
+Carbondata introduce BloomFilter as an index datamap to enhance the performance of querying with precise value.
+It is well suitable for queries that do precise match on high cardinality columns(such as Name/ID).
+Internally, CarbonData maintains a BloomFilter per blocklet for each index column to indicate that whether a value of the column is in this blocklet.
+Just like the other datamaps, BloomFilter datamap is managed ablong with main tables by CarbonData.
+User can create BloomFilter datamap on specified columns with specified BloomFilter configurations such as size and probability.
+
+For instance, main table called **datamap_test** which is defined as:
+
+  ```
+  CREATE TABLE datamap_test (
+    id string,
+    name string,
+    age int,
+    city string,
+    country string)
+  STORED BY 'carbondata'
+  TBLPROPERTIES('SORT_COLUMNS'='id')
+  ```
+
+In the above example, `id` and `name` are high cardinality columns
+and we always query on `id` and `name` with precise value.
+since `id` is in the sort_columns and it is orderd,
+query on it will be fast because CarbonData can skip all the irrelative blocklets.
+But queries on `name` may be bad since the blocklet minmax may not help,
+because in each blocklet the range of the value of `name` may be the same -- all from A*~z*.
+In this case, user can create a BloomFilter datamap on column `name`.
+Moreover, user can also create a BloomFilter datamap on the sort_columns.
+This is useful if user has too many segments and the range of the value of sort_columns are almost the same.
+
+User can create BloomFilter datamap using the Create DataMap DDL:
+
+  ```
+  CREATE DATAMAP dm
+  ON TABLE datamap_test
+  USING 'bloomfilter'
+  DMPROPERTIES ('INDEX_COLUMNS' = 'name,id', 'BLOOM_SIZE'='640000', 'BLOOM_FPP'='0.00001', 'BLOOM_COMPRESS'='true')
+  ```
+
+**Properties for BloomFilter DataMap**
+
+| Property | Is Required | Default Value | Description |
+|-------------|----------|--------|---------|
+| INDEX_COLUMNS | YES |  | Carbondata will generate BloomFilter index on these columns. Queries on there columns are usually like 'COL = VAL'. |
+| BLOOM_SIZE | NO | 32000 | This value is internally used by BloomFilter as the number of expected insertions, it will affects the size of BloomFilter index. Since each blocklet has a BloomFilter here, so the value is the approximate records in a blocklet. In another word, the value 32000 * #noOfPagesInBlocklet. The value should be an integer. |
+| BLOOM_FPP | NO | 0.01 | This value is internally used by BloomFilter as the False-Positive Probability, it will affects the size of bloomfilter index as well as the number of hash functions for the BloomFilter. The value should be in range (0, 1). |
+| BLOOM_COMPRESS | NO | true | Whether to compress the BloomFilter index files. |
+
+
+## Loading Data
+When loading data to main table, BloomFilter files will be generated for all the
+index_columns given in DMProperties which contains the blockletId and a BloomFilter for each index column.
+These index files will be written inside a folder named with datamap name
+inside each segment folders.
+
+
+## Querying Data
+A system level configuration `carbon.query.datamap.bloom.cache.size` can used to enhance query performance with BloomFilter datamap by providing a cache for the bloomfilter index files.
+The default value is `512` and its unit is `MB`. Internally the cache will be expired after it's idle for 2 hours.
+
+User can verify whether a query can leverage BloomFilter datamap by executing `EXPLAIN` command,
+which will show the transformed logical plan, and thus user can check whether the BloomFilter datamap can skip blocklets during the scan.
+If the datamap does not prune blocklets well, you can try to increase the value of property `BLOOM_SIZE` and decrease the value of property `BLOOM_FPP`.
+
+## Data Management With BloomFilter DataMap
+Data management with BloomFilter datamap has no difference with that on Lucene datamap. You can refer to the corresponding section in `CarbonData BloomFilter DataMap`.


[26/50] [abbrv] carbondata git commit: [CARBONDATA-2519] Add document for CarbonReader

Posted by gv...@apache.org.
[CARBONDATA-2519] Add document for CarbonReader

Add document for CarbonReader

This closes #2337


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/ddf3e859
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/ddf3e859
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/ddf3e859

Branch: refs/heads/spark-2.3
Commit: ddf3e85952d066cb82f4963c5f7bce6a8196f095
Parents: d8bafa3
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 21:45:49 2018 +0800
Committer: kunal642 <ku...@gmail.com>
Committed: Mon May 28 16:04:01 2018 +0530

----------------------------------------------------------------------
 README.md                                       |   2 +-
 docs/sdk-guide.md                               | 591 +++++++++++++++++++
 docs/sdk-writer-guide.md                        | 400 -------------
 .../carbondata/sdk/file/CarbonReader.java       |   2 +-
 .../sdk/file/CarbonReaderBuilder.java           |  56 +-
 .../sdk/file/CarbonWriterBuilder.java           |   8 +-
 6 files changed, 642 insertions(+), 417 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index be3186c..d8f7226 100644
--- a/README.md
+++ b/README.md
@@ -52,7 +52,7 @@ CarbonData is built using Apache Maven, to [build CarbonData](https://github.com
 * [Cluster Installation and Deployment](https://github.com/apache/carbondata/blob/master/docs/installation-guide.md)
 * [Configuring Carbondata](https://github.com/apache/carbondata/blob/master/docs/configuration-parameters.md)
 * [Streaming Ingestion](https://github.com/apache/carbondata/blob/master/docs/streaming-guide.md)
-* [SDK Writer Guide](https://github.com/apache/carbondata/blob/master/docs/sdk-writer-guide.md)
+* [SDK Guide](https://github.com/apache/carbondata/blob/master/docs/sdk-guide.md)
 * [CarbonData Pre-aggregate DataMap](https://github.com/apache/carbondata/blob/master/docs/datamap/preaggregate-datamap-guide.md)
 * [CarbonData Timeseries DataMap](https://github.com/apache/carbondata/blob/master/docs/datamap/timeseries-datamap-guide.md)
 * [FAQ](https://github.com/apache/carbondata/blob/master/docs/faq.md)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
new file mode 100644
index 0000000..4d258f0
--- /dev/null
+++ b/docs/sdk-guide.md
@@ -0,0 +1,591 @@
+# SDK Guide
+In the carbon jars package, there exist a carbondata-store-sdk-x.x.x-SNAPSHOT.jar, including SDK writer and reader.
+# SDK Writer
+This SDK writer, writes carbondata file and carbonindex file at a given path.
+External client can make use of this writer to convert other format data or live data to create carbondata and index files.
+These SDK writer output contains just a carbondata and carbonindex files. No metadata folder will be present.
+
+## Quick example
+
+### Example with csv format 
+
+```java
+ import java.io.IOException;
+ 
+ import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
+ import org.apache.carbondata.core.metadata.datatype.DataTypes;
+ import org.apache.carbondata.core.util.CarbonProperties;
+ import org.apache.carbondata.sdk.file.CarbonWriter;
+ import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
+ import org.apache.carbondata.sdk.file.Field;
+ import org.apache.carbondata.sdk.file.Schema;
+ 
+ public class TestSdk {
+
+   // pass true or false while executing the main to use offheap memory or not
+   public static void main(String[] args) throws IOException, InvalidLoadOptionException {
+     if (args.length > 0 && args[0] != null) {
+       testSdkWriter(args[0]);
+     } else {
+       testSdkWriter("true");
+     }
+   }
+ 
+   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
+     String path = "./target/testCSVSdkWriter";
+ 
+     Field[] fields = new Field[2];
+     fields[0] = new Field("name", DataTypes.STRING);
+     fields[1] = new Field("age", DataTypes.INT);
+ 
+     Schema schema = new Schema(fields);
+
+     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
+ 
+     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
+ 
+     CarbonWriter writer = builder.buildWriterForCSVInput(schema);
+ 
+     int rows = 5;
+     for (int i = 0; i < rows; i++) {
+       writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
+     }
+     writer.close();
+   }
+ }
+```
+
+### Example with Avro format
+```java
+import java.io.IOException;
+
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.sdk.file.AvroCarbonWriter;
+import org.apache.carbondata.sdk.file.CarbonWriter;
+import org.apache.carbondata.sdk.file.Field;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.commons.lang.CharEncoding;
+
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
+
+public class TestSdkAvro {
+
+  public static void main(String[] args) throws IOException, InvalidLoadOptionException {
+    testSdkWriter();
+  }
+
+
+  public static void testSdkWriter() throws IOException, InvalidLoadOptionException {
+    String path = "./AvroCarbonWriterSuiteWriteFiles";
+    // Avro schema
+    String avroSchema =
+        "{" +
+            "   \"type\" : \"record\"," +
+            "   \"name\" : \"Acme\"," +
+            "   \"fields\" : ["
+            + "{ \"name\" : \"fname\", \"type\" : \"string\" },"
+            + "{ \"name\" : \"age\", \"type\" : \"int\" }]" +
+            "}";
+
+    String json = "{\"fname\":\"bob\", \"age\":10}";
+
+    // conversion to GenericData.Record
+    JsonAvroConverter converter = new JsonAvroConverter();
+    GenericData.Record record = converter.convertToGenericDataRecord(
+        json.getBytes(CharEncoding.UTF_8), new org.apache.avro.Schema.Parser().parse(avroSchema));
+
+    try {
+      CarbonWriter writer = CarbonWriter.builder()
+          .outputPath(path)
+          .buildWriterForAvroInput(new org.apache.avro.Schema.Parser().parse(avroSchema));
+
+      for (int i = 0; i < 100; i++) {
+        writer.write(record);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+}
+```
+
+## Datatypes Mapping
+Each of SQL data types are mapped into data types of SDK. Following are the mapping:
+
+| SQL DataTypes | Mapped SDK DataTypes |
+|---------------|----------------------|
+| BOOLEAN | DataTypes.BOOLEAN |
+| SMALLINT | DataTypes.SHORT |
+| INTEGER | DataTypes.INT |
+| BIGINT | DataTypes.LONG |
+| DOUBLE | DataTypes.DOUBLE |
+| VARCHAR | DataTypes.STRING |
+| DATE | DataTypes.DATE |
+| TIMESTAMP | DataTypes.TIMESTAMP |
+| STRING | DataTypes.STRING |
+| DECIMAL | DataTypes.createDecimalType(precision, scale) |
+
+
+## API List
+
+### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
+```
+/**
+* Sets the output path of the writer builder
+* @param path is the absolute path where output files are written
+*             This method must be called when building CarbonWriterBuilder
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder outputPath(String path);
+```
+
+```
+/**
+* If set false, writes the carbondata and carbonindex files in a flat folder structure
+* @param isTransactionalTable is a boolelan value
+*             if set to false, then writes the carbondata and carbonindex files
+*                                                            in a flat folder structure.
+*             if set to true, then writes the carbondata and carbonindex files
+*                                                            in segment folder structure..
+*             By default set to false.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder isTransactionalTable(boolean isTransactionalTable);
+```
+
+```
+/**
+* to set the timestamp in the carbondata and carbonindex index files
+* @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
+*             By default set to zero.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder uniqueIdentifier(long UUID);
+```
+
+```
+/**
+* To set the carbondata file size in MB between 1MB-2048MB
+* @param blockSize is size in MB between 1MB to 2048 MB
+*                  default value is 1024 MB
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withBlockSize(int blockSize);
+```
+
+```
+/**
+* To set the blocklet size of carbondata file
+* @param blockletSize is blocklet size in MB
+*                     default value is 64 MB
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withBlockletSize(int blockletSize);
+```
+
+```
+/**
+* sets the list of columns that needs to be in sorted order
+* @param sortColumns is a string array of columns that needs to be sorted.
+*                    If it is null or by default all dimensions are selected for sorting
+*                    If it is empty array, no columns are sorted
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder sortBy(String[] sortColumns);
+```
+
+```
+/**
+* If set, create a schema file in metadata folder.
+* @param persist is a boolean value, If set to true, creates a schema file in metadata folder.
+*                By default set to false. will not create metadata folder
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder persistSchemaFile(boolean persist);
+```
+
+```
+/**
+* sets the taskNo for the writer. SDKs concurrently running
+* will set taskNo in order to avoid conflicts in file's name during write.
+* @param taskNo is the TaskNo user wants to specify.
+*               by default it is system time in nano seconds.
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder taskNo(String taskNo);
+```
+
+```
+/**
+* To support the load options for sdk writer
+* @param options key,value pair of load options.
+*                supported keys values are
+*                a. bad_records_logger_enable -- true (write into separate logs), false
+*                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
+*                c. bad_record_path -- path
+*                d. dateformat -- same as JAVA SimpleDateFormat
+*                e. timestampformat -- same as JAVA SimpleDateFormat
+*                f. complex_delimiter_level_1 -- value to Split the complexTypeData
+*                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
+*                h. quotechar
+*                i. escapechar
+*
+*                Default values are as follows.
+*
+*                a. bad_records_logger_enable -- "false"
+*                b. bad_records_action -- "FAIL"
+*                c. bad_record_path -- ""
+*                d. dateformat -- "" , uses from carbon.properties file
+*                e. timestampformat -- "", uses from carbon.properties file
+*                f. complex_delimiter_level_1 -- "$"
+*                g. complex_delimiter_level_2 -- ":"
+*                h. quotechar -- "\""
+*                i. escapechar -- "\\"
+*
+* @return updated CarbonWriterBuilder
+*/
+public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
+```
+
+```
+/**
+* Build a {@link CarbonWriter}, which accepts row in CSV format object
+* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
+* @return CSVCarbonWriter
+* @throws IOException
+* @throws InvalidLoadOptionException
+*/
+public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOptionException;
+```
+
+```  
+/**
+* Build a {@link CarbonWriter}, which accepts Avro format object
+* @param avroSchema avro Schema object {org.apache.avro.Schema}
+* @return AvroCarbonWriter 
+* @throws IOException
+* @throws InvalidLoadOptionException
+*/
+public CarbonWriter buildWriterForAvroInput() throws IOException, InvalidLoadOptionException;
+```
+
+### Class org.apache.carbondata.sdk.file.CarbonWriter
+```
+/**
+* Write an object to the file, the format of the object depends on the implementation
+* If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record 
+* If CSVCarbonWriter, object is of type String[]
+* Note: This API is not thread safe
+* @param object
+* @throws IOException
+*/
+public abstract void write(Object object) throws IOException;
+```
+
+```
+/**
+* Flush and close the writer
+*/
+public abstract void close() throws IOException;
+```
+
+```
+/**
+* Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
+*/
+public static CarbonWriterBuilder builder() {
+    return new CarbonWriterBuilder();
+}
+```
+
+### Class org.apache.carbondata.sdk.file.Field
+```
+/**
+* Field Constructor
+* @param name name of the field
+* @param type datatype of field, specified in strings.
+*/
+public Field(String name, String type);
+```
+
+```
+/**
+* Field constructor
+* @param name name of the field
+* @param type datatype of the field of class DataType
+*/
+public Field(String name, DataType type);  
+```
+
+### Class org.apache.carbondata.sdk.file.Schema
+
+```
+/**
+* construct a schema with fields
+* @param fields
+*/
+public Schema(Field[] fields);
+```
+
+```
+/**
+* Create a Schema using JSON string, for example:
+* [
+*   {"name":"string"},
+*   {"age":"int"}
+* ] 
+* @param json specified as string
+* @return Schema
+*/
+public static Schema parseJson(String json);
+```
+
+### Class org.apache.carbondata.core.util.CarbonProperties
+
+```
+/**
+* This method will be responsible to get the instance of CarbonProperties class
+*
+* @return carbon properties instance
+*/
+public static CarbonProperties getInstance();
+```
+
+```
+/**
+* This method will be used to add a new property
+*
+* @param key is a property name to set for carbon.
+* @param value is valid parameter corresponding to property.
+* @return CarbonProperties object
+*/
+public CarbonProperties addProperty(String key, String value);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value.
+* @return properties value for corresponding key. If not set, then returns null.
+*/
+public String getProperty(String key);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value..
+* @param defaultValue used to be returned by function if corrosponding key not set.
+* @return properties value for corresponding key. If not set, then returns specified defaultValue.
+*/
+public String getProperty(String key, String defaultValue);
+```
+Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
+
+### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
+```
+/**
+* converts avro schema to carbon schema, required by carbonWriter
+*
+* @param avroSchemaString json formatted avro schema as string
+* @return carbon sdk schema
+*/
+public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
+```
+# SDK Reader
+This SDK reader reads CarbonData file and carbonindex file at a given path.
+External client can make use of this reader to read CarbonData files without CarbonSession.
+## Quick example
+```
+    // 1. Create carbon reader
+    String path = "./testWriteFiles";
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    // 2. Read data
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      System.out.println(row[0] + "\t" + row[1]);
+      i++;
+    }
+    
+    // 3. Close this reader
+    reader.close();
+```
+
+Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java) in the CarbonData repo.
+
+## API List
+
+### Class org.apache.carbondata.sdk.file.CarbonReader
+```
+ /**
+  * Return a new CarbonReaderBuilder instance
+  */
+  public static CarbonReaderBuilder builder(String tablePath, String tableName);
+```
+
+```
+  /**
+   * Read carbondata file and return the schema
+   */
+  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath);
+```
+
+```
+ /**
+  * Read schema file and return table info object
+  */
+  public static TableInfo readSchemaFile(String schemaFilePath);
+```
+
+```
+  /**
+   * Return true if has next row
+   */
+  public boolean hasNext();
+```
+
+```
+  /**
+   * Read and return next row object
+   */
+  public T readNextRow();
+```
+
+```
+  /**
+   * Close reader
+   */
+  public void close();
+```
+
+### Class org.apache.carbondata.sdk.file.CarbonReaderBuilder
+```
+  /**
+   * Construct a CarbonReaderBuilder with table path and table name
+   *
+   * @param tablePath table path
+   * @param tableName table name
+   */
+  CarbonReaderBuilder(String tablePath, String tableName);
+```
+
+```
+  /**
+   * Configure the projection column names of carbon reader
+   *
+   * @param projectionColumnNames projection column names
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder projection(String[] projectionColumnNames);
+```
+
+```
+  /**
+   * Configure the transactional status of table
+   * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
+   * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
+   * Default value is true
+   *
+   * @param isTransactionalTable whether is transactional table or not
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder isTransactionalTable(boolean isTransactionalTable);
+```
+
+```
+ /**
+  * Configure the filter expression for carbon reader
+  *
+  * @param filterExpression filter expression
+  * @return CarbonReaderBuilder object
+  */
+  public CarbonReaderBuilder filter(Expression filterExpression);
+```
+
+```
+  /**
+   * Set the access key for S3
+   *
+   * @param key   the string of access key for different S3 type,like: fs.s3a.access.key
+   * @param value the value of access key
+   * @return CarbonWriterBuilder
+   */
+  public CarbonReaderBuilder setAccessKey(String key, String value);
+```
+
+```
+  /**
+   * Set the access key for S3.
+   *
+   * @param value the value of access key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setAccessKey(String value);
+```
+
+```
+  /**
+   * Set the secret key for S3
+   *
+   * @param key   the string of secret key for different S3 type,like: fs.s3a.secret.key
+   * @param value the value of secret key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setSecretKey(String key, String value);
+```
+
+```
+  /**
+   * Set the secret key for S3
+   *
+   * @param value the value of secret key
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setSecretKey(String value);
+```
+
+```
+ /**
+   * Set the endpoint for S3
+   *
+   * @param key   the string of endpoint for different S3 type,like: fs.s3a.endpoint
+   * @param value the value of endpoint
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setEndPoint(String key, String value);
+```
+
+``` 
+  /**
+   * Set the endpoint for S3
+   *
+   * @param value the value of endpoint
+   * @return CarbonWriterBuilder object
+   */
+  public CarbonReaderBuilder setEndPoint(String value);
+```
+
+```
+ /**
+   * Build CarbonReader
+   *
+   * @param <T>
+   * @return CarbonReader
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public <T> CarbonReader<T> build();
+```
+Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/docs/sdk-writer-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-writer-guide.md b/docs/sdk-writer-guide.md
deleted file mode 100644
index 3d9a3de..0000000
--- a/docs/sdk-writer-guide.md
+++ /dev/null
@@ -1,400 +0,0 @@
-# SDK Writer Guide
-In the carbon jars package, there exist a carbondata-store-sdk-x.x.x-SNAPSHOT.jar.
-This SDK writer, writes carbondata file and carbonindex file at a given path.
-External client can make use of this writer to convert other format data or live data to create carbondata and index files.
-These SDK writer output contains just a carbondata and carbonindex files. No metadata folder will be present.
-
-## Quick example
-
-### Example with csv format 
-
-```java
- import java.io.IOException;
- 
- import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
- import org.apache.carbondata.core.metadata.datatype.DataTypes;
- import org.apache.carbondata.core.util.CarbonProperties;
- import org.apache.carbondata.sdk.file.CarbonWriter;
- import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
- import org.apache.carbondata.sdk.file.Field;
- import org.apache.carbondata.sdk.file.Schema;
- 
- public class TestSdk {
-
-   // pass true or false while executing the main to use offheap memory or not
-   public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-     if (args.length > 0 && args[0] != null) {
-       testSdkWriter(args[0]);
-     } else {
-       testSdkWriter("true");
-     }
-   }
- 
-   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
-     String path = "./target/testCSVSdkWriter";
- 
-     Field[] fields = new Field[2];
-     fields[0] = new Field("name", DataTypes.STRING);
-     fields[1] = new Field("age", DataTypes.INT);
- 
-     Schema schema = new Schema(fields);
-
-     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
- 
-     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
- 
-     CarbonWriter writer = builder.buildWriterForCSVInput(schema);
- 
-     int rows = 5;
-     for (int i = 0; i < rows; i++) {
-       writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
-     }
-     writer.close();
-   }
- }
-```
-
-### Example with Avro format
-```java
-import java.io.IOException;
-
-import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.sdk.file.AvroCarbonWriter;
-import org.apache.carbondata.sdk.file.CarbonWriter;
-import org.apache.carbondata.sdk.file.Field;
-
-import org.apache.avro.generic.GenericData;
-import org.apache.commons.lang.CharEncoding;
-
-import tech.allegro.schema.json2avro.converter.JsonAvroConverter;
-
-public class TestSdkAvro {
-
-  public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-    testSdkWriter();
-  }
-
-
-  public static void testSdkWriter() throws IOException, InvalidLoadOptionException {
-    String path = "./AvroCarbonWriterSuiteWriteFiles";
-    // Avro schema
-    String avroSchema =
-        "{" +
-            "   \"type\" : \"record\"," +
-            "   \"name\" : \"Acme\"," +
-            "   \"fields\" : ["
-            + "{ \"name\" : \"fname\", \"type\" : \"string\" },"
-            + "{ \"name\" : \"age\", \"type\" : \"int\" }]" +
-            "}";
-
-    String json = "{\"fname\":\"bob\", \"age\":10}";
-
-    // conversion to GenericData.Record
-    JsonAvroConverter converter = new JsonAvroConverter();
-    GenericData.Record record = converter.convertToGenericDataRecord(
-        json.getBytes(CharEncoding.UTF_8), new org.apache.avro.Schema.Parser().parse(avroSchema));
-
-    try {
-      CarbonWriter writer = CarbonWriter.builder()
-          .outputPath(path)
-          .buildWriterForAvroInput(new org.apache.avro.Schema.Parser().parse(avroSchema));
-
-      for (int i = 0; i < 100; i++) {
-        writer.write(record);
-      }
-      writer.close();
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-}
-```
-
-## Datatypes Mapping
-Each of SQL data types are mapped into data types of SDK. Following are the mapping:
-
-| SQL DataTypes | Mapped SDK DataTypes |
-|---------------|----------------------|
-| BOOLEAN | DataTypes.BOOLEAN |
-| SMALLINT | DataTypes.SHORT |
-| INTEGER | DataTypes.INT |
-| BIGINT | DataTypes.LONG |
-| DOUBLE | DataTypes.DOUBLE |
-| VARCHAR | DataTypes.STRING |
-| DATE | DataTypes.DATE |
-| TIMESTAMP | DataTypes.TIMESTAMP |
-| STRING | DataTypes.STRING |
-| DECIMAL | DataTypes.createDecimalType(precision, scale) |
-
-
-## API List
-
-### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
-```
-/**
-* Sets the output path of the writer builder
-* @param path is the absolute path where output files are written
-*             This method must be called when building CarbonWriterBuilder
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder outputPath(String path);
-```
-
-```
-/**
-* If set false, writes the carbondata and carbonindex files in a flat folder structure
-* @param isTransactionalTable is a boolelan value
-*             if set to false, then writes the carbondata and carbonindex files
-*                                                            in a flat folder structure.
-*             if set to true, then writes the carbondata and carbonindex files
-*                                                            in segment folder structure..
-*             By default set to false.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder isTransactionalTable(boolean isTransactionalTable);
-```
-
-```
-/**
-* to set the timestamp in the carbondata and carbonindex index files
-* @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
-*             By default set to zero.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder uniqueIdentifier(long UUID);
-```
-
-```
-/**
-* To set the carbondata file size in MB between 1MB-2048MB
-* @param blockSize is size in MB between 1MB to 2048 MB
-*                  default value is 1024 MB
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withBlockSize(int blockSize);
-```
-
-```
-/**
-* To set the blocklet size of carbondata file
-* @param blockletSize is blocklet size in MB
-*                     default value is 64 MB
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withBlockletSize(int blockletSize);
-```
-
-```
-/**
-* sets the list of columns that needs to be in sorted order
-* @param sortColumns is a string array of columns that needs to be sorted.
-*                    If it is null or by default all dimensions are selected for sorting
-*                    If it is empty array, no columns are sorted
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder sortBy(String[] sortColumns);
-```
-
-```
-/**
-* If set, create a schema file in metadata folder.
-* @param persist is a boolean value, If set to true, creates a schema file in metadata folder.
-*                By default set to false. will not create metadata folder
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder persistSchemaFile(boolean persist);
-```
-
-```
-/**
-* sets the taskNo for the writer. SDKs concurrently running
-* will set taskNo in order to avoid conflicts in file's name during write.
-* @param taskNo is the TaskNo user wants to specify.
-*               by default it is system time in nano seconds.
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder taskNo(String taskNo);
-```
-
-```
-/**
-* To support the load options for sdk writer
-* @param options key,value pair of load options.
-*                supported keys values are
-*                a. bad_records_logger_enable -- true (write into separate logs), false
-*                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
-*                c. bad_record_path -- path
-*                d. dateformat -- same as JAVA SimpleDateFormat
-*                e. timestampformat -- same as JAVA SimpleDateFormat
-*                f. complex_delimiter_level_1 -- value to Split the complexTypeData
-*                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
-*                h. quotechar
-*                i. escapechar
-*
-*                Default values are as follows.
-*
-*                a. bad_records_logger_enable -- "false"
-*                b. bad_records_action -- "FAIL"
-*                c. bad_record_path -- ""
-*                d. dateformat -- "" , uses from carbon.properties file
-*                e. timestampformat -- "", uses from carbon.properties file
-*                f. complex_delimiter_level_1 -- "$"
-*                g. complex_delimiter_level_2 -- ":"
-*                h. quotechar -- "\""
-*                i. escapechar -- "\\"
-*
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
-```
-
-```
-/**
-* Build a {@link CarbonWriter}, which accepts row in CSV format object
-* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
-* @return CSVCarbonWriter
-* @throws IOException
-* @throws InvalidLoadOptionException
-*/
-public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOptionException;
-```
-
-```  
-/**
-* Build a {@link CarbonWriter}, which accepts Avro format object
-* @param avroSchema avro Schema object {org.apache.avro.Schema}
-* @return AvroCarbonWriter 
-* @throws IOException
-* @throws InvalidLoadOptionException
-*/
-public CarbonWriter buildWriterForAvroInput() throws IOException, InvalidLoadOptionException;
-```
-
-### Class org.apache.carbondata.sdk.file.CarbonWriter
-```
-/**
-* Write an object to the file, the format of the object depends on the implementation
-* If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record 
-* If CSVCarbonWriter, object is of type String[]
-* Note: This API is not thread safe
-* @param object
-* @throws IOException
-*/
-public abstract void write(Object object) throws IOException;
-```
-
-```
-/**
-* Flush and close the writer
-*/
-public abstract void close() throws IOException;
-```
-
-```
-/**
-* Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
-*/
-public static CarbonWriterBuilder builder() {
-return new CarbonWriterBuilder();
-}
-```
-
-### Class org.apache.carbondata.sdk.file.Field
-```
-/**
-* Field Constructor
-* @param name name of the field
-* @param type datatype of field, specified in strings.
-*/
-public Field(String name, String type);
-```
-
-```
-/**
-* Field constructor
-* @param name name of the field
-* @param type datatype of the field of class DataType
-*/
-public Field(String name, DataType type);  
-```
-
-### Class org.apache.carbondata.sdk.file.Schema
-
-```
-/**
-* construct a schema with fields
-* @param fields
-*/
-public Schema(Field[] fields);
-```
-
-```
-/**
-* Create a Schema using JSON string, for example:
-* [
-*   {"name":"string"},
-*   {"age":"int"}
-* ] 
-* @param json specified as string
-* @return Schema
-*/
-public static Schema parseJson(String json);
-```
-
-### Class org.apache.carbondata.core.util.CarbonProperties
-
-```
-/**
-* This method will be responsible to get the instance of CarbonProperties class
-*
-* @return carbon properties instance
-*/
-public static CarbonProperties getInstance();
-```
-
-```
-/**
-* This method will be used to add a new property
-*
-* @param key is a property name to set for carbon.
-* @param value is valid parameter corresponding to property.
-* @return CarbonProperties object
-*/
-public CarbonProperties addProperty(String key, String value);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value.
-* @return properties value for corresponding key. If not set, then returns null.
-*/
-public String getProperty(String key);
-```
-
-```
-/**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value..
-* @param defaultValue used to be returned by function if corrosponding key not set.
-* @return properties value for corresponding key. If not set, then returns specified defaultValue.
-*/
-public String getProperty(String key, String defaultValue);
-```
-Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
-
-### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
-```
-/**
-* converts avro schema to carbon schema, required by carbonWriter
-*
-* @param avroSchemaString json formatted avro schema as string
-* @return carbon sdk schema
-*/
-public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
-```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 6517e89..d85bf4b 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -102,7 +102,7 @@ public class CarbonReader<T> {
   }
 
   /**
-   * Read schmea file and return table info object
+   * Read schema file and return table info object
    */
   public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {
     org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 946ea0f..c78cda0 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -50,26 +50,53 @@ public class CarbonReaderBuilder {
   private String tableName;
   private boolean isTransactionalTable = true;
 
+  /**
+   * Construct a CarbonReaderBuilder with table path and table name
+   *
+   * @param tablePath table path
+   * @param tableName table name
+   */
   CarbonReaderBuilder(String tablePath, String tableName) {
     this.tablePath = tablePath;
     this.tableName = tableName;
   }
 
+  /**
+   * Configure the projection column names of carbon reader
+   *
+   * @param projectionColumnNames projection column names
+   * @return CarbonReaderBuilder object
+   */
   public CarbonReaderBuilder projection(String[] projectionColumnNames) {
     Objects.requireNonNull(projectionColumnNames);
     this.projectionColumns = projectionColumnNames;
     return this;
   }
 
+  /**
+   * Configure the transactional status of table
+   * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
+   * If set to true, then reads the carbondata and carbonindex files from segment folder structure.
+   * Default value is true
+   *
+   * @param isTransactionalTable whether is transactional table or not
+   * @return CarbonReaderBuilder object
+   */
   public CarbonReaderBuilder isTransactionalTable(boolean isTransactionalTable) {
     Objects.requireNonNull(isTransactionalTable);
     this.isTransactionalTable = isTransactionalTable;
     return this;
   }
 
-  public CarbonReaderBuilder filter(Expression fileterExpression) {
-    Objects.requireNonNull(fileterExpression);
-    this.filterExpression = fileterExpression;
+  /**
+   * Configure the filter expression for carbon reader
+   *
+   * @param filterExpression filter expression
+   * @return CarbonReaderBuilder object
+   */
+  public CarbonReaderBuilder filter(Expression filterExpression) {
+    Objects.requireNonNull(filterExpression);
+    this.filterExpression = filterExpression;
     return this;
   }
 
@@ -78,7 +105,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of access key for different S3 type,like: fs.s3a.access.key
    * @param value the value of access key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setAccessKey(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -89,7 +116,7 @@ public class CarbonReaderBuilder {
    * Set the access key for S3.
    *
    * @param value the value of access key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setAccessKey(String value) {
     return setAccessKey(Constants.ACCESS_KEY, value);
@@ -100,7 +127,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of secret key for different S3 type,like: fs.s3a.secret.key
    * @param value the value of secret key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setSecretKey(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -111,7 +138,7 @@ public class CarbonReaderBuilder {
    * Set the secret key for S3
    *
    * @param value the value of secret key
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setSecretKey(String value) {
     return setSecretKey(Constants.SECRET_KEY, value);
@@ -122,7 +149,7 @@ public class CarbonReaderBuilder {
    *
    * @param key   the string of endpoint for different S3 type,like: fs.s3a.endpoint
    * @param value the value of endpoint
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setEndPoint(String key, String value) {
     FileFactory.getConfiguration().set(key, value);
@@ -133,13 +160,20 @@ public class CarbonReaderBuilder {
    * Set the endpoint for S3
    *
    * @param value the value of endpoint
-   * @return CarbonWriterBuilder
+   * @return CarbonWriterBuilder object
    */
   public CarbonReaderBuilder setEndPoint(String value) {
-    FileFactory.getConfiguration().set(Constants.ENDPOINT, value);
-    return this;
+    return setEndPoint(Constants.ENDPOINT, value);
   }
 
+  /**
+   * Build CarbonReader
+   *
+   * @param <T>
+   * @return CarbonReader
+   * @throws IOException
+   * @throws InterruptedException
+   */
   public <T> CarbonReader<T> build() throws IOException, InterruptedException {
     CarbonTable table = CarbonTable.buildFromTablePath(tableName, tablePath, isTransactionalTable);
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/ddf3e859/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 2277ab0..e2dc8c2 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -120,10 +120,10 @@ public class CarbonWriterBuilder {
   /**
    * If set false, writes the carbondata and carbonindex files in a flat folder structure
    * @param isTransactionalTable is a boolelan value
-   * if set to false, then writes the carbondata and carbonindex files
+   * If set to false, then writes the carbondata and carbonindex files
    * in a flat folder structure.
-   * if set to true, then writes the carbondata and carbonindex files
-   * in segment folder structure..
+   * If set to true, then writes the carbondata and carbonindex files
+   * in segment folder structure.
    * By default set to false.
    * @return updated CarbonWriterBuilder
    */
@@ -285,7 +285,7 @@ public class CarbonWriterBuilder {
   }
 
   /**
-   * To set the blocklet size of carbondata file
+   * To set the blocklet size of CarbonData file
    * @param blockletSize is blocklet size in MB
    * default value is 64 MB
    * @return updated CarbonWriterBuilder


[38/50] [abbrv] carbondata git commit: [CARBONDATA-2521] Support create carbonReader without tableName

Posted by gv...@apache.org.
[CARBONDATA-2521] Support create carbonReader without tableName

Add new method for creating carbonReader without tableName

1.add new interface: public static CarbonReaderBuilder builder(String tablePath)
2.Default value of table name is UnknownTable + time

This closes #2336


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/5b2b9130
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/5b2b9130
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/5b2b9130

Branch: refs/heads/spark-2.3
Commit: 5b2b9130411da7737a76b8901f61c59639113e5d
Parents: b338459
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 21:08:23 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Thu May 31 14:19:47 2018 +0800

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 21 +++++-
 .../carbondata/sdk/file/CarbonReader.java       | 19 +++++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 76 ++++++++++++++++++++
 3 files changed, 113 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/5b2b9130/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 1d225a9..360516a 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -429,14 +429,29 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ### Class org.apache.carbondata.sdk.file.CarbonReader
 ```
- /**
-  * Return a new CarbonReaderBuilder instance
-  */
+   /**
+    * Return a new {@link CarbonReaderBuilder} instance
+    *
+    * @param tablePath table store path
+    * @param tableName table name
+    * @return CarbonReaderBuilder object
+    */
   public static CarbonReaderBuilder builder(String tablePath, String tableName);
 ```
 
 ```
   /**
+   * Return a new CarbonReaderBuilder instance
+   * Default value of table name is table + tablePath + time
+   *
+   * @param tablePath table path
+   * @return CarbonReaderBuilder object
+   */
+  public static CarbonReaderBuilder builder(String tablePath);
+```
+
+```
+  /**
    * Return true if has next row
    */
   public boolean hasNext();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5b2b9130/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 60ead05..81db7b2 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -18,6 +18,8 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
@@ -83,12 +85,29 @@ public class CarbonReader<T> {
 
   /**
    * Return a new {@link CarbonReaderBuilder} instance
+   *
+   * @param tablePath table store path
+   * @param tableName table name
+   * @return CarbonReaderBuilder object
    */
   public static CarbonReaderBuilder builder(String tablePath, String tableName) {
     return new CarbonReaderBuilder(tablePath, tableName);
   }
 
   /**
+   * Return a new {@link CarbonReaderBuilder} instance
+   * Default value of table name is table + tablePath + time
+   *
+   * @param tablePath table path
+   * @return CarbonReaderBuilder object
+   */
+  public static CarbonReaderBuilder builder(String tablePath) {
+    String time = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
+    String tableName = "UnknownTable" + time;
+    return builder(tablePath, tableName);
+  }
+
+  /**
    * Close reader
    *
    * @throws IOException

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5b2b9130/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index deb6d06..95c25f8 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -225,6 +225,82 @@ public class CarbonReaderTest extends TestCase {
   }
 
   @Test
+  public void testWriteAndReadFilesWithoutTableName() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path)
+        .projection(new String[]{"name", "age"})
+        .isTransactionalTable(true)
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testWriteAndReadFilesWithoutTableName2() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true,false);
+
+    CarbonReader reader = CarbonReader
+        .builder(path)
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
   public void testReadSchemaFromDataFile() throws IOException {
     String path = "./testWriteFiles";
     FileUtils.deleteDirectory(new File(path));


[29/50] [abbrv] carbondata git commit: [CARBONDATA-2524] Support create carbonReader with default projection

Posted by gv...@apache.org.
[CARBONDATA-2524] Support create carbonReader with default projection

This closes #2338


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/8b80b12e
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/8b80b12e
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/8b80b12e

Branch: refs/heads/spark-2.3
Commit: 8b80b12eca261016000f0ab132e8558a4e87fc95
Parents: 7f4bd3d
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 24 09:33:23 2018 +0800
Committer: QiangCai <qi...@qq.com>
Committed: Mon May 28 22:10:18 2018 +0800

----------------------------------------------------------------------
 docs/sdk-guide.md                               |  10 ++
 .../sdk/file/CarbonReaderBuilder.java           |  40 +++++++-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 101 +++++++++++++++++++
 3 files changed, 149 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b80b12e/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 4d258f0..328a845 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -493,6 +493,16 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ```
   /**
+   * Project all Columns for carbon reader
+   *
+   * @return CarbonReaderBuilder object
+   * @throws IOException
+   */
+  public CarbonReaderBuilder projectAllColumns();
+```
+
+```
+  /**
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b80b12e/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index c78cda0..4103c63 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -26,6 +26,7 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.hadoop.CarbonProjection;
 import org.apache.carbondata.hadoop.api.CarbonFileInputFormat;
@@ -51,6 +52,12 @@ public class CarbonReaderBuilder {
   private boolean isTransactionalTable = true;
 
   /**
+   * It will be true if use the projectAllColumns method,
+   * it will be false if use the projection method
+   */
+  private boolean isProjectAllColumns = true;
+
+  /**
    * Construct a CarbonReaderBuilder with table path and table name
    *
    * @param tablePath table path
@@ -70,6 +77,7 @@ public class CarbonReaderBuilder {
   public CarbonReaderBuilder projection(String[] projectionColumnNames) {
     Objects.requireNonNull(projectionColumnNames);
     this.projectionColumns = projectionColumnNames;
+    isProjectAllColumns = false;
     return this;
   }
 
@@ -89,6 +97,33 @@ public class CarbonReaderBuilder {
   }
 
   /**
+   * Project all Columns for carbon reader
+   *
+   * @return CarbonReaderBuilder object
+   * @throws IOException
+   */
+  public CarbonReaderBuilder projectAllColumns() throws IOException {
+    CarbonTable carbonTable = CarbonTable
+        .buildFromTablePath(tableName, tablePath, isTransactionalTable);
+
+    List<ColumnSchema> colList = carbonTable.getTableInfo().getFactTable().getListOfColumns();
+    List<String> projectColumn = new ArrayList<String>();
+    for (ColumnSchema cols : colList) {
+      if (cols.getSchemaOrdinal() != -1) {
+        projectColumn.add(cols.getColumnUniqueId());
+      }
+    }
+    projectionColumns = new String[projectColumn.size()];
+    int i = 0;
+    for (String columnName : projectColumn) {
+      projectionColumns[i] = columnName;
+      i++;
+    }
+    isProjectAllColumns = true;
+    return this;
+  }
+
+  /**
    * Configure the filter expression for carbon reader
    *
    * @param filterExpression filter expression
@@ -186,9 +221,10 @@ public class CarbonReaderBuilder {
     if (filterExpression != null) {
       format.setFilterPredicates(job.getConfiguration(), filterExpression);
     }
-    if (projectionColumns != null) {
-      format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
+    if (isProjectAllColumns) {
+      projectAllColumns();
     }
+    format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
 
     final List<InputSplit> splits =
         format.getSplits(new JobContextImpl(job.getConfiguration(), new JobID()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/8b80b12e/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 0d2c84e..756dbe4 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -409,4 +409,105 @@ public class CarbonReaderTest extends TestCase {
         badRecordLoc);
   }
 
+  @Test
+  public void testReadFilesWithProjectAllColumns() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projectAllColumns()
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadFilesWithDefaultProjection() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+  }
+
+  @Test
+  public void testReadFilesWithNullProjection() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{})
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+    // Default sort column is applied for dimensions. So, need  to validate accordingly
+
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      assert(row.length==0);
+    }
+  }
 }


[39/50] [abbrv] carbondata git commit: [CARBONDATA-2508] Fix the exception that can't get executorService when start search mode twice

Posted by gv...@apache.org.
[CARBONDATA-2508] Fix the exception that can't get executorService when start search mode twice

This closes #2355


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/6aadfe70
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/6aadfe70
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/6aadfe70

Branch: refs/heads/spark-2.3
Commit: 6aadfe70a67bfd13ed5efedfaa368de57403a88f
Parents: 5b2b913
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 31 09:15:16 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Thu May 31 14:24:35 2018 +0800

----------------------------------------------------------------------
 .../executor/impl/SearchModeDetailQueryExecutor.java   |  6 ++++--
 .../impl/SearchModeVectorDetailQueryExecutor.java      |  6 ++++--
 .../testsuite/detailquery/SearchModeTestCase.scala     | 13 +++++++++++++
 3 files changed, 21 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/6aadfe70/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
index aed472c..ae14327 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
@@ -37,8 +37,10 @@ public class SearchModeDetailQueryExecutor extends AbstractQueryExecutor<Object>
           LogServiceFactory.getLogService(SearchModeDetailQueryExecutor.class.getName());
   private static ExecutorService executorService = null;
 
-  static {
-    initThreadPool();
+  public SearchModeDetailQueryExecutor() {
+    if (executorService == null) {
+      initThreadPool();
+    }
   }
 
   private static synchronized void initThreadPool() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6aadfe70/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
index 00fd511..705c451 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
@@ -40,8 +40,10 @@ public class SearchModeVectorDetailQueryExecutor extends AbstractQueryExecutor<O
           LogServiceFactory.getLogService(SearchModeVectorDetailQueryExecutor.class.getName());
   private static ExecutorService executorService = null;
 
-  static {
-    initThreadPool();
+  public SearchModeVectorDetailQueryExecutor() {
+    if (executorService == null) {
+      initThreadPool();
+    }
   }
 
   private static synchronized void initThreadPool() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6aadfe70/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
index d278fc5..3e6adaf 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/SearchModeTestCase.scala
@@ -136,4 +136,17 @@ class SearchModeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP DATAMAP if exists dm3 ON TABLE main")
   }
 
+  test("start search mode twice") {
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    assert(sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+    checkSearchAnswer("select id from main where id = '3' limit 10")
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+    assert(!sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+
+    // start twice
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
+    assert(sqlContext.sparkSession.asInstanceOf[CarbonSession].isSearchModeEnabled)
+    checkSearchAnswer("select id from main where id = '3' limit 10")
+    sqlContext.sparkSession.asInstanceOf[CarbonSession].stopSearchMode()
+  }
 }


[35/50] [abbrv] carbondata git commit: [CARBONDATA-2546] Fixed the ArrayIndexOutOfBoundsException when give same column twice in projection of CarbonReader

Posted by gv...@apache.org.
[CARBONDATA-2546] Fixed the ArrayIndexOutOfBoundsException when give same column twice in projection of CarbonReader

This closes #2348


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/a7faef8a
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/a7faef8a
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/a7faef8a

Branch: refs/heads/spark-2.3
Commit: a7faef8a08cef32670ae7598fa117bb9fdbb543d
Parents: e740182
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 28 19:24:46 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Wed May 30 00:07:10 2018 +0800

----------------------------------------------------------------------
 .../carbondata/hadoop/CarbonRecordReader.java   |  1 -
 .../hadoop/api/CarbonInputFormat.java           | 27 ++++++++
 .../TestNonTransactionalCarbonTable.scala       | 11 +++-
 .../sdk/file/CarbonReaderBuilder.java           |  3 +-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 66 +++++++++++++++-----
 5 files changed, 87 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7faef8a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index d4b091c..cad20fc 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -90,7 +90,6 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
 
   @Override public boolean nextKeyValue() {
     return carbonIterator.hasNext();
-
   }
 
   @Override public Void getCurrentKey() throws IOException, InterruptedException {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7faef8a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index 91da93f..cf51162 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -24,6 +24,7 @@ import java.lang.reflect.Constructor;
 import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.List;
+import java.util.Objects;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.constants.CarbonCommonConstantsInternal;
@@ -190,6 +191,32 @@ m filterExpression
     }
   }
 
+  /**
+   * Set the column projection column names
+   *
+   * @param configuration     Configuration info
+   * @param projectionColumns projection columns name
+   */
+  public static void setColumnProjection(Configuration configuration, String[] projectionColumns) {
+    Objects.requireNonNull(projectionColumns);
+    if (projectionColumns.length < 1) {
+      throw new RuntimeException("Projection can't be empty");
+    }
+    StringBuilder builder = new StringBuilder();
+    for (String column : projectionColumns) {
+      builder.append(column).append(",");
+    }
+    String columnString = builder.toString();
+    columnString = columnString.substring(0, columnString.length() - 1);
+    configuration.set(COLUMN_PROJECTION, columnString);
+  }
+
+  /**
+   * Set the column projection column names from CarbonProjection
+   *
+   * @param configuration Configuration info
+   * @param projection    CarbonProjection object that includes unique projection column name
+   */
   public static void setColumnProjection(Configuration configuration, CarbonProjection projection) {
     if (projection == null || projection.isEmpty()) {
       return;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7faef8a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index afb9b2f..61b37d5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -317,7 +317,10 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-
+    checkAnswer(sql("SELECT name,name FROM sdkOutputTable"), Seq(
+      Row("robot0", "robot0"),
+      Row("robot1", "robot1"),
+      Row("robot2", "robot2")))
     checkAnswer(sql("select * from sdkOutputTable"), Seq(
       Row("robot0", 0, 0.0),
       Row("robot1", 1, 0.5),
@@ -1529,6 +1532,12 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
+    sql("SELECT name,name FROM sdkOutputTable").show()
+    checkAnswer(sql("SELECT name,name FROM sdkOutputTable"), Seq(
+      Row("bob", "bob"),
+      Row("bob", "bob"),
+      Row("bob", "bob")))
+
     sql("select * from sdkOutputTable").show(false)
 
     // TODO: Add a validation

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7faef8a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 4103c63..1e73e8c 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -28,7 +28,6 @@ import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
-import org.apache.carbondata.hadoop.CarbonProjection;
 import org.apache.carbondata.hadoop.api.CarbonFileInputFormat;
 
 import org.apache.hadoop.conf.Configuration;
@@ -224,7 +223,7 @@ public class CarbonReaderBuilder {
     if (isProjectAllColumns) {
       projectAllColumns();
     }
-    format.setColumnProjection(job.getConfiguration(), new CarbonProjection(projectionColumns));
+    format.setColumnProjection(job.getConfiguration(), projectionColumns);
 
     final List<InputSplit> splits =
         format.getSplits(new JobContextImpl(job.getConfiguration(), new JobID()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7faef8a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 7a2a765..ee095a1 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -105,6 +105,47 @@ public class CarbonReaderTest extends TestCase {
   }
 
   @Test
+  public void testReadColumnTwice() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "name", "age", "name"})
+        .build();
+
+    // expected output after sorting
+    String[] name = new String[100];
+    int[] age = new int[100];
+    for (int i = 0; i < 100; i++) {
+      name[i] = "robot" + (i / 10);
+      age[i] = (i % 10) * 10 + i / 10;
+    }
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(name[i], row[1]);
+      Assert.assertEquals(age[i], row[2]);
+      Assert.assertEquals(name[i], row[3]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
   public void testReadFilesParallel() throws IOException, InterruptedException {
     String path = "./testWriteFiles";
     FileUtils.deleteDirectory(new File(path));
@@ -836,23 +877,14 @@ public class CarbonReaderTest extends TestCase {
 
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
 
-    CarbonReader reader = CarbonReader
-        .builder(path, "_temp")
-        .projection(new String[]{})
-        .build();
-
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-    // Default sort column is applied for dimensions. So, need  to validate accordingly
-
-    while (reader.hasNext()) {
-      Object[] row = (Object[]) reader.readNextRow();
-      assert(row.length==0);
+    try {
+      CarbonReader reader = CarbonReader
+          .builder(path, "_temp")
+          .projection(new String[]{})
+          .build();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equalsIgnoreCase("Projection can't be empty"));
     }
   }
 


[21/50] [abbrv] carbondata git commit: [CARBONDATA-2514] Added condition to check for duplicate column names

Posted by gv...@apache.org.
[CARBONDATA-2514] Added condition to check for duplicate column names

1. Duplicate columns check was not present.
2. IndexFileReader was not being closed due to which index file could not be deleted.

This closes #2332


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cf666c17
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cf666c17
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cf666c17

Branch: refs/heads/spark-2.3
Commit: cf666c17b8be9f11dd9c0b51503ca194162ee782
Parents: 16ed99a
Author: kunal642 <ku...@gmail.com>
Authored: Tue May 22 15:16:32 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Thu May 24 10:19:47 2018 +0530

----------------------------------------------------------------------
 .../apache/carbondata/core/util/CarbonUtil.java | 44 +++++++++++---------
 .../carbondata/core/util/DataTypeUtil.java      |  2 +
 .../sdk/file/CarbonWriterBuilder.java           |  7 ++++
 .../sdk/file/AvroCarbonWriterTest.java          | 40 ++++++++++++++++++
 4 files changed, 73 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf666c17/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 9dc4aa2..23d02ef 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2380,27 +2380,31 @@ public final class CarbonUtil {
   public static org.apache.carbondata.format.TableInfo inferSchemaFromIndexFile(
       String indexFilePath, String tableName) throws IOException {
     CarbonIndexFileReader indexFileReader = new CarbonIndexFileReader();
-    indexFileReader.openThriftReader(indexFilePath);
-    org.apache.carbondata.format.IndexHeader readIndexHeader = indexFileReader.readIndexHeader();
-    List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
-    List<org.apache.carbondata.format.ColumnSchema> table_columns =
-        readIndexHeader.getTable_columns();
-    for (int i = 0; i < table_columns.size(); i++) {
-      columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+    try {
+      indexFileReader.openThriftReader(indexFilePath);
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexFileReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (int i = 0; i < table_columns.size(); i++) {
+        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      }
+      // only columnSchema is the valid entry, reset all dummy entries.
+      TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
+
+      ThriftWrapperSchemaConverterImpl thriftWrapperSchemaConverter =
+          new ThriftWrapperSchemaConverterImpl();
+      org.apache.carbondata.format.TableSchema thriftFactTable =
+          thriftWrapperSchemaConverter.fromWrapperToExternalTableSchema(tableSchema);
+      org.apache.carbondata.format.TableInfo tableInfo =
+          new org.apache.carbondata.format.TableInfo(thriftFactTable,
+              new ArrayList<org.apache.carbondata.format.TableSchema>());
+
+      tableInfo.setDataMapSchemas(null);
+      return tableInfo;
+    } finally {
+      indexFileReader.closeThriftReader();
     }
-    // only columnSchema is the valid entry, reset all dummy entries.
-    TableSchema tableSchema = getDummyTableSchema(tableName, columnSchemaList);
-
-    ThriftWrapperSchemaConverterImpl thriftWrapperSchemaConverter =
-        new ThriftWrapperSchemaConverterImpl();
-    org.apache.carbondata.format.TableSchema thriftFactTable =
-        thriftWrapperSchemaConverter.fromWrapperToExternalTableSchema(tableSchema);
-    org.apache.carbondata.format.TableInfo tableInfo =
-        new org.apache.carbondata.format.TableInfo(thriftFactTable,
-            new ArrayList<org.apache.carbondata.format.TableSchema>());
-
-    tableInfo.setDataMapSchemas(null);
-    return tableInfo;
   }
 
   private static TableSchema getDummyTableSchema(String tableName,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf666c17/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index f7f71b3..e06c82e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -341,6 +341,7 @@ public final class DataTypeUtil {
       try {
         if (null != dateFormat && !dateFormat.trim().isEmpty()) {
           dateFormatter = new SimpleDateFormat(dateFormat);
+          dateFormatter.setLenient(false);
         } else {
           dateFormatter = timeStampformatter.get();
         }
@@ -376,6 +377,7 @@ public final class DataTypeUtil {
       try {
         if (null != dateFormat && !dateFormat.trim().isEmpty()) {
           dateFormatter = new SimpleDateFormat(dateFormat);
+          dateFormatter.setLenient(false);
         } else {
           dateFormatter = timeStampformatter.get();
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf666c17/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index e846da4..2277ab0 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -21,9 +21,11 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -421,6 +423,7 @@ public class CarbonWriterBuilder {
 
   private void buildTableSchema(Field[] fields, TableSchemaBuilder tableSchemaBuilder,
       List<String> sortColumnsList, ColumnSchema[] sortColumnsSchemaList) {
+    Set<String> uniqueFields = new HashSet<>();
     // a counter which will be used in case of complex array type. This valIndex will be assigned
     // to child of complex array type in the order val1, val2 so that each array type child is
     // differentiated to any level
@@ -442,6 +445,10 @@ public class CarbonWriterBuilder {
     int i = 0;
     for (Field field : fields) {
       if (null != field) {
+        if (!uniqueFields.add(field.getFieldName())) {
+          throw new RuntimeException(
+              "Duplicate column " + field.getFieldName() + " found in table schema");
+        }
         int isSortColumn = sortColumnsList.indexOf(field.getFieldName());
         if (isSortColumn > -1) {
           // unsupported types for ("array", "struct", "double", "float", "decimal")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf666c17/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
index b70e74d..03a4f47 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
@@ -21,9 +21,12 @@ import java.io.File;
 import java.io.FileFilter;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.util.HashMap;
+import java.util.Map;
 
 import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.apache.avro.generic.GenericData;
@@ -450,6 +453,43 @@ public class AvroCarbonWriterTest {
     FileUtils.deleteDirectory(new File(path));
   }
 
+  @Test
+  public void testExceptionForDuplicateColumns() throws IOException, InvalidLoadOptionException {
+    Field[] field = new Field[2];
+    field[0] = new Field("name", DataTypes.STRING);
+    field[1] = new Field("name", DataTypes.STRING);
+    CarbonWriterBuilder writer = CarbonWriter.builder().isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).outputPath(path);
+
+    try {
+      writer.buildWriterForCSVInput(new org.apache.carbondata.sdk.file.Schema(field));
+      Assert.fail();
+    } catch (Exception e) {
+      assert(e.getMessage().contains("Duplicate column name found in table schema"));
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
 
+  @Test
+  public void testExceptionForInvalidDate() throws IOException, InvalidLoadOptionException {
+    Field[] field = new Field[2];
+    field[0] = new Field("name", DataTypes.STRING);
+    field[1] = new Field("date", DataTypes.DATE);
+    CarbonWriterBuilder writer = CarbonWriter.builder().isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).outputPath(path);
+
+    try {
+      Map<String, String> loadOptions = new HashMap<String, String>();
+      loadOptions.put("bad_records_action", "fail");
+      CarbonWriter carbonWriter =
+          writer.isTransactionalTable(false).withLoadOptions(loadOptions).buildWriterForCSVInput(new org.apache.carbondata.sdk.file.Schema(field));
+      carbonWriter.write(new String[] { "k", "20-02-2233" });
+      carbonWriter.close();
+      Assert.fail();
+    } catch (Exception e) {
+      assert(e.getMessage().contains("Data load failed due to bad record"));
+    }
+    FileUtils.deleteDirectory(new File(path));
+  }
 
 }


[08/50] [abbrv] carbondata git commit: [CARBONDATA-2206]add documentation for lucene datamap

Posted by gv...@apache.org.
[CARBONDATA-2206]add documentation for lucene datamap

added documentation for lucene datamap

This closes #2215


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/061871ed
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/061871ed
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/061871ed

Branch: refs/heads/spark-2.3
Commit: 061871eda45adce4bc7501dd303311e54ddf8831
Parents: 26eb2d0
Author: akashrn5 <ak...@gmail.com>
Authored: Mon Apr 23 19:27:56 2018 +0530
Committer: chenliang613 <ch...@huawei.com>
Committed: Mon May 21 20:11:20 2018 +0800

----------------------------------------------------------------------
 docs/datamap/lucene-datamap-guide.md | 159 ++++++++++++++++++++++++++++++
 1 file changed, 159 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/061871ed/docs/datamap/lucene-datamap-guide.md
----------------------------------------------------------------------
diff --git a/docs/datamap/lucene-datamap-guide.md b/docs/datamap/lucene-datamap-guide.md
new file mode 100644
index 0000000..5f7a2e4
--- /dev/null
+++ b/docs/datamap/lucene-datamap-guide.md
@@ -0,0 +1,159 @@
+# CarbonData Lucene DataMap (Alpha feature in 1.4.0)
+  
+* [DataMap Management](#datamap-management)
+* [Lucene Datamap](#lucene-datamap-introduction)
+* [Loading Data](#loading-data)
+* [Querying Data](#querying-data)
+* [Data Management](#data-management-with-lucene-datamap)
+
+#### DataMap Management 
+Lucene DataMap can be created using following DDL
+  ```
+  CREATE DATAMAP [IF NOT EXISTS] datamap_name
+  ON TABLE main_table
+  USING 'lucene'
+  DMPROPERTIES ('index_columns'='city, name', ...)
+  ```
+
+DataMap can be dropped using following DDL:
+  ```
+  DROP DATAMAP [IF EXISTS] datamap_name
+  ON TABLE main_table
+  ```
+To show all DataMaps created, use:
+  ```
+  SHOW DATAMAP 
+  ON TABLE main_table
+  ```
+It will show all DataMaps created on main table.
+
+
+## Lucene DataMap Introduction
+  Lucene is a high performance, full featured text search engine. Lucene is integrated to carbon as
+  an index datamap and managed along with main tables by CarbonData.User can create lucene datamap 
+  to improve query performance on string columns which has content of more length. So, user can 
+  search tokenized word or pattern of it using lucene query on text content.
+  
+  For instance, main table called **datamap_test** which is defined as:
+  
+  ```
+  CREATE TABLE datamap_test (
+    name string,
+    age int,
+    city string,
+    country string)
+  STORED BY 'carbondata'
+  ```
+  
+  User can create Lucene datamap using the Create DataMap DDL:
+  
+  ```
+  CREATE DATAMAP dm
+  ON TABLE datamap_test
+  USING 'lucene'
+  DMPROPERTIES ('INDEX_COLUMNS' = 'name, country',)
+  ```
+
+**DMProperties**
+1. INDEX_COLUMNS: The list of string columns on which lucene creates indexes.
+2. FLUSH_CACHE: size of the cache to maintain in Lucene writer, if specified then it tries to 
+   aggregate the unique data till the cache limit and flush to Lucene. It is best suitable for low 
+   cardinality dimensions.
+3. SPLIT_BLOCKLET: when made as true then store the data in blocklet wise in lucene , it means new 
+   folder will be created for each blocklet, thus, it eliminates storing blockletid in lucene and 
+   also it makes lucene small chunks of data.
+   
+## Loading data
+When loading data to main table, lucene index files will be generated for all the
+index_columns(String Columns) given in DMProperties which contains information about the data
+location of index_columns. These index files will be written inside a folder named with datamap name
+inside each segment folders.
+
+A system level configuration carbon.lucene.compression.mode can be added for best compression of
+lucene index files. The default value is speed, where the index writing speed will be more. If the
+value is compression, the index file size will be compressed.
+
+## Querying data
+As a technique for query acceleration, Lucene indexes cannot be queried directly.
+Queries are to be made on main table. when a query with TEXT_MATCH('name:c10') or 
+TEXT_MATCH_WITH_LIMIT('name:n10',10)[the second parameter represents the number of result to be 
+returned, if user does not specify this value, all results will be returned without any limit] is 
+fired, two jobs are fired.The first job writes the temporary files in folder created at table level 
+which contains lucene's seach results and these files will be read in second job to give faster 
+results. These temporary files will be cleared once the query finishes.
+
+User can verify whether a query can leverage Lucene datamap or not by executing `EXPLAIN`
+command, which will show the transformed logical plan, and thus user can check whether TEXT_MATCH()
+filter is applied on query or not.
+
+**Note:**
+ 1. The filter columns in TEXT_MATCH or TEXT_MATCH_WITH_LIMIT must be always in lower case and 
+filter condition like 'AND','OR' must be in upper case.
+
+      Ex: 
+      ```
+      select * from datamap_test where TEXT_MATCH('name:*10 AND name:*n*')
+      ```
+     
+2. Query supports only one TEXT_MATCH udf for filter condition and not multiple udfs.
+
+   The following query is supported:
+   ```
+   select * from datamap_test where TEXT_MATCH('name:*10 AND name:*n*')
+   ```
+       
+   The following query is not supported:
+   ```
+   select * from datamap_test where TEXT_MATCH('name:*10) AND TEXT_MATCH(name:*n*')
+   ```
+       
+          
+Below like queries can be converted to text_match queries as following:
+```
+select * from datamap_test where name='n10'
+
+select * from datamap_test where name like 'n1%'
+
+select * from datamap_test where name like '%10'
+
+select * from datamap_test where name like '%n%'
+
+select * from datamap_test where name like '%10' and name not like '%n%'
+```
+Lucene TEXT_MATCH Queries:
+```
+select * from datamap_test where TEXT_MATCH('name:n10')
+
+select * from datamap_test where TEXT_MATCH('name:n1*')
+
+select * from datamap_test where TEXT_MATCH('name:*10')
+
+select * from datamap_test where TEXT_MATCH('name:*n*')
+
+select * from datamap_test where TEXT_MATCH('name:*10 -name:*n*')
+```
+**Note:** For lucene queries and syntax, refer to [lucene-syntax](www.lucenetutorial.com/lucene-query-syntax.html)
+
+## Data Management with lucene datamap
+Once there is lucene datamap is created on the main table, following command on the main
+table
+is not supported:
+1. Data management command: `UPDATE/DELETE`.
+2. Schema management command: `ALTER TABLE DROP COLUMN`, `ALTER TABLE CHANGE DATATYPE`, 
+`ALTER TABLE RENAME`.
+
+**Note**: Adding a new column is supported, and for dropping columns and change datatype 
+command, CarbonData will check whether it will impact the lucene datamap, if not, the operation 
+is allowed, otherwise operation will be rejected by throwing exception.
+
+
+3. Partition management command: `ALTER TABLE ADD/DROP PARTITION`.
+
+However, there is still way to support these operations on main table, in current CarbonData 
+release, user can do as following:
+1. Remove the lucene datamap by `DROP DATAMAP` command.
+2. Carry out the data management operation on main table.
+3. Create the lucene datamap again by `CREATE DATAMAP` command.
+Basically, user can manually trigger the operation by re-building the datamap.
+
+


[23/50] [abbrv] carbondata git commit: [CARBONDATA-2491] Fix the error when reader read twice with SDK carbonReader

Posted by gv...@apache.org.
[CARBONDATA-2491] Fix the error when reader read twice with SDK carbonReader

This PR includes:
1. Fix the error out of bound when reader read twice with SDK carbonReader
2. Fix the java.lang.NegativeArraySizeException
3. Add timestamp and bad record test case
4. support parallel read of two readers

This closes #2318


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/a7ac6564
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/a7ac6564
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/a7ac6564

Branch: refs/heads/spark-2.3
Commit: a7ac65648c827be74b77bd6ef1f715508ba53a2c
Parents: 6cc86db
Author: xubo245 <xu...@huawei.com>
Authored: Fri May 18 15:40:16 2018 +0800
Committer: kunal642 <ku...@gmail.com>
Committed: Thu May 24 19:23:26 2018 +0530

----------------------------------------------------------------------
 .../core/datamap/DataMapStoreManager.java       |   2 +-
 .../scan/result/iterator/ChunkRowIterator.java  |  18 +-
 .../carbondata/hadoop/CarbonRecordReader.java   |   3 +
 .../carbondata/sdk/file/CarbonReader.java       |  17 ++
 .../carbondata/sdk/file/CarbonReaderTest.java   | 234 ++++++++++++++++++-
 5 files changed, 263 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7ac6564/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 1359e85..0fcf4cd 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -411,7 +411,7 @@ public final class DataMapStoreManager {
   }
 
   /**
-   * this methos clears the datamap of table from memory
+   * this methods clears the datamap of table from memory
    */
   public void clearDataMaps(String tableUniqName) {
     List<TableDataMap> tableIndices = allDataMaps.get(tableUniqName);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7ac6564/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
index 1235789..0866395 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/ChunkRowIterator.java
@@ -31,14 +31,14 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
   private CarbonIterator<RowBatch> iterator;
 
   /**
-   * currect chunk
+   * current chunk
    */
-  private RowBatch currentchunk;
+  private RowBatch currentChunk;
 
   public ChunkRowIterator(CarbonIterator<RowBatch> iterator) {
     this.iterator = iterator;
     if (iterator.hasNext()) {
-      currentchunk = iterator.next();
+      currentChunk = iterator.next();
     }
   }
 
@@ -50,13 +50,13 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
    * @return {@code true} if the iteration has more elements
    */
   @Override public boolean hasNext() {
-    if (null != currentchunk) {
-      if ((currentchunk.hasNext())) {
+    if (null != currentChunk) {
+      if ((currentChunk.hasNext())) {
         return true;
-      } else if (!currentchunk.hasNext()) {
+      } else if (!currentChunk.hasNext()) {
         while (iterator.hasNext()) {
-          currentchunk = iterator.next();
-          if (currentchunk != null && currentchunk.hasNext()) {
+          currentChunk = iterator.next();
+          if (currentChunk != null && currentChunk.hasNext()) {
             return true;
           }
         }
@@ -71,7 +71,7 @@ public class ChunkRowIterator extends CarbonIterator<Object[]> {
    * @return the next element in the iteration
    */
   @Override public Object[] next() {
-    return currentchunk.next();
+    return currentChunk.next();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7ac6564/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index 1191a38..d4b091c 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -23,6 +23,7 @@ import java.util.Map;
 
 import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.datamap.DataMapStoreManager;
 import org.apache.carbondata.core.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.scan.executor.QueryExecutor;
 import org.apache.carbondata.core.scan.executor.QueryExecutorFactory;
@@ -118,6 +119,8 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
         CarbonUtil.clearDictionaryCache(entry.getValue());
       }
     }
+    // Clear the datamap cache
+    DataMapStoreManager.getInstance().getDefaultDataMap(queryModel.getTable()).clear();
     // close read support
     readSupport.close();
     try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7ac6564/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index c9cd8f5..6517e89 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -44,6 +44,8 @@ public class CarbonReader<T> {
 
   private int index;
 
+  private boolean initialise;
+
   /**
    * Call {@link #builder(String)} to construct an instance
    */
@@ -51,6 +53,7 @@ public class CarbonReader<T> {
     if (readers.size() == 0) {
       throw new IllegalArgumentException("no reader");
     }
+    this.initialise = true;
     this.readers = readers;
     this.index = 0;
     this.currentReader = readers.get(0);
@@ -60,6 +63,7 @@ public class CarbonReader<T> {
    * Return true if has next row
    */
   public boolean hasNext() throws IOException, InterruptedException {
+    validateReader();
     if (currentReader.nextKeyValue()) {
       return true;
     } else {
@@ -78,6 +82,7 @@ public class CarbonReader<T> {
    * Read and return next row object
    */
   public T readNextRow() throws IOException, InterruptedException {
+    validateReader();
     return currentReader.getCurrentValue();
   }
 
@@ -111,6 +116,18 @@ public class CarbonReader<T> {
    * @throws IOException
    */
   public void close() throws IOException {
+    validateReader();
     this.currentReader.close();
+    this.initialise = false;
+  }
+
+  /**
+   * Validate the reader
+   */
+  private void validateReader() {
+    if (!this.initialise) {
+      throw new RuntimeException(this.getClass().getSimpleName() +
+          " not initialise, please create it first.");
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a7ac6564/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 937dde8..0d2c84e 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -18,21 +18,30 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.File;
+import java.io.FileFilter;
 import java.io.FilenameFilter;
 import java.io.IOException;
+import java.sql.Date;
+import java.sql.Timestamp;
 import java.util.List;
 
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
 
+import junit.framework.TestCase;
 import org.apache.commons.io.FileUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-public class CarbonReaderTest {
+public class CarbonReaderTest extends TestCase {
 
   @Before
   public void cleanFile() {
@@ -77,6 +86,99 @@ public class CarbonReaderTest {
     Assert.assertEquals(i, 100);
 
     reader.close();
+
+    // Read again
+    CarbonReader reader2 = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    i = 0;
+    while (reader2.hasNext()) {
+      Object[] row = (Object[]) reader2.readNextRow();
+      // Default sort column is applied for dimensions. So, need  to validate accordingly
+      Assert.assertEquals(name[i], row[0]);
+      Assert.assertEquals(age[i], row[1]);
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+    reader2.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadFilesParallel() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+    // Reader 2
+    CarbonReader reader2 = CarbonReader
+        .builder(path, "_temp")
+        .projection(new String[]{"name", "age"})
+        .build();
+
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      Object[] row2 = (Object[]) reader2.readNextRow();
+      // parallel compare
+      Assert.assertEquals(row[0], row2[0]);
+      Assert.assertEquals(row[1], row2[1]);
+    }
+
+    reader.close();
+    reader2.close();
+
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+  @Test
+  public void testReadAfterClose() throws IOException, InterruptedException {
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(new Schema(fields), path, true);
+
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .projection(new String[]{"name", "age"}).build();
+
+    reader.close();
+    String msg = "CarbonReader not initialise, please create it first.";
+    try {
+      reader.hasNext();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
+    try {
+      reader.readNextRow();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
+    try {
+      reader.close();
+      assert (false);
+    } catch (RuntimeException e) {
+      assert (e.getMessage().equals(msg));
+    }
+
     FileUtils.deleteDirectory(new File(path));
   }
 
@@ -177,4 +279,134 @@ public class CarbonReaderTest {
     reader.close();
     FileUtils.deleteDirectory(new File(path));
   }
+
+  CarbonProperties carbonProperties;
+
+  @Override
+  public void setUp() {
+    carbonProperties = CarbonProperties.getInstance();
+  }
+
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(CarbonReaderTest.class.getName());
+
+  @Test
+  public void testTimeStampAndBadRecord() throws IOException, InterruptedException {
+    String timestampFormat = carbonProperties.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+    String badRecordAction = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION_DEFAULT);
+    String badRecordLoc = carbonProperties.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL);
+    String rootPath = new File(this.getClass().getResource("/").getPath()
+        + "../../").getCanonicalPath();
+    String storeLocation = rootPath + "/target/";
+    carbonProperties
+        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, storeLocation)
+        .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd hh:mm:ss")
+        .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "REDIRECT");
+    String path = "./testWriteFiles";
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[9];
+    fields[0] = new Field("stringField", DataTypes.STRING);
+    fields[1] = new Field("intField", DataTypes.INT);
+    fields[2] = new Field("shortField", DataTypes.SHORT);
+    fields[3] = new Field("longField", DataTypes.LONG);
+    fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+    fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+    fields[6] = new Field("dateField", DataTypes.DATE);
+    fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+    fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
+
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(true)
+          .persistSchemaFile(true)
+          .outputPath(path);
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
+
+      for (int i = 0; i < 100; i++) {
+        String[] row = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2018-05-12",
+            "2018-05-12",
+            "12.345"
+        };
+        writer.write(row);
+        String[] row2 = new String[]{
+            "robot" + (i % 10),
+            String.valueOf(i),
+            String.valueOf(i),
+            String.valueOf(Long.MAX_VALUE - i),
+            String.valueOf((double) i / 2),
+            String.valueOf(true),
+            "2019-03-02",
+            "2019-02-12 03:03:34",
+            "12.345"
+        };
+        writer.write(row2);
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      Assert.fail(e.getMessage());
+    }
+    LOGGER.audit("Bad record location:" + storeLocation);
+    File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+    Assert.assertTrue(segmentFolder.exists());
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override
+      public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    Assert.assertNotNull(dataFiles);
+    Assert.assertTrue(dataFiles.length > 0);
+
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
+        .projection(new String[]{
+            "stringField"
+            , "shortField"
+            , "intField"
+            , "longField"
+            , "doubleField"
+            , "boolField"
+            , "dateField"
+            , "timeField"
+            , "decimalField"}).build();
+
+    int i = 0;
+    while (reader.hasNext()) {
+      Object[] row = (Object[]) reader.readNextRow();
+      int id = (int) row[2];
+      Assert.assertEquals("robot" + (id % 10), row[0]);
+      Assert.assertEquals(Short.parseShort(String.valueOf(id)), row[1]);
+      Assert.assertEquals(Long.MAX_VALUE - id, row[3]);
+      Assert.assertEquals((double) id / 2, row[4]);
+      Assert.assertEquals(true, (boolean) row[5]);
+      long day = 24L * 3600 * 1000;
+      Assert.assertEquals("2019-03-02", new Date((day * ((int) row[6]))).toString());
+      Assert.assertEquals("2019-02-12 03:03:34.0", new Timestamp((long) row[7] / 1000).toString());
+      i++;
+    }
+    Assert.assertEquals(i, 100);
+
+    reader.close();
+    FileUtils.deleteDirectory(new File(path));
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+        timestampFormat);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION,
+        badRecordAction);
+    carbonProperties.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLoc);
+  }
+
 }


[02/50] [abbrv] carbondata git commit: [CARBONDATA-2494] Fix lucene datasize and performance

Posted by gv...@apache.org.
[CARBONDATA-2494] Fix lucene datasize and performance

Improved lucene datamap size and performance by using the following parameters.
New DM properties
1.flush_cache: size of the cache to maintain in Lucene writer, if specified then it tries to aggregate the unique data till the cache limit and flush to Lucene. It is best suitable for low cardinality dimensions.
2.split_blocklet: when made as true then store the data in blocklet wise in lucene , it means new folder will be created for each blocklet thus it eliminates storing on blockletid in lucene. And also it makes lucene small chunks of data.

This closes #2275


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/f184de88
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/f184de88
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/f184de88

Branch: refs/heads/spark-2.3
Commit: f184de885a4656c654812c1244891732af788a39
Parents: cf55028
Author: ravipesala <ra...@gmail.com>
Authored: Sun May 6 23:42:09 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Mon May 21 09:10:59 2018 +0800

----------------------------------------------------------------------
 .../datamap/lucene/LuceneDataMapBuilder.java    | 159 +++------
 .../lucene/LuceneDataMapFactoryBase.java        |  68 +++-
 .../datamap/lucene/LuceneDataMapWriter.java     | 345 ++++++++++++++-----
 .../datamap/lucene/LuceneFineGrainDataMap.java  | 206 +++++++----
 .../lucene/LuceneFineGrainDataMapFactory.java   |   4 +-
 .../lucene/LuceneFineGrainDataMapSuite.scala    |  44 +++
 6 files changed, 559 insertions(+), 267 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
index 35c07f0..eb70220 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapBuilder.java
@@ -17,8 +17,12 @@
 
 package org.apache.carbondata.datamap.lucene;
 
+import java.io.File;
 import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
@@ -26,32 +30,25 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datamap.dev.DataMapBuilder;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.metadata.datatype.DataType;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
+import static org.apache.carbondata.datamap.lucene.LuceneDataMapWriter.addData;
+import static org.apache.carbondata.datamap.lucene.LuceneDataMapWriter.addToCache;
+import static org.apache.carbondata.datamap.lucene.LuceneDataMapWriter.flushCache;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
 import org.apache.lucene.codecs.lucene62.Lucene62Codec;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.DoublePoint;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.FloatPoint;
-import org.apache.lucene.document.IntPoint;
-import org.apache.lucene.document.IntRangeField;
-import org.apache.lucene.document.LongPoint;
-import org.apache.lucene.document.StoredField;
-import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.RAMDirectory;
 import org.apache.solr.store.hdfs.HdfsDirectory;
+import org.roaringbitmap.RoaringBitmap;
 
 public class LuceneDataMapBuilder implements DataMapBuilder {
 
@@ -66,21 +63,38 @@ public class LuceneDataMapBuilder implements DataMapBuilder {
 
   private IndexWriter indexWriter = null;
 
-  private IndexWriter pageIndexWriter = null;
-
   private Analyzer analyzer = null;
 
-  LuceneDataMapBuilder(String tablePath, String dataMapName,
-      Segment segment, String shardName, List<CarbonColumn> indexColumns) {
-    this.dataMapPath = CarbonTablePath.getDataMapStorePathOnShardName(
-        tablePath, segment.getSegmentNo(), dataMapName, shardName);
+  private int writeCacheSize;
+
+  private Map<LuceneDataMapWriter.LuceneColumnKeys, Map<Integer, RoaringBitmap>> cache =
+      new HashMap<>();
+
+  private ByteBuffer intBuffer = ByteBuffer.allocate(4);
+
+  private boolean storeBlockletWise;
+
+  private int currentBlockletId = -1;
+
+  LuceneDataMapBuilder(String tablePath, String dataMapName, Segment segment, String shardName,
+      List<CarbonColumn> indexColumns, int writeCacheSize, boolean storeBlockletWise) {
+    this.dataMapPath = CarbonTablePath
+        .getDataMapStorePathOnShardName(tablePath, segment.getSegmentNo(), dataMapName, shardName);
     this.indexColumns = indexColumns;
     this.columnsCount = indexColumns.size();
+    this.writeCacheSize = writeCacheSize;
+    this.storeBlockletWise = storeBlockletWise;
   }
 
   @Override
   public void initialize() throws IOException {
-    // get index path, put index data into segment's path
+    if (!storeBlockletWise) {
+      // get index path, put index data into segment's path
+      indexWriter = createIndexWriter(dataMapPath);
+    }
+  }
+
+  private IndexWriter createIndexWriter(String dataMapPath) throws IOException {
     Path indexPath = FileFactory.getPath(dataMapPath);
     FileSystem fs = FileFactory.getFileSystem(indexPath);
 
@@ -111,107 +125,44 @@ public class LuceneDataMapBuilder implements DataMapBuilder {
           .setCodec(new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_COMPRESSION));
     }
 
-    indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(analyzer));
-  }
-
-  private IndexWriter createPageIndexWriter() throws IOException {
-    // save index data into ram, write into disk after one page finished
-    RAMDirectory ramDir = new RAMDirectory();
-    return new IndexWriter(ramDir, new IndexWriterConfig(analyzer));
-  }
-
-  private void addPageIndex(IndexWriter pageIndexWriter) throws IOException {
-
-    Directory directory = pageIndexWriter.getDirectory();
-
-    // close ram writer
-    pageIndexWriter.close();
-
-    // add ram index data into disk
-    indexWriter.addIndexes(directory);
-
-    // delete this ram data
-    directory.close();
+    return new IndexWriter(indexDir, new IndexWriterConfig(analyzer));
   }
 
   @Override
-  public void addRow(int blockletId, int pageId, int rowId, Object[] values) throws IOException {
-    if (rowId == 0) {
-      if (pageIndexWriter != null) {
-        addPageIndex(pageIndexWriter);
+  public void addRow(int blockletId, int pageId, int rowId, Object[] values)
+      throws IOException {
+    if (storeBlockletWise) {
+      if (currentBlockletId != blockletId) {
+        close();
+        indexWriter = createIndexWriter(dataMapPath + File.separator + blockletId);
+        currentBlockletId = blockletId;
       }
-      pageIndexWriter = createPageIndexWriter();
     }
-
-    // create a new document
-    Document doc = new Document();
-
-    // add blocklet Id
-    doc.add(new IntPoint(LuceneDataMapWriter.BLOCKLETID_NAME, (int) values[columnsCount]));
-    doc.add(new StoredField(LuceneDataMapWriter.BLOCKLETID_NAME, (int) values[columnsCount]));
-
-    // add page id
-    doc.add(new IntPoint(LuceneDataMapWriter.PAGEID_NAME, (int) values[columnsCount + 1]));
-    doc.add(new StoredField(LuceneDataMapWriter.PAGEID_NAME, (int) values[columnsCount + 1]));
-
-    // add row id
-    doc.add(new IntPoint(LuceneDataMapWriter.ROWID_NAME, rowId));
-    doc.add(new StoredField(LuceneDataMapWriter.ROWID_NAME, rowId));
-
     // add other fields
+    LuceneDataMapWriter.LuceneColumnKeys columns =
+        new LuceneDataMapWriter.LuceneColumnKeys(columnsCount);
     for (int colIdx = 0; colIdx < columnsCount; colIdx++) {
-      CarbonColumn column = indexColumns.get(colIdx);
-      addField(doc, column.getColName(), column.getDataType(), values[colIdx]);
+      columns.getColValues()[colIdx] = values[colIdx];
+    }
+    if (writeCacheSize > 0) {
+      addToCache(columns, rowId, pageId, blockletId, cache, intBuffer, storeBlockletWise);
+      flushCacheIfPossible();
+    } else {
+      addData(columns, rowId, pageId, blockletId, intBuffer, indexWriter, indexColumns,
+          storeBlockletWise);
     }
 
-    pageIndexWriter.addDocument(doc);
   }
 
-  private boolean addField(Document doc, String fieldName, DataType type, Object value) {
-    if (type == DataTypes.STRING) {
-      doc.add(new TextField(fieldName, (String) value, Field.Store.NO));
-    } else if (type == DataTypes.BYTE) {
-      // byte type , use int range to deal with byte, lucene has no byte type
-      IntRangeField field =
-          new IntRangeField(fieldName, new int[] { Byte.MIN_VALUE }, new int[] { Byte.MAX_VALUE });
-      field.setIntValue((int) value);
-      doc.add(field);
-    } else if (type == DataTypes.SHORT) {
-      // short type , use int range to deal with short type, lucene has no short type
-      IntRangeField field = new IntRangeField(fieldName, new int[] { Short.MIN_VALUE },
-          new int[] { Short.MAX_VALUE });
-      field.setShortValue((short) value);
-      doc.add(field);
-    } else if (type == DataTypes.INT) {
-      // int type , use int point to deal with int type
-      doc.add(new IntPoint(fieldName, (int) value));
-    } else if (type == DataTypes.LONG) {
-      // long type , use long point to deal with long type
-      doc.add(new LongPoint(fieldName, (long) value));
-    } else if (type == DataTypes.FLOAT) {
-      doc.add(new FloatPoint(fieldName, (float) value));
-    } else if (type == DataTypes.DOUBLE) {
-      doc.add(new DoublePoint(fieldName, (double) value));
-    } else if (type == DataTypes.DATE) {
-      // TODO: how to get data value
-    } else if (type == DataTypes.TIMESTAMP) {
-      // TODO: how to get
-    } else if (type == DataTypes.BOOLEAN) {
-      IntRangeField field = new IntRangeField(fieldName, new int[] { 0 }, new int[] { 1 });
-      field.setIntValue((boolean) value ? 1 : 0);
-      doc.add(field);
-    } else {
-      LOGGER.error("unsupport data type " + type);
-      throw new RuntimeException("unsupported data type " + type);
+  private void flushCacheIfPossible() throws IOException {
+    if (cache.size() >= writeCacheSize) {
+      flushCache(cache, indexColumns, indexWriter, storeBlockletWise);
     }
-    return true;
   }
 
   @Override
   public void finish() throws IOException {
-    if (indexWriter != null && pageIndexWriter != null) {
-      addPageIndex(pageIndexWriter);
-    }
+    flushCache(cache, indexColumns, indexWriter, storeBlockletWise);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
index 4bcdebb..fab0565 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapFactoryBase.java
@@ -62,6 +62,29 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
 abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactory<T> {
 
   /**
+   * Size of the cache to maintain in Lucene writer, if specified then it tries to aggregate the
+   * unique data till the cache limit and flush to Lucene.
+   * It is best suitable for low cardinality dimensions.
+   */
+  static final String FLUSH_CACHE = "flush_cache";
+
+  /**
+   * By default it does not use any cache.
+   */
+  static final String FLUSH_CACHE_DEFAULT_SIZE = "-1";
+
+  /**
+   * when made as true then store the data in blocklet wise in lucene , it means new folder will be
+   * created for each blocklet thus it eliminates storing on blockletid in lucene.
+   * And also it makes lucene small chuns of data
+   */
+  static final String SPLIT_BLOCKLET = "split_blocklet";
+
+  /**
+   * By default it is false
+   */
+  static final String SPLIT_BLOCKLET_DEFAULT = "true";
+  /**
    * Logger
    */
   final LogService LOGGER = LogServiceFactory.getLogService(this.getClass().getName());
@@ -86,6 +109,12 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
    */
   AbsoluteTableIdentifier tableIdentifier = null;
 
+  List<CarbonColumn> indexedCarbonColumns = null;
+
+  int flushCacheSize;
+
+  boolean storeBlockletWise;
+
   public LuceneDataMapFactoryBase(CarbonTable carbonTable, DataMapSchema dataMapSchema)
       throws MalformedDataMapCommandException {
     super(carbonTable, dataMapSchema);
@@ -96,7 +125,9 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
     this.dataMapName = dataMapSchema.getDataMapName();
 
     // validate DataMapSchema and get index columns
-    List<CarbonColumn> indexedColumns =  carbonTable.getIndexedColumns(dataMapSchema);
+    indexedCarbonColumns =  carbonTable.getIndexedColumns(dataMapSchema);;
+    flushCacheSize = validateAndGetWriteCacheSize(dataMapSchema);
+    storeBlockletWise = validateAndGetStoreBlockletWise(dataMapSchema);
 
     // add optimizedOperations
     List<ExpressionType> optimizedOperations = new ArrayList<ExpressionType>();
@@ -107,13 +138,39 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
     // optimizedOperations.add(ExpressionType.LESSTHAN_EQUALTO);
     // optimizedOperations.add(ExpressionType.NOT);
     optimizedOperations.add(ExpressionType.TEXT_MATCH);
-    this.dataMapMeta = new DataMapMeta(indexedColumns, optimizedOperations);
-
+    this.dataMapMeta = new DataMapMeta(indexedCarbonColumns, optimizedOperations);
     // get analyzer
     // TODO: how to get analyzer ?
     analyzer = new StandardAnalyzer();
   }
 
+  public static int validateAndGetWriteCacheSize(DataMapSchema schema) {
+    String cacheStr = schema.getProperties().get(FLUSH_CACHE);
+    if (cacheStr == null) {
+      cacheStr = FLUSH_CACHE_DEFAULT_SIZE;
+    }
+    int cacheSize;
+    try {
+      cacheSize = Integer.parseInt(cacheStr);
+    } catch (NumberFormatException e) {
+      cacheSize = -1;
+    }
+    return cacheSize;
+  }
+
+  public static boolean validateAndGetStoreBlockletWise(DataMapSchema schema) {
+    String splitBlockletStr = schema.getProperties().get(SPLIT_BLOCKLET);
+    if (splitBlockletStr == null) {
+      splitBlockletStr = SPLIT_BLOCKLET_DEFAULT;
+    }
+    boolean splitBlockletWise;
+    try {
+      splitBlockletWise = Boolean.parseBoolean(splitBlockletStr);
+    } catch (NumberFormatException e) {
+      splitBlockletWise = true;
+    }
+    return splitBlockletWise;
+  }
   /**
    * this method will delete the datamap folders during drop datamap
    * @throws MalformedDataMapCommandException
@@ -149,13 +206,14 @@ abstract class LuceneDataMapFactoryBase<T extends DataMap> extends DataMapFactor
   public DataMapWriter createWriter(Segment segment, String shardName) {
     LOGGER.info("lucene data write to " + shardName);
     return new LuceneDataMapWriter(getCarbonTable().getTablePath(), dataMapName,
-        dataMapMeta.getIndexedColumns(), segment, shardName, true);
+        dataMapMeta.getIndexedColumns(), segment, shardName, flushCacheSize,
+        storeBlockletWise);
   }
 
   @Override
   public DataMapBuilder createBuilder(Segment segment, String shardName) {
     return new LuceneDataMapBuilder(getCarbonTable().getTablePath(), dataMapName,
-        segment, shardName, dataMapMeta.getIndexedColumns());
+        segment, shardName, dataMapMeta.getIndexedColumns(), flushCacheSize, storeBlockletWise);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index 3615936..759b607 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -17,9 +17,14 @@
 
 package org.apache.carbondata.datamap.lucene;
 
+import java.io.File;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.logging.LogService;
@@ -55,6 +60,8 @@ import org.apache.lucene.index.IndexWriterConfig;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.RAMDirectory;
 import org.apache.solr.store.hdfs.HdfsDirectory;
+import org.roaringbitmap.IntIterator;
+import org.roaringbitmap.RoaringBitmap;
 
 /**
  * Implementation to write lucene index while loading
@@ -74,30 +81,64 @@ public class LuceneDataMapWriter extends DataMapWriter {
 
   private Analyzer analyzer = null;
 
-  private boolean isFineGrain = true;
+  public static final String PAGEID_NAME = "pageId";
 
-  public static final String BLOCKLETID_NAME = "blockletId";
+  public static final String ROWID_NAME = "rowId";
 
-  private String indexShardName = null;
+  private Map<LuceneColumnKeys, Map<Integer, RoaringBitmap>> cache = new HashMap<>();
 
-  public static final String PAGEID_NAME = "pageId";
+  private int cacheSize;
 
-  public static final String ROWID_NAME = "rowId";
+  private ByteBuffer intBuffer = ByteBuffer.allocate(4);
+
+  private boolean storeBlockletWise;
 
   LuceneDataMapWriter(String tablePath, String dataMapName, List<CarbonColumn> indexColumns,
-      Segment segment, String shardName, boolean isFineGrain) {
+      Segment segment, String shardName, int flushSize,
+      boolean storeBlockletWise) {
     super(tablePath, dataMapName, indexColumns, segment, shardName);
-    this.isFineGrain = isFineGrain;
+    this.cacheSize = flushSize;
+    this.storeBlockletWise = storeBlockletWise;
   }
 
   /**
    * Start of new block notification.
    */
   public void onBlockStart(String blockId) throws IOException {
+
+  }
+
+  /**
+   * End of block notification
+   */
+  public void onBlockEnd(String blockId) throws IOException {
+
+  }
+
+  private RAMDirectory ramDir;
+  private IndexWriter ramIndexWriter;
+
+  /**
+   * Start of new blocklet notification.
+   */
+  public void onBlockletStart(int blockletId) throws IOException {
+    if (null == analyzer) {
+      analyzer = new StandardAnalyzer();
+    }
+    // save index data into ram, write into disk after one page finished
+    ramDir = new RAMDirectory();
+    ramIndexWriter = new IndexWriter(ramDir, new IndexWriterConfig(analyzer));
+
     if (indexWriter != null) {
       return;
     }
     // get index path, put index data into segment's path
+    String dataMapPath;
+    if (storeBlockletWise) {
+      dataMapPath = this.dataMapPath + File.separator + blockletId;
+    } else {
+      dataMapPath = this.dataMapPath;
+    }
     Path indexPath = FileFactory.getPath(dataMapPath);
     FileSystem fs = FileFactory.getFileSystem(indexPath);
 
@@ -108,10 +149,6 @@ public class LuceneDataMapWriter extends DataMapWriter {
       }
     }
 
-    if (null == analyzer) {
-      analyzer = new StandardAnalyzer();
-    }
-
     // the indexWriter closes the FileSystem on closing the writer, so for a new configuration
     // and disable the cache for the index writer, it will be closed on closing the writer
     Configuration conf = new Configuration();
@@ -131,26 +168,7 @@ public class LuceneDataMapWriter extends DataMapWriter {
           .setCodec(new Lucene62Codec(Lucene50StoredFieldsFormat.Mode.BEST_COMPRESSION));
     }
 
-    indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(analyzer));
-  }
-
-  /**
-   * End of block notification
-   */
-  public void onBlockEnd(String blockId) throws IOException {
-
-  }
-
-  private RAMDirectory ramDir;
-  private IndexWriter ramIndexWriter;
-
-  /**
-   * Start of new blocklet notification.
-   */
-  public void onBlockletStart(int blockletId) throws IOException {
-    // save index data into ram, write into disk after one page finished
-    ramDir = new RAMDirectory();
-    ramIndexWriter = new IndexWriter(ramDir, new IndexWriterConfig(analyzer));
+    indexWriter = new IndexWriter(indexDir, indexWriterConfig);
   }
 
   /**
@@ -165,6 +183,12 @@ public class LuceneDataMapWriter extends DataMapWriter {
 
     // delete this ram data
     ramDir.close();
+
+    if (storeBlockletWise) {
+      flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
+      indexWriter.close();
+      indexWriter = null;
+    }
   }
 
   /**
@@ -175,52 +199,39 @@ public class LuceneDataMapWriter extends DataMapWriter {
    */
   public void onPageAdded(int blockletId, int pageId, int pageSize, ColumnPage[] pages)
       throws IOException {
+    // save index data into ram, write into disk after one page finished
+    int columnsCount = pages.length;
+    if (columnsCount <= 0) {
+      LOGGER.warn("No data in the page " + pageId + "with blockletid " + blockletId
+          + " to write lucene datamap");
+      return;
+    }
     for (int rowId = 0; rowId < pageSize; rowId++) {
-      // create a new document
-      Document doc = new Document();
-      // add blocklet Id
-      doc.add(new IntPoint(BLOCKLETID_NAME, blockletId));
-      doc.add(new StoredField(BLOCKLETID_NAME, blockletId));
-      //doc.add(new NumericDocValuesField(BLOCKLETID_NAME,blockletId));
-
-      // add page id and row id in Fine Grain data map
-      if (isFineGrain) {
-        // add page Id
-        doc.add(new IntPoint(PAGEID_NAME, pageId));
-        doc.add(new StoredField(PAGEID_NAME, pageId));
-        //doc.add(new NumericDocValuesField(PAGEID_NAME,pageId));
-
-        // add row id
-        doc.add(new IntPoint(ROWID_NAME, rowId));
-        doc.add(new StoredField(ROWID_NAME, rowId));
-        //doc.add(new NumericDocValuesField(ROWID_NAME,rowId));
-      }
-
       // add indexed columns value into the document
-      List<CarbonColumn> indexColumns = getIndexColumns();
-      for (int i = 0; i < pages.length; i++) {
-        // add to lucene only if value is not null
-        if (!pages[i].getNullBits().get(rowId)) {
-          addField(doc, pages[i].getData(rowId), indexColumns.get(i), Field.Store.NO);
+      LuceneColumnKeys columns = new LuceneColumnKeys(getIndexColumns().size());
+      int i = 0;
+      for (ColumnPage page : pages) {
+        if (!page.getNullBits().get(rowId)) {
+          columns.colValues[i++] = getValue(page, rowId);
         }
       }
-
-      // add this document
-      ramIndexWriter.addDocument(doc);
+      if (cacheSize > 0) {
+        addToCache(columns, rowId, pageId, blockletId, cache, intBuffer, storeBlockletWise);
+      } else {
+        addData(columns, rowId, pageId, blockletId, intBuffer, ramIndexWriter, getIndexColumns(),
+            storeBlockletWise);
+      }
+    }
+    if (cacheSize > 0) {
+      flushCacheIfPossible();
     }
-
   }
 
-  private boolean addField(Document doc, Object data, CarbonColumn column, Field.Store store) {
+  private static void addField(Document doc, Object key, String fieldName, Field.Store store) {
     //get field name
-    String fieldName = column.getColName();
-
-    //get field type
-    DataType type = column.getDataType();
-
-    if (type == DataTypes.BYTE) {
+    if (key instanceof Byte) {
       // byte type , use int range to deal with byte, lucene has no byte type
-      byte value = (byte) data;
+      byte value = (Byte) key;
       IntRangeField field =
           new IntRangeField(fieldName, new int[] { Byte.MIN_VALUE }, new int[] { Byte.MAX_VALUE });
       field.setIntValue(value);
@@ -230,9 +241,9 @@ public class LuceneDataMapWriter extends DataMapWriter {
       if (store == Field.Store.YES) {
         doc.add(new StoredField(fieldName, (int) value));
       }
-    } else if (type == DataTypes.SHORT) {
+    } else if (key instanceof Short) {
       // short type , use int range to deal with short type, lucene has no short type
-      short value = (short) data;
+      short value = (Short) key;
       IntRangeField field = new IntRangeField(fieldName, new int[] { Short.MIN_VALUE },
           new int[] { Short.MAX_VALUE });
       field.setShortValue(value);
@@ -242,62 +253,179 @@ public class LuceneDataMapWriter extends DataMapWriter {
       if (store == Field.Store.YES) {
         doc.add(new StoredField(fieldName, (int) value));
       }
-    } else if (type == DataTypes.INT) {
+    } else if (key instanceof Integer) {
       // int type , use int point to deal with int type
-      int value = (int) data;
-      doc.add(new IntPoint(fieldName, value));
+      int value = (Integer) key;
+      doc.add(new IntPoint(fieldName, new int[] { value }));
 
       // if need store it , add StoredField
       if (store == Field.Store.YES) {
         doc.add(new StoredField(fieldName, value));
       }
-    } else if (type == DataTypes.LONG) {
+    } else if (key instanceof Long) {
       // long type , use long point to deal with long type
-      long value = (long) data;
-      doc.add(new LongPoint(fieldName, value));
+      long value = (Long) key;
+      doc.add(new LongPoint(fieldName, new long[] { value }));
 
       // if need store it , add StoredField
       if (store == Field.Store.YES) {
         doc.add(new StoredField(fieldName, value));
       }
-    } else if (type == DataTypes.FLOAT) {
-      float value = (float) data;
-      doc.add(new FloatPoint(fieldName, value));
+    } else if (key instanceof Float) {
+      float value = (Float) key;
+      doc.add(new FloatPoint(fieldName, new float[] { value }));
       if (store == Field.Store.YES) {
         doc.add(new FloatPoint(fieldName, value));
       }
-    } else if (type == DataTypes.DOUBLE) {
-      double value = (double) data;
-      doc.add(new DoublePoint(fieldName, value));
+    } else if (key instanceof Double) {
+      double value = (Double) key;
+      doc.add(new DoublePoint(fieldName, new double[] { value }));
       if (store == Field.Store.YES) {
         doc.add(new DoublePoint(fieldName, value));
       }
+    } else if (key instanceof String) {
+      String strValue = (String) key;
+      doc.add(new TextField(fieldName, strValue, store));
+    } else if (key instanceof Boolean) {
+      boolean value = (Boolean) key;
+      IntRangeField field = new IntRangeField(fieldName, new int[] { 0 }, new int[] { 1 });
+      field.setIntValue(value ? 1 : 0);
+      doc.add(field);
+      if (store == Field.Store.YES) {
+        doc.add(new StoredField(fieldName, value ? 1 : 0));
+      }
+    }
+  }
+
+  private Object getValue(ColumnPage page, int rowId) {
+
+    //get field type
+    DataType type = page.getColumnSpec().getSchemaDataType();
+    Object value = null;
+    if (type == DataTypes.BYTE) {
+      // byte type , use int range to deal with byte, lucene has no byte type
+      value = page.getByte(rowId);
+    } else if (type == DataTypes.SHORT) {
+      // short type , use int range to deal with short type, lucene has no short type
+      value = page.getShort(rowId);
+    } else if (type == DataTypes.INT) {
+      // int type , use int point to deal with int type
+      value = page.getInt(rowId);
+    } else if (type == DataTypes.LONG) {
+      // long type , use long point to deal with long type
+      value = page.getLong(rowId);
+    } else if (type == DataTypes.FLOAT) {
+      value = page.getFloat(rowId);
+    } else if (type == DataTypes.DOUBLE) {
+      value = page.getDouble(rowId);
     } else if (type == DataTypes.STRING) {
-      byte[] value = (byte[]) data;
-      String strValue = null;
+      byte[] bytes = page.getBytes(rowId);
       try {
-        strValue = new String(value, 2, value.length - 2, "UTF-8");
+        value = new String(bytes, 2, bytes.length - 2, "UTF-8");
       } catch (UnsupportedEncodingException e) {
         throw new RuntimeException(e);
       }
-      doc.add(new TextField(fieldName, strValue, store));
     } else if (type == DataTypes.DATE) {
       throw new RuntimeException("unsupported data type " + type);
     } else if (type == DataTypes.TIMESTAMP) {
       throw new RuntimeException("unsupported data type " + type);
     } else if (type == DataTypes.BOOLEAN) {
-      boolean value = (boolean) data;
-      IntRangeField field = new IntRangeField(fieldName, new int[] { 0 }, new int[] { 1 });
-      field.setIntValue(value ? 1 : 0);
-      doc.add(field);
-      if (store == Field.Store.YES) {
-        doc.add(new StoredField(fieldName, value ? 1 : 0));
-      }
+      value = page.getBoolean(rowId);
     } else {
       LOGGER.error("unsupport data type " + type);
       throw new RuntimeException("unsupported data type " + type);
     }
-    return true;
+    return value;
+  }
+
+  public static void addToCache(LuceneColumnKeys key, int rowId, int pageId, int blockletId,
+      Map<LuceneColumnKeys, Map<Integer, RoaringBitmap>> cache, ByteBuffer intBuffer,
+      boolean storeBlockletWise) {
+    Map<Integer, RoaringBitmap> setMap = cache.get(key);
+    if (setMap == null) {
+      setMap = new HashMap<>();
+      cache.put(key, setMap);
+    }
+    int combinKey;
+    if (!storeBlockletWise) {
+      intBuffer.clear();
+      intBuffer.putShort((short) blockletId);
+      intBuffer.putShort((short) pageId);
+      intBuffer.rewind();
+      combinKey = intBuffer.getInt();
+    } else {
+      combinKey = pageId;
+    }
+    RoaringBitmap bitSet = setMap.get(combinKey);
+    if (bitSet == null) {
+      bitSet = new RoaringBitmap();
+      setMap.put(combinKey, bitSet);
+    }
+    bitSet.add(rowId);
+  }
+
+  public static void addData(LuceneColumnKeys key, int rowId, int pageId, int blockletId,
+      ByteBuffer intBuffer, IndexWriter indexWriter, List<CarbonColumn> indexCols,
+      boolean storeBlockletWise) throws IOException {
+
+    Document document = new Document();
+    for (int i = 0; i < key.getColValues().length; i++) {
+      addField(document, key.getColValues()[i], indexCols.get(i).getColName(), Field.Store.NO);
+    }
+    intBuffer.clear();
+    if (storeBlockletWise) {
+      // No need to store blocklet id to it.
+      intBuffer.putShort((short) pageId);
+      intBuffer.putShort((short) rowId);
+      intBuffer.rewind();
+      document.add(new StoredField(ROWID_NAME, intBuffer.getInt()));
+    } else {
+      intBuffer.putShort((short) blockletId);
+      intBuffer.putShort((short) pageId);
+      intBuffer.rewind();
+      document.add(new StoredField(PAGEID_NAME, intBuffer.getInt()));
+      document.add(new StoredField(ROWID_NAME, (short) rowId));
+    }
+    indexWriter.addDocument(document);
+  }
+
+  private void flushCacheIfPossible() throws IOException {
+    if (cache.size() > cacheSize) {
+      flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
+    }
+  }
+
+  public static void flushCache(Map<LuceneColumnKeys, Map<Integer, RoaringBitmap>> cache,
+      List<CarbonColumn> indexCols, IndexWriter indexWriter, boolean storeBlockletWise)
+      throws IOException {
+    for (Map.Entry<LuceneColumnKeys, Map<Integer, RoaringBitmap>> entry : cache.entrySet()) {
+      Document document = new Document();
+      LuceneColumnKeys key = entry.getKey();
+      for (int i = 0; i < key.getColValues().length; i++) {
+        addField(document, key.getColValues()[i], indexCols.get(i).getColName(), Field.Store.NO);
+      }
+      Map<Integer, RoaringBitmap> value = entry.getValue();
+      int count = 0;
+      for (Map.Entry<Integer, RoaringBitmap> pageData : value.entrySet()) {
+        RoaringBitmap bitMap = pageData.getValue();
+        int cardinality = bitMap.getCardinality();
+        // Each row is short and pageid is stored in int
+        ByteBuffer byteBuffer = ByteBuffer.allocate(cardinality * 2 + 4);
+        if (!storeBlockletWise) {
+          byteBuffer.putInt(pageData.getKey());
+        } else {
+          byteBuffer.putShort(pageData.getKey().shortValue());
+        }
+        IntIterator intIterator = bitMap.getIntIterator();
+        while (intIterator.hasNext()) {
+          byteBuffer.putShort((short) intIterator.next());
+        }
+        document.add(new StoredField(PAGEID_NAME + count, byteBuffer.array()));
+        count++;
+      }
+      indexWriter.addDocument(document);
+    }
+    cache.clear();
   }
 
   /**
@@ -305,10 +433,39 @@ public class LuceneDataMapWriter extends DataMapWriter {
    * class.
    */
   public void finish() throws IOException {
+    flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
     // finished a file , close this index writer
     if (indexWriter != null) {
       indexWriter.close();
     }
   }
 
+  /**
+   * Keeps column values of a single row.
+   */
+  public static class LuceneColumnKeys {
+
+    private Object[] colValues;
+
+    public LuceneColumnKeys(int size) {
+      colValues = new Object[size];
+    }
+
+    public Object[] getColValues() {
+      return colValues;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      if (this == o) return true;
+      if (o == null || getClass() != o.getClass()) return false;
+      LuceneColumnKeys that = (LuceneColumnKeys) o;
+      return Arrays.equals(colValues, that.colValues);
+    }
+
+    @Override
+    public int hashCode() {
+      return Arrays.hashCode(colValues);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index f8d1b12..3645bb6 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -18,12 +18,11 @@
 package org.apache.carbondata.datamap.lucene;
 
 import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.logging.LogService;
@@ -32,14 +31,15 @@ import org.apache.carbondata.core.datamap.dev.DataMapModel;
 import org.apache.carbondata.core.datamap.dev.fgdatamap.FineGrainBlocklet;
 import org.apache.carbondata.core.datamap.dev.fgdatamap.FineGrainDataMap;
 import org.apache.carbondata.core.datastore.block.SegmentProperties;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.indexstore.PartitionSpec;
+import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.expression.MatchExpression;
 import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.standard.StandardAnalyzer;
@@ -55,17 +55,12 @@ import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreDoc;
 import org.apache.lucene.search.TopDocs;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
 import org.apache.solr.store.hdfs.HdfsDirectory;
 
 @InterfaceAudience.Internal
 public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
-  private static final int BLOCKLETID_ID = 0;
-
-  private static final int PAGEID_ID = 1;
-
-  private static final int ROWID_ID = 2;
-
   /**
    * log information
    */
@@ -73,14 +68,9 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
       LogServiceFactory.getLogService(LuceneFineGrainDataMap.class.getName());
 
   /**
-   * index Reader object to create searcher object
-   */
-  private IndexReader indexReader = null;
-
-  /**
    * searcher object for this datamap
    */
-  private IndexSearcher indexSearcher = null;
+  private Map<String, IndexSearcher> indexSearcherMap = null;
 
   /**
    * analyzer for lucene index
@@ -89,14 +79,21 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
   private String filePath;
 
-  LuceneFineGrainDataMap(Analyzer analyzer) {
+  private int writeCacheSize;
+
+  private boolean storeBlockletWise;
+
+  LuceneFineGrainDataMap(Analyzer analyzer, DataMapSchema schema) {
     this.analyzer = analyzer;
+    writeCacheSize = LuceneDataMapFactoryBase.validateAndGetWriteCacheSize(schema);
+    storeBlockletWise = LuceneDataMapFactoryBase.validateAndGetStoreBlockletWise(schema);
   }
 
   /**
    * It is called to load the data map to memory or to initialize it.
    */
   public void init(DataMapModel dataMapModel) throws IOException {
+    long startTime = System.currentTimeMillis();
     // get this path from file path
     Path indexPath = FileFactory.getPath(dataMapModel.getFilePath());
 
@@ -104,32 +101,51 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
     this.filePath = indexPath.getName();
 
+    this.indexSearcherMap = new HashMap<>();
+
     // get file system , use hdfs file system , realized in solr project
-    FileSystem fs = FileFactory.getFileSystem(indexPath);
+    CarbonFile indexFilePath = FileFactory.getCarbonFile(indexPath.toString());
 
     // check this path valid
-    if (!fs.exists(indexPath)) {
+    if (!indexFilePath.exists()) {
       String errorMessage = String.format("index directory %s not exists.", indexPath);
       LOGGER.error(errorMessage);
       throw new IOException(errorMessage);
     }
 
-    if (!fs.isDirectory(indexPath)) {
+    if (!indexFilePath.isDirectory()) {
       String errorMessage = String.format("error index path %s, must be directory", indexPath);
       LOGGER.error(errorMessage);
       throw new IOException(errorMessage);
     }
 
+    if (storeBlockletWise) {
+      CarbonFile[] blockletDirs = indexFilePath.listFiles();
+      for (CarbonFile blockletDir : blockletDirs) {
+        IndexSearcher indexSearcher = createIndexSearcher(new Path(blockletDir.getAbsolutePath()));
+        indexSearcherMap.put(blockletDir.getName(), indexSearcher);
+      }
+
+    } else {
+      IndexSearcher indexSearcher = createIndexSearcher(indexPath);
+      indexSearcherMap.put("-1", indexSearcher);
+
+    }
+    LOGGER.info(
+        "Time taken to intialize lucene searcher: " + (System.currentTimeMillis() - startTime));
+  }
+
+  private IndexSearcher createIndexSearcher(Path indexPath) throws IOException {
     // open this index path , use HDFS default configuration
     Directory indexDir = new HdfsDirectory(indexPath, FileFactory.getConfiguration());
 
-    indexReader = DirectoryReader.open(indexDir);
+    IndexReader indexReader = DirectoryReader.open(indexDir);
     if (indexReader == null) {
       throw new RuntimeException("failed to create index reader object");
     }
 
     // create a index searcher object
-    indexSearcher = new IndexSearcher(indexReader);
+    return new IndexSearcher(indexReader);
   }
 
   /**
@@ -212,49 +228,40 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
       LOGGER.error(errorMessage);
       return null;
     }
-
-    // execute index search
-    // initialize to null, else ScoreDoc objects will get accumulated in memory
-    TopDocs result = null;
-    try {
-      result = indexSearcher.search(query, maxDocs);
-    } catch (IOException e) {
-      String errorMessage =
-          String.format("failed to search lucene data, detail is %s", e.getMessage());
-      LOGGER.error(errorMessage);
-      throw new IOException(errorMessage);
-    }
-
     // temporary data, delete duplicated data
     // Map<BlockId, Map<BlockletId, Map<PageId, Set<RowId>>>>
-    Map<String, Map<Integer, Set<Integer>>> mapBlocks = new HashMap<>();
-
-    for (ScoreDoc scoreDoc : result.scoreDocs) {
-      // get a document
-      Document doc = indexSearcher.doc(scoreDoc.doc);
+    Map<String, Map<Integer, List<Short>>> mapBlocks = new HashMap<>();
+
+    for (Map.Entry<String, IndexSearcher> searcherEntry : indexSearcherMap.entrySet()) {
+      IndexSearcher indexSearcher = searcherEntry.getValue();
+      // execute index search
+      // initialize to null, else ScoreDoc objects will get accumulated in memory
+      TopDocs result = null;
+      try {
+        result = indexSearcher.search(query, maxDocs);
+      } catch (IOException e) {
+        String errorMessage =
+            String.format("failed to search lucene data, detail is %s", e.getMessage());
+        LOGGER.error(errorMessage);
+        throw new IOException(errorMessage);
+      }
 
-      // get all fields
-      List<IndexableField> fieldsInDoc = doc.getFields();
+      ByteBuffer intBuffer = ByteBuffer.allocate(4);
 
-      // get the blocklet id Map<BlockletId, Map<PageId, Set<RowId>>>
-      String blockletId = fieldsInDoc.get(BLOCKLETID_ID).stringValue();
-      Map<Integer, Set<Integer>> mapPageIds = mapBlocks.get(blockletId);
-      if (mapPageIds == null) {
-        mapPageIds = new HashMap<>();
-        mapBlocks.put(blockletId, mapPageIds);
-      }
+      for (ScoreDoc scoreDoc : result.scoreDocs) {
+        // get a document
+        Document doc = indexSearcher.doc(scoreDoc.doc);
 
-      // get the page id Map<PageId, Set<RowId>>
-      Number pageId = fieldsInDoc.get(PAGEID_ID).numericValue();
-      Set<Integer> setRowId = mapPageIds.get(pageId.intValue());
-      if (setRowId == null) {
-        setRowId = new HashSet<>();
-        mapPageIds.put(pageId.intValue(), setRowId);
+        // get all fields
+        List<IndexableField> fieldsInDoc = doc.getFields();
+        if (writeCacheSize > 0) {
+          // It fills rowids to the map, its value is combined with multiple rows.
+          fillMapForCombineRows(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
+        } else {
+          // Fill rowids to the map
+          fillMap(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
+        }
       }
-
-      // get the row id Set<RowId>
-      Number rowId = fieldsInDoc.get(ROWID_ID).numericValue();
-      setRowId.add(rowId.intValue());
     }
 
     // result blocklets
@@ -262,19 +269,19 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
     // transform all blocks into result type blocklets
     // Map<BlockId, Map<BlockletId, Map<PageId, Set<RowId>>>>
-    for (Map.Entry<String, Map<Integer, Set<Integer>>> mapBlocklet :
+    for (Map.Entry<String, Map<Integer, List<Short>>> mapBlocklet :
         mapBlocks.entrySet()) {
       String blockletId = mapBlocklet.getKey();
-      Map<Integer, Set<Integer>> mapPageIds = mapBlocklet.getValue();
+      Map<Integer, List<Short>> mapPageIds = mapBlocklet.getValue();
       List<FineGrainBlocklet.Page> pages = new ArrayList<FineGrainBlocklet.Page>();
 
       // for pages in this blocklet Map<PageId, Set<RowId>>>
-      for (Map.Entry<Integer, Set<Integer>> mapPageId : mapPageIds.entrySet()) {
+      for (Map.Entry<Integer, List<Short>> mapPageId : mapPageIds.entrySet()) {
         // construct array rowid
         int[] rowIds = new int[mapPageId.getValue().size()];
         int i = 0;
         // for rowids in this page Set<RowId>
-        for (Integer rowid : mapPageId.getValue()) {
+        for (Short rowid : mapPageId.getValue()) {
           rowIds[i++] = rowid;
         }
         // construct one page
@@ -293,6 +300,81 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
     return blocklets;
   }
 
+  /**
+   * It fills the rowids to the map, its value is combined with multiple rowids as we store group
+   * rows and combine as per there uniqueness.
+   */
+  private void fillMapForCombineRows(ByteBuffer intBuffer,
+      Map<String, Map<Integer, List<Short>>> mapBlocks, List<IndexableField> fieldsInDoc,
+      String blockletId) {
+    for (int i = 0; i < fieldsInDoc.size(); i++) {
+      BytesRef bytesRef = fieldsInDoc.get(i).binaryValue();
+      ByteBuffer buffer = ByteBuffer.wrap(bytesRef.bytes);
+
+      int pageId;
+      if (storeBlockletWise) {
+        // If we store as per blockletwise then just read pageid only we don't store blockletid
+        pageId = buffer.getShort();
+      } else {
+        int combineKey = buffer.getInt();
+        intBuffer.clear();
+        intBuffer.putInt(combineKey);
+        intBuffer.rewind();
+        blockletId = String.valueOf(intBuffer.getShort());
+        pageId = intBuffer.getShort();
+      }
+
+      Map<Integer, List<Short>> mapPageIds = mapBlocks.get(blockletId);
+      if (mapPageIds == null) {
+        mapPageIds = new HashMap<>();
+        mapBlocks.put(blockletId, mapPageIds);
+      }
+      List<Short> setRowId = mapPageIds.get(pageId);
+      if (setRowId == null) {
+        setRowId = new ArrayList<>();
+        mapPageIds.put(pageId, setRowId);
+      }
+
+      while (buffer.hasRemaining()) {
+        setRowId.add(buffer.getShort());
+      }
+    }
+  }
+
+  /**
+   * Fill the map with rowids from documents
+   */
+  private void fillMap(ByteBuffer intBuffer, Map<String, Map<Integer, List<Short>>> mapBlocks,
+      List<IndexableField> fieldsInDoc, String blockletId) {
+    int combineKey = fieldsInDoc.get(0).numericValue().intValue();
+    intBuffer.clear();
+    intBuffer.putInt(combineKey);
+    intBuffer.rewind();
+    short rowId;
+    int pageId;
+    if (storeBlockletWise) {
+      // If we store as per blockletwise then just read pageid and rowid
+      // only we don't store blockletid
+      pageId = intBuffer.getShort();
+      rowId = intBuffer.getShort();
+    } else {
+      blockletId = String.valueOf(intBuffer.getShort());
+      pageId = intBuffer.getShort();
+      rowId = fieldsInDoc.get(1).numericValue().shortValue();
+    }
+    Map<Integer, List<Short>> mapPageIds = mapBlocks.get(blockletId);
+    if (mapPageIds == null) {
+      mapPageIds = new HashMap<>();
+      mapBlocks.put(blockletId, mapPageIds);
+    }
+    List<Short> setRowId = mapPageIds.get(pageId);
+    if (setRowId == null) {
+      setRowId = new ArrayList<>();
+      mapPageIds.put(pageId, setRowId);
+    }
+    setRowId.add(rowId);
+  }
+
   @Override
   public boolean isScanRequired(FilterResolverIntf filterExp) {
     return true;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
index 2d9618c..8c7539f 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapFactory.java
@@ -50,7 +50,7 @@ public class LuceneFineGrainDataMapFactory extends LuceneDataMapFactoryBase<Fine
    */
   @Override public List<FineGrainDataMap> getDataMaps(Segment segment) throws IOException {
     List<FineGrainDataMap> lstDataMap = new ArrayList<>();
-    FineGrainDataMap dataMap = new LuceneFineGrainDataMap(analyzer);
+    FineGrainDataMap dataMap = new LuceneFineGrainDataMap(analyzer, getDataMapSchema());
     try {
       dataMap.init(new DataMapModel(
           DataMapWriter.getDefaultDataMapPath(
@@ -70,7 +70,7 @@ public class LuceneFineGrainDataMapFactory extends LuceneDataMapFactoryBase<Fine
   public List<FineGrainDataMap> getDataMaps(DataMapDistributable distributable)
       throws IOException {
     List<FineGrainDataMap> lstDataMap = new ArrayList<>();
-    FineGrainDataMap dataMap = new LuceneFineGrainDataMap(analyzer);
+    FineGrainDataMap dataMap = new LuceneFineGrainDataMap(analyzer, getDataMapSchema());
     String indexPath = ((LuceneDataMapDistributable) distributable).getIndexPath();
     try {
       dataMap.init(new DataMapModel(indexPath));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f184de88/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
index 89623cf..b90d190 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMapSuite.scala
@@ -731,6 +731,50 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE table1")
   }
 
+  test("test lucene with flush_cache as true") {
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+    sql(
+      """
+        | CREATE TABLE datamap_test_table(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm_flush ON TABLE datamap_test_table
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city', 'flush_cache'='true')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n99*')"),
+      sql("select * from datamap_test_table where name like 'n99%'"))
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n*9')"),
+      sql(s"select * from datamap_test_table where name like 'n%9'"))
+    sql("drop datamap if exists dm_flush on table datamap_test_table")
+  }
+
+  test("test lucene with split_blocklet as false ") {
+    sql("DROP TABLE IF EXISTS datamap_test_table")
+    sql(
+      """
+        | CREATE TABLE datamap_test_table(id INT, name STRING, city STRING, age INT)
+        | STORED BY 'carbondata'
+        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP dm_split_false ON TABLE datamap_test_table
+         | USING 'lucene'
+         | DMProperties('INDEX_COLUMNS'='name , city', 'split_blocklet'='false')
+      """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test_table OPTIONS('header'='false')")
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n99*')"),
+      sql("select * from datamap_test_table where name like 'n99%'"))
+    checkAnswer(sql("SELECT * FROM datamap_test_table WHERE TEXT_MATCH('name:n*9')"),
+      sql(s"select * from datamap_test_table where name like 'n%9'"))
+    sql("drop datamap if exists dm_split_false on table datamap_test_table")
+  }
+
   override protected def afterAll(): Unit = {
     LuceneFineGrainDataMapSuite.deleteFile(file2)
     sql("DROP TABLE IF EXISTS normal_test")


[14/50] [abbrv] carbondata git commit: [CARBONDATA-2227] Added support to show partition details in describe formatted

Posted by gv...@apache.org.
[CARBONDATA-2227] Added support to show partition details in describe formatted

Added Detailed information in describe formatted command like partition location and partition values.
Example Usage: Descsribe formatted partition(partition_col_name=partition_value)

This closes #2033


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/604902b9
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/604902b9
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/604902b9

Branch: refs/heads/spark-2.3
Commit: 604902b9a52ec613c1ec025b4dc33657b179895e
Parents: 2ebfab1
Author: kunal642 <ku...@gmail.com>
Authored: Mon Mar 5 20:33:06 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue May 22 11:37:07 2018 +0530

----------------------------------------------------------------------
 .../describeTable/TestDescribeTable.scala       | 20 +++++++++++++++
 .../partition/TestDDLForPartitionTable.scala    |  2 +-
 .../table/CarbonDescribeFormattedCommand.scala  | 26 +++++++++++++++++---
 .../sql/execution/strategy/DDLStrategy.scala    |  1 +
 4 files changed, 44 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/604902b9/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
index fe01598..1e333ee 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/describeTable/TestDescribeTable.scala
@@ -28,6 +28,7 @@ class TestDescribeTable extends QueryTest with BeforeAndAfterAll {
   override def beforeAll: Unit = {
     sql("DROP TABLE IF EXISTS Desc1")
     sql("DROP TABLE IF EXISTS Desc2")
+    sql("drop table if exists a")
     sql("CREATE TABLE Desc1(Dec1Col1 String, Dec1Col2 String, Dec1Col3 int, Dec1Col4 double) stored by 'carbondata'")
     sql("DESC Desc1")
     sql("DROP TABLE Desc1")
@@ -56,9 +57,28 @@ class TestDescribeTable extends QueryTest with BeforeAndAfterAll {
     assert(sql("desc formatted desc1").count() == 20)
   }
 
+  test("test describe formatted for partition table") {
+    sql("create table a(a string) partitioned by (b int) stored by 'carbondata'")
+    sql("insert into a values('a',1)")
+    sql("insert into a values('a',2)")
+    val desc = sql("describe formatted a").collect()
+    assert(desc(desc.indexWhere(_.get(0).toString.contains("#Partition")) + 2).get(0).toString.contains("b"))
+    val descPar = sql("describe formatted a partition(b=1)").collect
+    descPar.find(_.get(0).toString.contains("Partition Value:")) match {
+      case Some(row) => assert(row.get(1).toString.contains("1"))
+      case None => fail("Partition Value not found in describe formatted")
+    }
+    descPar.find(_.get(0).toString.contains("Location:")) match {
+      case Some(row) => assert(row.get(1).toString.contains("target/warehouse/a/b=1"))
+      case None => fail("Partition Location not found in describe formatted")
+    }
+    assert(descPar.exists(_.toString().contains("Partition Parameters:")))
+  }
+
   override def afterAll: Unit = {
     sql("DROP TABLE Desc1")
     sql("DROP TABLE Desc2")
+    sql("drop table if exists a")
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/604902b9/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
index 2cbafa8..cafd465 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/partition/TestDDLForPartitionTable.scala
@@ -145,7 +145,7 @@ class TestDDLForPartitionTable  extends QueryTest with BeforeAndAfterAll {
     sql(
       """create table des(a int, b string) partitioned by (c string) stored by 'carbondata'
         |tblproperties ('partition_type'='list','list_info'='1,2')""".stripMargin)
-    checkExistence(sql("describe formatted des"), true, "Partition Columns")
+    checkExistence(sql("describe formatted des"), true, "Partition Type")
     sql("drop table if exists des")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/604902b9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
index ce03959..69bb91f 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonDescribeFormattedCommand.scala
@@ -21,6 +21,7 @@ import scala.collection.JavaConverters._
 
 import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
 import org.apache.spark.sql.catalyst.expressions.Attribute
 import org.apache.spark.sql.execution.SparkPlan
 import org.apache.spark.sql.execution.command.MetadataCommand
@@ -35,6 +36,7 @@ import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
 private[sql] case class CarbonDescribeFormattedCommand(
     child: SparkPlan,
     override val output: Seq[Attribute],
+    partitionSpec: TablePartitionSpec,
     tblIdentifier: TableIdentifier)
   extends MetadataCommand {
 
@@ -148,12 +150,28 @@ private[sql] case class CarbonDescribeFormattedCommand(
       .map(column => column).mkString(","), ""))
     if (carbonTable.getPartitionInfo(carbonTable.getTableName) != null) {
       results ++=
-      Seq(("Partition Columns", carbonTable.getPartitionInfo(carbonTable.getTableName)
-        .getColumnSchemaList.asScala.map(_.getColumnName).mkString(","), ""))
-      results ++=
-      Seq(("Partition Type", carbonTable.getPartitionInfo(carbonTable.getTableName)
+      Seq(("#Partition Information", "", ""),
+        ("#col_name", "data_type", "comment"))
+      results ++= carbonTable.getPartitionInfo(carbonTable.getTableName)
+        .getColumnSchemaList.asScala.map {
+        col => (col.getColumnName, col.getDataType.getName, "NULL")
+      }
+      results ++= Seq(("Partition Type", carbonTable.getPartitionInfo(carbonTable.getTableName)
         .getPartitionType.toString, ""))
     }
+    if (partitionSpec.nonEmpty) {
+      val partitions = sparkSession.sessionState.catalog.getPartition(tblIdentifier, partitionSpec)
+      results ++=
+      Seq(("", "", ""),
+        ("##Detailed Partition Information", "", ""),
+        ("Partition Value:", partitions.spec.values.mkString("[", ",", "]"), ""),
+        ("Database:", tblIdentifier.database.getOrElse(sparkSession.catalog.currentDatabase), ""),
+        ("Table:", tblIdentifier.table, ""))
+      if (partitions.storage.locationUri.isDefined) {
+        results ++= Seq(("Location:", partitions.storage.locationUri.get.toString, ""))
+      }
+      results ++= Seq(("Partition Parameters:", partitions.parameters.mkString(", "), ""))
+    }
     results.map {
       case (name, dataType, null) =>
         Row(f"$name%-36s", f"$dataType%-80s", null)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/604902b9/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index ef4d05c..468121b 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -179,6 +179,7 @@ class DDLStrategy(sparkSession: SparkSession) extends SparkStrategy {
             CarbonDescribeFormattedCommand(
               resultPlan,
               plan.output,
+              partitionSpec,
               identifier)) :: Nil
         } else {
           Nil


[49/50] [abbrv] carbondata git commit: [CARBONDATA-2575] Add document to explain DataMap Management

Posted by gv...@apache.org.
[CARBONDATA-2575] Add document to explain DataMap Management

Add document to explain DataMap Management

This closes #2360


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d401e060
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d401e060
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d401e060

Branch: refs/heads/spark-2.3
Commit: d401e060adcc531d66468dc61f4d468768cfea3f
Parents: 5f68a79
Author: Jacky Li <ja...@qq.com>
Authored: Mon Jun 4 21:18:31 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Tue Jun 5 19:57:06 2018 +0800

----------------------------------------------------------------------
 docs/datamap/datamap-management.md | 111 ++++++++++++++++++++++++++++++++
 1 file changed, 111 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d401e060/docs/datamap/datamap-management.md
----------------------------------------------------------------------
diff --git a/docs/datamap/datamap-management.md b/docs/datamap/datamap-management.md
new file mode 100644
index 0000000..01bb69f
--- /dev/null
+++ b/docs/datamap/datamap-management.md
@@ -0,0 +1,111 @@
+# CarbonData DataMap Management
+
+## Overview
+
+DataMap can be created using following DDL
+
+```
+  CREATE DATAMAP [IF NOT EXISTS] datamap_name
+  [ON TABLE main_table]
+  USING "datamap_provider"
+  [WITH DEFERRED REBUILD]
+  DMPROPERTIES ('key'='value', ...)
+  AS
+    SELECT statement
+```
+
+Currently, there are 5 DataMap implementation in CarbonData.
+
+| DataMap Provider | Description                              | DMPROPERTIES                             | Management       |
+| ---------------- | ---------------------------------------- | ---------------------------------------- | ---------------- |
+| preaggregate     | single table pre-aggregate table         | No DMPROPERTY is required                | Automatic        |
+| timeseries       | time dimension rollup table.             | event_time, xx_granularity, please refer to [Timeseries DataMap](https://github.com/apache/carbondata/blob/master/docs/datamap/timeseries-datamap-guide.md) | Automatic        |
+| mv               | multi-table pre-aggregate table,         | No DMPROPERTY is required                | Manual           |
+| lucene           | lucene indexing for text column          | index_columns to specifying the index columns | Manual/Automatic |
+| bloom            | bloom filter for high cardinality column, geospatial column | index_columns to specifying the index columns | Manual/Automatic |
+
+## DataMap Management
+
+There are two kinds of management semantic for DataMap.
+
+1. Autmatic Refresh: Create datamap without `WITH DEFERED REBUILD` in the statement
+2. Manual Refresh: Create datamap with `WITH DEFERED REBUILD` in the statement
+
+### Automatic Refresh
+
+When user creates a datamap on the main table without using `WITH DEFERED REBUILD` syntax, the datamap will be managed by system automatically.
+For every data load to the main table, system will immediately triger a load to the datamap automatically. These two data loading (to main table and datamap) is executed in a transactional manner, meaning that it will be either both success or neither success. 
+
+The data loading to datamap is incremental based on Segment concept, avoiding a expesive total rebuild.
+
+If user perform following command on the main table, system will return failure. (reject the operation)
+
+1. Data management command: `UPDATE/DELETE/DELETE SEGMENT`.
+2. Schema management command: `ALTER TABLE DROP COLUMN`, `ALTER TABLE CHANGE DATATYPE`,
+   `ALTER TABLE RENAME`. Note that adding a new column is supported, and for dropping columns and
+   change datatype command, CarbonData will check whether it will impact the pre-aggregate table, if
+    not, the operation is allowed, otherwise operation will be rejected by throwing exception.
+3. Partition management command: `ALTER TABLE ADD/DROP PARTITION
+
+If user do want to perform above operations on the main table, user can first drop the datamap, perform the operation, and re-create the datamap again.
+
+If user drop the main table, the datamap will be dropped immediately too.
+
+### Manual Refresh
+
+When user creates a datamap specifying maunal refresh semantic, the datamap is created with status *disabled* and query will NOT use this datamap until user can issue REBUILD DATAMAP command to build the datamap. For every REBUILD DATAMAP command, system will trigger a full rebuild of the datamap. After rebuild is done, system will change datamap status to *enabled*, so that it can be used in query rewrite.
+
+For every new data loading, data update, delete, the related datamap will be made *disabled*.
+
+If the main table is dropped by user, the related datamap will be dropped immediately.
+
+*Note: If you are creating a datamap on external table, you need to do manual managment of the datamap.*
+
+
+
+## DataMap Catalog
+
+Currently, when user creates a datamap, system will store the datamap metadata in a configurable *system* folder in HDFS or S3.
+
+In this *system* folder, it contains:
+
+- DataMapSchema file. It is a json file containing schema for one datamap. Ses DataMapSchema class. If user creates 100 datamaps (on different tables), there will be 100 files in *system* folder.
+- DataMapStatus file. Only one file, it is in json format, and each entry in the file represents for one datamap. Ses DataMapStatusDetail class
+
+There is a DataMapCatalog interface to retrieve schema of all datamap, it can be used in optimizer to get the metadata of datamap.
+
+
+
+## DataMap Related Commands
+
+### Explain
+
+How can user know whether datamap is used in the query?
+
+User can use EXPLAIN command to know, it will print out something like
+
+```text
+== CarbonData Profiler ==
+Hit mv DataMap: datamap1
+Scan Table: default.datamap1_table
++- filter:
++- pruning by CG DataMap
++- all blocklets: 1
+   skipped blocklets: 0
+```
+
+### Show DataMap
+
+There is a SHOW DATAMAPS command, when this is issued, system will read all datamap from *system* folder and print all information on screen. The current information includes:
+
+- DataMapName
+- DataMapProviderName like mv, preaggreagte, timeseries, etc
+- Associated Table
+
+### Compaction on DataMap
+
+This feature applies for preaggregate datamap only
+
+Running Compaction command (`ALTER TABLE COMPACT`) on main table will **not automatically** compact the pre-aggregate tables created on the main table. User need to run Compaction command separately on each pre-aggregate table to compact them.
+
+Compaction is an optional operation for pre-aggregate table. If compaction is performed on main table but not performed on pre-aggregate table, all queries still can benefit from pre-aggregate tables. To further improve the query performance, compaction on pre-aggregate tables can be triggered to merge the segments and files in the pre-aggregate tables.


[31/50] [abbrv] carbondata git commit: [HOTFIX] Changes in selecting the carbonindex files

Posted by gv...@apache.org.
[HOTFIX] Changes in selecting the carbonindex files

Currently, in the query flow while getting the index files we are checking for either mergeFileName or the list of files. After this change, we will
be checking for both files and mergeFileName

This closes #2333


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/22d5035c
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/22d5035c
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/22d5035c

Branch: refs/heads/spark-2.3
Commit: 22d5035c84342e0c0b15a87abbdd4dca5e6d4976
Parents: d777318
Author: dhatchayani <dh...@gmail.com>
Authored: Tue May 22 17:26:37 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue May 29 11:01:08 2018 +0530

----------------------------------------------------------------------
 .../core/metadata/SegmentFileStore.java           | 18 ++++++++++++------
 .../core/writer/CarbonIndexFileMergeWriter.java   |  2 ++
 2 files changed, 14 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/22d5035c/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
index d72ded3..acfc145 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
@@ -151,7 +151,8 @@ public class SegmentFileStore {
     CarbonFile segmentFolder = FileFactory.getCarbonFile(segmentPath);
     CarbonFile[] indexFiles = segmentFolder.listFiles(new CarbonFileFilter() {
       @Override public boolean accept(CarbonFile file) {
-        return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT);
+        return (file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
+            .endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT));
       }
     });
     if (indexFiles != null && indexFiles.length > 0) {
@@ -160,7 +161,11 @@ public class SegmentFileStore {
       folderDetails.setRelative(true);
       folderDetails.setStatus(SegmentStatus.SUCCESS.getMessage());
       for (CarbonFile file : indexFiles) {
-        folderDetails.getFiles().add(file.getName());
+        if (file.getName().endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT)) {
+          folderDetails.setMergeFileName(file.getName());
+        } else {
+          folderDetails.getFiles().add(file.getName());
+        }
       }
       String segmentRelativePath = segmentPath.substring(tablePath.length(), segmentPath.length());
       segmentFile.addPath(segmentRelativePath, folderDetails);
@@ -508,10 +513,11 @@ public class SegmentFileStore {
           if (null != mergeFileName) {
             indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + mergeFileName,
                 entry.getValue().mergeFileName);
-          } else {
-            for (String indexFile : entry.getValue().getFiles()) {
-              indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + indexFile,
-                  entry.getValue().mergeFileName);
+          }
+          Set<String> files = entry.getValue().getFiles();
+          if (null != files && !files.isEmpty()) {
+            for (String indexFile : files) {
+              indexFiles.put(location + CarbonCommonConstants.FILE_SEPARATOR + indexFile, null);
             }
           }
         }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/22d5035c/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
index ceeb431..cb53c0b 100644
--- a/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/writer/CarbonIndexFileMergeWriter.java
@@ -21,6 +21,7 @@ import java.io.Serializable;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 
@@ -140,6 +141,7 @@ public class CarbonIndexFileMergeWriter {
         }
         if (new Path(entry.getKey()).equals(new Path(location))) {
           segentry.getValue().setMergeFileName(mergeIndexFile);
+          segentry.getValue().setFiles(new HashSet<String>());
           break;
         }
       }


[40/50] [abbrv] carbondata git commit: [Documentation] Editorial Review comment fixed

Posted by gv...@apache.org.
[Documentation] Editorial Review comment fixed

Editorial Review comment fixed

This closes #2320


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/5ad70095
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/5ad70095
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/5ad70095

Branch: refs/heads/spark-2.3
Commit: 5ad7009573b7a95a181221d6a58df05e1fafbeb6
Parents: 6aadfe7
Author: sgururajshetty <sg...@gmail.com>
Authored: Thu May 31 17:36:26 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Thu May 31 17:40:30 2018 +0530

----------------------------------------------------------------------
 docs/data-management-on-carbondata.md    | 4 ++--
 docs/datamap/timeseries-datamap-guide.md | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/5ad70095/docs/data-management-on-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/data-management-on-carbondata.md b/docs/data-management-on-carbondata.md
index 51e98ab..706209c 100644
--- a/docs/data-management-on-carbondata.md
+++ b/docs/data-management-on-carbondata.md
@@ -35,11 +35,11 @@ This tutorial is going to introduce all commands and data operations on CarbonDa
   
   ```
   CREATE TABLE [IF NOT EXISTS] [db_name.]table_name[(col_name data_type , ...)]
-  STORED BY 'carbondata'
+  STORED AS carbondata
   [TBLPROPERTIES (property_name=property_value, ...)]
   [LOCATION 'path']
   ```
-  **NOTE:** CarbonData also supports "STORED AS carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
+  **NOTE:** CarbonData also supports "STORED AS carbondata" and "USING carbondata". Find example code at [CarbonSessionExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala) in the CarbonData repo.
 ### Usage Guidelines
 
   Following are the guidelines for TBLPROPERTIES, CarbonData's additional table options can be set via carbon.properties.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5ad70095/docs/datamap/timeseries-datamap-guide.md
----------------------------------------------------------------------
diff --git a/docs/datamap/timeseries-datamap-guide.md b/docs/datamap/timeseries-datamap-guide.md
index 7847312..bea5286 100644
--- a/docs/datamap/timeseries-datamap-guide.md
+++ b/docs/datamap/timeseries-datamap-guide.md
@@ -1,12 +1,12 @@
 # CarbonData Timeseries DataMap
 
-* [Timeseries DataMap](#timeseries-datamap-intoduction-(alpha-feature-in-1.3.0))
+* [Timeseries DataMap Introduction](#timeseries-datamap-intoduction)
 * [Compaction](#compacting-pre-aggregate-tables)
 * [Data Management](#data-management-with-pre-aggregate-tables)
 
-## Timeseries DataMap Intoduction (Alpha feature in 1.3.0)
-Timeseries DataMap a pre-aggregate table implementation based on 'preaggregate' DataMap. 
-Difference is that Timerseries DataMap has built-in understanding of time hierarchy and 
+## Timeseries DataMap Introduction (Alpha feature in 1.3.0)
+Timeseries DataMap a pre-aggregate table implementation based on 'pre-aggregate' DataMap.
+Difference is that Timeseries DataMap has built-in understanding of time hierarchy and
 levels: year, month, day, hour, minute, so that it supports automatic roll-up in time dimension 
 for query.
 


[50/50] [abbrv] carbondata git commit: [CARBONDATA-2577] [CARBONDATA-2579] Fixed issue in Avro logical type for nested Array and document update

Posted by gv...@apache.org.
[CARBONDATA-2577] [CARBONDATA-2579] Fixed issue in Avro logical type for nested Array and document update

Problem: Nested Array logical type of date, timestamp-millis, timestamp-micros is not working.

Root cause: During the preparation of carbon schema from avro schema. For array nested type logical types were not handled.

Solution: Handle the logical types for array nested type during carbon schema preparation.

This closes #2361


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/041603dc
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/041603dc
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/041603dc

Branch: refs/heads/spark-2.3
Commit: 041603dccf1d98348db36c4bf8e2e60d50a5bcc8
Parents: d401e06
Author: ajantha-bhat <aj...@gmail.com>
Authored: Mon Jun 4 16:12:48 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Tue Jun 5 19:21:07 2018 +0530

----------------------------------------------------------------------
 docs/data-management-on-carbondata.md           |  7 ++++-
 docs/sdk-guide.md                               | 15 +++++++++++
 .../TestNonTransactionalCarbonTable.scala       | 28 +++++++++-----------
 .../carbondata/sdk/file/AvroCarbonWriter.java   | 26 ++++++++++++++++--
 4 files changed, 58 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/041603dc/docs/data-management-on-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/data-management-on-carbondata.md b/docs/data-management-on-carbondata.md
index 706209c..3326e9b 100644
--- a/docs/data-management-on-carbondata.md
+++ b/docs/data-management-on-carbondata.md
@@ -216,7 +216,12 @@ This tutorial is going to introduce all commands and data operations on CarbonDa
   This can be SDK output. Refer [SDK Writer Guide](https://github.com/apache/carbondata/blob/master/docs/sdk-writer-guide.md). 
   
   **Note:**
-  Dropping of the external table should not delete the files present in the location.
+  1. Dropping of the external table should not delete the files present in the location.
+  2. When external table is created on non-transactional table data, 
+  external table will be registered with the schema of carbondata files.
+  If multiple files with different schema is present, exception will be thrown.
+  So, If table registered with one schema and files are of different schema, 
+  suggest to drop the external table and create again to register table with new schema.  
 
 
 ## CREATE DATABASE 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/041603dc/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 0f20dc3..e04698d 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -128,6 +128,21 @@ Each of SQL data types are mapped into data types of SDK. Following are the mapp
 | STRING | DataTypes.STRING |
 | DECIMAL | DataTypes.createDecimalType(precision, scale) |
 
+**NOTE:**
+ Carbon Supports below logical types of AVRO.
+ a. Date
+    The date logical type represents a date within the calendar, with no reference to a particular time zone or time of day.
+    A date logical type annotates an Avro int, where the int stores the number of days from the unix epoch, 1 January 1970 (ISO calendar). 
+ b. Timestamp (millisecond precision)
+    The timestamp-millis logical type represents an instant on the global timeline, independent of a particular time zone or calendar, with a precision of one millisecond.
+    A timestamp-millis logical type annotates an Avro long, where the long stores the number of milliseconds from the unix epoch, 1 January 1970 00:00:00.000 UTC.
+ c. Timestamp (microsecond precision)
+    The timestamp-micros logical type represents an instant on the global timeline, independent of a particular time zone or calendar, with a precision of one microsecond.
+    A timestamp-micros logical type annotates an Avro long, where the long stores the number of microseconds from the unix epoch, 1 January 1970 00:00:00.000000 UTC.
+    
+    Currently the values of logical types are not validated by carbon. 
+    Expect that avro record passed by the user is already validated by avro record generator tools.   
+
 ## Run SQL on files directly
 Instead of creating table and query it, you can also query that file directly with SQL.
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/041603dc/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 14a63ca..b275bb8 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -1825,6 +1825,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
                      |						"items": {
                      |							"name": "EachdoorNums",
                      |							"type": "int",
+                     |              "logicalType": "date",
                      |							"default": -1
                      |						}
                      |					}
@@ -1849,8 +1850,8 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     buildAvroTestDataMultiLevel3_2(3, null)
   }
 
-  // test multi level -- 3 levels [array of array of array of int]
-  test("test multi level support : array of array of array of int") {
+  // test multi level -- 3 levels [array of array of array of int with logical type]
+  test("test multi level support : array of array of array of int with logical type") {
     buildAvroTestDataMultiLevel3_2Type()
     assert(new File(writerPath).exists())
     sql("DROP TABLE IF EXISTS sdkOutputTable")
@@ -1858,22 +1859,19 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
          |'$writerPath' """.stripMargin)
 
-    sql("select * from sdkOutputTable").show(false)
+    sql("select * from sdkOutputTable limit 1").show(false)
 
     // TODO: Add a validation
     /*
-    +----+---+---------------------------------------------------------------------------+
-    |name|age|BuildNum
-                                               |
-    +----+---+---------------------------------------------------------------------------+
-    |bob |10 |[WrappedArray(WrappedArray(1, 2, 3), WrappedArray(4, 5, 6)), WrappedArray
-    (WrappedArray(10, 20, 30), WrappedArray(40, 50, 60))]|
-    |bob |10 |[WrappedArray(WrappedArray(1, 2, 3), WrappedArray(4, 5, 6)), WrappedArray
-    (WrappedArray(10, 20, 30), WrappedArray(40, 50, 60))]|
-    |bob |10 |[WrappedArray(WrappedArray(1, 2, 3), WrappedArray(4, 5, 6)), WrappedArray
-    (WrappedArray(10, 20, 30), WrappedArray(40, 50, 60))]|
-    +----+---+---------------------------------------------------------------------------+
-   */
+    +----+---+------------------------------------------------------------------+
+    |name|age|BuildNum                                                          |
+    +----+---+------------------------------------------------------------------+
+    |bob |10 |[WrappedArray(WrappedArray(1970-01-02, 1970-01-03, 1970-01-04),   |
+    |                    WrappedArray(1970-01-05, 1970-01-06, 1970-01-07)),     |
+    |       WrappedArray(WrappedArray(1970-01-11, 1970-01-21, 1970-01-31),      |
+    |                    WrappedArray(1970-02-10, 1970-02-20, 1970-03-02))]     |
+    +----+---+------------------------------------------------------------------+
+     */
 
     sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files

http://git-wip-us.apache.org/repos/asf/carbondata/blob/041603dc/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
index edecd6b..fdd1f5a 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
@@ -323,13 +323,35 @@ public class AvroCarbonWriter extends CarbonWriter {
   }
 
   private static DataType getMappingDataTypeForArrayRecord(Schema childSchema) {
+    LogicalType logicalType = childSchema.getLogicalType();
     switch (childSchema.getType()) {
       case BOOLEAN:
         return DataTypes.BOOLEAN;
       case INT:
-        return DataTypes.INT;
+        if (logicalType != null) {
+          if (logicalType instanceof LogicalTypes.Date) {
+            return DataTypes.DATE;
+          } else {
+            LOGGER.warn("Unsupported logical type. Considering Data Type as INT for " + childSchema
+                .getName());
+            return DataTypes.INT;
+          }
+        } else {
+          return DataTypes.INT;
+        }
       case LONG:
-        return DataTypes.LONG;
+        if (logicalType != null) {
+          if (logicalType instanceof LogicalTypes.TimestampMillis
+              || logicalType instanceof LogicalTypes.TimestampMicros) {
+            return DataTypes.TIMESTAMP;
+          } else {
+            LOGGER.warn("Unsupported logical type. Considering Data Type as LONG for " + childSchema
+                .getName());
+            return DataTypes.LONG;
+          }
+        } else {
+          return DataTypes.LONG;
+        }
       case DOUBLE:
         return DataTypes.DOUBLE;
       case STRING:


[25/50] [abbrv] carbondata git commit: [CARBONDATA-2507] enable.offheap.sort not validate in CarbonData

Posted by gv...@apache.org.
[CARBONDATA-2507] enable.offheap.sort not validate in CarbonData

In #2274, the value of enable.offheap.sort will transform to false when args[0] not equal to true, including false and other string, like f,any and so on.

So we should validate it.

This closes #2331


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d8bafa34
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d8bafa34
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d8bafa34

Branch: refs/heads/spark-2.3
Commit: d8bafa34dd4ce31b0596cabe139ea2022ba9d78b
Parents: 33b825d
Author: xubo245 <xu...@huawei.com>
Authored: Wed May 23 16:08:25 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Mon May 28 17:25:16 2018 +0800

----------------------------------------------------------------------
 .../carbondata/core/util/CarbonProperties.java  | 17 +++++++++++++
 .../carbondata/core/util/SessionParams.java     |  2 ++
 .../core/CarbonPropertiesValidationTest.java    | 25 ++++++++++++++++++++
 3 files changed, 44 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d8bafa34/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 391096d..4ee5199 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -56,6 +56,7 @@ import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CSV_READ_BUFFER_SIZE;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_AUTO_HANDOFF;
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_OFFHEAP_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_UNSAFE_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_VECTOR_READER;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.HANDOFF_SIZE;
@@ -149,6 +150,9 @@ public final class CarbonProperties {
       case ENABLE_UNSAFE_SORT:
         validateEnableUnsafeSort();
         break;
+      case ENABLE_OFFHEAP_SORT:
+        validateEnableOffHeapSort();
+        break;
       case CARBON_CUSTOM_BLOCK_DISTRIBUTION:
         validateCustomBlockDistribution();
         break;
@@ -235,6 +239,7 @@ public final class CarbonProperties {
     validateBlockletGroupSizeInMB();
     validateNumberOfColumnPerIORead();
     validateEnableUnsafeSort();
+    validateEnableOffHeapSort();
     validateCustomBlockDistribution();
     validateEnableVectorReader();
     validateLockType();
@@ -471,6 +476,18 @@ public final class CarbonProperties {
     }
   }
 
+  private void validateEnableOffHeapSort() {
+    String value = carbonProperties.getProperty(ENABLE_OFFHEAP_SORT);
+    boolean isValidBooleanValue = CarbonUtil.validateBoolean(value);
+    if (!isValidBooleanValue) {
+      LOGGER.warn("The enable off heap sort value \"" + value
+          + "\" is invalid. Using the default value \""
+          + CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT);
+      carbonProperties.setProperty(ENABLE_OFFHEAP_SORT,
+          CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT);
+    }
+  }
+
   private void initPropertySet() throws IllegalAccessException {
     Field[] declaredFields = CarbonCommonConstants.class.getDeclaredFields();
     for (Field field : declaredFields) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d8bafa34/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
index 3823aef..169c003 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/SessionParams.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.exception.InvalidConfigurationException;
 
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_CUSTOM_BLOCK_DISTRIBUTION;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.CARBON_SEARCH_MODE_ENABLE;
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_OFFHEAP_SORT;
 import static org.apache.carbondata.core.constants.CarbonCommonConstants.ENABLE_UNSAFE_SORT;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_ACTION;
 import static org.apache.carbondata.core.constants.CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE;
@@ -145,6 +146,7 @@ public class SessionParams implements Serializable, Cloneable {
     boolean isValid = false;
     switch (key) {
       case ENABLE_UNSAFE_SORT:
+      case ENABLE_OFFHEAP_SORT:
       case CARBON_CUSTOM_BLOCK_DISTRIBUTION:
       case CARBON_OPTIONS_BAD_RECORDS_LOGGER_ENABLE:
       case CARBON_OPTIONS_IS_EMPTY_DATA_BAD_RECORD:

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d8bafa34/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
index bbfe26c..7cc665e 100644
--- a/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/CarbonPropertiesValidationTest.java
@@ -24,6 +24,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonProperties;
 
 import junit.framework.TestCase;
+import org.junit.Assert;
 import org.junit.Test;
 
 /**
@@ -65,6 +66,30 @@ public class CarbonPropertiesValidationTest extends TestCase {
         CarbonCommonConstants.ENABLE_UNSAFE_SORT_DEFAULT.equalsIgnoreCase(valueAfterValidation));
   }
 
+  @Test public void testValidateEnableOffHeapSort()
+      throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+    Method validateMethodType =
+        carbonProperties.getClass().getDeclaredMethod("validateEnableOffHeapSort");
+    validateMethodType.setAccessible(true);
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "True");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("true"));
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "false");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("false"));
+    carbonProperties.addProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT, "xyz");
+    assert (carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT)
+        .equalsIgnoreCase("true"));
+    String valueBeforeValidation =
+        carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT);
+    validateMethodType.invoke(carbonProperties);
+    String valueAfterValidation =
+        carbonProperties.getProperty(CarbonCommonConstants.ENABLE_OFFHEAP_SORT);
+    assertTrue(valueBeforeValidation.equals(valueAfterValidation));
+    assertTrue(
+        CarbonCommonConstants.ENABLE_OFFHEAP_SORT_DEFAULT.equalsIgnoreCase(valueAfterValidation));
+  }
+
   @Test public void testValidateCustomBlockDistribution()
       throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
     Method validateMethodType =


[24/50] [abbrv] carbondata git commit: [CARBONDATA-2545] Fix some spell error in CarbonData

Posted by gv...@apache.org.
[CARBONDATA-2545] Fix some spell error in CarbonData

Change Inerface to Interface

This closes #2346


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/33b825d7
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/33b825d7
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/33b825d7

Branch: refs/heads/spark-2.3
Commit: 33b825d7fe686fc1985501f2208724aeabc99d9f
Parents: a7ac656
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 28 11:47:27 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Mon May 28 17:16:05 2018 +0800

----------------------------------------------------------------------
 .../datastore/impl/DefaultFileTypeProvider.java |  2 +-
 .../core/datastore/impl/FileFactory.java        | 14 ++++-----
 .../core/datastore/impl/FileTypeInerface.java   | 32 --------------------
 .../core/datastore/impl/FileTypeInterface.java  | 32 ++++++++++++++++++++
 4 files changed, 40 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/33b825d7/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
index b58a473..f54e9af 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DefaultFileTypeProvider.java
@@ -26,7 +26,7 @@ import org.apache.carbondata.core.datastore.filesystem.ViewFSCarbonFile;
 
 import org.apache.hadoop.conf.Configuration;
 
-public class DefaultFileTypeProvider implements FileTypeInerface {
+public class DefaultFileTypeProvider implements FileTypeInterface {
 
   public FileReader getFileHolder(FileFactory.FileType fileType) {
     switch (fileType) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33b825d7/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index 1529649..5c46bcf 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -50,9 +50,9 @@ public final class FileFactory {
     configuration.addResource(new Path("../core-default.xml"));
   }
 
-  private static FileTypeInerface fileFileTypeInerface = new DefaultFileTypeProvider();
-  public static void setFileTypeInerface(FileTypeInerface fileTypeInerface) {
-    fileFileTypeInerface = fileTypeInerface;
+  private static FileTypeInterface fileFileTypeInterface = new DefaultFileTypeProvider();
+  public static void setFileTypeInterface(FileTypeInterface fileTypeInterface) {
+    fileFileTypeInterface = fileTypeInterface;
   }
   private FileFactory() {
 
@@ -63,7 +63,7 @@ public final class FileFactory {
   }
 
   public static FileReader getFileHolder(FileType fileType) {
-    return fileFileTypeInerface.getFileHolder(fileType);
+    return fileFileTypeInterface.getFileHolder(fileType);
   }
 
   public static FileType getFileType(String path) {
@@ -83,14 +83,14 @@ public final class FileFactory {
   }
 
   public static CarbonFile getCarbonFile(String path) {
-    return fileFileTypeInerface.getCarbonFile(path, getFileType(path));
+    return fileFileTypeInterface.getCarbonFile(path, getFileType(path));
   }
   public static CarbonFile getCarbonFile(String path, FileType fileType) {
-    return fileFileTypeInerface.getCarbonFile(path, fileType);
+    return fileFileTypeInterface.getCarbonFile(path, fileType);
   }
   public static CarbonFile getCarbonFile(String path, FileType fileType,
       Configuration hadoopConf) {
-    return fileFileTypeInerface.getCarbonFile(path, fileType, hadoopConf);
+    return fileFileTypeInterface.getCarbonFile(path, fileType, hadoopConf);
   }
 
   public static DataInputStream getDataInputStream(String path, FileType fileType)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33b825d7/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
deleted file mode 100644
index 413261c..0000000
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInerface.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.impl;
-
-import org.apache.carbondata.core.datastore.FileReader;
-import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
-
-import org.apache.hadoop.conf.Configuration;
-
-public interface FileTypeInerface {
-
-  FileReader getFileHolder(FileFactory.FileType fileType);
-  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType);
-  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType, Configuration configuration);
-
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/33b825d7/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
new file mode 100644
index 0000000..84da148
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileTypeInterface.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datastore.impl;
+
+import org.apache.carbondata.core.datastore.FileReader;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+
+import org.apache.hadoop.conf.Configuration;
+
+public interface FileTypeInterface {
+
+  FileReader getFileHolder(FileFactory.FileType fileType);
+  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType);
+  CarbonFile getCarbonFile(String path, FileFactory.FileType fileType, Configuration configuration);
+
+}
+


[19/50] [abbrv] carbondata git commit: [CARBONDATA-2503] Data write fails if empty value is provided for sort columns in sdk is fixed

Posted by gv...@apache.org.
[CARBONDATA-2503] Data write fails if empty value is provided for sort columns in sdk is fixed

SortColumn with empty value was giving exception

This closes #2326


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/9aa3a8c2
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/9aa3a8c2
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/9aa3a8c2

Branch: refs/heads/spark-2.3
Commit: 9aa3a8c22460f58691e0de7dee97dade5a567285
Parents: e1ef85a
Author: rahulforallp <ra...@knoldus.in>
Authored: Mon May 21 15:17:10 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue May 22 17:25:53 2018 +0530

----------------------------------------------------------------------
 .../TestNonTransactionalCarbonTable.scala       | 21 +++++++++++++++++---
 .../sdk/file/CarbonWriterBuilder.java           |  2 +-
 2 files changed, 19 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/9aa3a8c2/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 1c74adc..afb9b2f 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -322,14 +322,12 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       Row("robot0", 0, 0.0),
       Row("robot1", 1, 0.5),
       Row("robot2", 2, 1.0)))
-    new File(writerPath).listFiles().map(x => LOGGER.audit(x.getName +" : "+x.lastModified()))
     FileUtils.deleteDirectory(new File(writerPath))
     // Thread.sleep is required because it is possible sometime deletion
     // and creation of new file can happen at same timestamp.
     Thread.sleep(1000)
     assert(!new File(writerPath).exists())
     buildTestDataWithSameUUID(4, false, null, List("name"))
-    new File(writerPath).listFiles().map(x => LOGGER.audit(x.getName +" : "+x.lastModified()))
     checkAnswer(sql("select * from sdkOutputTable"), Seq(
       Row("robot0", 0, 0.0),
       Row("robot1", 1, 0.5),
@@ -379,9 +377,26 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
 
     checkExistence(sql("describe formatted sdkOutputTable"), true, "name")
 
+    buildTestDataWithSortColumns(List())
+    assert(new File(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkOutputTable")
+
+    // with partition
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkOutputTable(name string) PARTITIONED BY (age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    sql("describe formatted sdkOutputTable").show(false)
+    sql("select * from sdkOutputTable").show()
+
+    intercept[RuntimeException] {
+      buildTestDataWithSortColumns(List(""))
+    }
+
     sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files
-    assert(new File(writerPath).exists())
+    assert(!(new File(writerPath).exists()))
     cleanTestData()
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/9aa3a8c2/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index bf99e05..e846da4 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -377,7 +377,7 @@ public class CarbonWriterBuilder {
     }
 
     List<String> sortColumnsList = new ArrayList<>();
-    if (sortColumns == null) {
+    if (sortColumns == null || sortColumns.length == 0) {
       // If sort columns are not specified, default set all dimensions to sort column.
       // When dimensions are default set to sort column,
       // Inverted index will be supported by default for sort columns.


[07/50] [abbrv] carbondata git commit: [CARBONDATA-2498] Change CarbonWriterBuilder interface to take schema while creating writer

Posted by gv...@apache.org.
[CARBONDATA-2498] Change CarbonWriterBuilder interface to take schema while creating writer

This closes #2316


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/26eb2d0b
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/26eb2d0b
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/26eb2d0b

Branch: refs/heads/spark-2.3
Commit: 26eb2d0b0e795c098d064471f7387072a74e07e5
Parents: 7ef9164
Author: kunal642 <ku...@gmail.com>
Authored: Thu May 17 21:00:50 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Mon May 21 17:13:12 2018 +0530

----------------------------------------------------------------------
 docs/sdk-writer-guide.md                        |  25 +-
 .../examples/sdk/CarbonReaderExample.java       |   3 +-
 .../carbondata/examples/sdk/SDKS3Example.java   |   3 +-
 .../carbondata/examples/S3UsingSDkExample.scala |   8 +-
 ...FileInputFormatWithExternalCarbonTable.scala |   4 +-
 ...tCreateTableUsingSparkCarbonFileFormat.scala |   4 +-
 .../TestNonTransactionalCarbonTable.scala       | 302 +++----------------
 ...ransactionalCarbonTableWithComplexType.scala |  53 +---
 ...tSparkCarbonFileFormatWithSparkSession.scala |   4 +-
 .../carbondata/sdk/file/AvroCarbonWriter.java   |   8 +-
 .../sdk/file/CarbonWriterBuilder.java           |  24 +-
 .../sdk/file/AvroCarbonWriterTest.java          |  15 +-
 .../sdk/file/CSVCarbonWriterTest.java           |  10 +-
 .../CSVNonTransactionalCarbonWriterTest.java    |   6 +-
 .../apache/carbondata/sdk/file/TestUtil.java    |   3 +-
 15 files changed, 82 insertions(+), 390 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/docs/sdk-writer-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-writer-guide.md b/docs/sdk-writer-guide.md
index 9878b71..682b27a 100644
--- a/docs/sdk-writer-guide.md
+++ b/docs/sdk-writer-guide.md
@@ -33,9 +33,9 @@ These SDK writer output contains just a carbondata and carbonindex files. No met
  
      Schema schema = new Schema(fields);
  
-     CarbonWriterBuilder builder = CarbonWriter.builder().withSchema(schema).outputPath(path);
+     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
  
-     CarbonWriter writer = builder.buildWriterForCSVInput();
+     CarbonWriter writer = builder.buildWriterForCSVInput(schema);
  
      int rows = 5;
      for (int i = 0; i < rows; i++) {
@@ -87,15 +87,10 @@ public class TestSdkAvro {
     GenericData.Record record = converter.convertToGenericDataRecord(
         json.getBytes(CharEncoding.UTF_8), new org.apache.avro.Schema.Parser().parse(avroSchema));
 
-    // prepare carbon schema from avro schema 
-    org.apache.carbondata.sdk.file.Schema carbonSchema =
-            AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema);
-
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(carbonSchema)
           .outputPath(path)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(new org.apache.avro.Schema.Parser().parse(avroSchema));
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);
@@ -130,16 +125,6 @@ Each of SQL data types are mapped into data types of SDK. Following are the mapp
 ### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
 ```
 /**
-* prepares the builder with the schema provided
-* @param schema is instance of Schema
-*        This method must be called when building CarbonWriterBuilder
-* @return updated CarbonWriterBuilder
-*/
-public CarbonWriterBuilder withSchema(Schema schema);
-```
-
-```
-/**
 * Sets the output path of the writer builder
 * @param path is the absolute path where output files are written
 *             This method must be called when building CarbonWriterBuilder
@@ -259,6 +244,7 @@ public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
 ```
 /**
 * Build a {@link CarbonWriter}, which accepts row in CSV format object
+* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
 * @return CSVCarbonWriter
 * @throws IOException
 * @throws InvalidLoadOptionException
@@ -269,6 +255,7 @@ public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOpti
 ```  
 /**
 * Build a {@link CarbonWriter}, which accepts Avro format object
+* @param avroSchema avro Schema object {org.apache.avro.Schema}
 * @return AvroCarbonWriter 
 * @throws IOException
 * @throws InvalidLoadOptionException
@@ -356,4 +343,4 @@ public static Schema parseJson(String json);
 * @return carbon sdk schema
 */
 public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
-```
\ No newline at end of file
+```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index 8ea8604..937bfa0 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -44,11 +44,10 @@ public class CarbonReaderExample {
             fields[1] = new Field("age", DataTypes.INT);
 
             CarbonWriter writer = CarbonWriter.builder()
-                    .withSchema(new Schema(fields))
                     .isTransactionalTable(true)
                     .outputPath(path)
                     .persistSchemaFile(true)
-                    .buildWriterForCSVInput();
+                    .buildWriterForCSVInput(new Schema(fields));
 
             for (int i = 0; i < 10; i++) {
                 writer.write(new String[]{"robot" + (i % 10), String.valueOf(i)});

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
index 02247cb..7fab2cc 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java
@@ -66,7 +66,6 @@ public class SDKS3Example {
         fields[0] = new Field("name", DataTypes.STRING);
         fields[1] = new Field("age", DataTypes.INT);
         CarbonWriterBuilder builder = CarbonWriter.builder()
-                .withSchema(new Schema(fields))
                 .setAccessKey(args[0])
                 .setSecretKey(args[1])
                 .setEndPoint(args[2])
@@ -74,7 +73,7 @@ public class SDKS3Example {
                 .persistSchemaFile(persistSchema)
                 .isTransactionalTable(transactionalTable);
 
-        CarbonWriter writer = builder.buildWriterForCSVInput();
+        CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 
         for (int i = 0; i < num; i++) {
             writer.write(new String[]{"robot" + (i % 10), String.valueOf(i)});

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
index 7ecde88..022b28e 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
@@ -49,15 +49,15 @@ object S3UsingSDKExample {
       val writer =
         if (persistSchema) {
           builder.persistSchemaFile(true)
-          builder.withSchema(new Schema(fields)).outputPath(writerPath).isTransactionalTable(true)
+          builder.outputPath(writerPath).isTransactionalTable(true)
             .uniqueIdentifier(
               System.currentTimeMillis)
-            .buildWriterForCSVInput()
+            .buildWriterForCSVInput(new Schema(fields))
         } else {
-          builder.withSchema(new Schema(fields)).outputPath(writerPath).isTransactionalTable(true)
+          builder.outputPath(writerPath).isTransactionalTable(true)
             .uniqueIdentifier(
               System.currentTimeMillis).withBlockSize(2)
-            .buildWriterForCSVInput()
+            .buildWriterForCSVInput(new Schema(fields))
         }
       var i = 0
       var row = num

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
index 9646c1d..019b915 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
@@ -59,9 +59,9 @@ class TestCarbonFileInputFormatWithExternalCarbonTable extends QueryTest with Be
       val writer =
       if (persistSchema) {
         builder.persistSchemaFile(true)
-        builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+        builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
       } else {
-        builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+        builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
       }
 
       var i = 0

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
index 16f19a7..66be8e4 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
@@ -68,9 +68,9 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
       val writer =
         if (persistSchema) {
           builder.persistSchemaFile(true)
-          builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+          builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
         } else {
-          builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+          builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
         }
 
       var i = 0

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index a15d0c2..1c74adc 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -129,27 +129,27 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       val writer =
         if (persistSchema) {
           builder.persistSchemaFile(true)
-          builder.withSchema(Schema.parseJson(schema))
+          builder
             .sortBy(sortColumns.toArray)
             .outputPath(writerPath)
             .isTransactionalTable(false)
             .uniqueIdentifier(System.currentTimeMillis)
-            .buildWriterForCSVInput()
+            .buildWriterForCSVInput(Schema.parseJson(schema))
         } else {
           if (options != null) {
-            builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath)
+            builder.outputPath(writerPath)
               .isTransactionalTable(false)
               .sortBy(sortColumns.toArray)
               .uniqueIdentifier(
                 System.currentTimeMillis).withBlockSize(2).withLoadOptions(options)
-              .buildWriterForCSVInput()
+              .buildWriterForCSVInput(Schema.parseJson(schema))
           } else {
-            builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath)
+            builder.outputPath(writerPath)
               .isTransactionalTable(false)
               .sortBy(sortColumns.toArray)
               .uniqueIdentifier(
                 System.currentTimeMillis).withBlockSize(2)
-              .buildWriterForCSVInput()
+              .buildWriterForCSVInput(Schema.parseJson(schema))
           }
         }
       var i = 0
@@ -185,10 +185,10 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     try {
       val builder = CarbonWriter.builder()
       val writer =
-        builder.withSchema(new Schema(fields)).outputPath(writerPath)
+        builder.outputPath(writerPath)
           .isTransactionalTable(false)
           .uniqueIdentifier(System.currentTimeMillis()).withBlockSize(2).sortBy(sortColumns)
-          .buildWriterForCSVInput()
+          .buildWriterForCSVInput(new Schema(fields))
 
       var i = 0
       while (i < rows) {
@@ -218,12 +218,12 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     try {
       val builder = CarbonWriter.builder()
       val writer =
-        builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath)
+        builder.outputPath(writerPath)
           .isTransactionalTable(false)
           .sortBy(sortColumns.toArray)
           .uniqueIdentifier(
             123).withBlockSize(2)
-          .buildWriterForCSVInput()
+          .buildWriterForCSVInput(Schema.parseJson(schema))
       var i = 0
       while (i < rows) {
         writer.write(Array[String]("robot" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
@@ -915,10 +915,10 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     fields(2) = new Field("mydate", DataTypes.DATE)
     fields(3) = new Field("mytime", DataTypes.TIMESTAMP)
 
-    val builder: CarbonWriterBuilder = CarbonWriter.builder.withSchema(new Schema(fields))
+    val builder: CarbonWriterBuilder = CarbonWriter.builder
       .outputPath(writerPath).isTransactionalTable(false).withLoadOptions(options)
 
-    val writer: CarbonWriter = builder.buildWriterForCSVInput
+    val writer: CarbonWriter = builder.buildWriterForCSVInput(new Schema(fields))
     writer.write(Array("babu","1","02-01-2002","02-01-2002 01:01:00"));
     writer.close()
 
@@ -1014,8 +1014,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
 
   private def WriteFilesWithAvroWriter(rows: Int,
       mySchema: String,
-      json: String,
-      fields: Array[Field]) = {
+      json: String) = {
     // conversion to GenericData.Record
     val nn = new avro.Schema.Parser().parse(mySchema)
     val converter = new JsonAvroConverter
@@ -1023,9 +1022,9 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       .convertToGenericDataRecord(json.getBytes(CharEncoding.UTF_8), nn)
 
     try {
-      val writer = CarbonWriter.builder.withSchema(new Schema(fields))
+      val writer = CarbonWriter.builder
         .outputPath(writerPath).isTransactionalTable(false)
-        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput
+        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput(nn)
       var i = 0
       while (i < rows) {
         writer.write(record)
@@ -1061,15 +1060,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
 
     val json = """ {"name":"bob", "age":10.24, "address" : {"street":"abc", "city":"bang"}} """
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.DOUBLE)
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-    fields(2) = new Field("address", "struct", fld)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataStructType(): Any = {
@@ -1109,16 +1100,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
 
     val json: String = """ {"name": "bob","age": 10,"address": ["abc", "defg"]} """
 
-
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-    // fields[1] = new Field("age", DataTypes.INT);
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fields(2) = new Field("address", "array", fld)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataSingleFileArrayType(): Any = {
@@ -1163,17 +1145,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |"address" : {"street":"abc", "city":"bang"},
         |"doorNum" : [1,2,3,4]}""".stripMargin
 
-    val fields = new Array[Field](4)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-    fields(2) = new Field("address", "struct", fld)
-    val fld1 = new util.ArrayList[StructField]
-    fld1.add(new StructField("eachDoorNum", DataTypes.INT))
-    fields(3) = new Field("doorNum", "array", fld1)
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataBothStructArrayType(): Any = {
@@ -1227,19 +1199,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |{"street":"ghi","city":"city3"},
         |{"street":"jkl","city":"city4"}]} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-
-    val fld2 = new util.ArrayList[StructField]
-    fld2.add(new StructField("my_address", DataTypes.createStructType(fld), fld))
-    fields(2) = new Field("doorNum", DataTypes.createArrayType(fld2.get(0).getDataType), fld2)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataArrayOfStructType(): Any = {
@@ -1310,20 +1270,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
                  |	}
                  |} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val fld2 = new util.ArrayList[StructField]
-    fld2.add(new StructField("street", DataTypes.STRING))
-    fld2.add(new StructField("city", DataTypes.STRING))
-
-    val fld1 = new util.ArrayList[StructField]
-    fld1.add(new StructField("eachDoorNum", DataTypes.INT))
-    fld2.add(new StructField("doorNum", DataTypes.createArrayType(DataTypes.INT), fld1))
-
-    fields(2) = new Field("address","struct",fld2)
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataStructOfArrayType(): Any = {
@@ -1376,19 +1323,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |{"street":"ghi","city":"city3"},
         |{"street":"jkl","city":"city4"}]} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("exp", DataTypes.INT)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-
-    val fld2 = new util.ArrayList[StructField]
-    fld2.add(new StructField("my_address", DataTypes.createStructType(fld), fld))
-    fields(2) = new Field("doorNum", DataTypes.createArrayType(fld2.get(0).getDataType), fld2)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   test("Read sdk writer Avro output Record Type with no sort columns") {
@@ -1486,15 +1421,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     // skip giving array value to take default values
     val json: String = "{\"name\": \"bob\",\"age\": 10}"
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-    // fields[1] = new Field("age", DataTypes.INT);
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fields(2) = new Field("address", "array", fld)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataSingleFileArrayDefaultType(): Any = {
@@ -1680,20 +1607,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |	]
         |} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-    fld.add(new StructField("FloorNum", DataTypes.createArrayType(DataTypes.INT)))
-
-    val fld2 = new util.ArrayList[StructField]
-    fld2.add(new StructField("my_address", DataTypes.createStructType(fld), fld))
-    fields(2) = new Field("doorNum", DataTypes.createArrayType(fld2.get(0).getDataType), fld2)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataMultiLevel3Type(): Any = {
@@ -1812,25 +1726,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |	]
         |}  """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("street", DataTypes.STRING))
-    fld.add(new StructField("city", DataTypes.STRING))
-
-    val subFld = new util.ArrayList[StructField]
-    subFld.add(new StructField("wing", DataTypes.STRING))
-    subFld.add(new StructField("number", DataTypes.INT))
-    fld.add(new StructField("FloorNum", DataTypes.createStructType(subFld)))
-
-    // array of struct of struct
-    val fld2 = new util.ArrayList[StructField]
-    fld2.add(new StructField("my_address", DataTypes.createStructType(fld), fld))
-    fields(2) = new Field("doorNum", DataTypes.createArrayType(fld2.get(0).getDataType), fld2)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataMultiLevel3_1Type(): Any = {
@@ -1910,22 +1806,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |        	"BuildNum": [[[1,2,3],[4,5,6]],[[10,20,30],[40,50,60]]]
         |        }   """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val subFld = new util.ArrayList[StructField]
-    subFld.add(new StructField("EachDoorNum", DataTypes.INT))
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("DoorNum", DataTypes.createArrayType(DataTypes.INT), subFld))
-    // array of struct of struct
-    val doorNum = new util.ArrayList[StructField]
-    doorNum.add(new StructField("FloorNum",
-      DataTypes.createArrayType(DataTypes.createArrayType(DataTypes.INT)), fld))
-    fields(2) = new Field("BuildNum", "array", doorNum)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataMultiLevel3_2Type(): Any = {
@@ -2045,29 +1926,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         |	]
         |} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val subFld = new util.ArrayList[StructField]
-    subFld.add(new StructField("EachDoorNum", DataTypes.INT))
-
-    val address = new util.ArrayList[StructField]
-    address.add(new StructField("street", DataTypes.STRING))
-    address.add(new StructField("city", DataTypes.STRING))
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("DoorNum",
-        DataTypes.createArrayType(DataTypes.createStructType(address)),
-        subFld))
-    // array of struct of struct
-    val doorNum = new util.ArrayList[StructField]
-    doorNum.add(new StructField("FloorNum",
-      DataTypes.createArrayType(
-        DataTypes.createArrayType(DataTypes.createStructType(address))), fld))
-    fields(2) = new Field("BuildNum", "array", doorNum)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataMultiLevel4Type(): Any = {
@@ -2130,15 +1989,9 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     val record = converter
       .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
 
-    val fields = new Array[Field](2)
-    fields(0) = new Field("id", DataTypes.INT)
-    val fld_s = new java.util.ArrayList[StructField]
-    fld_s.add(new StructField("course_struct_course_time", DataTypes.STRING))
-    fields(1) = new Field("course_details", "struct", fld_s)
-
     assert(intercept[RuntimeException] {
-      val writer = CarbonWriter.builder.withSchema(new Schema(fields)).sortBy(Array("name", "id"))
-        .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput
+      val writer = CarbonWriter.builder.sortBy(Array("name", "id"))
+        .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
       writer.write(record)
       writer.close()
     }.getMessage.toLowerCase.contains("column: name specified in sort columns"))
@@ -2179,8 +2032,8 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     val record = converter
       .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
 
-    val writer = CarbonWriter.builder.withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(schema1))
-      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
     writer.write(record)
     writer.close()
   }
@@ -2219,14 +2072,8 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     val record = converter
       .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
 
-    val fields = new Array[Field](2)
-    fields(0) = new Field("id", DataTypes.INT)
-    val fld_s = new java.util.ArrayList[StructField]
-    fld_s.add(new StructField("course_struct_course_time", DataTypes.TIMESTAMP))
-    fields(1) = new Field("course_details", "struct", fld_s)
-
-    val writer = CarbonWriter.builder.withSchema(new Schema(fields)).sortBy(Array("id"))
-      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput
+    val writer = CarbonWriter.builder.sortBy(Array("id"))
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
     writer.write(record)
     writer.close()
   }
@@ -2271,91 +2118,10 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
     val record = converter
       .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
 
-    val fields = new Array[Field](2)
-    fields(0) = new Field("id", DataTypes.LONG)
-    val fld_s = new java.util.ArrayList[StructField]
-    fld_s.add(new StructField("id", DataTypes.LONG))
-    fields(1) = new Field("entries", DataTypes.createArrayType(DataTypes.createStructType(fld_s)))
-    val writer = CarbonWriter.builder.withSchema(new Schema(fields))
-      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput
+    val writer = CarbonWriter.builder
+      .outputPath(writerPath).isTransactionalTable(false).buildWriterForAvroInput(nn)
     writer.write(record)
     writer.close()
   }
 
-  test("test if data load with various bad_records_action") {
-    val schema =
-      """{
-        |	"namespace": "com.apache.schema",
-        |	"type": "record",
-        |	"name": "StudentActivity",
-        |	"fields": [
-        |		{
-        |			"name": "id",
-        |			"type": "string"
-        |		},
-        |		{
-        |			"name": "course_details",
-        |			"type": {
-        |				"name": "course_details",
-        |				"type": "record",
-        |				"fields": [
-        |					{
-        |						"name": "course_struct_course_string",
-        |						"type": "string"
-        |					}
-        |				]
-        |			}
-        |		},
-        |		{
-        |			"name": "salary_string",
-        |			"type": {
-        |				"type": "array",
-        |				"items": "string"
-        |			}
-        |		}
-        |	]
-        |}""".stripMargin
-    val json1 =
-      """{
-        |	"id": "cust_1",
-        |	"course_details": {
-        |		"course_struct_course_string": "asd"
-        |	},
-        |	"salary_string": [
-        |		"xyz",
-        |		"abc"
-        |	]
-        |}""".stripMargin
-
-    val nn = new org.apache.avro.Schema.Parser().parse(schema)
-    val converter = new JsonAvroConverter
-    val record = converter
-      .convertToGenericDataRecord(json1.getBytes(CharEncoding.UTF_8), nn)
-
-    val fields = new Array[Field](3)
-    fields(0)=new Field("id", DataTypes.STRING)
-    val fld_s = new java.util.ArrayList[StructField]
-    fld_s.add(new StructField("carbon_int", DataTypes.INT))
-    fields(1)=new Field("course_details", "struct",fld_s)
-
-    val fld_a = new java.util.ArrayList[StructField]
-    fld_a.add(new StructField("carbon_array", DataTypes.INT))
-    fields(2)=new Field("salary_string", "array",fld_a)
-
-    val loadOptions = new util.HashMap[String, String]()
-    loadOptions.put("bad_records_action", "fail")
-    assert(intercept[Exception] {
-      val writer = CarbonWriter.builder.withSchema(new Schema(fields)).outputPath(writerPath)
-        .isTransactionalTable(false).withLoadOptions(loadOptions).buildWriterForAvroInput
-      writer.write(record)
-      writer.close()
-    }.getMessage.contains("Data load failed due to bad record"))
-
-    loadOptions.put("bad_records_action", "FORCE")
-      val writer = CarbonWriter.builder.withSchema(new Schema(fields)).outputPath(writerPath)
-        .isTransactionalTable(false).withLoadOptions(loadOptions).buildWriterForAvroInput
-      writer.write(record)
-      writer.close()
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
index 7f9023b..d4de428 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTableWithComplexType.scala
@@ -19,9 +19,6 @@ package org.apache.carbondata.spark.testsuite.createTable
 
 import java.io.File
 import java.util
-import java.util.ArrayList
-
-import scala.collection.mutable.ArrayBuffer
 
 import org.apache.avro
 import org.apache.commons.io.FileUtils
@@ -32,9 +29,8 @@ import org.scalatest.BeforeAndAfterAll
 import tech.allegro.schema.json2avro.converter.JsonAvroConverter
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.metadata.datatype.{DataTypes, StructField}
-import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
-import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.sdk.file.CarbonWriter
 
 class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with BeforeAndAfterAll {
 
@@ -64,8 +60,7 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
 
   private def WriteFilesWithAvroWriter(rows: Int,
       mySchema: String,
-      json: String,
-      fields: Array[Field]) = {
+      json: String) = {
     // conversion to GenericData.Record
     val nn = new avro.Schema.Parser().parse(mySchema)
     val converter = new JsonAvroConverter
@@ -73,9 +68,9 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
       .convertToGenericDataRecord(json.getBytes(CharEncoding.UTF_8), nn)
 
     try {
-      val writer = CarbonWriter.builder.withSchema(new Schema(fields))
+      val writer = CarbonWriter.builder
         .outputPath(writerPath).isTransactionalTable(false)
-        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput
+        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput(nn)
       var i = 0
       while (i < rows) {
         writer.write(record)
@@ -182,32 +177,7 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
         |	]
         |} """.stripMargin
 
-    val fields = new Array[Field](3)
-    fields(0) = new Field("name", DataTypes.STRING)
-    fields(1) = new Field("age", DataTypes.INT)
-
-    val subFld = new util.ArrayList[StructField]
-    subFld.add(new StructField("EachDoorNum", DataTypes.INT))
-
-    val address = new util.ArrayList[StructField]
-    address.add(new StructField("street", DataTypes.STRING))
-    address.add(new StructField("city", DataTypes.STRING))
-    address.add(new StructField("Temperature", DataTypes.DOUBLE))
-    address.add(new StructField("WindSpeed", DataTypes.createDecimalType(6,2)))
-    address.add(new StructField("year", DataTypes.DATE))
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("DoorNum",
-      DataTypes.createArrayType(DataTypes.createStructType(address)),
-      subFld))
-    // array of struct of struct
-    val doorNum = new util.ArrayList[StructField]
-    doorNum.add(new StructField("FloorNum",
-      DataTypes.createArrayType(
-        DataTypes.createArrayType(DataTypes.createStructType(address))), fld))
-    fields(2) = new Field("BuildNum", "array", doorNum)
-
-    WriteFilesWithAvroWriter(rows, mySchema, json, fields)
+    WriteFilesWithAvroWriter(rows, mySchema, json)
   }
 
   def buildAvroTestDataMultiLevel4Type(): Any = {
@@ -274,16 +244,7 @@ class TestNonTransactionalCarbonTableWithComplexType extends QueryTest with Befo
 
     val records=new JsonAvroConverter().convertToGenericDataRecord(jsonvalue.getBytes(CharEncoding.UTF_8),pschema)
 
-    val fieds = new Array[Field](3)
-    fieds(0)=new Field("name",DataTypes.STRING);
-    fieds(1)=new Field("age",DataTypes.INT)
-
-    val fld = new util.ArrayList[StructField]
-    fld.add(new StructField("Temperature", DataTypes.DOUBLE))
-    fieds(2) = new Field("my_address", "struct", fld)
-
-
-    val writer=CarbonWriter.builder().withSchema(new Schema(fieds)).outputPath(writerPath).buildWriterForAvroInput()
+    val writer=CarbonWriter.builder().outputPath(writerPath).buildWriterForAvroInput(pschema)
     writer.write(records)
     writer.close()
     sql("DROP TABLE IF EXISTS sdkOutputTable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
index 53dadf6..54b23a5 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestSparkCarbonFileFormatWithSparkSession.scala
@@ -58,9 +58,9 @@ object TestSparkCarbonFileFormatWithSparkSession {
       val writer =
         if (persistSchema) {
           builder.persistSchemaFile(true)
-          builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+          builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
         } else {
-          builder.withSchema(Schema.parseJson(schema)).outputPath(writerPath).buildWriterForCSVInput()
+          builder.outputPath(writerPath).buildWriterForCSVInput(Schema.parseJson(schema))
         }
 
       var i = 0

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
index 9f2f295..8bbf364 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/AvroCarbonWriter.java
@@ -156,15 +156,11 @@ public class AvroCarbonWriter extends CarbonWriter {
   /**
    * converts avro schema to carbon schema required by carbonWriter
    *
-   * @param avroSchemaString json formatted avro schema as string
+   * @param avroSchema avro schema
    * @return carbon sdk schema
    */
   public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(
-      String avroSchemaString) {
-    if (avroSchemaString == null) {
-      throw new UnsupportedOperationException("avro schema string cannot be null");
-    }
-    Schema avroSchema = new Schema.Parser().parse(avroSchemaString);
+      Schema avroSchema) {
     Field[] carbonField = new Field[avroSchema.getFields().size()];
     int i = 0;
     for (Schema.Field avroField : avroSchema.getFields()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 585975f..bf99e05 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -67,18 +67,6 @@ public class CarbonWriterBuilder {
   private String taskNo;
 
   /**
-   * prepares the builder with the schema provided
-   * @param schema is instance of Schema
-   * This method must be called when building CarbonWriterBuilder
-   * @return updated CarbonWriterBuilder
-   */
-  public CarbonWriterBuilder withSchema(Schema schema) {
-    Objects.requireNonNull(schema, "schema should not be null");
-    this.schema = schema;
-    return this;
-  }
-
-  /**
    * Sets the output path of the writer builder
    * @param path is the absolute path where output files are written
    * This method must be called when building CarbonWriterBuilder
@@ -310,24 +298,30 @@ public class CarbonWriterBuilder {
 
   /**
    * Build a {@link CarbonWriter}, which accepts row in CSV format
+   * @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
    * @return CSVCarbonWriter
    * @throws IOException
    * @throws InvalidLoadOptionException
    */
-  public CarbonWriter buildWriterForCSVInput() throws IOException, InvalidLoadOptionException {
+  public CarbonWriter buildWriterForCSVInput(Schema schema)
+      throws IOException, InvalidLoadOptionException {
     Objects.requireNonNull(schema, "schema should not be null");
     Objects.requireNonNull(path, "path should not be null");
+    this.schema = schema;
     CarbonLoadModel loadModel = createLoadModel();
     return new CSVCarbonWriter(loadModel);
   }
 
   /**
    * Build a {@link CarbonWriter}, which accepts Avro object
+   * @param avroSchema avro Schema object {org.apache.avro.Schema}
    * @return AvroCarbonWriter
    * @throws IOException
    * @throws InvalidLoadOptionException
    */
-  public CarbonWriter buildWriterForAvroInput() throws IOException, InvalidLoadOptionException {
+  public CarbonWriter buildWriterForAvroInput(org.apache.avro.Schema avroSchema)
+      throws IOException, InvalidLoadOptionException {
+    this.schema = AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema);
     Objects.requireNonNull(schema, "schema should not be null");
     Objects.requireNonNull(path, "path should not be null");
     CarbonLoadModel loadModel = createLoadModel();
@@ -537,4 +531,4 @@ public class CarbonWriterBuilder {
     setCsvHeader(build);
     return build;
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
index 104c6e4..b70e74d 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/AvroCarbonWriterTest.java
@@ -73,10 +73,9 @@ public class AvroCarbonWriterTest {
 
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema))
           .outputPath(path)
           .isTransactionalTable(true)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(new Schema.Parser().parse(avroSchema));
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);
@@ -144,10 +143,9 @@ public class AvroCarbonWriterTest {
 
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(avroSchema))
           .outputPath(path)
           .isTransactionalTable(true)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(new Schema.Parser().parse(avroSchema));
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);
@@ -239,10 +237,9 @@ public class AvroCarbonWriterTest {
 
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(mySchema))
           .outputPath(path)
           .isTransactionalTable(true)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(nn);
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);
@@ -303,10 +300,9 @@ public class AvroCarbonWriterTest {
 
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(mySchema))
           .outputPath(path)
           .isTransactionalTable(true)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(nn);
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);
@@ -343,10 +339,9 @@ public class AvroCarbonWriterTest {
 
     try {
       CarbonWriter writer = CarbonWriter.builder()
-          .withSchema(AvroCarbonWriter.getCarbonSchemaFromAvroSchema(mySchema))
           .outputPath(path)
           .isTransactionalTable(true).sortBy(sortColumns)
-          .buildWriterForAvroInput();
+          .buildWriterForAvroInput(nn);
 
       for (int i = 0; i < 100; i++) {
         writer.write(record);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
index fc283b6..1eed47b 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVCarbonWriterTest.java
@@ -98,11 +98,10 @@ public class CSVCarbonWriterTest {
 
     try {
       CarbonWriterBuilder builder = CarbonWriter.builder()
-          .withSchema(new Schema(fields))
           .isTransactionalTable(true)
           .outputPath(path);
 
-      CarbonWriter writer = builder.buildWriterForCSVInput();
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 
       for (int i = 0; i < 100; i++) {
         String[] row = new String[]{
@@ -225,7 +224,7 @@ public class CSVCarbonWriterTest {
     fields[1] = new Field("age", DataTypes.INT);
     try {
       carbonWriter = CarbonWriter.builder().isTransactionalTable(false).
-          outputPath(path).withSchema(new Schema(fields)).buildWriterForCSVInput();
+          outputPath(path).buildWriterForCSVInput(new Schema(fields));
     } catch (InvalidLoadOptionException e) {
       e.printStackTrace();
       Assert.assertTrue(false);
@@ -245,7 +244,7 @@ public class CSVCarbonWriterTest {
     fields[1] = new Field("age", DataTypes.INT);
     try {
       carbonWriter = CarbonWriter.builder().isTransactionalTable(false).
-          outputPath(path).withSchema(new Schema(fields)).buildWriterForCSVInput();
+          outputPath(path).buildWriterForCSVInput(new Schema(fields));
     } catch (InvalidLoadOptionException e) {
       e.printStackTrace();
       Assert.assertTrue(false);
@@ -268,11 +267,10 @@ public class CSVCarbonWriterTest {
 
     try {
       CarbonWriterBuilder builder = CarbonWriter.builder()
-          .withSchema(new Schema(fields))
           .isTransactionalTable(true).taskNo(5)
           .outputPath(path);
 
-      CarbonWriter writer = builder.buildWriterForCSVInput();
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 
       for (int i = 0; i < 2; i++) {
         String[] row = new String[]{

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
index 881b5a5..0393077 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CSVNonTransactionalCarbonWriterTest.java
@@ -104,7 +104,6 @@ public class CSVNonTransactionalCarbonWriterTest {
       boolean persistSchema, int blockletSize, int blockSize) {
     try {
       CarbonWriterBuilder builder = CarbonWriter.builder()
-          .withSchema(schema)
           .isTransactionalTable(false)
           .uniqueIdentifier(System.currentTimeMillis())
           .taskNo(System.nanoTime())
@@ -122,7 +121,7 @@ public class CSVNonTransactionalCarbonWriterTest {
         builder = builder.withBlockSize(blockSize);
       }
 
-      CarbonWriter writer = builder.buildWriterForCSVInput();
+      CarbonWriter writer = builder.buildWriterForCSVInput(schema);
 
       for (int i = 0; i < rows; i++) {
         writer.write(new String[]{"robot" + (i % 10), String.valueOf(i), String.valueOf((double) i / 2)});
@@ -168,13 +167,12 @@ public class CSVNonTransactionalCarbonWriterTest {
 
     try {
       CarbonWriterBuilder builder = CarbonWriter.builder()
-          .withSchema(new Schema(fields))
           .uniqueIdentifier(System.currentTimeMillis())
           .isTransactionalTable(false)
           .taskNo(System.nanoTime())
           .outputPath(path);
 
-      CarbonWriter writer = builder.buildWriterForCSVInput();
+      CarbonWriter writer = builder.buildWriterForCSVInput(new Schema(fields));
 
       for (int i = 0; i < 100; i++) {
         String[] row = new String[]{

http://git-wip-us.apache.org/repos/asf/carbondata/blob/26eb2d0b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
index 97de1a0..eb406e2 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
@@ -63,7 +63,6 @@ public class TestUtil {
       boolean persistSchema, int blockletSize, int blockSize, boolean isTransactionalTable) {
     try {
       CarbonWriterBuilder builder = CarbonWriter.builder()
-          .withSchema(schema)
           .isTransactionalTable(isTransactionalTable)
           .outputPath(path);
       if (sortColumns != null) {
@@ -79,7 +78,7 @@ public class TestUtil {
         builder = builder.withBlockSize(blockSize);
       }
 
-      CarbonWriter writer = builder.buildWriterForCSVInput();
+      CarbonWriter writer = builder.buildWriterForCSVInput(schema);
 
       for (int i = 0; i < rows; i++) {
         writer.write(new String[]{"robot" + (i % 10), String.valueOf(i), String.valueOf((double) i / 2)});


[03/50] [abbrv] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
----------------------------------------------------------------------
diff --git a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
deleted file mode 100644
index 16d4d53..0000000
--- a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java
+++ /dev/null
@@ -1,469 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.processing;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.nio.charset.Charset;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.UUID;
-
-import org.apache.carbondata.common.CarbonIterator;
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CacheProvider;
-import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.cache.dictionary.Dictionary;
-import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
-import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl;
-import org.apache.carbondata.core.fileoperations.FileWriteOperation;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.CarbonMetadata;
-import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
-import org.apache.carbondata.core.metadata.ColumnIdentifier;
-import org.apache.carbondata.core.metadata.converter.SchemaConverter;
-import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
-import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.core.metadata.encoder.Encoding;
-import org.apache.carbondata.core.metadata.schema.SchemaEvolution;
-import org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.TableSchema;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
-import org.apache.carbondata.core.statusmanager.SegmentStatus;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
-import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
-import org.apache.carbondata.core.writer.ThriftWriter;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriter;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriterImpl;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfo;
-import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPreparator;
-import org.apache.carbondata.processing.loading.DataLoadExecutor;
-import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants;
-import org.apache.carbondata.processing.loading.csvinput.BlockDetails;
-import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat;
-import org.apache.carbondata.processing.loading.csvinput.CSVRecordReaderIterator;
-import org.apache.carbondata.processing.loading.csvinput.StringArrayWritable;
-import org.apache.carbondata.processing.loading.model.CarbonDataLoadSchema;
-import org.apache.carbondata.processing.loading.model.CarbonLoadModel;
-import org.apache.carbondata.processing.util.TableOptionConstant;
-
-import com.google.gson.Gson;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapred.TaskAttemptID;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskType;
-import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
-
-/**
- * This class will create store file based on provided schema
- *
- */
-public class StoreCreator {
-
-  private static AbsoluteTableIdentifier identifier;
-  private static String storePath = "";
-  static {
-    try {
-      storePath = new File("target/store").getCanonicalPath();
-      String dbName = "testdb";
-      String tableName = "testtable";
-      identifier =
-          AbsoluteTableIdentifier.from(
-              storePath + "/testdb/testtable",
-              new CarbonTableIdentifier(dbName, tableName, UUID.randomUUID().toString()));
-    } catch (IOException ex) {
-
-    }
-  }
-
-  public static AbsoluteTableIdentifier getIdentifier() {
-    return identifier;
-  }
-
-  /**
-   * Create store without any restructure
-   */
-  public static void createCarbonStore() {
-    try {
-      String factFilePath = new File("../hadoop/src/test/resources/data.csv").getCanonicalPath();
-      File storeDir = new File(storePath);
-      CarbonUtil.deleteFoldersAndFiles(storeDir);
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.STORE_LOCATION_HDFS,
-          storePath);
-
-      CarbonTable table = createTable();
-      writeDictionary(factFilePath, table);
-      CarbonDataLoadSchema schema = new CarbonDataLoadSchema(table);
-      CarbonLoadModel loadModel = new CarbonLoadModel();
-      loadModel.setCarbonDataLoadSchema(schema);
-      loadModel.setDatabaseName(identifier.getCarbonTableIdentifier().getDatabaseName());
-      loadModel.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-      loadModel.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-      loadModel.setCarbonTransactionalTable(true);
-      loadModel.setFactFilePath(factFilePath);
-      loadModel.setLoadMetadataDetails(new ArrayList<LoadMetadataDetails>());
-      loadModel.setTablePath(identifier.getTablePath());
-      loadModel.setDateFormat(null);
-      loadModel.setDefaultTimestampFormat(CarbonProperties.getInstance().getProperty(
-          CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-          CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
-      loadModel.setDefaultDateFormat(CarbonProperties.getInstance().getProperty(
-          CarbonCommonConstants.CARBON_DATE_FORMAT,
-          CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT));
-      loadModel
-          .setSerializationNullFormat(
-              TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName() + "," + "\\N");
-      loadModel
-          .setBadRecordsLoggerEnable(
-              TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName() + "," + "false");
-      loadModel
-          .setBadRecordsAction(
-              TableOptionConstant.BAD_RECORDS_ACTION.getName() + "," + "FORCE");
-      loadModel
-          .setIsEmptyDataBadRecord(
-              DataLoadProcessorConstants.IS_EMPTY_DATA_BAD_RECORD + "," + "false");
-      loadModel.setCsvHeader("ID,date,country,name,phonetype,serialname,salary");
-      loadModel.setCsvHeaderColumns(loadModel.getCsvHeader().split(","));
-      loadModel.setTaskNo("0");
-      loadModel.setSegmentId("0");
-      loadModel.setFactTimeStamp(System.currentTimeMillis());
-      loadModel.setMaxColumns("10");
-
-      loadData(loadModel, storePath);
-
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  private static CarbonTable createTable() throws IOException {
-    TableInfo tableInfo = new TableInfo();
-    tableInfo.setDatabaseName(identifier.getCarbonTableIdentifier().getDatabaseName());
-    TableSchema tableSchema = new TableSchema();
-    tableSchema.setTableName(identifier.getCarbonTableIdentifier().getTableName());
-    List<ColumnSchema> columnSchemas = new ArrayList<ColumnSchema>();
-    ArrayList<Encoding> encodings = new ArrayList<>();
-    encodings.add(Encoding.DICTIONARY);
-    ColumnSchema id = new ColumnSchema();
-    id.setColumnName("ID");
-    id.setColumnar(true);
-    id.setDataType(DataTypes.INT);
-    id.setEncodingList(encodings);
-    id.setColumnUniqueId(UUID.randomUUID().toString());
-    id.setDimensionColumn(true);
-    id.setColumnGroup(1);
-    columnSchemas.add(id);
-
-    ColumnSchema date = new ColumnSchema();
-    date.setColumnName("date");
-    date.setColumnar(true);
-    date.setDataType(DataTypes.STRING);
-    date.setEncodingList(encodings);
-    date.setColumnUniqueId(UUID.randomUUID().toString());
-    date.setDimensionColumn(true);
-    date.setColumnGroup(2);
-    columnSchemas.add(date);
-
-    ColumnSchema country = new ColumnSchema();
-    country.setColumnName("country");
-    country.setColumnar(true);
-    country.setDataType(DataTypes.STRING);
-    country.setEncodingList(encodings);
-    country.setColumnUniqueId(UUID.randomUUID().toString());
-    country.setDimensionColumn(true);
-    country.setColumnGroup(3);
-    columnSchemas.add(country);
-
-    ColumnSchema name = new ColumnSchema();
-    name.setColumnName("name");
-    name.setColumnar(true);
-    name.setDataType(DataTypes.STRING);
-    name.setEncodingList(encodings);
-    name.setColumnUniqueId(UUID.randomUUID().toString());
-    name.setDimensionColumn(true);
-    name.setColumnGroup(4);
-    columnSchemas.add(name);
-
-    ColumnSchema phonetype = new ColumnSchema();
-    phonetype.setColumnName("phonetype");
-    phonetype.setColumnar(true);
-    phonetype.setDataType(DataTypes.STRING);
-    phonetype.setEncodingList(encodings);
-    phonetype.setColumnUniqueId(UUID.randomUUID().toString());
-    phonetype.setDimensionColumn(true);
-    phonetype.setColumnGroup(5);
-    columnSchemas.add(phonetype);
-
-    ColumnSchema serialname = new ColumnSchema();
-    serialname.setColumnName("serialname");
-    serialname.setColumnar(true);
-    serialname.setDataType(DataTypes.STRING);
-    serialname.setEncodingList(encodings);
-    serialname.setColumnUniqueId(UUID.randomUUID().toString());
-    serialname.setDimensionColumn(true);
-    serialname.setColumnGroup(6);
-    columnSchemas.add(serialname);
-
-    ColumnSchema salary = new ColumnSchema();
-    salary.setColumnName("salary");
-    salary.setColumnar(true);
-    salary.setDataType(DataTypes.INT);
-    salary.setEncodingList(new ArrayList<Encoding>());
-    salary.setColumnUniqueId(UUID.randomUUID().toString());
-    salary.setDimensionColumn(false);
-    salary.setColumnGroup(7);
-    columnSchemas.add(salary);
-
-    tableSchema.setListOfColumns(columnSchemas);
-    SchemaEvolution schemaEvol = new SchemaEvolution();
-    schemaEvol.setSchemaEvolutionEntryList(new ArrayList<SchemaEvolutionEntry>());
-    tableSchema.setSchemaEvalution(schemaEvol);
-    tableSchema.setTableId(UUID.randomUUID().toString());
-    tableInfo.setTableUniqueName(
-        identifier.getCarbonTableIdentifier().getTableUniqueName()
-    );
-    tableInfo.setLastUpdatedTime(System.currentTimeMillis());
-    tableInfo.setFactTable(tableSchema);
-    tableInfo.setTablePath(identifier.getTablePath());
-
-    String schemaFilePath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath());
-    String schemaMetadataPath = CarbonTablePath.getFolderContainingFile(schemaFilePath);
-    CarbonMetadata.getInstance().loadTableMetadata(tableInfo);
-
-    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
-    org.apache.carbondata.format.TableInfo thriftTableInfo = schemaConverter
-        .fromWrapperToExternalTableInfo(tableInfo, tableInfo.getDatabaseName(),
-            tableInfo.getFactTable().getTableName());
-    org.apache.carbondata.format.SchemaEvolutionEntry schemaEvolutionEntry =
-        new org.apache.carbondata.format.SchemaEvolutionEntry(tableInfo.getLastUpdatedTime());
-    thriftTableInfo.getFact_table().getSchema_evolution().getSchema_evolution_history()
-        .add(schemaEvolutionEntry);
-
-    FileFactory.FileType fileType = FileFactory.getFileType(schemaMetadataPath);
-    if (!FileFactory.isFileExist(schemaMetadataPath, fileType)) {
-      FileFactory.mkdirs(schemaMetadataPath, fileType);
-    }
-
-    ThriftWriter thriftWriter = new ThriftWriter(schemaFilePath, false);
-    thriftWriter.open();
-    thriftWriter.write(thriftTableInfo);
-    thriftWriter.close();
-    return CarbonMetadata.getInstance().getCarbonTable(tableInfo.getTableUniqueName());
-  }
-
-  private static void writeDictionary(String factFilePath, CarbonTable table) throws Exception {
-    BufferedReader reader = new BufferedReader(new FileReader(factFilePath));
-    String header = reader.readLine();
-    String[] split = header.split(",");
-    List<CarbonColumn> allCols = new ArrayList<CarbonColumn>();
-    List<CarbonDimension> dims = table.getDimensionByTableName(table.getTableName());
-    allCols.addAll(dims);
-    List<CarbonMeasure> msrs = table.getMeasureByTableName(table.getTableName());
-    allCols.addAll(msrs);
-    Set<String>[] set = new HashSet[dims.size()];
-    for (int i = 0; i < set.length; i++) {
-      set[i] = new HashSet<String>();
-    }
-    String line = reader.readLine();
-    while (line != null) {
-      String[] data = line.split(",");
-      for (int i = 0; i < set.length; i++) {
-        set[i].add(data[i]);
-      }
-      line = reader.readLine();
-    }
-
-    Cache dictCache = CacheProvider.getInstance()
-        .createCache(CacheType.REVERSE_DICTIONARY);
-    for (int i = 0; i < set.length; i++) {
-      ColumnIdentifier columnIdentifier =
-          new ColumnIdentifier(dims.get(i).getColumnId(), null, null);
-      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier =
-          new DictionaryColumnUniqueIdentifier(table.getAbsoluteTableIdentifier(), columnIdentifier,
-              columnIdentifier.getDataType());
-      CarbonDictionaryWriter writer =
-          new CarbonDictionaryWriterImpl(dictionaryColumnUniqueIdentifier);
-      for (String value : set[i]) {
-        writer.write(value);
-      }
-      writer.close();
-      writer.commit();
-      Dictionary dict = (Dictionary) dictCache.get(
-          new DictionaryColumnUniqueIdentifier(identifier,
-        		  columnIdentifier, dims.get(i).getDataType()));
-      CarbonDictionarySortInfoPreparator preparator =
-          new CarbonDictionarySortInfoPreparator();
-      List<String> newDistinctValues = new ArrayList<String>();
-      CarbonDictionarySortInfo dictionarySortInfo =
-          preparator.getDictionarySortInfo(newDistinctValues, dict, dims.get(i).getDataType());
-      CarbonDictionarySortIndexWriter carbonDictionaryWriter =
-          new CarbonDictionarySortIndexWriterImpl(dictionaryColumnUniqueIdentifier);
-      try {
-        carbonDictionaryWriter.writeSortIndex(dictionarySortInfo.getSortIndex());
-        carbonDictionaryWriter.writeInvertedSortIndex(dictionarySortInfo.getSortIndexInverted());
-      } finally {
-        carbonDictionaryWriter.close();
-      }
-    }
-    reader.close();
-  }
-
-  /**
-   * Execute graph which will further load data
-   *
-   * @param loadModel
-   * @param storeLocation
-   * @throws Exception
-   */
-  public static void loadData(CarbonLoadModel loadModel, String storeLocation)
-      throws Exception {
-    new File(storeLocation).mkdirs();
-    String outPutLoc = storeLocation + "/etl";
-    String databaseName = loadModel.getDatabaseName();
-    String tableName = loadModel.getTableName();
-    String tempLocationKey = databaseName + '_' + tableName + "_1";
-    CarbonProperties.getInstance().addProperty(tempLocationKey, storeLocation);
-    CarbonProperties.getInstance().addProperty("store_output_location", outPutLoc);
-    CarbonProperties.getInstance().addProperty("send.signal.load", "false");
-    CarbonProperties.getInstance().addProperty("carbon.is.columnar.storage", "true");
-    CarbonProperties.getInstance().addProperty("carbon.dimension.split.value.in.columnar", "1");
-    CarbonProperties.getInstance().addProperty("carbon.is.fullyfilled.bits", "true");
-    CarbonProperties.getInstance().addProperty("is.int.based.indexer", "true");
-    CarbonProperties.getInstance().addProperty("aggregate.columnar.keyblock", "true");
-    CarbonProperties.getInstance().addProperty("is.compressed.keyblock", "false");
-    CarbonProperties.getInstance().addProperty("carbon.leaf.node.size", "120000");
-
-    String graphPath =
-        outPutLoc + File.separator + loadModel.getDatabaseName() + File.separator + tableName
-            + File.separator + 0 + File.separator + 1 + File.separator + tableName + ".ktr";
-    File path = new File(graphPath);
-    if (path.exists()) {
-      path.delete();
-    }
-
-    BlockDetails blockDetails = new BlockDetails(new Path(loadModel.getFactFilePath()),
-        0, new File(loadModel.getFactFilePath()).length(), new String[] {"localhost"});
-    Configuration configuration = new Configuration();
-    CSVInputFormat.setCommentCharacter(configuration, loadModel.getCommentChar());
-    CSVInputFormat.setCSVDelimiter(configuration, loadModel.getCsvDelimiter());
-    CSVInputFormat.setEscapeCharacter(configuration, loadModel.getEscapeChar());
-    CSVInputFormat.setHeaderExtractionEnabled(configuration, true);
-    CSVInputFormat.setQuoteCharacter(configuration, loadModel.getQuoteChar());
-    CSVInputFormat.setReadBufferSize(configuration, CarbonProperties.getInstance()
-        .getProperty(CarbonCommonConstants.CSV_READ_BUFFER_SIZE,
-            CarbonCommonConstants.CSV_READ_BUFFER_SIZE_DEFAULT));
-    CSVInputFormat.setMaxColumns(configuration, "10");
-    CSVInputFormat.setNumberOfColumns(configuration, "7");
-
-    TaskAttemptContextImpl hadoopAttemptContext = new TaskAttemptContextImpl(configuration, new TaskAttemptID("", 1, TaskType.MAP, 0, 0));
-    CSVInputFormat format = new CSVInputFormat();
-
-    RecordReader<NullWritable, StringArrayWritable> recordReader =
-        format.createRecordReader(blockDetails, hadoopAttemptContext);
-
-    CSVRecordReaderIterator readerIterator = new CSVRecordReaderIterator(recordReader, blockDetails, hadoopAttemptContext);
-    String[] storeLocationArray = new String[] {storeLocation + "/" + databaseName + "/" + tableName};
-    new DataLoadExecutor().execute(loadModel,
-        storeLocationArray,
-        new CarbonIterator[]{readerIterator});
-
-    writeLoadMetadata(loadModel.getCarbonDataLoadSchema(), loadModel.getTableName(), loadModel.getTableName(),
-        new ArrayList<LoadMetadataDetails>());
-  }
-
-  public static void writeLoadMetadata(CarbonDataLoadSchema schema, String databaseName,
-      String tableName, List<LoadMetadataDetails> listOfLoadFolderDetails) throws IOException {
-    LoadMetadataDetails loadMetadataDetails = new LoadMetadataDetails();
-    loadMetadataDetails.setLoadEndTime(System.currentTimeMillis());
-    loadMetadataDetails.setSegmentStatus(SegmentStatus.SUCCESS);
-    loadMetadataDetails.setLoadName(String.valueOf(0));
-    loadMetadataDetails.setLoadStartTime(loadMetadataDetails.getTimeStamp(readCurrentTime()));
-    listOfLoadFolderDetails.add(loadMetadataDetails);
-
-    String dataLoadLocation = schema.getCarbonTable().getMetadataPath() + File.separator
-        + CarbonTablePath.TABLE_STATUS_FILE;
-
-    DataOutputStream dataOutputStream;
-    Gson gsonObjectToWrite = new Gson();
-    BufferedWriter brWriter = null;
-
-    AtomicFileOperations writeOperation =
-        new AtomicFileOperationsImpl(dataLoadLocation, FileFactory.getFileType(dataLoadLocation));
-
-    try {
-
-      dataOutputStream = writeOperation.openForWrite(FileWriteOperation.OVERWRITE);
-      brWriter = new BufferedWriter(new OutputStreamWriter(dataOutputStream,
-              Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)));
-
-      String metadataInstance = gsonObjectToWrite.toJson(listOfLoadFolderDetails.toArray());
-      brWriter.write(metadataInstance);
-    } catch (Exception ex) {
-      throw ex;
-    } finally {
-      try {
-        if (null != brWriter) {
-          brWriter.flush();
-        }
-      } catch (Exception e) {
-        throw e;
-
-      }
-      CarbonUtil.closeStreams(brWriter);
-
-    }
-    writeOperation.close();
-
-  }
-
-  public static String readCurrentTime() {
-    SimpleDateFormat sdf = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_MILLIS);
-    String date = null;
-
-    date = sdf.format(new Date());
-
-    return date;
-  }
-
-  public static void main(String[] args) {
-    StoreCreator.createCarbonStore();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index d15e548..946ea0f 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -165,6 +165,7 @@ public class CarbonReaderBuilder {
           new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID());
       RecordReader reader = format.createRecordReader(split, attempt);
       reader.initialize(split, attempt);
+      reader.close();
       readers.add(reader);
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
index 394ffea..9e338e7 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/store/LocalCarbonStore.java
@@ -24,6 +24,8 @@ import java.util.List;
 import java.util.Objects;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.scan.expression.Expression;
@@ -49,13 +51,15 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
 @InterfaceAudience.Internal
 class LocalCarbonStore extends MetaCachedCarbonStore {
 
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(LocalCarbonStore.class.getName());
+
   @Override
   public Iterator<CarbonRow> scan(String path, String[] projectColumns) throws IOException {
     return scan(path, projectColumns, null);
   }
 
-  @Override
-  public Iterator<CarbonRow> scan(String path, String[] projectColumns, Expression filter)
+  @Override public Iterator<CarbonRow> scan(String path, String[] projectColumns, Expression filter)
       throws IOException {
     Objects.requireNonNull(path);
     Objects.requireNonNull(projectColumns);
@@ -73,8 +77,8 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
     CarbonInputFormat.setTableName(job.getConfiguration(), table.getTableName());
     CarbonInputFormat.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
     CarbonInputFormat.setCarbonReadSupport(job.getConfiguration(), CarbonRowReadSupport.class);
-    CarbonInputFormat.setColumnProjection(
-        job.getConfiguration(), new CarbonProjection(projectColumns));
+    CarbonInputFormat
+        .setColumnProjection(job.getConfiguration(), new CarbonProjection(projectColumns));
     if (filter != null) {
       CarbonInputFormat.setFilterPredicates(job.getConfiguration(), filter);
     }
@@ -84,6 +88,8 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
 
     List<RecordReader<Void, Object>> readers = new ArrayList<>(splits.size());
 
+    List<CarbonRow> rows = new ArrayList<>();
+
     try {
       for (InputSplit split : splits) {
         TaskAttemptContextImpl attempt =
@@ -92,19 +98,27 @@ class LocalCarbonStore extends MetaCachedCarbonStore {
         reader.initialize(split, attempt);
         readers.add(reader);
       }
-    } catch (InterruptedException e) {
-      throw new IOException(e);
-    }
 
-    List<CarbonRow> rows = new ArrayList<>();
-    try {
       for (RecordReader<Void, Object> reader : readers) {
         while (reader.nextKeyValue()) {
-          rows.add((CarbonRow)reader.getCurrentValue());
+          rows.add((CarbonRow) reader.getCurrentValue());
+        }
+        try {
+          reader.close();
+        } catch (IOException e) {
+          LOGGER.error(e);
         }
       }
     } catch (InterruptedException e) {
       throw new IOException(e);
+    } finally {
+      for (RecordReader<Void, Object> reader : readers) {
+        try {
+          reader.close();
+        } catch (IOException e) {
+          LOGGER.error(e);
+        }
+      }
     }
     return rows.iterator();
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
----------------------------------------------------------------------
diff --git a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
index 445b292..9727352 100644
--- a/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
+++ b/store/search/src/main/java/org/apache/carbondata/store/worker/SearchRequestHandler.java
@@ -128,11 +128,12 @@ public class SearchRequestHandler {
     // In search mode, reader will read multiple blocks by using a thread pool
     CarbonRecordReader<CarbonRow> reader =
         new CarbonRecordReader<>(queryModel, new CarbonRowReadSupport());
-    reader.initialize(mbSplit, null);
 
     // read all rows by the reader
     List<CarbonRow> rows = new LinkedList<>();
     try {
+      reader.initialize(mbSplit, null);
+
       // loop to read required number of rows.
       // By default, if user does not specify the limit value, limit is Long.MaxValue
       while (reader.nextKeyValue() && rowCount < limit) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
index cbf93b8..c4b501d 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordReader.java
@@ -414,9 +414,6 @@ public class CarbonStreamRecordReader extends RecordReader<Void, Object> {
 
   private boolean isScanRequired(BlockletHeader header) {
     // TODO require to implement min-max index
-    if (null == filter) {
-      return true;
-    }
     return true;
   }
 


[18/50] [abbrv] carbondata git commit: [CARBONDATA-2440] doc updated to set the property for SDK user

Posted by gv...@apache.org.
[CARBONDATA-2440] doc updated to set the property for SDK user

doc updated to set the property for SDK user

This closes #2274


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e1ef85ac
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e1ef85ac
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e1ef85ac

Branch: refs/heads/spark-2.3
Commit: e1ef85ac749e3fade0942d746c9e18533aa6f620
Parents: 07a77fa
Author: rahulforallp <ra...@knoldus.in>
Authored: Fri May 11 16:10:49 2018 +0530
Committer: chenliang613 <ch...@huawei.com>
Committed: Tue May 22 19:02:13 2018 +0800

----------------------------------------------------------------------
 docs/sdk-writer-guide.md | 62 ++++++++++++++++++++++++++++++++++++++++---
 1 file changed, 58 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/e1ef85ac/docs/sdk-writer-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-writer-guide.md b/docs/sdk-writer-guide.md
index 682b27a..3d9a3de 100644
--- a/docs/sdk-writer-guide.md
+++ b/docs/sdk-writer-guide.md
@@ -13,25 +13,33 @@ These SDK writer output contains just a carbondata and carbonindex files. No met
  
  import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
  import org.apache.carbondata.core.metadata.datatype.DataTypes;
+ import org.apache.carbondata.core.util.CarbonProperties;
  import org.apache.carbondata.sdk.file.CarbonWriter;
  import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
  import org.apache.carbondata.sdk.file.Field;
  import org.apache.carbondata.sdk.file.Schema;
  
  public class TestSdk {
- 
+
+   // pass true or false while executing the main to use offheap memory or not
    public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-     testSdkWriter();
+     if (args.length > 0 && args[0] != null) {
+       testSdkWriter(args[0]);
+     } else {
+       testSdkWriter("true");
+     }
    }
  
-   public static void testSdkWriter() throws IOException, InvalidLoadOptionException {
-     String path = "/home/root1/Documents/ab/temp";
+   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
+     String path = "./target/testCSVSdkWriter";
  
      Field[] fields = new Field[2];
      fields[0] = new Field("name", DataTypes.STRING);
      fields[1] = new Field("age", DataTypes.INT);
  
      Schema schema = new Schema(fields);
+
+     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
  
      CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path);
  
@@ -334,6 +342,52 @@ public Schema(Field[] fields);
 public static Schema parseJson(String json);
 ```
 
+### Class org.apache.carbondata.core.util.CarbonProperties
+
+```
+/**
+* This method will be responsible to get the instance of CarbonProperties class
+*
+* @return carbon properties instance
+*/
+public static CarbonProperties getInstance();
+```
+
+```
+/**
+* This method will be used to add a new property
+*
+* @param key is a property name to set for carbon.
+* @param value is valid parameter corresponding to property.
+* @return CarbonProperties object
+*/
+public CarbonProperties addProperty(String key, String value);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value.
+* @return properties value for corresponding key. If not set, then returns null.
+*/
+public String getProperty(String key);
+```
+
+```
+/**
+* This method will be used to get the property value. If property is not
+* present, then it will return the default value.
+*
+* @param key is a property name to get user specified value..
+* @param defaultValue used to be returned by function if corrosponding key not set.
+* @return properties value for corresponding key. If not set, then returns specified defaultValue.
+*/
+public String getProperty(String key, String defaultValue);
+```
+Reference : [list of carbon properties](http://carbondata.apache.org/configuration-parameters.html)
+
 ### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
 ```
 /**


[27/50] [abbrv] carbondata git commit: [CARBONDATA-2499][Test] Validate the visible/invisible status of datamap

Posted by gv...@apache.org.
[CARBONDATA-2499][Test] Validate the visible/invisible status of datamap

This closes #2325


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/1b6ce8cd
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/1b6ce8cd
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/1b6ce8cd

Branch: refs/heads/spark-2.3
Commit: 1b6ce8cdcd95c7502098b46608c51ab1d0cb3689
Parents: ddf3e85
Author: xubo245 <xu...@huawei.com>
Authored: Mon May 21 15:47:24 2018 +0800
Committer: QiangCai <qi...@qq.com>
Committed: Mon May 28 21:23:50 2018 +0800

----------------------------------------------------------------------
 .../testsuite/datamap/CGDataMapTestCase.scala   | 71 ++++++++++++++----
 .../testsuite/datamap/FGDataMapTestCase.scala   | 78 +++++++++++++++++++-
 .../DataLoadFailAllTypeSortTest.scala           |  4 -
 3 files changed, 130 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/1b6ce8cd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
index 848acde..b5c3df1 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/CGDataMapTestCase.scala
@@ -22,7 +22,6 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
-import org.apache.hadoop.fs.Path
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
@@ -401,30 +400,70 @@ class CGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"DROP TABLE IF EXISTS $tableName")
     sql(
       s"""
-        | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
-        | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
       """.stripMargin)
     // register datamap writer
-    sql(s"create datamap $dataMapName1 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='name')")
-    sql(s"create datamap $dataMapName2 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='city')")
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName1
+         | ON TABLE $tableName
+         | USING '${classOf[CGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='name')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName2
+         | ON TABLE $tableName
+         | USING '${classOf[CGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='city')
+       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df1(0).getString(0).contains("CG DataMap"))
+    assert(df1(0).getString(0).contains(dataMapName1))
+    val e11 = intercept[Exception] {
+      assert(df1(0).getString(0).contains(dataMapName2))
+    }
+    assert(e11.getMessage.contains("did not contain \"" + dataMapName2))
 
     // make datamap1 invisible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e = intercept[Exception] {
+      assert(df2(0).getString(0).contains(dataMapName1))
+    }
+    assert(e.getMessage.contains("did not contain \"" + dataMapName1))
+    assert(df2(0).getString(0).contains(dataMapName2))
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
 
     // also make datamap2 invisible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e31 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName1))
+    }
+    assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
+    val e32 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e32.getMessage.contains("did not contain \"" + dataMapName2))
 
     // make datamap1,datamap2 visible
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
-    sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
-    checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
-      sql("select * from normal_test where name='n502670' and city='c2670'"))
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df4(0).getString(0).contains(dataMapName1))
+    val e41 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
   }
 
   test("test datamap storage in system folder") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/1b6ce8cd/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
index e2642ff..2d666c3 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
@@ -22,12 +22,10 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta}
-import org.apache.carbondata.core.datamap.Segment
-import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
 import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta, Segment}
 import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
 import org.apache.carbondata.core.datamap.dev.fgdatamap.{FineGrainBlocklet, FineGrainDataMap, FineGrainDataMapFactory}
@@ -488,9 +486,83 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
       sql("select * from normal_test where name='n502670' and city='c2670'"))
   }
 
+  test("test invisible datamap during query") {
+    val tableName = "datamap_testFG"
+    val dataMapName1 = "datamap1"
+    val dataMapName2 = "datamap2"
+    sql(s"DROP TABLE IF EXISTS $tableName")
+    sql(
+      s"""
+         | CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
+         | STORED BY 'org.apache.carbondata.format'
+         | TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
+      """.stripMargin)
+    // register datamap writer
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName1
+         | ON TABLE $tableName
+         | USING '${classOf[FGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='name')
+      """.stripMargin)
+    sql(
+      s"""
+         | CREATE DATAMAP $dataMapName2
+         | ON TABLE $tableName
+         | USING '${classOf[FGDataMapFactory].getName}'
+         | DMPROPERTIES('index_columns'='city')
+       """.stripMargin)
+    sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
+    val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df1(0).getString(0).contains("FG DataMap"))
+    assert(df1(0).getString(0).contains(dataMapName1))
+    val e11 = intercept[Exception] {
+      assert(df1(0).getString(0).contains(dataMapName2))
+    }
+    assert(e11.getMessage.contains("did not contain \"" + dataMapName2))
+
+    // make datamap1 invisible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
+    val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e = intercept[Exception] {
+      assert(df2(0).getString(0).contains(dataMapName1))
+    }
+    assert(e.getMessage.contains("did not contain \"" + dataMapName1))
+    assert(df2(0).getString(0).contains(dataMapName2))
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+
+    // also make datamap2 invisible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    val e31 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName1))
+    }
+    assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
+    val e32 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e32.getMessage.contains("did not contain \"" + dataMapName2))
+
+    // make datamap1,datamap2 visible
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
+    checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
+      sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
+    val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
+    assert(df4(0).getString(0).contains(dataMapName1))
+    val e41 = intercept[Exception] {
+      assert(df3(0).getString(0).contains(dataMapName2))
+    }
+    assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
+  }
+
   override protected def afterAll(): Unit = {
     CompactionSupportGlobalSortBigFileTest.deleteFile(file2)
     sql("DROP TABLE IF EXISTS normal_test")
     sql("DROP TABLE IF EXISTS datamap_test")
+    sql("DROP TABLE IF EXISTS datamap_testFG")
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/1b6ce8cd/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
index 121150c..a7dceb4 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.carbondata
 
-import java.io.File
-
 import org.apache.spark.sql.common.util.Spark2QueryTest
 import org.apache.spark.sql.hive.HiveContext
 import org.scalatest.BeforeAndAfterAll
@@ -28,8 +26,6 @@ import org.apache.carbondata.core.util.CarbonProperties
 
 /**
  * Test Class for detailed query on timestamp datatypes
- *
- *
  */
 class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll {
   var hiveContext: HiveContext = _


[05/50] [abbrv] carbondata git commit: [CARBONDATA-2489] Coverity scan fixes

Posted by gv...@apache.org.
http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
index d3b9b48..2115f82 100644
--- a/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/memory/UnsafeMemoryManager.java
@@ -159,7 +159,8 @@ public class UnsafeMemoryManager {
   /**
    * It tries to allocate memory of `size` bytes, keep retry until it allocates successfully.
    */
-  public static MemoryBlock allocateMemoryWithRetry(long taskId, long size) throws MemoryException {
+  public static MemoryBlock allocateMemoryWithRetry(long taskId, long size)
+      throws MemoryException {
     MemoryBlock baseBlock = null;
     int tries = 0;
     while (tries < 300) {
@@ -177,8 +178,7 @@ public class UnsafeMemoryManager {
       tries++;
     }
     if (baseBlock == null) {
-      LOGGER.error(" Memory Used : " + INSTANCE.memoryUsed + " Tasks running : "
-          + taskIdToMemoryBlockMap.keySet());
+      INSTANCE.printCurrentMemoryUsage();
       throw new MemoryException("Not enough memory");
     }
     return baseBlock;
@@ -187,4 +187,9 @@ public class UnsafeMemoryManager {
   public static boolean isOffHeap() {
     return offHeap;
   }
+
+  private synchronized void printCurrentMemoryUsage() {
+    LOGGER.error(
+        " Memory Used : " + memoryUsed + " Tasks running : " + taskIdToMemoryBlockMap.keySet());
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
index 0f0f120..94a4e89 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/ArrayType.java
@@ -31,20 +31,32 @@ public class ArrayType extends DataType {
     return true;
   }
 
-  public DataType getElementType() {
-    return elementType;
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof ArrayType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((ArrayType) obj).getName())) {
+      return false;
+    }
+    return true;
   }
 
-  @Override public boolean equals(Object o) {
-    if (this == o) return true;
-    if (!(o instanceof ArrayType)) return false;
-
-    ArrayType arrayType = (ArrayType) o;
-
-    return elementType.equals(arrayType.elementType);
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
   }
 
-  @Override public int hashCode() {
-    return elementType.hashCode();
+  public DataType getElementType() {
+    return elementType;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
index b2acd21..8536222 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DecimalType.java
@@ -29,6 +29,37 @@ public class DecimalType extends DataType {
     this.scale = scale;
   }
 
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof DecimalType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((DecimalType) obj).getName())) {
+      return false;
+    }
+    if (this.precision != ((DecimalType) obj).precision) {
+      return false;
+    }
+    if (this.scale != ((DecimalType) obj).scale) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
+  }
+
   public int getPrecision() {
     return precision;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
index 97cc4f0..90b7374 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/StructType.java
@@ -33,6 +33,31 @@ public class StructType extends DataType {
     return true;
   }
 
+  @Override
+  public boolean equals(Object obj) {
+    if (this == obj) {
+      return true;
+    }
+    if (obj == null) {
+      return false;
+    }
+    if (!(obj instanceof StructType)) {
+      return false;
+    }
+    if (!this.getName().equalsIgnoreCase(((StructType) obj).getName())) {
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public int hashCode() {
+    final int prime = 31;
+    int result = 1;
+    result = prime * result + getName().hashCode();
+    return result;
+  }
+
   public List<StructField> getFields() {
     return fields;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
index 80c6a3a..fb4d8e3 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/ColumnSchema.java
@@ -33,15 +33,11 @@ import org.apache.carbondata.core.metadata.schema.table.Writable;
 import org.apache.carbondata.core.metadata.schema.table.WritableUtil;
 import org.apache.carbondata.core.preagg.TimeSeriesUDF;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
 /**
  * Store the information about the column meta data present the table
  */
 public class ColumnSchema implements Serializable, Writable {
 
-  private static final Log LOG = LogFactory.getLog(ColumnSchema.class);
   /**
    * serialization version
    */
@@ -334,8 +330,6 @@ public class ColumnSchema implements Serializable, Writable {
         return false;
       }
     } else if (!columnName.equals(other.columnName)) {
-      LOG.error("column name is " + columnName
-          + " but other column name is " + other.columnName);
       return false;
     }
     if (dataType == null) {
@@ -343,8 +337,6 @@ public class ColumnSchema implements Serializable, Writable {
         return false;
       }
     } else if (!dataType.equals(other.dataType)) {
-      LOG.error("column name is" + columnName + " data type is " + dataType
-          + " but other column data type is " + other.dataType);
       return false;
     }
     return true;
@@ -361,40 +353,16 @@ public class ColumnSchema implements Serializable, Writable {
       return false;
     }
     ColumnSchema other = (ColumnSchema) obj;
-    if (!columnUniqueId.equals(other.columnUniqueId)) {
-      LOG.error("Index file's column " + columnName + " columnUniqueId is " + columnUniqueId
-          + " but table's column columnUniqueId is " + other.columnUniqueId);
-      return false;
-    }
-    if (isDimensionColumn != other.isDimensionColumn) {
-      LOG.error("Index file's column " + columnName + " isDimensionColumn is " + isDimensionColumn
-          + " but table's column isDimensionColumn is " + other.isDimensionColumn);
-      return false;
-    }
-    if (scale != other.scale) {
-      LOG.error("Index file's column " + columnName + " scale is " + scale
-          + " but table's column scale is " + other.scale);
-      return false;
-    }
-    if (precision != other.precision) {
-      LOG.error("Index file's column " + columnName + " precision is " + precision
-          + " but table's column precision is " + other.precision);
-      return false;
-    }
-    if (isSortColumn != other.isSortColumn) {
-      LOG.error("Index file's column " + columnName + " isSortColumn is " + isSortColumn
-          + " but table's column isSortColumn is " + other.isSortColumn);
+    if (!columnUniqueId.equals(other.columnUniqueId) ||
+        (isDimensionColumn != other.isDimensionColumn) ||
+        (isSortColumn != other.isSortColumn)) {
       return false;
     }
     if (encodingList.size() != other.encodingList.size()) {
-      LOG.error("Index file's column " + columnName + " encoding size is " + encodingList.size()
-          + " but table's column encoding size is " + other.encodingList.size());
       return false;
     }
     for (int i = 0; i < encodingList.size(); i++) {
       if (encodingList.get(i).compareTo(other.encodingList.get(i)) != 0) {
-        LOG.error("Index file's column " + columnName + " encoding is " + encodingList.get(i)
-            + " but table's column encoding is " + other.encodingList.get(i));
         return false;
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
index df712de..ce0f15d 100644
--- a/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
+++ b/core/src/main/java/org/apache/carbondata/core/preagg/TimeSeriesUDF.java
@@ -112,7 +112,7 @@ public class TimeSeriesUDF {
   /**
    * Below method will be used to initialize the thread local
    */
-  private synchronized void initialize() {
+  private void initialize() {
     if (calanderThreadLocal.get() == null) {
       calanderThreadLocal.set(new GregorianCalendar());
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
index 6439b36..4f41b92 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
@@ -111,6 +111,9 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
       // get the thrift object for dictionary chunk
       dictionaryChunkMeta = (ColumnDictionaryChunkMeta) dictionaryMetadataFileReader.read();
     }
+    if (null == dictionaryChunkMeta) {
+      throw new IOException("Last dictionary chunk does not exist");
+    }
     // create a new instance of chunk meta wrapper using thrift object
     return getNewInstanceOfCarbonDictionaryColumnMetaChunk(dictionaryChunkMeta);
   }
@@ -128,6 +131,9 @@ public class CarbonDictionaryMetadataReaderImpl implements CarbonDictionaryMetad
         break;
       }
     }
+    if (null == dictionaryChunkMeta) {
+      throw new IOException("Matching dictionary chunk does not exist");
+    }
     // create a new instance of chunk meta wrapper using thrift object
     return getNewInstanceOfCarbonDictionaryColumnMetaChunk(dictionaryChunkMeta);
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
index 1c440cf..33f0db7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/RestructureBasedRawResultCollector.java
@@ -197,7 +197,7 @@ public class RestructureBasedRawResultCollector extends RawBasedResultCollector
         if (CarbonUtil.hasEncoding(actualQueryDimensions[i].getDimension().getEncoder(),
             Encoding.DICTIONARY)) {
           // if dimension exists then add the key array value else add the default value
-          if (dimensionInfo.getDimensionExists()[i]) {
+          if (dimensionInfo.getDimensionExists()[i] && null != keyArray && 0 != keyArray.length) {
             keyArrayWithNewAddedColumns[newKeyArrayIndex++] =
                 keyArray[existingColumnKeyArrayIndex++];
           } else {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
index 04669ab..aed472c 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeDetailQueryExecutor.java
@@ -70,9 +70,7 @@ public class SearchModeDetailQueryExecutor extends AbstractQueryExecutor<Object>
   public CarbonIterator<Object> execute(QueryModel queryModel)
       throws QueryExecutionException, IOException {
     List<BlockExecutionInfo> blockExecutionInfoList = getBlockExecutionInfos(queryModel);
-    if (executorService == null) {
-      initThreadPool();
-    }
+
     this.queryIterator = new SearchModeResultIterator(
         blockExecutionInfoList,
         queryModel,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
index 6c9396b..00fd511 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/SearchModeVectorDetailQueryExecutor.java
@@ -74,9 +74,7 @@ public class SearchModeVectorDetailQueryExecutor extends AbstractQueryExecutor<O
   public CarbonIterator<Object> execute(QueryModel queryModel)
       throws QueryExecutionException, IOException {
     List<BlockExecutionInfo> blockExecutionInfoList = getBlockExecutionInfos(queryModel);
-    if (executorService == null) {
-      initThreadPool();
-    }
+
     this.queryIterator = new SearchModeVectorResultIterator(
         blockExecutionInfoList,
         queryModel,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
index dc9415e..74c9ae2 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -207,12 +207,14 @@ public class FilterExpressionProcessor implements FilterProcessor {
    */
   private void addBlockBasedOnMinMaxValue(FilterExecuter filterExecuter,
       List<DataRefNode> listOfDataBlocksToScan, DataRefNode dataRefNode) {
-
+    if (null == dataRefNode.getColumnsMinValue() || null == dataRefNode.getColumnsMaxValue()) {
+      listOfDataBlocksToScan.add(dataRefNode);
+      return;
+    }
     BitSet bitSet = filterExecuter
         .isScanRequired(dataRefNode.getColumnsMaxValue(), dataRefNode.getColumnsMinValue());
     if (!bitSet.isEmpty()) {
       listOfDataBlocksToScan.add(dataRefNode);
-
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 5196f8f..9741915 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -77,12 +77,10 @@ import org.apache.carbondata.core.scan.expression.logical.AndExpression;
 import org.apache.carbondata.core.scan.expression.logical.TrueExpression;
 import org.apache.carbondata.core.scan.filter.executer.AndFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.DimColumnExecuterFilterInfo;
-import org.apache.carbondata.core.scan.filter.executer.ExcludeColGroupFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.ExcludeFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.FalseFilterExecutor;
 import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
 import org.apache.carbondata.core.scan.filter.executer.ImplicitIncludeFilterExecutorImpl;
-import org.apache.carbondata.core.scan.filter.executer.IncludeColGroupFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.IncludeFilterExecuterImpl;
 import org.apache.carbondata.core.scan.filter.executer.MeasureColumnExecuterFilterInfo;
 import org.apache.carbondata.core.scan.filter.executer.OrFilterExecuterImpl;
@@ -232,30 +230,24 @@ public final class FilterUtil {
             msrColResolvedFilterInfo, true);
       }
     }
-    if (null != dimColResolvedFilterInfo) {
-      CarbonDimension dimension = dimColResolvedFilterInfo.getDimension();
-      if (dimension.hasEncoding(Encoding.IMPLICIT)) {
-        return new ImplicitIncludeFilterExecutorImpl(dimColResolvedFilterInfo);
-      } else if (dimension.isColumnar()) {
-        CarbonDimension dimensionFromCurrentBlock =
-            segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
-        if (null != dimensionFromCurrentBlock) {
-          // update dimension and column index according to the dimension position in current block
-          DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
-              dimColResolvedFilterInfo.getCopyObject();
-          dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
-          dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
-          return new IncludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
-              segmentProperties, false);
-        } else {
-          return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
-              msrColResolvedFilterInfo, false);
-        }
+    CarbonDimension dimension = dimColResolvedFilterInfo.getDimension();
+    if (dimension.hasEncoding(Encoding.IMPLICIT)) {
+      return new ImplicitIncludeFilterExecutorImpl(dimColResolvedFilterInfo);
+    } else {
+      CarbonDimension dimensionFromCurrentBlock =
+          segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
+      if (null != dimensionFromCurrentBlock) {
+        // update dimension and column index according to the dimension position in current block
+        DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
+            dimColResolvedFilterInfo.getCopyObject();
+        dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
+        dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
+        return new IncludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+            segmentProperties, false);
       } else {
-        return new IncludeColGroupFilterExecuterImpl(dimColResolvedFilterInfo, segmentProperties);
+        return new RestructureIncludeFilterExecutorImpl(dimColResolvedFilterInfo,
+            msrColResolvedFilterInfo, false);
       }
-    } else {
-      return new IncludeColGroupFilterExecuterImpl(null, segmentProperties);
     }
   }
 
@@ -288,24 +280,19 @@ public final class FilterUtil {
             msrColResolvedFilterInfo, true);
       }
     }
-    if ((null != dimColResolvedFilterInfo) && (dimColResolvedFilterInfo.getDimension()
-        .isColumnar())) {
-      CarbonDimension dimensionFromCurrentBlock =
-          segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
-      if (null != dimensionFromCurrentBlock) {
-        // update dimension and column index according to the dimension position in current block
-        DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
-            dimColResolvedFilterInfo.getCopyObject();
-        dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
-        dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
-        return new ExcludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
-            segmentProperties, false);
-      } else {
-        return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
-            msrColResolvedFilterInfo, false);
-      }
+    CarbonDimension dimensionFromCurrentBlock =
+        segmentProperties.getDimensionFromCurrentBlock(dimColResolvedFilterInfo.getDimension());
+    if (null != dimensionFromCurrentBlock) {
+      // update dimension and column index according to the dimension position in current block
+      DimColumnResolvedFilterInfo dimColResolvedFilterInfoCopyObject =
+          dimColResolvedFilterInfo.getCopyObject();
+      dimColResolvedFilterInfoCopyObject.setDimension(dimensionFromCurrentBlock);
+      dimColResolvedFilterInfoCopyObject.setColumnIndex(dimensionFromCurrentBlock.getOrdinal());
+      return new ExcludeFilterExecuterImpl(dimColResolvedFilterInfoCopyObject, null,
+          segmentProperties, false);
     } else {
-      return new ExcludeColGroupFilterExecuterImpl(dimColResolvedFilterInfo, segmentProperties);
+      return new RestructureExcludeFilterExecutorImpl(dimColResolvedFilterInfo,
+          msrColResolvedFilterInfo, false);
     }
   }
 
@@ -581,17 +568,15 @@ public final class FilterUtil {
     }
     Collections.sort(surrogates);
     ColumnFilterInfo columnFilterInfo = null;
-    if (surrogates.size() > 0) {
-      columnFilterInfo = new ColumnFilterInfo();
-      if (isExcludeFilterNeedsToApply) {
-        columnFilterInfo.setOptimized(true);
-      }
-      columnFilterInfo.setIncludeFilter(isIncludeFilter);
-      if (!isIncludeFilter) {
-        columnFilterInfo.setExcludeFilterList(surrogates);
-      } else {
-        columnFilterInfo.setFilterList(surrogates);
-      }
+    columnFilterInfo = new ColumnFilterInfo();
+    if (isExcludeFilterNeedsToApply) {
+      columnFilterInfo.setOptimized(true);
+    }
+    columnFilterInfo.setIncludeFilter(isIncludeFilter);
+    if (!isIncludeFilter) {
+      columnFilterInfo.setExcludeFilterList(surrogates);
+    } else {
+      columnFilterInfo.setFilterList(surrogates);
     }
     return columnFilterInfo;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
deleted file mode 100644
index 44f7c07..0000000
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/ExcludeColGroupFilterExecuterImpl.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.scan.filter.executer;
-
-import java.util.BitSet;
-
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
-
-/**
- * It checks if filter is required on given block and if required, it does
- * linear search on block data and set the bitset.
- */
-public class ExcludeColGroupFilterExecuterImpl extends ExcludeFilterExecuterImpl {
-
-  /**
-   * @param dimColResolvedFilterInfo
-   * @param segmentProperties
-   */
-  public ExcludeColGroupFilterExecuterImpl(DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
-      SegmentProperties segmentProperties) {
-    super(dimColResolvedFilterInfo, null, segmentProperties, false);
-  }
-
-  /**
-   * Check if scan is required on given block based on min and max value
-   */
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
-    BitSet bitSet = new BitSet(1);
-    bitSet.flip(0, 1);
-    return bitSet;
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
deleted file mode 100644
index e4da26f..0000000
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/IncludeColGroupFilterExecuterImpl.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.scan.filter.executer;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.BitSet;
-import java.util.List;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.chunk.DimensionColumnPage;
-import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.core.keygenerator.KeyGenerator;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.executor.util.QueryUtil;
-import org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
-import org.apache.carbondata.core.scan.processor.RawBlockletColumnChunks;
-import org.apache.carbondata.core.util.BitSetGroup;
-import org.apache.carbondata.core.util.ByteUtil;
-
-/**
- * It checks if filter is required on given block and if required, it does
- * linear search on block data and set the bitset.
- */
-public class IncludeColGroupFilterExecuterImpl extends IncludeFilterExecuterImpl {
-
-  /**
-   * LOGGER
-   */
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(IncludeColGroupFilterExecuterImpl.class.getName());
-
-  /**
-   * @param dimColResolvedFilterInfo
-   * @param segmentProperties
-   */
-  public IncludeColGroupFilterExecuterImpl(DimColumnResolvedFilterInfo dimColResolvedFilterInfo,
-      SegmentProperties segmentProperties) {
-    super(dimColResolvedFilterInfo, null, segmentProperties, false);
-  }
-
-  /**
-   * It fills BitSet with row index which matches filter key
-   */
-  protected BitSet getFilteredIndexes(DimensionColumnPage dimensionColumnPage,
-      int numerOfRows) {
-    BitSet bitSet = new BitSet(numerOfRows);
-
-    try {
-      KeyStructureInfo keyStructureInfo = getKeyStructureInfo();
-      byte[][] filterValues = dimColumnExecuterInfo.getFilterKeys();
-      for (int i = 0; i < filterValues.length; i++) {
-        byte[] filterVal = filterValues[i];
-        for (int rowId = 0; rowId < numerOfRows; rowId++) {
-          byte[] colData = new byte[keyStructureInfo.getMaskByteRanges().length];
-          dimensionColumnPage.fillRawData(rowId, 0, colData, keyStructureInfo);
-          if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterVal, colData) == 0) {
-            bitSet.set(rowId);
-          }
-        }
-      }
-
-    } catch (Exception e) {
-      LOGGER.error(e);
-    }
-
-    return bitSet;
-  }
-
-  @Override
-  public BitSetGroup applyFilter(RawBlockletColumnChunks rawBlockletColumnChunks,
-      boolean useBitsetPipeLine) throws IOException {
-    int chunkIndex = segmentProperties.getDimensionOrdinalToChunkMapping()
-        .get(dimColumnEvaluatorInfo.getColumnIndex());
-    if (null == rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex]) {
-      rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex] =
-          rawBlockletColumnChunks.getDataBlock().readDimensionChunk(
-              rawBlockletColumnChunks.getFileReader(), chunkIndex);
-    }
-    DimensionRawColumnChunk dimensionRawColumnChunk =
-        rawBlockletColumnChunks.getDimensionRawColumnChunks()[chunkIndex];
-    BitSetGroup bitSetGroup = new BitSetGroup(dimensionRawColumnChunk.getPagesCount());
-    for (int i = 0; i < dimensionRawColumnChunk.getPagesCount(); i++) {
-      if (dimensionRawColumnChunk.getMaxValues() != null) {
-        BitSet bitSet = getFilteredIndexes(dimensionRawColumnChunk.decodeColumnPage(i),
-            dimensionRawColumnChunk.getRowCount()[i]);
-        bitSetGroup.setBitSet(bitSet, i);
-      }
-    }
-    return bitSetGroup;
-  }
-
-  /**
-   * It is required for extracting column data from columngroup chunk
-   *
-   * @return
-   * @throws KeyGenException
-   */
-  private KeyStructureInfo getKeyStructureInfo() throws KeyGenException {
-    int colGrpId = getColumnGroupId(dimColumnEvaluatorInfo.getColumnIndex());
-    KeyGenerator keyGenerator = segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
-    List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
-    mdKeyOrdinal.add(getMdkeyOrdinal(dimColumnEvaluatorInfo.getColumnIndex(), colGrpId));
-    int[] maskByteRanges = QueryUtil.getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
-    byte[] maxKey = QueryUtil.getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
-    KeyStructureInfo restructureInfos = new KeyStructureInfo();
-    restructureInfos.setKeyGenerator(keyGenerator);
-    restructureInfos.setMaskByteRanges(maskByteRanges);
-    restructureInfos.setMaxKey(maxKey);
-    return restructureInfos;
-  }
-
-  /**
-   * Check if scan is required on given block based on min and max value
-   */
-  public BitSet isScanRequired(byte[][] blkMaxVal, byte[][] blkMinVal) {
-    BitSet bitSet = new BitSet(1);
-    byte[][] filterValues = dimColumnExecuterInfo.getFilterKeys();
-    int columnIndex = dimColumnEvaluatorInfo.getColumnIndex();
-    int chunkIndex = segmentProperties.getDimensionOrdinalToChunkMapping().get(columnIndex);
-    int[] cols = getAllColumns(columnIndex);
-    byte[] maxValue = getMinMaxData(cols, blkMaxVal[chunkIndex], columnIndex);
-    byte[] minValue = getMinMaxData(cols, blkMinVal[chunkIndex], columnIndex);
-    boolean isScanRequired = false;
-    for (int k = 0; k < filterValues.length; k++) {
-      // filter value should be in range of max and min value i.e
-      // max>filtervalue>min
-      // so filter-max should be negative
-      int maxCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], maxValue);
-      // and filter-min should be positive
-      int minCompare = ByteUtil.UnsafeComparer.INSTANCE.compareTo(filterValues[k], minValue);
-
-      // if any filter value is in range than this block needs to be
-      // scanned
-      if (maxCompare <= 0 && minCompare >= 0) {
-        isScanRequired = true;
-        break;
-      }
-    }
-    if (isScanRequired) {
-      bitSet.set(0);
-    }
-    return bitSet;
-  }
-
-  /**
-   * It extract min and max data for given column from stored min max value
-   *
-   * @param colGrpColumns
-   * @param minMaxData
-   * @param columnIndex
-   * @return
-   */
-  private byte[] getMinMaxData(int[] colGrpColumns, byte[] minMaxData, int columnIndex) {
-    int startIndex = 0;
-    int endIndex = 0;
-    if (null != colGrpColumns) {
-      for (int i = 0; i < colGrpColumns.length; i++) {
-        int colGrpId = getColumnGroupId(colGrpColumns[i]);
-        int mdKeyOrdinal = getMdkeyOrdinal(colGrpColumns[i], colGrpId);
-        int[] byteRange = getKeyGenerator(colGrpId).getKeyByteOffsets(mdKeyOrdinal);
-        int colSize = 0;
-        for (int j = byteRange[0]; j <= byteRange[1]; j++) {
-          colSize++;
-        }
-        if (colGrpColumns[i] == columnIndex) {
-          endIndex = startIndex + colSize;
-          break;
-        }
-        startIndex += colSize;
-      }
-    }
-    byte[] data = new byte[endIndex - startIndex];
-    System.arraycopy(minMaxData, startIndex, data, 0, data.length);
-    return data;
-  }
-
-  /**
-   * It returns column groups which have provided column ordinal
-   *
-   * @param columnIndex
-   * @return column group array
-   */
-  private int[] getAllColumns(int columnIndex) {
-    int[][] colGroups = segmentProperties.getColumnGroups();
-    for (int i = 0; i < colGroups.length; i++) {
-      if (QueryUtil.searchInArray(colGroups[i], columnIndex)) {
-        return colGroups[i];
-      }
-    }
-    return null;
-  }
-
-  private int getMdkeyOrdinal(int ordinal, int colGrpId) {
-    return segmentProperties.getColumnGroupMdKeyOrdinal(colGrpId, ordinal);
-  }
-
-  private int getColumnGroupId(int ordinal) {
-    int[][] columnGroups = segmentProperties.getColumnGroups();
-    int colGrpId = -1;
-    for (int i = 0; i < columnGroups.length; i++) {
-      if (columnGroups[i].length > 1) {
-        colGrpId++;
-        if (QueryUtil.searchInArray(columnGroups[i], ordinal)) {
-          break;
-        }
-      }
-    }
-    return colGrpId;
-  }
-
-  public KeyGenerator getKeyGenerator(int colGrpId) {
-    return segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
index 34555e1..b218813 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RangeValueFilterExecuterImpl.java
@@ -113,7 +113,7 @@ public class RangeValueFilterExecuterImpl extends ValueBasedFilterExecuterImpl {
    */
   private void ifDefaultValueMatchesFilter() {
     isDefaultValuePresentInFilter = false;
-    if (!this.isDimensionPresentInCurrentBlock) {
+    if (!this.isDimensionPresentInCurrentBlock && null != filterRangesValues) {
       CarbonDimension dimension = this.dimColEvaluatorInfo.getDimension();
       byte[] defaultValue = dimension.getDefaultValue();
       if (null != defaultValue) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
index e1432b0..f901238 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RestructureEvaluatorImpl.java
@@ -134,5 +134,4 @@ public abstract class RestructureEvaluatorImpl implements FilterExecuter {
     }
     return isDefaultValuePresentInFilterValues;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
index 4bee89b..057a244 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelFilterExecuterImpl.java
@@ -124,8 +124,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       this.dimColEvaluatorInfoList = dimColEvaluatorInfoList;
     }
     if (this.dimColEvaluatorInfoList.size() > 0) {
-      this.isDimensionPresentInCurrentBlock = new boolean[dimColEvaluatorInfoList.size()];
-      this.dimensionChunkIndex = new int[dimColEvaluatorInfoList.size()];
+      this.isDimensionPresentInCurrentBlock = new boolean[this.dimColEvaluatorInfoList.size()];
+      this.dimensionChunkIndex = new int[this.dimColEvaluatorInfoList.size()];
     } else {
       this.isDimensionPresentInCurrentBlock = new boolean[]{false};
       this.dimensionChunkIndex = new int[]{0};
@@ -136,8 +136,8 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       this.msrColEvalutorInfoList = msrColEvalutorInfoList;
     }
     if (this.msrColEvalutorInfoList.size() > 0) {
-      this.isMeasurePresentInCurrentBlock = new boolean[msrColEvalutorInfoList.size()];
-      this.measureChunkIndex = new int[msrColEvalutorInfoList.size()];
+      this.isMeasurePresentInCurrentBlock = new boolean[this.msrColEvalutorInfoList.size()];
+      this.measureChunkIndex = new int[this.msrColEvalutorInfoList.size()];
     } else {
       this.isMeasurePresentInCurrentBlock = new boolean[]{false};
       this.measureChunkIndex = new int[] {0};
@@ -647,13 +647,11 @@ public class RowLevelFilterExecuterImpl implements FilterExecuter {
       }
     }
 
-    if (null != msrColEvalutorInfoList) {
-      for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
-        if (null == rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]]) {
-          rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]] =
-              rawBlockletColumnChunks.getDataBlock()
-                  .readMeasureChunk(rawBlockletColumnChunks.getFileReader(), measureChunkIndex[0]);
-        }
+    for (MeasureColumnResolvedFilterInfo msrColumnEvalutorInfo : msrColEvalutorInfoList) {
+      if (null == rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]]) {
+        rawBlockletColumnChunks.getMeasureRawColumnChunks()[measureChunkIndex[0]] =
+            rawBlockletColumnChunks.getDataBlock()
+              .readMeasureChunk(rawBlockletColumnChunks.getFileReader(), measureChunkIndex[0]);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
index be871d4..e339a58 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtThanFiterExecuterImpl.java
@@ -67,7 +67,7 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
     this.filterRangeValues = filterRangeValues;
     this.msrFilterRangeValues = msrFilterRangeValues;
     lastDimensionColOrdinal = segmentProperties.getLastDimensionColOrdinal();
-    if (!msrColEvalutorInfoList.isEmpty()) {
+    if (!this.msrColEvalutorInfoList.isEmpty()) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       comparator = Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
     }
@@ -98,9 +98,12 @@ public class RowLevelRangeGrtThanFiterExecuterImpl extends RowLevelFilterExecute
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp = (null != comparator ?
+          comparator :
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType()));
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
index a3359be..2ea3c73 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeGrtrThanEquaToFilterExecuterImpl.java
@@ -98,9 +98,11 @@ public class RowLevelRangeGrtrThanEquaToFilterExecuterImpl extends RowLevelFilte
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
           if (maxCompare <= 0) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
index 0c268c9..f52d087 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanEqualFilterExecuterImpl.java
@@ -100,9 +100,11 @@ public class RowLevelRangeLessThanEqualFilterExecuterImpl extends RowLevelFilter
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
-          int maxCompare = comparator.compare(msrFilterRangeValues[k],
+          int maxCompare = comparatorTmp.compare(msrFilterRangeValues[k],
               RestructureUtil.getMeasureDefaultValue(measure.getColumnSchema(),
                   measure.getDefaultValue()));
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
index c5ed77d..97e750a 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/executer/RowLevelRangeLessThanFilterExecuterImpl.java
@@ -100,12 +100,14 @@ public class RowLevelRangeLessThanFilterExecuterImpl extends RowLevelFilterExecu
     } else if (!msrColEvalutorInfoList.isEmpty() && !isMeasurePresentInCurrentBlock[0]) {
       CarbonMeasure measure = this.msrColEvalutorInfoList.get(0).getMeasure();
       byte[] defaultValue = measure.getDefaultValue();
+      SerializableComparator comparatorTmp =
+          Comparator.getComparatorByDataTypeForMeasure(measure.getDataType());
       if (null != defaultValue) {
         for (int k = 0; k < msrFilterRangeValues.length; k++) {
           Object convertedValue = RestructureUtil
               .getMeasureDefaultValue(measure.getColumnSchema(), measure.getDefaultValue());
           int maxCompare =
-              comparator.compare(msrFilterRangeValues[k], convertedValue);
+              comparatorTmp.compare(msrFilterRangeValues[k], convertedValue);
           if (maxCompare > 0) {
             isDefaultValuePresentInFilter = true;
             break;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
index ccbbc32..8ad0c48 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/resolver/ConditionalFilterResolverImpl.java
@@ -303,7 +303,6 @@ public class ConditionalFilterResolverImpl implements FilterResolverIntf {
           this.dimColResolvedFilterInfo.getDimension(), segmentProperties, false);
     }
     return null;
-
   }
 
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index d975c20..01aa939 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -135,14 +135,19 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
       } else {
         DataRefNode startDataBlock =
             finder.findFirstDataBlock(dataRefNode, blockInfo.getStartKey());
-        while (startDataBlock.nodeIndex() < blockInfo.getStartBlockletIndex()) {
+        while ((null != startDataBlock) && (startDataBlock.nodeIndex() < blockInfo
+            .getStartBlockletIndex())) {
           startDataBlock = startDataBlock.getNextDataRefNode();
         }
         long numberOfBlockToScan = blockInfo.getNumberOfBlockletToScan();
         //if number of block is less than 0 then take end block.
         if (numberOfBlockToScan <= 0) {
           DataRefNode endDataBlock = finder.findLastDataBlock(dataRefNode, blockInfo.getEndKey());
-          numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
+          if (null != startDataBlock) {
+            numberOfBlockToScan = endDataBlock.nodeIndex() - startDataBlock.nodeIndex() + 1;
+          } else {
+            numberOfBlockToScan = endDataBlock.nodeIndex() + 1;
+          }
         }
         blockInfo.setFirstDataBlock(startDataBlock);
         blockInfo.setNumberOfBlockToScan(numberOfBlockToScan);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
index 57d8177..0100c8b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/BlockletFilterScanner.java
@@ -110,6 +110,10 @@ public class BlockletFilterScanner extends BlockletFullScanner {
         totalPagesScanned.getCount() + dataBlock.numberOfPages());
     // apply min max
     if (isMinMaxEnabled) {
+      if (null == dataBlock.getColumnsMaxValue()
+              || null == dataBlock.getColumnsMinValue()) {
+        return true;
+      }
       BitSet bitSet = null;
       // check for implicit include filter instance
       if (filterExecuter instanceof ImplicitColumnFilterExecutor) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
index d6671b4..9dc8fe6 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/LoadMetadataDetails.java
@@ -230,7 +230,13 @@ public class LoadMetadataDetails implements Serializable {
    * @return
    */
   public long getLoadStartTimeAsLong() {
-    return (!loadStartTime.isEmpty()) ? getTimeStamp(loadStartTime) : 0;
+    if (!loadStartTime.isEmpty()) {
+      Long time = getTimeStamp(loadStartTime);
+      if (null != time) {
+        return time;
+      }
+    }
+    return 0;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
index 363b5bc..1c53fbb 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
@@ -322,13 +322,12 @@ public class SegmentUpdateStatusManager {
    * @return the list of delete file
    */
   private List<String> getDeltaFiles(CarbonFile blockDir, final String blockNameFromTuple,
-      final String extension,
-      String segment) {
+      final String extension, String segment) throws IOException {
     List<String> deleteFileList = new ArrayList<>();
     for (SegmentUpdateDetails block : updateDetails) {
-      if (block.getBlockName().equalsIgnoreCase(blockNameFromTuple) &&
-          block.getSegmentName().equalsIgnoreCase(segment) &&
-          !CarbonUpdateUtil.isBlockInvalid(block.getSegmentStatus())) {
+      if (block.getBlockName().equalsIgnoreCase(blockNameFromTuple) && block.getSegmentName()
+          .equalsIgnoreCase(segment) && !CarbonUpdateUtil
+          .isBlockInvalid(block.getSegmentStatus())) {
         final long deltaStartTimestamp = getStartTimeOfDeltaFile(extension, block);
         // If there is no delete delete file , then return null
         if (deltaStartTimestamp == 0) {
@@ -347,33 +346,38 @@ public class SegmentUpdateStatusManager {
 
   private List<String> getFilePaths(CarbonFile blockDir, final String blockNameFromTuple,
       final String extension, List<String> deleteFileList, final long deltaStartTimestamp,
-      final long deltaEndTimeStamp) {
-    CarbonFile[] files = blockDir.getParentFile().listFiles(new CarbonFileFilter() {
-
-      @Override public boolean accept(CarbonFile pathName) {
-        String fileName = pathName.getName();
-        if (fileName.endsWith(extension) && pathName.getSize() > 0) {
-          String firstPart = fileName.substring(0, fileName.indexOf('.'));
-          String blockName =
-              firstPart.substring(0, firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN));
-          long timestamp = Long.parseLong(firstPart
-              .substring(firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
-                  firstPart.length()));
-          if (blockNameFromTuple.equals(blockName) && (
-              (Long.compare(timestamp, deltaEndTimeStamp) <= 0) && (
-                  Long.compare(timestamp, deltaStartTimestamp) >= 0))) {
-            return true;
+      final long deltaEndTimeStamp) throws IOException {
+    if (null != blockDir.getParentFile()) {
+      CarbonFile[] files = blockDir.getParentFile().listFiles(new CarbonFileFilter() {
+
+        @Override
+        public boolean accept(CarbonFile pathName) {
+          String fileName = pathName.getName();
+          if (fileName.endsWith(extension) && pathName.getSize() > 0) {
+            String firstPart = fileName.substring(0, fileName.indexOf('.'));
+            String blockName =
+                    firstPart.substring(0, firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN));
+            long timestamp = Long.parseLong(firstPart
+                    .substring(firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1,
+                            firstPart.length()));
+            if (blockNameFromTuple.equals(blockName) && (
+                    (Long.compare(timestamp, deltaEndTimeStamp) <= 0) && (
+                            Long.compare(timestamp, deltaStartTimestamp) >= 0))) {
+              return true;
+            }
           }
+          return false;
         }
-        return false;
-      }
-    });
+      });
 
-    for (CarbonFile cfile : files) {
-      if (null == deleteFileList) {
-        deleteFileList = new ArrayList<String>(files.length);
+      for (CarbonFile cfile : files) {
+        if (null == deleteFileList) {
+          deleteFileList = new ArrayList<String>(files.length);
+        }
+        deleteFileList.add(cfile.getCanonicalPath());
       }
-      deleteFileList.add(cfile.getCanonicalPath());
+    } else {
+      throw new IOException("Parent file could not found");
     }
     return deleteFileList;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
index b74c279..e30ad03 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/AbstractDataFileFooterConverter.java
@@ -60,7 +60,12 @@ public abstract class AbstractDataFileFooterConverter {
    */
   private static BitSet getPresenceMeta(
       org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
-    return BitSet.valueOf(presentMetadataThrift.getPresent_bit_stream());
+    final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
+    if (null != present_bit_stream) {
+      return BitSet.valueOf(present_bit_stream);
+    } else {
+      return new BitSet(1);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
index 9880b4d..af5121c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonMetadataUtil.java
@@ -370,7 +370,7 @@ public class CarbonMetadataUtil {
     } else if (DataTypes.isDecimal(dataType)) {
       return DataTypeUtil.byteToBigDecimal(first).compareTo(DataTypeUtil.byteToBigDecimal(second));
     } else {
-      throw new IllegalArgumentException("Invalid data type");
+      throw new IllegalArgumentException("Invalid data type:" + dataType);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 9822167..f7f71b3 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -587,20 +587,16 @@ public final class DataTypeUtil {
       return null;
     }
     try {
-      Object parsedValue = null;
       if (actualDataType == DataTypes.SHORT) {
-        parsedValue = Short.parseShort(data);
+        Short.parseShort(data);
       } else if (actualDataType == DataTypes.INT) {
-        parsedValue = Integer.parseInt(data);
+        Integer.parseInt(data);
       } else if (actualDataType == DataTypes.LONG) {
-        parsedValue = Long.parseLong(data);
+        Long.parseLong(data);
       } else {
         return data;
       }
-      if (null != parsedValue) {
-        return data;
-      }
-      return null;
+      return data;
     } catch (NumberFormatException ex) {
       return null;
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
index 62192ff..e8a121c 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
@@ -576,11 +576,15 @@ public class CarbonTablePath {
    */
   public static CarbonFile[] getSortIndexFiles(CarbonFile sortIndexDir,
       final String columnUniqueId) {
-    return sortIndexDir.listFiles(new CarbonFileFilter() {
-      @Override public boolean accept(CarbonFile file) {
-        return file.getName().startsWith(columnUniqueId) && file.getName().endsWith(SORT_INDEX_EXT);
-      }
-    });
+    if (null != sortIndexDir) {
+      return sortIndexDir.listFiles(new CarbonFileFilter() {
+        @Override public boolean accept(CarbonFile file) {
+          return file.getName().startsWith(columnUniqueId) && file.getName()
+              .endsWith(SORT_INDEX_EXT);
+        }
+      });
+    }
+    return null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
deleted file mode 100644
index 54b66a6..0000000
--- a/core/src/test/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunkTest.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.core.datastore.chunk.impl;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.carbondata.core.keygenerator.KeyGenException;
-import org.apache.carbondata.core.keygenerator.KeyGenerator;
-import org.apache.carbondata.core.keygenerator.mdkey.MultiDimKeyVarLengthGenerator;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.core.scan.executor.util.QueryUtil;
-
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class ColumnGroupDimensionDataChunkTest {
-
-  static ColumnGroupDimensionColumnPage columnGroupDimensionDataChunk;
-  static KeyGenerator keyGenerator;
-
-  @BeforeClass public static void setup() {
-    int[] bitLength = CarbonUtil.getDimensionBitLength(new int[] { 10, 10, 10 }, new int[] { 3 });
-    // create a key generator
-    keyGenerator = new MultiDimKeyVarLengthGenerator(bitLength);
-    byte[] data = new byte[keyGenerator.getKeySizeInBytes() * 3];
-    int position = 0;
-    for (int i = 1; i <= 3; i++) {
-      try {
-        System.arraycopy(keyGenerator.generateKey(new int[] { i, i, i }), 0, data, position,
-            keyGenerator.getKeySizeInBytes());
-      } catch (KeyGenException e) {
-        assertTrue(false);
-      }
-      position += keyGenerator.getKeySizeInBytes();
-    }
-    columnGroupDimensionDataChunk =
-        new ColumnGroupDimensionColumnPage(data, keyGenerator.getKeySizeInBytes(), 3);
-  }
-
-  @Test public void fillChunkDataTest() {
-    List<Integer> ordinals = new ArrayList<Integer>();
-    ordinals.add(1);
-    KeyStructureInfo keyStructureInfo = getKeyStructureInfo(ordinals, keyGenerator);
-    byte[] buffer = new byte[1];
-    columnGroupDimensionDataChunk.fillRawData(1, 0, buffer, keyStructureInfo);
-    assertEquals(buffer[0], 2);
-  }
-
-  @Test public void getChunkDataTest() {
-    byte[] b = { 34, 2 };
-    byte res[] = columnGroupDimensionDataChunk.getChunkData(1);
-    Assert.assertTrue(Arrays.equals(res, b));
-  }
-
-  @Test public void fillConvertedChunkDataTest() {
-    int[] row = new int[3];
-    int[] expected = { 0, 0, 3 };
-    List<Integer> ordinals = new ArrayList<Integer>();
-    ordinals.add(2);
-    KeyStructureInfo keyStructureInfo = getKeyStructureInfo(ordinals, keyGenerator);
-    keyStructureInfo.setMdkeyQueryDimensionOrdinal(new int[] { 2 });
-    int res = columnGroupDimensionDataChunk.fillSurrogateKey(2, 2, row, keyStructureInfo);
-    Assert.assertTrue(Arrays.equals(row, expected));
-  }
-
-  /**
-   * Below method will be used to get the key structure info for the query
-   *
-   * @param ordinals   query model
-   * @param keyGenerator
-   * @return key structure info
-   */
-  private KeyStructureInfo getKeyStructureInfo(List<Integer> ordinals, KeyGenerator keyGenerator) {
-    // getting the masked byte range for dictionary column
-    int[] maskByteRanges = QueryUtil.getMaskedByteRangeBasedOrdinal(ordinals, keyGenerator);
-
-    // getting the masked bytes for query dimension dictionary column
-    int[] maskedBytes = QueryUtil.getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
-
-    // max key for the dictionary dimension present in the query
-    byte[] maxKey = null;
-    try {
-      // getting the max key which will be used to masked and get the
-      // masked key
-      maxKey = QueryUtil.getMaxKeyBasedOnOrinal(ordinals, keyGenerator);
-    } catch (KeyGenException e) {
-    }
-
-    KeyStructureInfo restructureInfos = new KeyStructureInfo();
-    restructureInfos.setKeyGenerator(keyGenerator);
-    restructureInfos.setMaskByteRanges(maskByteRanges);
-    restructureInfos.setMaxKey(maxKey);
-    return restructureInfos;
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
index f55cd67..890d36d 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/AlluxioCarbonFileTest.java
@@ -59,8 +59,11 @@ public class AlluxioCarbonFileTest {
             }
         try {
             FileOutputStream oFile = new FileOutputStream(file, true);
+            oFile.close();
         } catch (FileNotFoundException e) {
             e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
         }
 
         fileStatus = new FileStatus(12L, true, 60, 120l, 180L, new Path(file.getAbsolutePath()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
index ba661b1..82d1501 100644
--- a/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/datastore/filesystem/ViewFsCarbonFileTest.java
@@ -54,14 +54,17 @@ public class ViewFsCarbonFileTest {
         file = new File("Test.carbondata");
         if (!file.exists())
             try {
-                file.createNewFile();
+              file.createNewFile();
             } catch (IOException e) {
-                e.printStackTrace();
+              e.printStackTrace();
             }
         try {
-            FileOutputStream oFile = new FileOutputStream(file, true);
+          FileOutputStream oFile = new FileOutputStream(file, true);
+          oFile.close();
         } catch (FileNotFoundException e) {
             e.printStackTrace();
+        } catch (IOException e) {
+            e.printStackTrace();
         }
 
         fileStatus = new FileStatus(12L, true, 60, 120l, 180L, new Path(file.getAbsolutePath()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
index 32af8d3..8be1e2e 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
@@ -16,39 +16,21 @@
  */
 package org.apache.carbondata.hadoop;
 
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.datastore.SegmentTaskIndexStore;
 import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * CacheClient : Holds all the Cache access clients for Btree, Dictionary
  */
 public class CacheClient {
 
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(CacheClient.class.getName());
-
-  private final Object lock = new Object();
-
   // segment access client for driver LRU cache
   private CacheAccessClient<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper>
       segmentAccessClient;
 
-  private static Map<SegmentTaskIndexStore.SegmentPropertiesWrapper, SegmentProperties>
-      segmentProperties = new ConcurrentHashMap<>();
-
   public CacheClient() {
     Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> segmentCache =
         CacheProvider.getInstance().createCache(CacheType.DRIVER_BTREE);
@@ -63,35 +45,4 @@ public class CacheClient {
   public void close() {
     segmentAccessClient.close();
   }
-
-  /**
-   * Method to get the segment properties and avoid construction of new segment properties until
-   * the schema is not modified
-   *
-   * @param tableIdentifier
-   * @param columnsInTable
-   * @param columnCardinality
-   */
-  public SegmentProperties getSegmentProperties(AbsoluteTableIdentifier tableIdentifier,
-      List<ColumnSchema> columnsInTable, int[] columnCardinality) {
-    SegmentTaskIndexStore.SegmentPropertiesWrapper segmentPropertiesWrapper =
-        new SegmentTaskIndexStore.SegmentPropertiesWrapper(tableIdentifier, columnsInTable,
-            columnCardinality);
-    SegmentProperties segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
-    if (null == segmentProperties) {
-      synchronized (lock) {
-        segmentProperties = this.segmentProperties.get(segmentPropertiesWrapper);
-        if (null == segmentProperties) {
-          // create a metadata details
-          // this will be useful in query handling
-          // all the data file metadata will have common segment properties we
-          // can use first one to get create the segment properties
-          LOGGER.info("Constructing new SegmentProperties");
-          segmentProperties = new SegmentProperties(columnsInTable, columnCardinality);
-          this.segmentProperties.put(segmentPropertiesWrapper, segmentProperties);
-        }
-      }
-    }
-    return segmentProperties;
-  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
index 0bcb188..e5e3165 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonOutputCommitter.java
@@ -124,6 +124,9 @@ public class CarbonOutputCommitter extends FileOutputCommitter {
         .mergeSegmentFiles(readPath, segmentFileName,
             CarbonTablePath.getSegmentFilesLocation(loadModel.getTablePath()));
     if (segmentFile != null) {
+      if (null == newMetaEntry) {
+        throw new RuntimeException("Internal Error");
+      }
       // Move all files from temp directory of each segment to partition directory
       SegmentFileStore.moveFromTempFolder(segmentFile,
           loadModel.getSegmentId() + "_" + loadModel.getFactTimeStamp() + ".tmp",

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
index 7d9c712..b619158 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
@@ -18,14 +18,10 @@
 package org.apache.carbondata.hadoop.internal.segment;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.JobContext;
 
@@ -56,25 +52,6 @@ public abstract class Segment {
   }
 
   /**
-   * return all InputSplit of this segment, each file is a InputSplit
-   * @param job job context
-   * @return all InputSplit
-   * @throws IOException
-   */
-  public List<InputSplit> getAllSplits(JobContext job) throws IOException {
-    List<InputSplit> result = new ArrayList<>();
-    Path p = new Path(path);
-    FileSystem fs = p.getFileSystem(job.getConfiguration());
-
-    //TODO: filter out the hidden files
-    FileStatus[] files = fs.globStatus(p);
-    for (FileStatus file: files) {
-      // make split and add to result
-    }
-    return result;
-  }
-
-  /**
    * get all files, implementation may use the input filter and index to prune files
    * @param job job context
    * @param filterResolver filter

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
index febca50..e95382c 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
@@ -196,12 +196,15 @@ public class CarbonDictionaryDecodeReadSupport<T> implements CarbonReadSupport<T
       if (carbonColumn.isDimension() && carbonColumn.getColumnSchema().getNumberOfChild() > 0) {
         childCarbonDimensions = ((CarbonDimension) carbonColumn).getListOfChildDimensions();
       }
-      Writable[] arr = new Writable[objArray.length];
-      for (int i = 0; i < objArray.length; i++) {
 
-        arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i));
+      if (null != childCarbonDimensions) {
+        Writable[] arr = new Writable[objArray.length];
+        for (int i = 0; i < objArray.length; i++) {
+
+          arr[i] = createWritableObject(objArray[i], childCarbonDimensions.get(i));
+        }
+        return new ArrayWritable(Writable.class, arr);
       }
-      return new ArrayWritable(Writable.class, arr);
     }
     throw new IOException("DataType not supported in Carbondata");
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index 89a5ed6..d4cf480 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -77,13 +77,17 @@ public class MapredCarbonInputFormat extends CarbonTableInputFormat<ArrayWritabl
         }
       }
     }
-    AbsoluteTableIdentifier absoluteTableIdentifier = AbsoluteTableIdentifier
-        .from(validInputPath, getDatabaseName(configuration), getTableName(configuration));
-    // read the schema file to get the absoluteTableIdentifier having the correct table id
-    // persisted in the schema
-    CarbonTable carbonTable = SchemaReader.readCarbonTableFromStore(absoluteTableIdentifier);
-    configuration.set(CARBON_TABLE, ObjectSerializationUtil.convertObjectToString(carbonTable));
-    setTableInfo(configuration, carbonTable.getTableInfo());
+    if (null != validInputPath) {
+      AbsoluteTableIdentifier absoluteTableIdentifier = AbsoluteTableIdentifier
+          .from(validInputPath, getDatabaseName(configuration), getTableName(configuration));
+      // read the schema file to get the absoluteTableIdentifier having the correct table id
+      // persisted in the schema
+      CarbonTable carbonTable = SchemaReader.readCarbonTableFromStore(absoluteTableIdentifier);
+      configuration.set(CARBON_TABLE, ObjectSerializationUtil.convertObjectToString(carbonTable));
+      setTableInfo(configuration, carbonTable.getTableInfo());
+    } else {
+      throw new InvalidPathException("No input paths specified in job");
+    }
   }
 
   private static CarbonTable getCarbonTable(Configuration configuration, String path)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
index 811393f..3a54b22 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataSplitManager.java
@@ -67,20 +67,23 @@ public class CarbondataSplitManager implements ConnectorSplitManager {
         getColumnConstraints(layoutHandle.getConstraint());
 
     CarbonTableCacheModel cache = carbonTableReader.getCarbonCache(key);
-    Expression filters = PrestoFilterUtil.parseFilterExpression(layoutHandle.getConstraint());
-    try {
-      List<CarbonLocalInputSplit> splits = carbonTableReader.getInputSplits2(cache, filters, layoutHandle.getConstraint());
-
-      ImmutableList.Builder<ConnectorSplit> cSplits = ImmutableList.builder();
-      for (CarbonLocalInputSplit split : splits) {
-        cSplits.add(new CarbondataSplit(connectorId, tableHandle.getSchemaTableName(),
-            layoutHandle.getConstraint(), split, rebuildConstraints));
+    if (null != cache) {
+      Expression filters = PrestoFilterUtil.parseFilterExpression(layoutHandle.getConstraint());
+      try {
+        List<CarbonLocalInputSplit> splits = carbonTableReader.getInputSplits2(cache, filters,
+                layoutHandle.getConstraint());
+
+        ImmutableList.Builder<ConnectorSplit> cSplits = ImmutableList.builder();
+        for (CarbonLocalInputSplit split : splits) {
+          cSplits.add(new CarbondataSplit(connectorId, tableHandle.getSchemaTableName(),
+              layoutHandle.getConstraint(), split, rebuildConstraints));
+        }
+        return new FixedSplitSource(cSplits.build());
+      } catch (Exception ex) {
+        throw new RuntimeException(ex.getMessage(), ex);
       }
-      return new FixedSplitSource(cSplits.build());
-    } catch (Exception ex) {
-      throw new RuntimeException(ex.getMessage(), ex);
     }
-
+    return null;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
index 3c42d0a..2c6a810 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/impl/CarbonLocalInputSplit.java
@@ -115,15 +115,21 @@ public class CarbonLocalInputSplit {
 
   }
 
-  public static  CarbonInputSplit convertSplit(CarbonLocalInputSplit carbonLocalInputSplit) {
+  public static CarbonInputSplit convertSplit(CarbonLocalInputSplit carbonLocalInputSplit) {
     CarbonInputSplit inputSplit = new CarbonInputSplit(carbonLocalInputSplit.getSegmentId(), "0",
         new Path(carbonLocalInputSplit.getPath()), carbonLocalInputSplit.getStart(),
         carbonLocalInputSplit.getLength(), carbonLocalInputSplit.getLocations()
         .toArray(new String[carbonLocalInputSplit.getLocations().size()]),
-        carbonLocalInputSplit.getNumberOfBlocklets(), ColumnarFormatVersion.valueOf(carbonLocalInputSplit.getVersion()),
+        carbonLocalInputSplit.getNumberOfBlocklets(),
+        ColumnarFormatVersion.valueOf(carbonLocalInputSplit.getVersion()),
         carbonLocalInputSplit.getDeleteDeltaFiles());
     Gson gson = new Gson();
-    BlockletDetailInfo blockletDetailInfo = gson.fromJson(carbonLocalInputSplit.detailInfo, BlockletDetailInfo.class);
+    BlockletDetailInfo blockletDetailInfo =
+        gson.fromJson(carbonLocalInputSplit.detailInfo, BlockletDetailInfo.class);
+
+    if (null == blockletDetailInfo) {
+      throw new RuntimeException("Could not read blocklet details");
+    }
     try {
       blockletDetailInfo.readColumnSchema(blockletDetailInfo.getColumnSchemaBinary());
     } catch (IOException e) {
@@ -132,6 +138,4 @@ public class CarbonLocalInputSplit {
     inputSplit.setDetailInfo(blockletDetailInfo);
     return inputSplit;
   }
-
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
index 4507425..0b7206b 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/BooleanStreamReader.java
@@ -62,10 +62,8 @@ public class BooleanStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeBoolean(builder, byteToBoolean(streamData[i]));
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeBoolean(builder, byteToBoolean(streamData[i]));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7ef91645/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
index 23db769..3e7fc59 100644
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/readers/DoubleStreamReader.java
@@ -59,7 +59,7 @@ public class DoubleStreamReader extends AbstractStreamReader {
       numberOfRows = batchSize;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
       if (columnVector != null) {
-        if(isDictionary) {
+        if (isDictionary) {
           populateDictionaryVector(type, numberOfRows, builder);
         } else {
           if (columnVector.anyNullsSet()) {
@@ -72,10 +72,8 @@ public class DoubleStreamReader extends AbstractStreamReader {
     } else {
       numberOfRows = streamData.length;
       builder = type.createBlockBuilder(new BlockBuilderStatus(), numberOfRows);
-      if (streamData != null) {
-        for (int i = 0; i < numberOfRows; i++) {
-          type.writeDouble(builder, (Double) streamData[i]);
-        }
+      for (int i = 0; i < numberOfRows; i++) {
+        type.writeDouble(builder, (Double) streamData[i]);
       }
     }
 


[22/50] [abbrv] carbondata git commit: [CARBONDATA-2481] Adding SDV for SDKwriter

Posted by gv...@apache.org.
[CARBONDATA-2481] Adding SDV for SDKwriter

Adding SDV testcases for SDKwriter

This closes #2308


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/6cc86db8
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/6cc86db8
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/6cc86db8

Branch: refs/heads/spark-2.3
Commit: 6cc86db8f9a245827b9bcf72e15884722154a616
Parents: cf666c1
Author: Indhumathi27 <in...@gmail.com>
Authored: Fri May 11 10:29:42 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Thu May 24 17:13:38 2018 +0530

----------------------------------------------------------------------
 integration/spark-common-cluster-test/pom.xml   |  12 +
 .../sdv/generated/SDKwriterTestCase.scala       | 732 +++++++++++++++++++
 .../cluster/sdv/suite/SDVSuites.scala           |   1 +
 3 files changed, 745 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/6cc86db8/integration/spark-common-cluster-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/pom.xml b/integration/spark-common-cluster-test/pom.xml
index 44453b3..d8aecc2 100644
--- a/integration/spark-common-cluster-test/pom.xml
+++ b/integration/spark-common-cluster-test/pom.xml
@@ -68,6 +68,18 @@
       <version>${project.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-store-sdk</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>tech.allegro.schema.json2avro</groupId>
+      <artifactId>converter</artifactId>
+      <version>0.2.5</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6cc86db8/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
new file mode 100644
index 0000000..012091d
--- /dev/null
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SDKwriterTestCase.scala
@@ -0,0 +1,732 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.cluster.sdv.generated
+
+
+import java.util
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterEach
+import scala.collection.JavaConverters._
+import scala.collection.mutable
+
+import org.apache.avro
+import org.apache.commons.lang.CharEncoding
+import org.junit.Assert
+import tech.allegro.schema.json2avro.converter.JsonAvroConverter
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import org.apache.carbondata.core.util.CarbonUtil
+import org.apache.carbondata.sdk.file.{AvroCarbonWriter, CarbonWriter, Schema}
+
+/**
+ * Test Class for SDKwriterTestcase to verify all scenarios
+ */
+
+class SDKwriterTestCase extends QueryTest with BeforeAndAfterEach {
+
+  var writerPath =
+    s"${ resourcesPath }" + "/SparkCarbonFileFormat/WriterOutput1/"
+
+  override def beforeEach: Unit = {
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql("DROP TABLE IF EXISTS table1")
+    cleanTestData()
+  }
+
+  override def afterEach(): Unit = {
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql("DROP TABLE IF EXISTS table1")
+    cleanTestData()
+  }
+
+  def cleanTestData() = {
+    FileFactory.deleteAllCarbonFilesOfDir(FileFactory.getCarbonFile(writerPath))
+  }
+
+  def buildTestDataSingleFile(): Any = {
+    buildTestData(3, false, null)
+  }
+
+  def buildTestDataWithBadRecordForce(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "FORCE").asJava
+    buildTestData(3, false, options)
+  }
+
+  def buildTestDataWithBadRecordFail(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "FAIL").asJava
+    buildTestData(15001, false, options)
+  }
+
+  def buildTestData(rows: Int,
+      persistSchema: Boolean,
+      options: util.Map[String, String]): Any = {
+    buildTestData(rows, persistSchema, options, List("name"), writerPath)
+  }
+
+  // prepare sdk writer output
+  def buildTestData(rows: Int,
+      persistSchema: Boolean,
+      options: util.Map[String, String],
+      sortColumns: List[String],
+      writerPath: String): Any = {
+    val schema = new StringBuilder()
+      .append("[ \n")
+      .append("   {\"name\":\"string\"},\n")
+      .append("   {\"age\":\"int\"},\n")
+      .append("   {\"height\":\"double\"}\n")
+      .append("]")
+      .toString()
+
+    try {
+      val builder = CarbonWriter.builder()
+      val writer =
+        if (persistSchema) {
+          builder.persistSchemaFile(true)
+          builder
+            .sortBy(sortColumns.toArray)
+            .outputPath(writerPath)
+            .isTransactionalTable(false)
+            .uniqueIdentifier(System.currentTimeMillis)
+            .buildWriterForCSVInput(Schema.parseJson(schema))
+        } else {
+          if (options != null) {
+            builder.outputPath(writerPath)
+              .isTransactionalTable(false)
+              .sortBy(sortColumns.toArray)
+              .uniqueIdentifier(
+                System.currentTimeMillis).withBlockSize(2).withLoadOptions(options)
+              .buildWriterForCSVInput(Schema.parseJson(schema))
+          } else {
+            builder.outputPath(writerPath)
+              .isTransactionalTable(false)
+              .sortBy(sortColumns.toArray)
+              .uniqueIdentifier(
+                System.currentTimeMillis).withBlockSize(2)
+              .buildWriterForCSVInput(Schema.parseJson(schema))
+          }
+        }
+      var i = 0
+      while (i < rows) {
+        if ((options != null) && (i < 3)) {
+          // writing a bad record
+          writer.write(Array[String]("abc" + i, String.valueOf(i.toDouble / 2), "abc"))
+        } else {
+          writer.write(Array[String]("abc" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+        }
+        i += 1
+      }
+      if (options != null) {
+        //Keep one valid record. else carbon data file will not generate
+        writer.write(Array[String]("abc" + i, String.valueOf(i), String.valueOf(i.toDouble / 2)))
+      }
+      writer.close()
+    } catch {
+      case ex: Exception => throw new RuntimeException(ex)
+
+      case _ => None
+    }
+  }
+
+  def buildTestDataWithBadRecordIgnore(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "IGNORE").asJava
+    buildTestData(3, false, options)
+  }
+
+  def buildTestDataWithBadRecordRedirect(writerPath: String): Any = {
+    var options = Map("bAd_RECords_action" -> "REDIRECT").asJava
+    buildTestData(3, false, options)
+  }
+
+  def deleteFile(path: String, extension: String): Unit = {
+    val file: CarbonFile = FileFactory
+      .getCarbonFile(path, FileFactory.getFileType(path))
+
+    for (eachDir <- file.listFiles) {
+      if (!eachDir.isDirectory) {
+        if (eachDir.getName.endsWith(extension)) {
+          CarbonUtil.deleteFoldersAndFilesSilent(eachDir)
+        }
+      } else {
+        deleteFile(eachDir.getPath, extension)
+      }
+    }
+  }
+
+  test("test create External Table with WriterPath") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table with Comment") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable comment 'this is comment' STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table and test files written from sdk writer") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select name from sdkTable"), Seq(Row("abc0"),
+      Row("abc1"),
+      Row("abc2")))
+
+    checkAnswer(sql("select age from sdkTable"), Seq(Row(0), Row(1), Row(2)))
+    checkAnswer(sql("select * from sdkTable where age > 1 and age < 8"),
+      Seq(Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select * from sdkTable where name = 'abc2'"),
+      Seq(Row("abc2", 2, 1.0)))
+
+    checkAnswer(sql("select * from sdkTable where name like '%b%' limit 2"),
+      Seq(Row("abc0", 0, 0.0),
+        Row("abc1", 1, 0.5)))
+
+    checkAnswer(sql("select sum(age) from sdkTable where name like 'abc%'"), Seq(Row(3)))
+    checkAnswer(sql("select count(*) from sdkTable where name like 'abc%' "), Seq(Row(3)))
+    checkAnswer(sql("select count(*) from sdkTable"), Seq(Row(3)))
+
+  }
+
+  test("test create External Table and test insert into external table") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(1)))
+
+    sql("insert into sdktable select 'def0',1,5.5")
+    sql("insert into sdktable select 'def1',5,6.6")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(2)))
+  }
+
+  test("test create External Table and test insert into normal table with different schema") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      "create table if not exists table1 (name string, age int) STORED BY 'carbondata'")
+    sql("insert into table1 select * from sdkTable")
+    checkAnswer(sql("select * from table1"), Seq(Row("abc0", 0),
+      Row("abc1", 1),
+      Row("abc2", 2)))
+  }
+
+  test("test Insert into External Table from another External Table with Same Schema") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql("DROP TABLE IF EXISTS sdkTable2")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable1(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable2(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    sql("insert into sdkTable1 select *from sdkTable2")
+    checkAnswer(sql("select count(*) from sdkTable1"), Seq(Row(6)))
+  }
+
+  test("test create External Table with Schema with partition, external table should " +
+       "ignore schema and partition") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test External Table with insert overwrite") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string) PARTITIONED BY (age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    sql(
+      "create table if not exists table1 (name string, age int, height double) STORED BY 'org" +
+      ".apache.carbondata.format'")
+    sql(s"""insert into table1 values ("aaaaa", 12, 20)""")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(1)))
+
+    sql("insert overwrite table sdkTable select * from table1")
+
+    checkAnswer(sql(s"""select count(*) from sdkTable where age = 1"""),
+      Seq(Row(0)))
+  }
+
+  test("test create External Table with Table properties should ignore tblproperties") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable(name string,age int) STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' TBLPROPERTIES('sort_scope'='batch_sort') """.stripMargin)
+
+    checkExistence(sql("Describe formatted sdkTable "), false, "batch_sort")
+  }
+
+  test("Read sdk writer output file and test without carbondata and carbonindex files should fail")
+  {
+    buildTestDataSingleFile()
+    deleteFile(writerPath, CarbonCommonConstants.FACT_FILE_EXT)
+    deleteFile(writerPath, CarbonCommonConstants.UPDATE_INDEX_FILE_EXT)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+
+    val exception = intercept[Exception] {
+      //data source file format
+      sql(
+        s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+           |'$writerPath' """.stripMargin)
+    }
+    assert(exception.getMessage()
+      .contains("Operation not allowed: Invalid table path provided:"))
+  }
+
+  test("test create External Table and test CTAS") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS table1")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+
+    sql("create table table1 stored by 'carbondata' as select *from sdkTable")
+
+    checkAnswer(sql("select * from table1"), Seq(Row("abc0", 0, 0.0),
+      Row("abc1", 1, 0.5),
+      Row("abc2", 2, 1.0)))
+  }
+
+  test("test create External Table and test JOIN on External Tables") {
+    buildTestDataSingleFile()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql("DROP TABLE IF EXISTS sdkTable1")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable1 STORED BY
+         |'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+    checkAnswer(sql(
+      "select * from sdkTable LEFT OUTER JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+    checkAnswer(sql(
+      "select * from sdkTable RIGHT OUTER JOIN sdkTable1 on (sdkTable.age=sdkTable1.age)"),
+      Seq(Row("abc0", 0, 0.0, "abc0", 0, 0.0),
+        Row("abc1", 1, 0.5, "abc1", 1, 0.5),
+        Row("abc2", 2, 1.0, "abc2", 2, 1.0)))
+  }
+
+  test("test create external table and test bad record") {
+    //1. Action = FORCE
+    buildTestDataWithBadRecordForce(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc0", null, null),
+      Row("abc1", null, null),
+      Row("abc2", null, null),
+      Row("abc3", 3, 1.5)))
+
+    sql("DROP TABLE sdkTable")
+    cleanTestData()
+
+    //2. Action = REDIRECT
+    buildTestDataWithBadRecordRedirect(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc3", 3, 1.5)))
+
+    sql("DROP TABLE sdkTable")
+    cleanTestData()
+
+    //3. Action = IGNORE
+    buildTestDataWithBadRecordIgnore(writerPath)
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("abc3", 3, 1.5)))
+
+  }
+
+  def buildAvroTestDataStructType(): Any = {
+    buildAvroTestDataStruct(3, null)
+  }
+
+  def buildAvroTestDataStruct(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """
+        |{"name": "address",
+        | "type": "record",
+        | "fields": [
+        |  { "name": "name", "type": "string"},
+        |  { "name": "age", "type": "int"},
+        |  { "name": "address",  "type": {
+        |    "type" : "record",  "name" : "my_address",
+        |        "fields" : [
+        |    {"name": "street", "type": "string"},
+        |    {"name": "city", "type": "string"}]}}
+        |]}
+      """.stripMargin
+
+    val json = """ {"name":"bob", "age":10, "address" : {"street":"abc", "city":"bang"}} """
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  def buildAvroTestDataBothStructArrayType(): Any = {
+    buildAvroTestDataStructWithArrayType(3, null)
+  }
+
+  def buildAvroTestDataStructWithArrayType(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """
+                     {
+                     |     "name": "address",
+                     |     "type": "record",
+                     |     "fields": [
+                     |     { "name": "name", "type": "string"},
+                     |     { "name": "age", "type": "int"},
+                     |     {
+                     |     "name": "address",
+                     |     "type": {
+                     |     "type" : "record",
+                     |     "name" : "my_address",
+                     |     "fields" : [
+                     |     {"name": "street", "type": "string"},
+                     |     {"name": "city", "type": "string"}
+                     |     ]}
+                     |     },
+                     |     {"name" :"doorNum",
+                     |     "type" : {
+                     |     "type" :"array",
+                     |     "items":{
+                     |     "name" :"EachdoorNums",
+                     |     "type" : "int",
+                     |     "default":-1
+                     |     }}
+                     |     }]}
+                     """.stripMargin
+
+    val json =
+      """ {"name":"bob", "age":10,
+        |"address" : {"street":"abc", "city":"bang"},
+        |"doorNum" : [1,2,3,4]}""".stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  private def WriteFilesWithAvroWriter(rows: Int,
+      mySchema: String,
+      json: String): Unit = {
+    // conversion to GenericData.Record
+    val nn = new avro.Schema.Parser().parse(mySchema)
+    val converter = new JsonAvroConverter
+    val record = converter
+      .convertToGenericDataRecord(json.getBytes(CharEncoding.UTF_8), nn)
+
+    try {
+      val writer = CarbonWriter.builder
+        .outputPath(writerPath).isTransactionalTable(false)
+        .uniqueIdentifier(System.currentTimeMillis()).buildWriterForAvroInput(nn)
+      var i = 0
+      while (i < rows) {
+        writer.write(record)
+        i = i + 1
+      }
+      writer.close()
+    }
+    catch {
+      case e: Exception => {
+        e.printStackTrace()
+        Assert.fail(e.getMessage)
+      }
+    }
+  }
+
+  def buildAvroTestDataArrayOfStructType(): Any = {
+    buildAvroTestDataArrayOfStruct(3, null)
+  }
+
+  def buildAvroTestDataArrayOfStruct(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """ {
+        |	"name": "address",
+        |	"type": "record",
+        |	"fields": [
+        |		{
+        |			"name": "name",
+        |			"type": "string"
+        |		},
+        |		{
+        |			"name": "age",
+        |			"type": "int"
+        |		},
+        |		{
+        |			"name": "doorNum",
+        |			"type": {
+        |				"type": "array",
+        |				"items": {
+        |					"type": "record",
+        |					"name": "my_address",
+        |					"fields": [
+        |						{
+        |							"name": "street",
+        |							"type": "string"
+        |						},
+        |						{
+        |							"name": "city",
+        |							"type": "string"
+        |						}
+        |					]
+        |				}
+        |			}
+        |		}
+        |	]
+        |} """.stripMargin
+    val json =
+      """ {"name":"bob","age":10,"doorNum" :
+        |[{"street":"abc","city":"city1"},
+        |{"street":"def","city":"city2"},
+        |{"street":"ghi","city":"city3"},
+        |{"street":"jkl","city":"city4"}]} """.stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  def buildAvroTestDataStructOfArrayType(): Any = {
+    buildAvroTestDataStructOfArray(3, null)
+  }
+
+  def buildAvroTestDataStructOfArray(rows: Int,
+      options: util.Map[String, String]): Any = {
+
+    val mySchema =
+      """ {
+        |	"name": "address",
+        |	"type": "record",
+        |	"fields": [
+        |		{
+        |			"name": "name",
+        |			"type": "string"
+        |		},
+        |		{
+        |			"name": "age",
+        |			"type": "int"
+        |		},
+        |		{
+        |			"name": "address",
+        |			"type": {
+        |				"type": "record",
+        |				"name": "my_address",
+        |				"fields": [
+        |					{
+        |						"name": "street",
+        |						"type": "string"
+        |					},
+        |					{
+        |						"name": "city",
+        |						"type": "string"
+        |					},
+        |					{
+        |						"name": "doorNum",
+        |						"type": {
+        |							"type": "array",
+        |							"items": {
+        |								"name": "EachdoorNums",
+        |								"type": "int",
+        |								"default": -1
+        |							}
+        |						}
+        |					}
+        |				]
+        |			}
+        |		}
+        |	]
+        |} """.stripMargin
+
+    val json =
+      """ {
+        |	"name": "bob",
+        |	"age": 10,
+        |	"address": {
+        |		"street": "abc",
+        |		"city": "bang",
+        |		"doorNum": [
+        |			1,
+        |			2,
+        |			3,
+        |			4
+        |		]
+        |	}
+        |} """.stripMargin
+    WriteFilesWithAvroWriter(rows, mySchema, json)
+  }
+
+  test("Read sdk writer Avro output Record Type for nontransactional table") {
+    buildAvroTestDataStructType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("bob", 10, Row("abc", "bang")),
+      Row("bob", 10, Row("abc", "bang")),
+      Row("bob", 10, Row("abc", "bang"))))
+
+  }
+
+  test("Read sdk writer Avro output with both Array and Struct Type for nontransactional table") {
+    buildAvroTestDataBothStructArrayType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+    checkAnswer(sql("select * from sdkTable"), Seq(
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4)),
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4)),
+      Row("bob", 10, Row("abc", "bang"), mutable.WrappedArray.newBuilder[Int].+=(1, 2, 3, 4))))
+  }
+
+  test("Read sdk writer Avro output with Array of struct for external table") {
+    buildAvroTestDataArrayOfStructType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable"""),
+      Seq(Row(3)))
+  }
+
+  test("Read sdk writer Avro output with struct of Array for nontransactional table") {
+    buildAvroTestDataStructOfArrayType()
+    assert(FileFactory.getCarbonFile(writerPath).exists())
+    sql("DROP TABLE IF EXISTS sdkTable")
+    sql(
+      s"""CREATE EXTERNAL TABLE sdkTable STORED BY 'carbondata' LOCATION
+         |'$writerPath' """.stripMargin)
+
+    checkAnswer(sql(s"""select count(*) from sdkTable"""),
+      Seq(Row(3)))
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/6cc86db8/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
index 2f7d98b..c5aceaa 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/suite/SDVSuites.scala
@@ -150,6 +150,7 @@ class SDVSuites3 extends Suites with BeforeAndAfterAll {
                     new LuceneTestCase ::
                     new TimeSeriesPreAggregateTestCase :: 
                     new TestPartitionWithGlobalSort ::
+                    new SDKwriterTestCase ::
                     new SetParameterTestCase ::
                     new PartitionWithPreAggregateTestCase :: Nil
 


[43/50] [abbrv] carbondata git commit: [CARBONDATA-2566] Optimize CarbonReaderExample

Posted by gv...@apache.org.
[CARBONDATA-2566] Optimize CarbonReaderExample

Optimize CarbonReaderExample
1.Add different data type, including date and timestamp
2. update the doc
3.invoke the
Schema schema = CarbonSchemaReader
.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath())
.asOriginOrder();

This closes #2356


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/56bf4e42
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/56bf4e42
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/56bf4e42

Branch: refs/heads/spark-2.3
Commit: 56bf4e420747ddeb800fc7f004a6ec0d9f5e7d3f
Parents: 9469e6b
Author: xubo245 <xu...@huawei.com>
Authored: Thu May 31 15:52:57 2018 +0800
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Fri Jun 1 16:33:28 2018 +0530

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 15 ++--
 .../examples/sdk/CarbonReaderExample.java       | 92 +++++++++++++++++---
 2 files changed, 89 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/56bf4e42/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index ec70919..2371b33 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -415,17 +415,22 @@ External client can make use of this reader to read CarbonData files without Car
     String path = "./testWriteFiles";
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
-        .projection(new String[]{"name", "age"})
+        .projection(new String[]{"stringField", "shortField", "intField", "longField", 
+                "doubleField", "boolField", "dateField", "timeField", "decimalField"})
         .build();
 
     // 2. Read data
+    long day = 24L * 3600 * 1000;
     int i = 0;
     while (reader.hasNext()) {
-      Object[] row = (Object[]) reader.readNextRow();
-      System.out.println(row[0] + "\t" + row[1]);
-      i++;
+        Object[] row = (Object[]) reader.readNextRow();
+        System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+            i, row[0], row[1], row[2], row[3], row[4], row[5],
+            new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
+        ));
+        i++;
     }
-    
+
     // 3. Close this reader
     reader.close();
 ```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/56bf4e42/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index d7886c0..8d3ff0d 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -18,16 +18,19 @@
 package org.apache.carbondata.examples.sdk;
 
 import java.io.File;
+import java.io.FilenameFilter;
+import java.sql.Date;
+import java.sql.Timestamp;
 
 import org.apache.commons.io.FileUtils;
 
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.sdk.file.CarbonReader;
+import org.apache.carbondata.sdk.file.CarbonSchemaReader;
 import org.apache.carbondata.sdk.file.CarbonWriter;
 import org.apache.carbondata.sdk.file.Field;
 import org.apache.carbondata.sdk.file.Schema;
 
-
 /**
  * Example fo CarbonReader with close method
  * After readNextRow of CarbonReader, User should close the reader,
@@ -39,36 +42,99 @@ public class CarbonReaderExample {
         try {
             FileUtils.deleteDirectory(new File(path));
 
-            Field[] fields = new Field[2];
-            fields[0] = new Field("name", DataTypes.STRING);
-            fields[1] = new Field("age", DataTypes.INT);
+            Field[] fields = new Field[9];
+            fields[0] = new Field("stringField", DataTypes.STRING);
+            fields[1] = new Field("shortField", DataTypes.SHORT);
+            fields[2] = new Field("intField", DataTypes.INT);
+            fields[3] = new Field("longField", DataTypes.LONG);
+            fields[4] = new Field("doubleField", DataTypes.DOUBLE);
+            fields[5] = new Field("boolField", DataTypes.BOOLEAN);
+            fields[6] = new Field("dateField", DataTypes.DATE);
+            fields[7] = new Field("timeField", DataTypes.TIMESTAMP);
+            fields[8] = new Field("decimalField", DataTypes.createDecimalType(8, 2));
 
             CarbonWriter writer = CarbonWriter.builder()
-                    .outputPath(path)
-                    .persistSchemaFile(true)
-                    .buildWriterForCSVInput(new Schema(fields));
+                .outputPath(path)
+                .buildWriterForCSVInput(new Schema(fields));
 
             for (int i = 0; i < 10; i++) {
-                writer.write(new String[]{"robot" + (i % 10), String.valueOf(i)});
+                String[] row2 = new String[]{
+                    "robot" + (i % 10),
+                    String.valueOf(i),
+                    String.valueOf(i),
+                    String.valueOf(Long.MAX_VALUE - i),
+                    String.valueOf((double) i / 2),
+                    String.valueOf(true),
+                    "2019-03-02",
+                    "2019-02-12 03:03:34",
+                    "12.345"
+                };
+                writer.write(row2);
             }
             writer.close();
 
+            File[] dataFiles = new File(path).listFiles(new FilenameFilter() {
+                @Override
+                public boolean accept(File dir, String name) {
+                    if (name == null) {
+                        return false;
+                    }
+                    return name.endsWith("carbonindex");
+                }
+            });
+            if (dataFiles == null || dataFiles.length < 1) {
+                throw new RuntimeException("Carbon index file not exists.");
+            }
+            Schema schema = CarbonSchemaReader
+                .readSchemaInIndexFile(dataFiles[0].getAbsolutePath())
+                .asOriginOrder();
+            // Transform the schema
+            String[] strings = new String[schema.getFields().length];
+            for (int i = 0; i < schema.getFields().length; i++) {
+                strings[i] = (schema.getFields())[i].getFieldName();
+            }
+
             // Read data
             CarbonReader reader = CarbonReader
-                    .builder(path, "_temp")
-                    .projection(new String[]{"name", "age"})
-                    .build();
+                .builder(path, "_temp")
+                .projection(strings)
+                .build();
 
             System.out.println("\nData:");
+            long day = 24L * 3600 * 1000;
+            int i = 0;
             while (reader.hasNext()) {
                 Object[] row = (Object[]) reader.readNextRow();
-                System.out.println(row[0] + " " + row[1]);
+                System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+                    i, row[0], row[1], row[2], row[3], row[4], row[5],
+                    new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
+                ));
+                i++;
+            }
+            System.out.println("\nFinished");
+
+            // Read data
+            CarbonReader reader2 = CarbonReader
+                .builder(path, "_temp")
+                .projectAllColumns()
+                .build();
+
+            System.out.println("\nData:");
+            i = 0;
+            while (reader2.hasNext()) {
+              Object[] row = (Object[]) reader2.readNextRow();
+              System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+                  i, row[0], new Date((day * ((int) row[1]))), new Timestamp((long) row[2] / 1000),
+                  row[3], row[4], row[5], row[6], row[7], row[8]
+              ));
+              i++;
             }
             System.out.println("\nFinished");
             reader.close();
             FileUtils.deleteDirectory(new File(path));
-        } catch (Exception e) {
+        } catch (Throwable e) {
             e.printStackTrace();
+            System.out.println(e.getMessage());
         }
     }
 }


[48/50] [abbrv] carbondata git commit: [CARBONDATA-2557] [CARBONDATA-2472] [CARBONDATA-2570] Improve Carbon Reader performance on S3 and fixed datamap clear issue in reader

Posted by gv...@apache.org.
[CARBONDATA-2557] [CARBONDATA-2472] [CARBONDATA-2570] Improve Carbon Reader performance on S3 and fixed datamap clear issue in reader

[CARBONDATA-2557] [CARBONDATA-2472] Problem : CarbonReaderBuilder.build() is slower in s3. It takes around 8 seconds to finish build()
Solution: S3 is slow in listFiles, open, FileExist, getCarbonFile operations. So, List down all the calls of those API in the reader flow and remove the redundant checks.

[CARBONDATA-2570] Problem : Carbon SDK Reader, second time reader instance have an issue in cluster test
Solution: Blocklet datamap's of first time reader is not cleared properly in the cluster. Need to change the API to clear the blocklet datamap.

so change
DataMapStoreManager.getInstance().getDefaultDataMap(queryModel.getTable()).clear();
to
DataMapStoreManager.getInstance().clearDataMaps(queryModel.getTable().getAbsoluteTableIdentifie());

This closes #2345


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/5f68a792
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/5f68a792
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/5f68a792

Branch: refs/heads/spark-2.3
Commit: 5f68a792f2e83d15379740f715cf05d7ae9aaa05
Parents: 2f23486
Author: ajantha-bhat <aj...@gmail.com>
Authored: Sun May 27 22:49:23 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Tue Jun 5 16:23:27 2018 +0530

----------------------------------------------------------------------
 .../core/datamap/dev/CacheableDataMap.java      |   6 +-
 .../core/datastore/SegmentTaskIndexStore.java   |   2 +-
 .../indexstore/BlockletDataMapIndexStore.java   |  84 +++++++------
 .../TableBlockIndexUniqueIdentifierWrapper.java |  52 ++++++++
 .../blockletindex/BlockletDataMapFactory.java   | 122 ++++++++-----------
 .../blockletindex/SegmentIndexFileStore.java    |  15 +++
 .../core/metadata/schema/table/CarbonTable.java |  60 ++++-----
 .../LatestFilesReadCommittedScope.java          |  19 +--
 .../SegmentUpdateStatusManager.java             |  15 ++-
 .../core/util/BlockletDataMapUtil.java          |  50 +++++++-
 .../apache/carbondata/core/util/CarbonUtil.java |  30 +++++
 .../TestBlockletDataMapFactory.java             |  13 +-
 docs/sdk-guide.md                               |  10 --
 .../examples/sdk/CarbonReaderExample.java       |   1 -
 .../carbondata/hadoop/CarbonRecordReader.java   |   3 +-
 .../hadoop/api/CarbonFileInputFormat.java       |  97 ++++-----------
 .../hadoop/api/CarbonInputFormat.java           |  24 ++++
 ...FileInputFormatWithExternalCarbonTable.scala |   2 +-
 ...tCreateTableUsingSparkCarbonFileFormat.scala |   2 +-
 .../TestNonTransactionalCarbonTable.scala       |  11 +-
 .../sdk/file/CarbonReaderBuilder.java           |  51 ++------
 .../carbondata/sdk/file/CarbonReaderTest.java   |   4 +-
 22 files changed, 375 insertions(+), 298 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/datamap/dev/CacheableDataMap.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/CacheableDataMap.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/CacheableDataMap.java
index dba0840..e292c60 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/CacheableDataMap.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/CacheableDataMap.java
@@ -22,7 +22,7 @@ import java.util.List;
 
 import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
-import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifier;
+import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifierWrapper;
 import org.apache.carbondata.core.memory.MemoryException;
 
 /**
@@ -33,10 +33,10 @@ public interface CacheableDataMap {
   /**
    * Add the blockletDataMapIndexWrapper to cache for key tableBlockIndexUniqueIdentifier
    *
-   * @param tableBlockIndexUniqueIdentifier
+   * @param tableBlockIndexUniqueIdentifierWrapper
    * @param blockletDataMapIndexWrapper
    */
-  void cache(TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier,
+  void cache(TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper,
       BlockletDataMapIndexWrapper blockletDataMapIndexWrapper) throws IOException, MemoryException;
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java b/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
index d325f21..c642091 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
@@ -91,7 +91,7 @@ public class SegmentTaskIndexStore
       segmentTaskIndexWrapper =
           loadAndGetTaskIdToSegmentsMap(
               tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(),
-              CarbonTable.buildFromTablePath("name", "path", false),
+              CarbonTable.buildDummyTable("path"),
               tableSegmentUniqueIdentifier);
     } catch (IndexBuilderException e) {
       throw new IOException(e.getMessage(), e);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
index db49976..71a9b5a 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/BlockletDataMapIndexStore.java
@@ -41,7 +41,7 @@ import org.apache.carbondata.core.util.BlockletDataMapUtil;
  * blocks
  */
 public class BlockletDataMapIndexStore
-    implements Cache<TableBlockIndexUniqueIdentifier, BlockletDataMapIndexWrapper> {
+    implements Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletDataMapIndexWrapper> {
   private static final LogService LOGGER =
       LogServiceFactory.getLogService(BlockletDataMapIndexStore.class.getName());
   /**
@@ -68,8 +68,10 @@ public class BlockletDataMapIndexStore
   }
 
   @Override
-  public BlockletDataMapIndexWrapper get(TableBlockIndexUniqueIdentifier identifier)
+  public BlockletDataMapIndexWrapper get(TableBlockIndexUniqueIdentifierWrapper identifierWrapper)
       throws IOException {
+    TableBlockIndexUniqueIdentifier identifier =
+        identifierWrapper.getTableBlockIndexUniqueIdentifier();
     String lruCacheKey = identifier.getUniqueTableSegmentIdentifier();
     BlockletDataMapIndexWrapper blockletDataMapIndexWrapper =
         (BlockletDataMapIndexWrapper) lruCache.get(lruCacheKey);
@@ -84,7 +86,7 @@ public class BlockletDataMapIndexStore
         // if the identifier is not a merge file we can directly load the datamaps
         if (identifier.getMergeIndexFileName() == null) {
           Map<String, BlockMetaInfo> blockMetaInfoMap = BlockletDataMapUtil
-              .getBlockMetaInfoMap(identifier, indexFileStore, filesRead,
+              .getBlockMetaInfoMap(identifierWrapper, indexFileStore, filesRead,
                   carbonDataFileBlockMetaInfoMapping);
           BlockletDataMap blockletDataMap =
               loadAndGetDataMap(identifier, indexFileStore, blockMetaInfoMap);
@@ -96,9 +98,10 @@ public class BlockletDataMapIndexStore
               BlockletDataMapUtil.getIndexFileIdentifiersFromMergeFile(identifier, indexFileStore);
           for (TableBlockIndexUniqueIdentifier blockIndexUniqueIdentifier :
               tableBlockIndexUniqueIdentifiers) {
-            Map<String, BlockMetaInfo> blockMetaInfoMap = BlockletDataMapUtil
-                .getBlockMetaInfoMap(blockIndexUniqueIdentifier, indexFileStore, filesRead,
-                    carbonDataFileBlockMetaInfoMapping);
+            Map<String, BlockMetaInfo> blockMetaInfoMap = BlockletDataMapUtil.getBlockMetaInfoMap(
+                new TableBlockIndexUniqueIdentifierWrapper(blockIndexUniqueIdentifier,
+                    identifierWrapper.getCarbonTable()), indexFileStore, filesRead,
+                carbonDataFileBlockMetaInfoMapping);
             BlockletDataMap blockletDataMap =
                 loadAndGetDataMap(blockIndexUniqueIdentifier, indexFileStore, blockMetaInfoMap);
             dataMaps.add(blockletDataMap);
@@ -119,26 +122,28 @@ public class BlockletDataMapIndexStore
     return blockletDataMapIndexWrapper;
   }
 
-  @Override
-  public List<BlockletDataMapIndexWrapper> getAll(
-      List<TableBlockIndexUniqueIdentifier> tableSegmentUniqueIdentifiers) throws IOException {
+  @Override public List<BlockletDataMapIndexWrapper> getAll(
+      List<TableBlockIndexUniqueIdentifierWrapper> tableSegmentUniqueIdentifiers)
+      throws IOException {
     List<BlockletDataMapIndexWrapper> blockletDataMapIndexWrappers =
         new ArrayList<>(tableSegmentUniqueIdentifiers.size());
-    List<TableBlockIndexUniqueIdentifier> missedIdentifiers = new ArrayList<>();
+    List<TableBlockIndexUniqueIdentifierWrapper> missedIdentifiersWrapper = new ArrayList<>();
     BlockletDataMapIndexWrapper blockletDataMapIndexWrapper = null;
     // Get the datamaps for each indexfile from cache.
     try {
-      for (TableBlockIndexUniqueIdentifier identifier : tableSegmentUniqueIdentifiers) {
-        BlockletDataMapIndexWrapper dataMapIndexWrapper = getIfPresent(identifier);
+      for (TableBlockIndexUniqueIdentifierWrapper
+               identifierWrapper : tableSegmentUniqueIdentifiers) {
+        BlockletDataMapIndexWrapper dataMapIndexWrapper =
+            getIfPresent(identifierWrapper);
         if (dataMapIndexWrapper != null) {
           blockletDataMapIndexWrappers.add(dataMapIndexWrapper);
         } else {
-          missedIdentifiers.add(identifier);
+          missedIdentifiersWrapper.add(identifierWrapper);
         }
       }
-      if (missedIdentifiers.size() > 0) {
-        for (TableBlockIndexUniqueIdentifier identifier : missedIdentifiers) {
-          blockletDataMapIndexWrapper = get(identifier);
+      if (missedIdentifiersWrapper.size() > 0) {
+        for (TableBlockIndexUniqueIdentifierWrapper identifierWrapper : missedIdentifiersWrapper) {
+          blockletDataMapIndexWrapper = get(identifierWrapper);
           blockletDataMapIndexWrappers.add(blockletDataMapIndexWrapper);
         }
       }
@@ -151,37 +156,40 @@ public class BlockletDataMapIndexStore
       }
       throw new IOException("Problem in loading segment blocks.", e);
     }
+
     return blockletDataMapIndexWrappers;
   }
 
   /**
    * returns the SegmentTaskIndexWrapper
    *
-   * @param tableSegmentUniqueIdentifier
+   * @param tableSegmentUniqueIdentifierWrapper
    * @return
    */
-  @Override
-  public BlockletDataMapIndexWrapper getIfPresent(
-      TableBlockIndexUniqueIdentifier tableSegmentUniqueIdentifier) {
+  @Override public BlockletDataMapIndexWrapper getIfPresent(
+      TableBlockIndexUniqueIdentifierWrapper tableSegmentUniqueIdentifierWrapper) {
     return (BlockletDataMapIndexWrapper) lruCache.get(
-        tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
+        tableSegmentUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
+            .getUniqueTableSegmentIdentifier());
   }
 
   /**
    * method invalidate the segment cache for segment
    *
-   * @param tableSegmentUniqueIdentifier
+   * @param tableSegmentUniqueIdentifierWrapper
    */
-  @Override
-  public void invalidate(TableBlockIndexUniqueIdentifier tableSegmentUniqueIdentifier) {
-    lruCache.remove(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
+  @Override public void invalidate(
+      TableBlockIndexUniqueIdentifierWrapper tableSegmentUniqueIdentifierWrapper) {
+    lruCache.remove(tableSegmentUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
+        .getUniqueTableSegmentIdentifier());
   }
 
   @Override
-  public void put(TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier,
+  public void put(TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper,
       BlockletDataMapIndexWrapper wrapper) throws IOException, MemoryException {
     String uniqueTableSegmentIdentifier =
-        tableBlockIndexUniqueIdentifier.getUniqueTableSegmentIdentifier();
+        tableBlockIndexUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
+            .getUniqueTableSegmentIdentifier();
     Object lock = segmentLockMap.get(uniqueTableSegmentIdentifier);
     if (lock == null) {
       lock = addAndGetSegmentLock(uniqueTableSegmentIdentifier);
@@ -190,16 +198,16 @@ public class BlockletDataMapIndexStore
     // as in that case clearing unsafe memory need to be taken card. If at all datamap entry
     // in the cache need to be overwritten then use the invalidate interface
     // and then use the put interface
-    if (null == getIfPresent(tableBlockIndexUniqueIdentifier)) {
+    if (null == getIfPresent(tableBlockIndexUniqueIdentifierWrapper)) {
       synchronized (lock) {
-        if (null == getIfPresent(tableBlockIndexUniqueIdentifier)) {
+        if (null == getIfPresent(tableBlockIndexUniqueIdentifierWrapper)) {
           List<BlockletDataMap> dataMaps = wrapper.getDataMaps();
           try {
             for (BlockletDataMap blockletDataMap: dataMaps) {
               blockletDataMap.convertToUnsafeDMStore();
             }
-            lruCache.put(tableBlockIndexUniqueIdentifier.getUniqueTableSegmentIdentifier(), wrapper,
-                wrapper.getMemorySize());
+            lruCache.put(tableBlockIndexUniqueIdentifierWrapper.getTableBlockIndexUniqueIdentifier()
+                .getUniqueTableSegmentIdentifier(), wrapper, wrapper.getMemorySize());
           } catch (Throwable e) {
             // clear all the memory acquired by data map in case of any failure
             for (DataMap blockletDataMap : dataMaps) {
@@ -264,14 +272,14 @@ public class BlockletDataMapIndexStore
   /**
    * The method clears the access count of table segments
    *
-   * @param tableSegmentUniqueIdentifiers
+   * @param tableSegmentUniqueIdentifiersWrapper
    */
-  @Override
-  public void clearAccessCount(
-      List<TableBlockIndexUniqueIdentifier> tableSegmentUniqueIdentifiers) {
-    for (TableBlockIndexUniqueIdentifier identifier : tableSegmentUniqueIdentifiers) {
-      BlockletDataMap cacheable =
-          (BlockletDataMap) lruCache.get(identifier.getUniqueTableSegmentIdentifier());
+  @Override public void clearAccessCount(
+      List<TableBlockIndexUniqueIdentifierWrapper> tableSegmentUniqueIdentifiersWrapper) {
+    for (TableBlockIndexUniqueIdentifierWrapper
+             identifierWrapper : tableSegmentUniqueIdentifiersWrapper) {
+      BlockletDataMap cacheable = (BlockletDataMap) lruCache.get(
+          identifierWrapper.getTableBlockIndexUniqueIdentifier().getUniqueTableSegmentIdentifier());
       cacheable.clear();
     }
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifierWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifierWrapper.java b/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifierWrapper.java
new file mode 100644
index 0000000..3411397
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/TableBlockIndexUniqueIdentifierWrapper.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.indexstore;
+
+import java.io.Serializable;
+
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+
+/**
+ * Class holds reference to TableBlockIndexUniqueIdentifier and carbonTable related info
+ * This is just a wrapper passed between methods like a context, This object must never be cached.
+ *
+ */
+public class TableBlockIndexUniqueIdentifierWrapper implements Serializable {
+
+  private static final long serialVersionUID = 1L;
+
+  // holds the reference to tableBlockIndexUniqueIdentifier
+  private TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier;
+
+  // holds the reference to CarbonTable
+  private CarbonTable carbonTable;
+
+  public TableBlockIndexUniqueIdentifierWrapper(
+      TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier, CarbonTable carbonTable) {
+    this.tableBlockIndexUniqueIdentifier = tableBlockIndexUniqueIdentifier;
+    this.carbonTable = carbonTable;
+  }
+
+  public TableBlockIndexUniqueIdentifier getTableBlockIndexUniqueIdentifier() {
+    return tableBlockIndexUniqueIdentifier;
+  }
+
+  public CarbonTable getCarbonTable() {
+    return carbonTable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
index 318fc6e..c434e2e 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataMapFactory.java
@@ -44,16 +44,12 @@ import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
 import org.apache.carbondata.core.indexstore.PartitionSpec;
 import org.apache.carbondata.core.indexstore.SegmentPropertiesFetcher;
 import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifier;
+import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifierWrapper;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.metadata.converter.SchemaConverter;
-import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.util.BlockletDataMapUtil;
-import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.events.Event;
 
@@ -81,7 +77,7 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
   // segmentId -> list of index file
   private Map<String, Set<TableBlockIndexUniqueIdentifier>> segmentMap = new ConcurrentHashMap<>();
 
-  private Cache<TableBlockIndexUniqueIdentifier, BlockletDataMapIndexWrapper> cache;
+  private Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletDataMapIndexWrapper> cache;
 
   public BlockletDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema) {
     super(carbonTable, dataMapSchema);
@@ -104,11 +100,15 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     List<CoarseGrainDataMap> dataMaps = new ArrayList<>();
     Set<TableBlockIndexUniqueIdentifier> identifiers =
         getTableBlockIndexUniqueIdentifiers(segment);
-    List<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers =
+    List<TableBlockIndexUniqueIdentifierWrapper> tableBlockIndexUniqueIdentifierWrappers =
         new ArrayList<>(identifiers.size());
-    tableBlockIndexUniqueIdentifiers.addAll(identifiers);
+    for (TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier : identifiers) {
+      tableBlockIndexUniqueIdentifierWrappers.add(
+          new TableBlockIndexUniqueIdentifierWrapper(tableBlockIndexUniqueIdentifier,
+              this.getCarbonTable()));
+    }
     List<BlockletDataMapIndexWrapper> blockletDataMapIndexWrappers =
-        cache.getAll(tableBlockIndexUniqueIdentifiers);
+        cache.getAll(tableBlockIndexUniqueIdentifierWrappers);
     for (BlockletDataMapIndexWrapper wrapper : blockletDataMapIndexWrappers) {
       dataMaps.addAll(wrapper.getDataMaps());
     }
@@ -120,12 +120,6 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     Set<TableBlockIndexUniqueIdentifier> tableBlockIndexUniqueIdentifiers =
         segmentMap.get(segment.getSegmentNo());
     if (tableBlockIndexUniqueIdentifiers == null) {
-      CarbonTable carbonTable = this.getCarbonTable();
-      if (!carbonTable.getTableInfo().isTransactionalTable()) {
-        // For NonTransactional table, compare the schema of all index files with inferred schema.
-        // If there is a mismatch throw exception. As all files must be of same schema.
-        validateSchemaForNewTranscationalTableFiles(segment, carbonTable);
-      }
       tableBlockIndexUniqueIdentifiers =
           BlockletDataMapUtil.getTableBlockUniqueIdentifiers(segment);
       segmentMap.put(segment.getSegmentNo(), tableBlockIndexUniqueIdentifiers);
@@ -133,46 +127,6 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     return tableBlockIndexUniqueIdentifiers;
   }
 
-  private void validateSchemaForNewTranscationalTableFiles(Segment segment, CarbonTable carbonTable)
-      throws IOException {
-    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
-    Map<String, String> indexFiles = segment.getCommittedIndexFile();
-    for (Map.Entry<String, String> indexFileEntry : indexFiles.entrySet()) {
-      Path indexFile = new Path(indexFileEntry.getKey());
-      org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.inferSchemaFromIndexFile(
-          indexFile.toString(), carbonTable.getTableName());
-      TableInfo wrapperTableInfo = schemaConverter.fromExternalToWrapperTableInfo(
-          tableInfo, identifier.getDatabaseName(),
-          identifier.getTableName(),
-          identifier.getTablePath());
-      List<ColumnSchema> indexFileColumnList =
-          wrapperTableInfo.getFactTable().getListOfColumns();
-      List<ColumnSchema> tableColumnList =
-          carbonTable.getTableInfo().getFactTable().getListOfColumns();
-      if (!isSameColumnSchemaList(indexFileColumnList, tableColumnList)) {
-        LOG.error("Schema of " + indexFile.getName()
-            + " doesn't match with the table's schema");
-        throw new IOException("All the files doesn't have same schema. "
-            + "Unsupported operation on nonTransactional table. Check logs.");
-      }
-    }
-  }
-
-  private boolean isSameColumnSchemaList(List<ColumnSchema> indexFileColumnList,
-      List<ColumnSchema> tableColumnList) {
-    if (indexFileColumnList.size() != tableColumnList.size()) {
-      LOG.error("Index file's column size is " + indexFileColumnList.size()
-          + " but table's column size is " + tableColumnList.size());
-      return false;
-    }
-    for (int i = 0; i < tableColumnList.size(); i++) {
-      if (!indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i))) {
-        return false;
-      }
-    }
-    return true;
-  }
-
   /**
    * Get the blocklet detail information based on blockletid, blockid and segmentid. This method is
    * exclusively for BlockletDataMapFactory as detail information is only available in this
@@ -191,9 +145,16 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     }
     Set<TableBlockIndexUniqueIdentifier> identifiers =
         getTableBlockIndexUniqueIdentifiers(segment);
+    Set<TableBlockIndexUniqueIdentifierWrapper> tableBlockIndexUniqueIdentifierWrappers =
+        new HashSet<>(identifiers.size());
+    for (TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier : identifiers) {
+      tableBlockIndexUniqueIdentifierWrappers.add(
+          new TableBlockIndexUniqueIdentifierWrapper(tableBlockIndexUniqueIdentifier,
+              this.getCarbonTable()));
+    }
     // Retrieve each blocklets detail information from blocklet datamap
     for (Blocklet blocklet : blocklets) {
-      detailedBlocklets.add(getExtendedBlocklet(identifiers, blocklet));
+      detailedBlocklets.add(getExtendedBlocklet(tableBlockIndexUniqueIdentifierWrappers, blocklet));
     }
     return detailedBlocklets;
   }
@@ -204,14 +165,24 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     if (blocklet instanceof ExtendedBlocklet) {
       return (ExtendedBlocklet) blocklet;
     }
-    Set<TableBlockIndexUniqueIdentifier> identifiers = getTableBlockIndexUniqueIdentifiers(segment);
-    return getExtendedBlocklet(identifiers, blocklet);
+    Set<TableBlockIndexUniqueIdentifier> identifiers =
+        getTableBlockIndexUniqueIdentifiers(segment);
+
+    Set<TableBlockIndexUniqueIdentifierWrapper> tableBlockIndexUniqueIdentifierWrappers =
+        new HashSet<>(identifiers.size());
+    for (TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier : identifiers) {
+      tableBlockIndexUniqueIdentifierWrappers.add(
+          new TableBlockIndexUniqueIdentifierWrapper(tableBlockIndexUniqueIdentifier,
+              this.getCarbonTable()));
+    }
+    return getExtendedBlocklet(tableBlockIndexUniqueIdentifierWrappers, blocklet);
   }
 
-  private ExtendedBlocklet getExtendedBlocklet(Set<TableBlockIndexUniqueIdentifier> identifiers,
-      Blocklet blocklet) throws IOException {
-    for (TableBlockIndexUniqueIdentifier identifier : identifiers) {
-      BlockletDataMapIndexWrapper wrapper = cache.get(identifier);
+  private ExtendedBlocklet getExtendedBlocklet(
+      Set<TableBlockIndexUniqueIdentifierWrapper> identifiersWrapper, Blocklet blocklet)
+      throws IOException {
+    for (TableBlockIndexUniqueIdentifierWrapper identifierWrapper : identifiersWrapper) {
+      BlockletDataMapIndexWrapper wrapper = cache.get(identifierWrapper);
       List<BlockletDataMap> dataMaps = wrapper.getDataMaps();
       for (DataMap dataMap : dataMaps) {
         if (((BlockletDataMap) dataMap).getIndexFileName().startsWith(blocklet.getFilePath())) {
@@ -265,12 +236,14 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     Set<TableBlockIndexUniqueIdentifier> blockIndexes = segmentMap.remove(segment.getSegmentNo());
     if (blockIndexes != null) {
       for (TableBlockIndexUniqueIdentifier blockIndex : blockIndexes) {
-        BlockletDataMapIndexWrapper wrapper = cache.getIfPresent(blockIndex);
+        TableBlockIndexUniqueIdentifierWrapper blockIndexWrapper =
+            new TableBlockIndexUniqueIdentifierWrapper(blockIndex, this.getCarbonTable());
+        BlockletDataMapIndexWrapper wrapper = cache.getIfPresent(blockIndexWrapper);
         if (null != wrapper) {
           List<BlockletDataMap> dataMaps = wrapper.getDataMaps();
           for (DataMap dataMap : dataMaps) {
             if (dataMap != null) {
-              cache.invalidate(blockIndex);
+              cache.invalidate(blockIndexWrapper);
               dataMap.clear();
             }
           }
@@ -292,27 +265,28 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
   public List<CoarseGrainDataMap> getDataMaps(DataMapDistributable distributable)
       throws IOException {
     BlockletDataMapDistributable mapDistributable = (BlockletDataMapDistributable) distributable;
-    List<TableBlockIndexUniqueIdentifier> identifiers = new ArrayList<>();
+    List<TableBlockIndexUniqueIdentifierWrapper> identifiersWrapper = new ArrayList<>();
     Path indexPath = new Path(mapDistributable.getFilePath());
     String segmentNo = mapDistributable.getSegment().getSegmentNo();
     if (indexPath.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
       String parent = indexPath.getParent().toString();
-      identifiers
-          .add(new TableBlockIndexUniqueIdentifier(parent, indexPath.getName(), null, segmentNo));
+      identifiersWrapper.add(new TableBlockIndexUniqueIdentifierWrapper(
+          new TableBlockIndexUniqueIdentifier(parent, indexPath.getName(), null, segmentNo),
+          this.getCarbonTable()));
     } else if (indexPath.getName().endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT)) {
       SegmentIndexFileStore fileStore = new SegmentIndexFileStore();
       CarbonFile carbonFile = FileFactory.getCarbonFile(indexPath.toString());
       String parentPath = carbonFile.getParentFile().getAbsolutePath();
       List<String> indexFiles = fileStore.getIndexFilesFromMergeFile(carbonFile.getAbsolutePath());
       for (String indexFile : indexFiles) {
-        identifiers.add(
+        identifiersWrapper.add(new TableBlockIndexUniqueIdentifierWrapper(
             new TableBlockIndexUniqueIdentifier(parentPath, indexFile, carbonFile.getName(),
-                segmentNo));
+                segmentNo), this.getCarbonTable()));
       }
     }
     List<CoarseGrainDataMap> dataMaps = new ArrayList<>();
     try {
-      List<BlockletDataMapIndexWrapper> wrappers = cache.getAll(identifiers);
+      List<BlockletDataMapIndexWrapper> wrappers = cache.getAll(identifiersWrapper);
       for (BlockletDataMapIndexWrapper wrapper : wrappers) {
         dataMaps.addAll(wrapper.getDataMaps());
       }
@@ -356,9 +330,10 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
     return false;
   }
 
-  @Override public void cache(TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier,
+  @Override
+  public void cache(TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper,
       BlockletDataMapIndexWrapper blockletDataMapIndexWrapper) throws IOException, MemoryException {
-    cache.put(tableBlockIndexUniqueIdentifier, blockletDataMapIndexWrapper);
+    cache.put(tableBlockIndexUniqueIdentifierWrapper, blockletDataMapIndexWrapper);
   }
 
   @Override
@@ -373,7 +348,8 @@ public class BlockletDataMapFactory extends CoarseGrainDataMapFactory
       TableBlockIndexUniqueIdentifier validIdentifier = BlockletDataMapUtil
           .filterIdentifiersBasedOnDistributable(tableBlockIndexUniqueIdentifiers,
               (BlockletDataMapDistributable) distributable);
-      if (null == cache.getIfPresent(validIdentifier)) {
+      if (null == cache.getIfPresent(
+          new TableBlockIndexUniqueIdentifierWrapper(validIdentifier, this.getCarbonTable()))) {
         ((BlockletDataMapDistributable) distributable)
             .setTableBlockIndexUniqueIdentifier(validIdentifier);
         distributablesToBeLoaded.add(distributable);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
index c2686d0..35e512d 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/SegmentIndexFileStore.java
@@ -323,6 +323,21 @@ public class SegmentIndexFileStore {
   /**
    * List all the index files of the segment.
    *
+   * @param carbonFile directory
+   * @return
+   */
+  public static CarbonFile[] getCarbonIndexFiles(CarbonFile carbonFile) {
+    return carbonFile.listFiles(new CarbonFileFilter() {
+      @Override public boolean accept(CarbonFile file) {
+        return ((file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
+            .endsWith(CarbonTablePath.MERGE_INDEX_FILE_EXT)) && file.getSize() > 0);
+      }
+    });
+  }
+
+  /**
+   * List all the index files of the segment.
+   *
    * @param segmentPath
    * @return
    */

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index ba051be..6949643 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -218,17 +218,9 @@ public class CarbonTable implements Serializable {
     }
   }
 
-  public static CarbonTable buildFromTablePath(String tableName, String tablePath,
-      boolean isTransactionalTable) throws IOException {
-    if (isTransactionalTable) {
-      return SchemaReader
-          .readCarbonTableFromStore(AbsoluteTableIdentifier.from(tablePath, "default", tableName));
-    } else {
-      // Infer the schema from the Carbondata file.
-      TableInfo tableInfoInfer =
-          SchemaReader.inferSchema(AbsoluteTableIdentifier.from(tablePath, "null", "null"), false);
-      return CarbonTable.buildFromTableInfo(tableInfoInfer);
-    }
+  public static CarbonTable buildDummyTable(String tablePath) throws IOException {
+    TableInfo tableInfoInfer = CarbonUtil.buildDummyTableInfo(tablePath, "null", "null");
+    return CarbonTable.buildFromTableInfo(tableInfoInfer);
   }
 
   public static CarbonTable buildFromTablePath(String tableName, String dbName, String tablePath)
@@ -241,24 +233,7 @@ public class CarbonTable implements Serializable {
    */
   public static CarbonTable buildFromTableInfo(TableInfo tableInfo) {
     CarbonTable table = new CarbonTable();
-    updateTableInfo(tableInfo);
-    table.tableInfo = tableInfo;
-    table.blockSize = tableInfo.getTableBlockSizeInMB();
-    table.tableLastUpdatedTime = tableInfo.getLastUpdatedTime();
-    table.tableUniqueName = tableInfo.getTableUniqueName();
-    table.setTransactionalTable(tableInfo.isTransactionalTable());
-    table.fillDimensionsAndMeasuresForTables(tableInfo.getFactTable());
-    table.fillCreateOrderColumn(tableInfo.getFactTable().getTableName());
-    if (tableInfo.getFactTable().getBucketingInfo() != null) {
-      table.tableBucketMap.put(tableInfo.getFactTable().getTableName(),
-          tableInfo.getFactTable().getBucketingInfo());
-    }
-    if (tableInfo.getFactTable().getPartitionInfo() != null) {
-      table.tablePartitionMap.put(tableInfo.getFactTable().getTableName(),
-          tableInfo.getFactTable().getPartitionInfo());
-    }
-    table.hasDataMapSchema =
-        null != tableInfo.getDataMapSchemaList() && tableInfo.getDataMapSchemaList().size() > 0;
+    updateTableByTableInfo(table, tableInfo);
     return table;
   }
 
@@ -996,4 +971,31 @@ public class CarbonTable implements Serializable {
     }
     return indexColumn;
   }
+
+  /**
+   * update the carbon table by using the passed tableInfo
+   *
+   * @param table
+   * @param tableInfo
+   */
+  public static void updateTableByTableInfo(CarbonTable table, TableInfo tableInfo) {
+    updateTableInfo(tableInfo);
+    table.tableInfo = tableInfo;
+    table.blockSize = tableInfo.getTableBlockSizeInMB();
+    table.tableLastUpdatedTime = tableInfo.getLastUpdatedTime();
+    table.tableUniqueName = tableInfo.getTableUniqueName();
+    table.setTransactionalTable(tableInfo.isTransactionalTable());
+    table.fillDimensionsAndMeasuresForTables(tableInfo.getFactTable());
+    table.fillCreateOrderColumn(tableInfo.getFactTable().getTableName());
+    if (tableInfo.getFactTable().getBucketingInfo() != null) {
+      table.tableBucketMap.put(tableInfo.getFactTable().getTableName(),
+          tableInfo.getFactTable().getBucketingInfo());
+    }
+    if (tableInfo.getFactTable().getPartitionInfo() != null) {
+      table.tablePartitionMap.put(tableInfo.getFactTable().getTableName(),
+          tableInfo.getFactTable().getPartitionInfo());
+    }
+    table.hasDataMapSchema =
+        null != tableInfo.getDataMapSchemaList() && tableInfo.getDataMapSchemaList().size() > 0;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
index 6a1234e..63cfa21 100644
--- a/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
+++ b/core/src/main/java/org/apache/carbondata/core/readcommitter/LatestFilesReadCommittedScope.java
@@ -23,7 +23,6 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
-import org.apache.carbondata.core.datastore.filesystem.CarbonFileFilter;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.mutate.UpdateVO;
@@ -157,28 +156,20 @@ public class LatestFilesReadCommittedScope implements ReadCommittedScope {
   @Override public void takeCarbonIndexFileSnapShot() throws IOException {
     // Read the current file Path get the list of indexes from the path.
     CarbonFile file = FileFactory.getCarbonFile(carbonFilePath);
-    CarbonFile[] files = file.listFiles(new CarbonFileFilter() {
-      @Override
-      public boolean accept(CarbonFile file) {
-        return file.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT) || file.getName()
-            .endsWith(CarbonTablePath.CARBON_DATA_EXT) || file.getName().endsWith("Fact");
-      }
-    });
-    if (files.length == 0) {
-      // For nonTransactional table, files can be removed at any point of time.
-      // So cannot assume files will be present
-      throw new IOException("No files are present in the table location :" + carbonFilePath);
-    }
     Map<String, List<String>> indexFileStore = new HashMap<>();
     Map<String, SegmentRefreshInfo> segmentTimestampUpdaterMap = new HashMap<>();
     CarbonFile[] carbonIndexFiles = null;
     if (file.isDirectory()) {
       if (segmentId == null) {
-        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(carbonFilePath);
+        carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(file);
       } else {
         String segmentPath = CarbonTablePath.getSegmentPath(carbonFilePath, segmentId);
         carbonIndexFiles = SegmentIndexFileStore.getCarbonIndexFiles(segmentPath);
       }
+      if (carbonIndexFiles.length == 0) {
+        throw new IOException(
+            "No Index files are present in the table location :" + carbonFilePath);
+      }
       for (int i = 0; i < carbonIndexFiles.length; i++) {
         // TODO. If Required to support merge index, then this code has to be modified.
         // TODO. Nested File Paths.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
index 1c53fbb..c2faadc 100644
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java
@@ -86,10 +86,19 @@ public class SegmentUpdateStatusManager {
     this.identifier = table.getAbsoluteTableIdentifier();
     // current it is used only for read function scenarios, as file update always requires to work
     // on latest file status.
-    segmentDetails = SegmentStatusManager.readLoadMetadata(
-        CarbonTablePath.getMetadataPath(identifier.getTablePath()));
+    if (!table.getTableInfo().isTransactionalTable()) {
+      // fileExist is costly operation, so check based on table Type
+      segmentDetails = new LoadMetadataDetails[0];
+    } else {
+      segmentDetails = SegmentStatusManager.readLoadMetadata(
+          CarbonTablePath.getMetadataPath(identifier.getTablePath()));
+    }
     isPartitionTable = table.isHivePartitionTable();
-    updateDetails = readLoadMetadata();
+    if (segmentDetails.length != 0) {
+      updateDetails = readLoadMetadata();
+    } else {
+      updateDetails = new SegmentUpdateDetails[0];
+    }
     populateMap();
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java b/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
index 0d28b9f..518cd03 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/BlockletDataMapUtil.java
@@ -33,20 +33,31 @@ import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.indexstore.BlockMetaInfo;
 import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifier;
+import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifierWrapper;
 import org.apache.carbondata.core.indexstore.blockletindex.BlockletDataMapDistributable;
 import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 
 public class BlockletDataMapUtil {
 
+  private static final Log LOG = LogFactory.getLog(BlockletDataMapUtil.class);
+
   public static Map<String, BlockMetaInfo> getBlockMetaInfoMap(
-      TableBlockIndexUniqueIdentifier identifier, SegmentIndexFileStore indexFileStore,
-      Set<String> filesRead, Map<String, BlockMetaInfo> fileNameToMetaInfoMapping)
-      throws IOException {
+      TableBlockIndexUniqueIdentifierWrapper identifierWrapper,
+      SegmentIndexFileStore indexFileStore, Set<String> filesRead,
+      Map<String, BlockMetaInfo> fileNameToMetaInfoMapping) throws IOException {
+    boolean isTransactionalTable = true;
+    TableBlockIndexUniqueIdentifier identifier =
+        identifierWrapper.getTableBlockIndexUniqueIdentifier();
+    List<ColumnSchema> tableColumnList = null;
     if (identifier.getMergeIndexFileName() != null
         && indexFileStore.getFileData(identifier.getIndexFileName()) == null) {
       CarbonFile indexMergeFile = FileFactory.getCarbonFile(
@@ -67,7 +78,25 @@ public class BlockletDataMapUtil {
     List<DataFileFooter> indexInfo = fileFooterConverter.getIndexInfo(
         identifier.getIndexFilePath() + CarbonCommonConstants.FILE_SEPARATOR + identifier
             .getIndexFileName(), indexFileStore.getFileData(identifier.getIndexFileName()));
+    CarbonTable carbonTable = identifierWrapper.getCarbonTable();
+    if (carbonTable != null) {
+      isTransactionalTable = carbonTable.getTableInfo().isTransactionalTable();
+      tableColumnList =
+          carbonTable.getTableInfo().getFactTable().getListOfColumns();
+    }
     for (DataFileFooter footer : indexInfo) {
+      if ((!isTransactionalTable) && (tableColumnList.size() != 0) &&
+          !isSameColumnSchemaList(footer.getColumnInTable(), tableColumnList)) {
+        LOG.error("Schema of " + identifier.getIndexFileName()
+            + " doesn't match with the table's schema");
+        throw new IOException("All the files doesn't have same schema. "
+            + "Unsupported operation on nonTransactional table. Check logs.");
+      }
+      if ((tableColumnList != null) && (tableColumnList.size() == 0)) {
+        // Carbon reader have used dummy columnSchema. Update it with inferred schema now
+        carbonTable.getTableInfo().getFactTable().setListOfColumns(footer.getColumnInTable());
+        CarbonTable.updateTableByTableInfo(carbonTable, carbonTable.getTableInfo());
+      }
       String blockPath = footer.getBlockInfo().getTableBlockInfo().getFilePath();
       if (null == blockMetaInfoMap.get(blockPath)) {
         blockMetaInfoMap.put(blockPath, createBlockMetaInfo(fileNameToMetaInfoMapping, blockPath));
@@ -156,6 +185,7 @@ public class BlockletDataMapUtil {
    * This method will the index files tableBlockIndexUniqueIdentifiers of a merge index file
    *
    * @param identifier
+   * @param segmentIndexFileStore
    * @return
    * @throws IOException
    */
@@ -177,4 +207,18 @@ public class BlockletDataMapUtil {
     return tableBlockIndexUniqueIdentifiers;
   }
 
+  private static boolean isSameColumnSchemaList(List<ColumnSchema> indexFileColumnList,
+      List<ColumnSchema> tableColumnList) {
+    if (indexFileColumnList.size() != tableColumnList.size()) {
+      LOG.error("Index file's column size is " + indexFileColumnList.size()
+          + " but table's column size is " + tableColumnList.size());
+      return false;
+    }
+    for (int i = 0; i < tableColumnList.size(); i++) {
+      if (!indexFileColumnList.get(i).equalsWithStrictCheck(tableColumnList.get(i))) {
+        return false;
+      }
+    }
+    return true;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 5a7bce3..e1e5e16 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -53,6 +53,7 @@ import org.apache.carbondata.core.metadata.SegmentFileStore;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.metadata.blocklet.SegmentInfo;
+import org.apache.carbondata.core.metadata.converter.SchemaConverter;
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypeAdapter;
@@ -2371,6 +2372,35 @@ public final class CarbonUtil {
   }
 
   /**
+   * This method will prepare dummy tableInfo
+   *
+   * @param carbonDataFilePath
+   * @param tableName
+   * @return
+   */
+  public static TableInfo buildDummyTableInfo(String carbonDataFilePath,
+      String tableName, String dbName) {
+    // During SDK carbon Reader, This method will be called.
+    // This API will avoid IO operation to get the columnSchema list.
+    // ColumnSchema list will be filled during blocklet loading (where actual IO happens)
+    List<ColumnSchema> columnSchemaList = new ArrayList<>();
+    TableSchema tableSchema = getDummyTableSchema(tableName,columnSchemaList);
+    ThriftWrapperSchemaConverterImpl thriftWrapperSchemaConverter =
+        new ThriftWrapperSchemaConverterImpl();
+    org.apache.carbondata.format.TableSchema thriftFactTable =
+        thriftWrapperSchemaConverter.fromWrapperToExternalTableSchema(tableSchema);
+    org.apache.carbondata.format.TableInfo tableInfo =
+        new org.apache.carbondata.format.TableInfo(thriftFactTable,
+            new ArrayList<org.apache.carbondata.format.TableSchema>());
+    tableInfo.setDataMapSchemas(null);
+    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
+    TableInfo wrapperTableInfo = schemaConverter.fromExternalToWrapperTableInfo(
+        tableInfo, dbName, tableName, carbonDataFilePath);
+    wrapperTableInfo.setTransactionalTable(false);
+    return wrapperTableInfo;
+  }
+
+  /**
    * This method will infer the schema file from a given index file path
    * @param indexFilePath
    * @param tableName

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMapFactory.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMapFactory.java b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMapFactory.java
index dfbdd29..526f630 100644
--- a/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMapFactory.java
+++ b/core/src/test/java/org/apache/carbondata/core/indexstore/blockletindex/TestBlockletDataMapFactory.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.datamap.DataMapDistributable;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
 import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifier;
+import org.apache.carbondata.core.indexstore.TableBlockIndexUniqueIdentifierWrapper;
 import org.apache.carbondata.core.memory.MemoryException;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
@@ -57,7 +58,9 @@ public class TestBlockletDataMapFactory {
 
   private TableBlockIndexUniqueIdentifier tableBlockIndexUniqueIdentifier;
 
-  private Cache<TableBlockIndexUniqueIdentifier, BlockletDataMapIndexWrapper> cache;
+  private TableBlockIndexUniqueIdentifierWrapper tableBlockIndexUniqueIdentifierWrapper;
+
+  private Cache<TableBlockIndexUniqueIdentifierWrapper, BlockletDataMapIndexWrapper> cache;
 
   @Before public void setUp()
       throws ClassNotFoundException, IllegalAccessException, InvocationTargetException,
@@ -78,6 +81,8 @@ public class TestBlockletDataMapFactory {
     tableBlockIndexUniqueIdentifier =
         new TableBlockIndexUniqueIdentifier("/opt/store/default/carbon_table/Fact/Part0/Segment_0",
             "0_batchno0-0-1521012756709.carbonindex", null, "0");
+    tableBlockIndexUniqueIdentifierWrapper =
+        new TableBlockIndexUniqueIdentifierWrapper(tableBlockIndexUniqueIdentifier, carbonTable);
     cache = CacheProvider.getInstance().createCache(CacheType.DRIVER_BLOCKLET_DATAMAP);
   }
 
@@ -86,12 +91,12 @@ public class TestBlockletDataMapFactory {
       IllegalAccessException {
     List<BlockletDataMap> dataMaps = new ArrayList<>();
     Method method = BlockletDataMapFactory.class
-        .getDeclaredMethod("cache", TableBlockIndexUniqueIdentifier.class,
+        .getDeclaredMethod("cache", TableBlockIndexUniqueIdentifierWrapper.class,
             BlockletDataMapIndexWrapper.class);
     method.setAccessible(true);
-    method.invoke(blockletDataMapFactory, tableBlockIndexUniqueIdentifier,
+    method.invoke(blockletDataMapFactory, tableBlockIndexUniqueIdentifierWrapper,
         new BlockletDataMapIndexWrapper(dataMaps));
-    BlockletDataMapIndexWrapper result = cache.getIfPresent(tableBlockIndexUniqueIdentifier);
+    BlockletDataMapIndexWrapper result = cache.getIfPresent(tableBlockIndexUniqueIdentifierWrapper);
     assert null != result;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 5dbb5ac..0f20dc3 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -460,16 +460,6 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ```
   /**
-   * Project all Columns for carbon reader
-   *
-   * @return CarbonReaderBuilder object
-   * @throws IOException
-   */
-  public CarbonReaderBuilder projectAllColumns();
-```
-
-```
-  /**
    * Configure the transactional status of table
    * If set to false, then reads the carbondata and carbonindex files from a flat folder structure.
    * If set to true, then reads the carbondata and carbonindex files from segment folder structure.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
index 8d3ff0d..ada1a8c 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java
@@ -116,7 +116,6 @@ public class CarbonReaderExample {
             // Read data
             CarbonReader reader2 = CarbonReader
                 .builder(path, "_temp")
-                .projectAllColumns()
                 .build();
 
             System.out.println("\nData:");

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
index da84c00..4911e41 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/CarbonRecordReader.java
@@ -123,7 +123,8 @@ public class CarbonRecordReader<T> extends AbstractRecordReader<T> {
       }
     }
     // Clear the datamap cache
-    DataMapStoreManager.getInstance().getDefaultDataMap(queryModel.getTable()).clear();
+    DataMapStoreManager.getInstance()
+        .clearDataMaps(queryModel.getTable().getAbsoluteTableIdentifier());
     // close read support
     readSupport.close();
     try {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
index 8ed89d5..8755176 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonFileInputFormat.java
@@ -23,26 +23,21 @@ import java.util.ArrayList;
 import java.util.BitSet;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.indexstore.blockletindex.SegmentIndexFileStore;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.schema.PartitionInfo;
 import org.apache.carbondata.core.metadata.schema.SchemaReader;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.mutate.UpdateVO;
 import org.apache.carbondata.core.readcommitter.LatestFilesReadCommittedScope;
 import org.apache.carbondata.core.readcommitter.ReadCommittedScope;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
 import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
-import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager;
-import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.hadoop.CarbonInputSplit;
 
@@ -105,8 +100,10 @@ public class CarbonFileInputFormat<T> extends CarbonInputFormat<T> implements Se
    */
   @Override
   public List<InputSplit> getSplits(JobContext job) throws IOException {
+
     AbsoluteTableIdentifier identifier = getAbsoluteTableIdentifier(job.getConfiguration());
     CarbonTable carbonTable = getOrCreateCarbonTable(job.getConfiguration());
+
     if (null == carbonTable) {
       throw new IOException("Missing/Corrupt schema file for table.");
     }
@@ -115,6 +112,7 @@ public class CarbonFileInputFormat<T> extends CarbonInputFormat<T> implements Se
       // get all valid segments and set them into the configuration
       // check for externalTable segment (Segment_null)
       // process and resolve the expression
+
       ReadCommittedScope readCommittedScope = null;
       if (carbonTable.isTransactionalTable()) {
         readCommittedScope = new LatestFilesReadCommittedScope(
@@ -129,44 +127,33 @@ public class CarbonFileInputFormat<T> extends CarbonInputFormat<T> implements Se
 
       FilterResolverIntf filterInterface = carbonTable.resolveFilter(filter);
 
-      String segmentDir = null;
+      // if external table Segments are found, add it to the List
+      List<Segment> externalTableSegments = new ArrayList<Segment>();
+      Segment seg;
       if (carbonTable.isTransactionalTable()) {
-        segmentDir = CarbonTablePath.getSegmentPath(identifier.getTablePath(), "null");
+        // SDK some cases write into the Segment Path instead of Table Path i.e. inside
+        // the "Fact/Part0/Segment_null". The segment in this case is named as "null".
+        // The table is denoted by default as a transactional table and goes through
+        // the path of CarbonFileInputFormat. The above scenario is handled in the below code.
+        seg = new Segment("null", null, readCommittedScope);
+        externalTableSegments.add(seg);
       } else {
-        segmentDir = identifier.getTablePath();
-      }
-      FileFactory.FileType fileType = FileFactory.getFileType(segmentDir);
-      if (FileFactory.isFileExist(segmentDir, fileType)) {
-        // if external table Segments are found, add it to the List
-        List<Segment> externalTableSegments = new ArrayList<Segment>();
-        Segment seg;
-        if (carbonTable.isTransactionalTable()) {
-          // SDK some cases write into the Segment Path instead of Table Path i.e. inside
-          // the "Fact/Part0/Segment_null". The segment in this case is named as "null".
-          // The table is denoted by default as a transactional table and goes through
-          // the path of CarbonFileInputFormat. The above scenario is handled in the below code.
-          seg = new Segment("null", null, readCommittedScope);
+        LoadMetadataDetails[] loadMetadataDetails = readCommittedScope.getSegmentList();
+        for (LoadMetadataDetails load : loadMetadataDetails) {
+          seg = new Segment(load.getLoadName(), null, readCommittedScope);
           externalTableSegments.add(seg);
-        } else {
-          LoadMetadataDetails[] loadMetadataDetails = readCommittedScope.getSegmentList();
-          for (LoadMetadataDetails load : loadMetadataDetails) {
-            seg = new Segment(load.getLoadName(), null, readCommittedScope);
-            externalTableSegments.add(seg);
-          }
         }
-
-        Map<String, String> indexFiles =
-            new SegmentIndexFileStore().getIndexFilesFromSegment(segmentDir);
-
-        if (indexFiles.size() == 0) {
-          throw new RuntimeException("Index file not present to read the carbondata file");
-        }
-        // do block filtering and get split
-        List<InputSplit> splits =
-            getSplits(job, filterInterface, externalTableSegments, null, partitionInfo, null);
-
-        return splits;
       }
+      // do block filtering and get split
+      List<InputSplit> splits =
+          getSplits(job, filterInterface, externalTableSegments, null, partitionInfo, null);
+      if (getColumnProjection(job.getConfiguration()) == null) {
+        // If the user projection is empty, use default all columns as projections.
+        // All column name will be filled inside getSplits, so can update only here.
+        String[]  projectionColumns = projectAllColumns(carbonTable);
+        setColumnProjection(job.getConfiguration(), projectionColumns);
+      }
+      return splits;
     }
     return null;
   }
@@ -185,45 +172,13 @@ public class CarbonFileInputFormat<T> extends CarbonInputFormat<T> implements Se
 
     numSegments = validSegments.size();
     List<InputSplit> result = new LinkedList<InputSplit>();
-    UpdateVO invalidBlockVOForSegmentId = null;
-    Boolean isIUDTable = false;
-
-    SegmentUpdateStatusManager updateStatusManager = new SegmentUpdateStatusManager(carbonTable);
-
-    isIUDTable = (updateStatusManager.getUpdateStatusDetails().length != 0);
 
     // for each segment fetch blocks matching filter in Driver BTree
     List<CarbonInputSplit> dataBlocksOfSegment =
         getDataBlocksOfSegment(job, carbonTable, filterResolver, matchedPartitions,
             validSegments, partitionInfo, oldPartitionIdList);
     numBlocks = dataBlocksOfSegment.size();
-    for (CarbonInputSplit inputSplit : dataBlocksOfSegment) {
-
-      // Get the UpdateVO for those tables on which IUD operations being performed.
-      if (isIUDTable) {
-        invalidBlockVOForSegmentId =
-            updateStatusManager.getInvalidTimestampRange(inputSplit.getSegmentId());
-      }
-      String[] deleteDeltaFilePath = null;
-      if (isIUDTable) {
-        // In case IUD is not performed in this table avoid searching for
-        // invalidated blocks.
-        if (CarbonUtil
-            .isInvalidTableBlock(inputSplit.getSegmentId(), inputSplit.getPath().toString(),
-                invalidBlockVOForSegmentId, updateStatusManager)) {
-          continue;
-        }
-        // When iud is done then only get delete delta files for a block
-        try {
-          deleteDeltaFilePath = updateStatusManager
-              .getDeleteDeltaFilePath(inputSplit.getPath().toString(), inputSplit.getSegmentId());
-        } catch (Exception e) {
-          throw new IOException(e);
-        }
-      }
-      inputSplit.setDeleteDeltaFiles(deleteDeltaFilePath);
-      result.add(inputSplit);
-    }
+    result.addAll(dataBlocksOfSegment);
     return result;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
index 05c70f8..485b087 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonInputFormat.java
@@ -42,6 +42,7 @@ import org.apache.carbondata.core.metadata.schema.PartitionInfo;
 import org.apache.carbondata.core.metadata.schema.partition.PartitionType;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.mutate.UpdateVO;
 import org.apache.carbondata.core.profiler.ExplainCollector;
 import org.apache.carbondata.core.scan.expression.Expression;
@@ -675,4 +676,27 @@ m filterExpression
       return false;
     }
   }
+
+  /**
+   * Project all Columns for carbon reader
+   *
+   * @return String araay of columnNames
+   * @param carbonTable
+   */
+  public String[] projectAllColumns(CarbonTable carbonTable) {
+    List<ColumnSchema> colList = carbonTable.getTableInfo().getFactTable().getListOfColumns();
+    List<String> projectColumn = new ArrayList<>();
+    for (ColumnSchema cols : colList) {
+      if (cols.getSchemaOrdinal() != -1) {
+        projectColumn.add(cols.getColumnUniqueId());
+      }
+    }
+    String[] projectionColumns = new String[projectColumn.size()];
+    int i = 0;
+    for (String columnName : projectColumn) {
+      projectionColumns[i] = columnName;
+      i++;
+    }
+    return projectionColumns;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
index e6d39d3..0e6f0c7 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCarbonFileInputFormatWithExternalCarbonTable.scala
@@ -184,7 +184,7 @@ class TestCarbonFileInputFormatWithExternalCarbonTable extends QueryTest with Be
     {
       sql("select * from sdkOutputTable").show(false)
     }
-    assert(exception.getMessage().contains("Index file not present to read the carbondata file"))
+    assert(exception.getMessage().contains("Error while taking index snapshot"))
 
     sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
index 211bc8c..d7e500e 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableUsingSparkCarbonFileFormat.scala
@@ -346,7 +346,7 @@ class TestCreateTableUsingSparkCarbonFileFormat extends QueryTest with BeforeAnd
       {
         sql("select * from sdkOutputTable").show(false)
       }
-    assert(exception.getMessage().contains("Index file not present to read the carbondata file"))
+    assert(exception.getMessage().contains("Error while taking index snapshot"))
 
     sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 095d12d..14a63ca 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -993,7 +993,14 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
       sql("select * from sdkOutputTable").show(false)
     }
     assert(exception.getMessage()
-      .contains("All the files doesn't have same schema"))
+      .contains("Problem in loading segment blocks."))
+
+    val exception1 =
+      intercept[IOException] {
+        sql("select count(*) from sdkOutputTable").show(false)
+      }
+    assert(exception1.getMessage()
+      .contains("Problem in loading segment blocks."))
 
     sql("DROP TABLE sdkOutputTable")
     // drop table should not delete the files
@@ -1025,7 +1032,7 @@ class TestNonTransactionalCarbonTable extends QueryTest with BeforeAndAfterAll {
         sql("select * from sdkOutputTable").show(false)
       }
     assert(exception.getMessage()
-      .contains("All the files doesn't have same schema"))
+      .contains("Problem in loading segment blocks."))
 
 
     sql("DROP TABLE sdkOutputTable")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
index 9d7470e..98aa6e0 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReaderBuilder.java
@@ -26,7 +26,6 @@ import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.scan.expression.Expression;
 import org.apache.carbondata.hadoop.api.CarbonFileInputFormat;
 
@@ -51,12 +50,6 @@ public class CarbonReaderBuilder {
   private boolean isTransactionalTable;
 
   /**
-   * It will be true if use the projectAllColumns method,
-   * it will be false if use the projection method
-   */
-  private boolean isProjectAllColumns = true;
-
-  /**
    * Construct a CarbonReaderBuilder with table path and table name
    *
    * @param tablePath table path
@@ -76,7 +69,6 @@ public class CarbonReaderBuilder {
   public CarbonReaderBuilder projection(String[] projectionColumnNames) {
     Objects.requireNonNull(projectionColumnNames);
     this.projectionColumns = projectionColumnNames;
-    isProjectAllColumns = false;
     return this;
   }
 
@@ -96,33 +88,6 @@ public class CarbonReaderBuilder {
   }
 
   /**
-   * Project all Columns for carbon reader
-   *
-   * @return CarbonReaderBuilder object
-   * @throws IOException
-   */
-  public CarbonReaderBuilder projectAllColumns() throws IOException {
-    CarbonTable carbonTable = CarbonTable
-        .buildFromTablePath(tableName, tablePath, isTransactionalTable);
-
-    List<ColumnSchema> colList = carbonTable.getTableInfo().getFactTable().getListOfColumns();
-    List<String> projectColumn = new ArrayList<String>();
-    for (ColumnSchema cols : colList) {
-      if (cols.getSchemaOrdinal() != -1) {
-        projectColumn.add(cols.getColumnUniqueId());
-      }
-    }
-    projectionColumns = new String[projectColumn.size()];
-    int i = 0;
-    for (String columnName : projectColumn) {
-      projectionColumns[i] = columnName;
-      i++;
-    }
-    isProjectAllColumns = true;
-    return this;
-  }
-
-  /**
    * Configure the filter expression for carbon reader
    *
    * @param filterExpression filter expression
@@ -209,8 +174,13 @@ public class CarbonReaderBuilder {
    * @throws InterruptedException
    */
   public <T> CarbonReader<T> build() throws IOException, InterruptedException {
-    CarbonTable table = CarbonTable.buildFromTablePath(tableName, tablePath, isTransactionalTable);
-
+    // DB name is not applicable for SDK reader as, table will be never registered.
+    CarbonTable table;
+    if (isTransactionalTable) {
+      table = CarbonTable.buildFromTablePath(tableName, "default", tablePath);
+    } else {
+      table = CarbonTable.buildDummyTable(tablePath);
+    }
     final CarbonFileInputFormat format = new CarbonFileInputFormat();
     final Job job = new Job(new Configuration());
     format.setTableInfo(job.getConfiguration(), table.getTableInfo());
@@ -220,10 +190,11 @@ public class CarbonReaderBuilder {
     if (filterExpression != null) {
       format.setFilterPredicates(job.getConfiguration(), filterExpression);
     }
-    if (isProjectAllColumns) {
-      projectAllColumns();
+
+    if (projectionColumns != null) {
+      // set the user projection
+      format.setColumnProjection(job.getConfiguration(), projectionColumns);
     }
-    format.setColumnProjection(job.getConfiguration(), projectionColumns);
 
     final List<InputSplit> splits =
         format.getSplits(new JobContextImpl(job.getConfiguration(), new JobID()));

http://git-wip-us.apache.org/repos/asf/carbondata/blob/5f68a792/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index db118cd..a8aa795 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -385,9 +385,8 @@ public class CarbonReaderTest extends TestCase {
     // Write to a Non Transactional Table
     TestUtil.writeFilesAndVerify(new Schema(fields), path, true, false);
 
-    CarbonReader reader = CarbonReader.builder(path, "_temp").isTransactionalTable(true)
+    CarbonReader reader = CarbonReader.builder(path, "_temp")
         .projection(new String[]{"name", "age"})
-        .isTransactionalTable(false)
         .build();
 
     // expected output after sorting
@@ -892,7 +891,6 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader
         .builder(path, "_temp")
         .isTransactionalTable(true)
-        .projectAllColumns()
         .build();
 
     // expected output after sorting


[28/50] [abbrv] carbondata git commit: [CARBONDATA-2520] Clean and close datamap writers on any task failure during load

Posted by gv...@apache.org.
[CARBONDATA-2520] Clean and close datamap writers on any task failure during load

Problem: The datamap writers registered to listener are closed or finished only in case of load success case and not in any failure case. So when tesing lucene, it is found that, after task is failed and the writer is not closed, so the write.lock file written in the index folder of lucene is still exists, so when next task comes to write index in same directory, it fails with the error lock file already exists.

Solution: close the writers if any load task fails.

This closes #2321


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/7f4bd3d0
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/7f4bd3d0
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/7f4bd3d0

Branch: refs/heads/spark-2.3
Commit: 7f4bd3d06517b551aa14a1054dffa9ae7ca9ad57
Parents: 1b6ce8c
Author: akashrn5 <ak...@gmail.com>
Authored: Thu May 17 11:37:22 2018 +0530
Committer: Jacky Li <ja...@qq.com>
Committed: Mon May 28 21:27:25 2018 +0800

----------------------------------------------------------------------
 .../core/datamap/dev/DataMapWriter.java         | 10 ++++++++
 .../datamap/bloom/BloomDataMapWriter.java       |  9 +++++---
 .../datamap/lucene/LuceneDataMapWriter.java     | 12 ++++++----
 .../loading/AbstractDataLoadProcessorStep.java  | 24 ++++++++++++++++++++
 .../CarbonRowDataWriterProcessorStepImpl.java   |  4 +++-
 .../steps/DataWriterBatchProcessorStepImpl.java |  4 +++-
 .../steps/DataWriterProcessorStepImpl.java      | 23 +++++++++++++++++--
 .../store/CarbonFactDataHandlerModel.java       | 22 ++++++++++--------
 8 files changed, 87 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
index 03a369a..89d5d76 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/dev/DataMapWriter.java
@@ -46,6 +46,8 @@ public abstract class DataMapWriter {
 
   private List<CarbonColumn> indexColumns;
 
+  private boolean isWritingFinished;
+
   public DataMapWriter(String tablePath, String dataMapName, List<CarbonColumn> indexColumns,
       Segment segment, String shardName) {
     this.tablePath = tablePath;
@@ -133,4 +135,12 @@ public abstract class DataMapWriter {
       String tablePath, String segmentId, String dataMapName) {
     return CarbonTablePath.getSegmentPath(tablePath, segmentId) + File.separator + dataMapName;
   }
+
+  public boolean isWritingFinished() {
+    return isWritingFinished;
+  }
+
+  public void setWritingFinished(boolean writingFinished) {
+    isWritingFinished = writingFinished;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
index b3e69f4..2791a6c 100644
--- a/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
+++ b/datamap/bloom/src/main/java/org/apache/carbondata/datamap/bloom/BloomDataMapWriter.java
@@ -196,10 +196,13 @@ public class BloomDataMapWriter extends DataMapWriter {
 
   @Override
   public void finish() throws IOException {
-    if (indexBloomFilters.size() > 0) {
-      writeBloomDataMapFile();
+    if (!isWritingFinished()) {
+      if (indexBloomFilters.size() > 0) {
+        writeBloomDataMapFile();
+      }
+      releaseResouce();
+      setWritingFinished(true);
     }
-    releaseResouce();
   }
 
   protected void releaseResouce() {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
index c7eb3d8..605ec89 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneDataMapWriter.java
@@ -447,10 +447,14 @@ public class LuceneDataMapWriter extends DataMapWriter {
    * class.
    */
   public void finish() throws IOException {
-    flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
-    // finished a file , close this index writer
-    if (indexWriter != null) {
-      indexWriter.close();
+    if (!isWritingFinished()) {
+      flushCache(cache, getIndexColumns(), indexWriter, storeBlockletWise);
+      // finished a file , close this index writer
+      if (indexWriter != null) {
+        indexWriter.close();
+        indexWriter = null;
+      }
+      setWritingFinished(true);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
index 9f2482b..eb02ede 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/AbstractDataLoadProcessorStep.java
@@ -25,8 +25,12 @@ import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
+import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants;
 import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
 import org.apache.carbondata.processing.loading.row.CarbonRowBatch;
+import org.apache.carbondata.processing.store.CarbonDataFileAttributes;
 
 /**
  * This base abstract class for data loading.
@@ -149,6 +153,26 @@ public abstract class AbstractDataLoadProcessorStep {
    */
   protected abstract String getStepName();
 
+  /**
+   * This method registers all writer listeners and returns the listener
+   * @param bucketId bucketId
+   * @return
+   */
+  protected DataMapWriterListener getDataMapWriterListener(int bucketId) {
+    CarbonDataFileAttributes carbonDataFileAttributes =
+        new CarbonDataFileAttributes(Long.parseLong(configuration.getTaskNo()),
+            (Long) configuration.getDataLoadProperty(DataLoadProcessorConstants.FACT_TIME_STAMP));
+    DataMapWriterListener listener = new DataMapWriterListener();
+    listener.registerAllWriter(
+        configuration.getTableSpec().getCarbonTable(),
+        configuration.getSegmentId(),
+        CarbonTablePath.getShardName(
+            carbonDataFileAttributes.getTaskId(),
+            bucketId,
+            0,
+            String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    return listener;
+  }
 
   /**
    * Close all resources.This method is called after execute() is finished.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
index edf67a7..e465471 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
@@ -33,6 +33,7 @@ import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -156,8 +157,9 @@ public class CarbonRowDataWriterProcessorStepImpl extends AbstractDataLoadProces
 
   private void doExecute(Iterator<CarbonRowBatch> iterator, int iteratorIndex) throws IOException {
     String[] storeLocation = getStoreLocation(tableIdentifier);
+    DataMapWriterListener listener = getDataMapWriterListener(0);
     CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(
-        configuration, storeLocation, 0, iteratorIndex);
+        configuration, storeLocation, 0, iteratorIndex, listener);
     CarbonFactHandler dataHandler = null;
     boolean rowsNotExist = true;
     while (iterator.hasNext()) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
index 369c1f2..78777ce 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterBatchProcessorStepImpl.java
@@ -25,6 +25,7 @@ import org.apache.carbondata.core.datastore.row.CarbonRow;
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -85,8 +86,9 @@ public class DataWriterBatchProcessorStepImpl extends AbstractDataLoadProcessorS
           CarbonRowBatch next = iterator.next();
           // If no rows from merge sorter, then don't create a file in fact column handler
           if (next.hasNext()) {
+            DataMapWriterListener listener = getDataMapWriterListener(0);
             CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel
-                .createCarbonFactDataHandlerModel(configuration, storeLocation, 0, k++);
+                .createCarbonFactDataHandlerModel(configuration, storeLocation, 0, k++, listener);
             CarbonFactHandler dataHandler = CarbonFactHandlerFactory
                 .createCarbonFactHandler(model, CarbonFactHandlerFactory.FactHandlerType.COLUMNAR);
             dataHandler.initialise();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
index b09fb7d..a0f29fa 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataWriterProcessorStepImpl.java
@@ -36,6 +36,7 @@ import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
 import org.apache.carbondata.core.util.CarbonTimeStatisticsFactory;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.processing.datamap.DataMapWriterListener;
 import org.apache.carbondata.processing.loading.AbstractDataLoadProcessorStep;
 import org.apache.carbondata.processing.loading.CarbonDataLoadConfiguration;
 import org.apache.carbondata.processing.loading.DataField;
@@ -57,6 +58,8 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
 
   private long readCounter;
 
+  private DataMapWriterListener listener;
+
   public DataWriterProcessorStepImpl(CarbonDataLoadConfiguration configuration,
       AbstractDataLoadProcessorStep child) {
     super(configuration, child);
@@ -88,8 +91,9 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
     CarbonTableIdentifier tableIdentifier =
         configuration.getTableIdentifier().getCarbonTableIdentifier();
     String[] storeLocation = getStoreLocation(tableIdentifier);
+    listener = getDataMapWriterListener(0);
     return CarbonFactDataHandlerModel.createCarbonFactDataHandlerModel(configuration,
-        storeLocation, 0, 0);
+        storeLocation, 0, 0, listener);
   }
 
   @Override public Iterator<CarbonRowBatch>[] execute() throws CarbonDataLoadingException {
@@ -162,8 +166,9 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
       CarbonTableIdentifier tableIdentifier, int rangeId) {
     String[] storeLocation = getStoreLocation(tableIdentifier);
 
+    listener = getDataMapWriterListener(rangeId);
     CarbonFactDataHandlerModel model = CarbonFactDataHandlerModel
-        .createCarbonFactDataHandlerModel(configuration, storeLocation, rangeId, 0);
+        .createCarbonFactDataHandlerModel(configuration, storeLocation, rangeId, 0, listener);
     CarbonFactHandler dataHandler = null;
     boolean rowsNotExist = true;
     while (insideRangeIterator.hasNext()) {
@@ -247,4 +252,18 @@ public class DataWriterProcessorStepImpl extends AbstractDataLoadProcessorStep {
     return null;
   }
 
+  @Override public void close() {
+    if (!closed) {
+      super.close();
+      if (listener != null) {
+        try {
+          LOGGER.info("closing all the DataMap writers registered to DataMap writer listener");
+          listener.finish();
+        } catch (IOException e) {
+          LOGGER.error(e, "error while closing the datamap writers");
+          // ignoring the exception
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/7f4bd3d0/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
index a725936..87a6de0 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerModel.java
@@ -168,7 +168,7 @@ public class CarbonFactDataHandlerModel {
    */
   public static CarbonFactDataHandlerModel createCarbonFactDataHandlerModel(
       CarbonDataLoadConfiguration configuration, String[] storeLocation, int bucketId,
-      int taskExtension) {
+      int taskExtension, DataMapWriterListener listener) {
     CarbonTableIdentifier identifier =
         configuration.getTableIdentifier().getCarbonTableIdentifier();
     boolean[] isUseInvertedIndex =
@@ -258,15 +258,17 @@ public class CarbonFactDataHandlerModel {
     carbonFactDataHandlerModel.tableSpec = configuration.getTableSpec();
     carbonFactDataHandlerModel.sortScope = CarbonDataProcessorUtil.getSortScope(configuration);
 
-    DataMapWriterListener listener = new DataMapWriterListener();
-    listener.registerAllWriter(
-        configuration.getTableSpec().getCarbonTable(),
-        configuration.getSegmentId(),
-        CarbonTablePath.getShardName(
-            carbonDataFileAttributes.getTaskId(),
-            bucketId,
-            0,
-            String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    if (listener == null) {
+      listener = new DataMapWriterListener();
+      listener.registerAllWriter(
+          configuration.getTableSpec().getCarbonTable(),
+          configuration.getSegmentId(),
+          CarbonTablePath.getShardName(
+              carbonDataFileAttributes.getTaskId(),
+              bucketId,
+              0,
+              String.valueOf(carbonDataFileAttributes.getFactTimeStamp())));
+    }
     carbonFactDataHandlerModel.dataMapWriterlistener = listener;
     carbonFactDataHandlerModel.writingCoresCount = configuration.getWritingCoresCount();
 


[12/50] [abbrv] carbondata git commit: [CARBONDATA-2198] Fixed bug for streaming data for bad_records_action as REDIRECT or IGNORE

Posted by gv...@apache.org.
[CARBONDATA-2198] Fixed bug for streaming data for bad_records_action as REDIRECT or IGNORE

1. Refactored streaming functionality for bad_records_action as IGNORE or REDIRECT
2. Added related test cases

This closes #2014


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/59693123
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/59693123
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/59693123

Branch: refs/heads/spark-2.3
Commit: 59693123da1c7aad17c284887e7819235427af74
Parents: 3394128
Author: Geetika Gupta <ge...@knoldus.in>
Authored: Wed Feb 28 16:09:48 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Tue May 22 10:30:00 2018 +0530

----------------------------------------------------------------------
 .../core/datastore/row/CarbonRow.java           |   4 +
 .../TestStreamingTableOperation.scala           |  76 ++++++++++-
 .../streaming/CarbonStreamRecordWriter.java     | 126 ++++++++++---------
 .../streaming/StreamBlockletWriter.java         |   5 +
 4 files changed, 148 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/59693123/core/src/main/java/org/apache/carbondata/core/datastore/row/CarbonRow.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/row/CarbonRow.java b/core/src/main/java/org/apache/carbondata/core/datastore/row/CarbonRow.java
index bb624af..82f004f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/row/CarbonRow.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/row/CarbonRow.java
@@ -91,4 +91,8 @@ public class CarbonRow implements Serializable {
   public void setRangeId(short rangeId) {
     this.rangeId = rangeId;
   }
+
+  public void clearData() {
+    this.data = null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/59693123/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
index f46505a..325722d 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/TestStreamingTableOperation.scala
@@ -45,8 +45,12 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
 
   private val spark = sqlContext.sparkSession
   private val dataFilePath = s"$resourcesPath/streamSample.csv"
+  def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
+    .getCanonicalPath
+  val badRecordFilePath: File =new File(currentPath + "/target/test/badRecords")
 
   override def beforeAll {
+    badRecordFilePath.mkdirs()
     CarbonProperties.getInstance().addProperty(
       CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
       CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
@@ -1562,6 +1566,68 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
     assertResult("true")(resultStreaming(0).getString(1).trim)
   }
 
+
+  test("test bad_record_action IGNORE on streaming table") {
+
+sql("drop table if exists streaming.bad_record_ignore")
+    sql(
+      s"""
+         | CREATE TABLE streaming.bad_record_ignore(
+         | id INT,
+         | name STRING,
+         | city STRING,
+         | salary FLOAT
+         | )
+         | STORED BY 'carbondata'
+         | TBLPROPERTIES('streaming'='true')
+         | """.stripMargin)
+
+    executeStreamingIngest(
+      tableName = "bad_record_ignore",
+      batchNums = 2,
+      rowNumsEachBatch = 10,
+      intervalOfSource = 1,
+      intervalOfIngest = 1,
+      continueSeconds = 8,
+      generateBadRecords = true,
+      badRecordAction = "ignore",
+      autoHandoff = false
+    )
+
+    checkAnswer(sql("select count(*) from streaming.bad_record_ignore"), Seq(Row(19)))
+  }
+
+  test("test bad_record_action REDIRECT on streaming table") {
+    sql("drop table if exists streaming.bad_record_redirect")
+    sql(
+      s"""
+         | CREATE TABLE streaming.bad_record_redirect(
+         | id INT,
+         | name STRING,
+         | city STRING,
+         | salary FLOAT
+         | )
+         | STORED BY 'carbondata'
+         | TBLPROPERTIES('streaming'='true')
+         | """.stripMargin)
+
+    executeStreamingIngest(
+      tableName = "bad_record_redirect",
+      batchNums = 2,
+      rowNumsEachBatch = 10,
+      intervalOfSource = 1,
+      intervalOfIngest = 1,
+      continueSeconds = 8,
+      generateBadRecords = true,
+      badRecordAction = "redirect",
+      autoHandoff = false,
+      badRecordsPath = badRecordFilePath.getCanonicalPath
+    )
+    assert(new File(badRecordFilePath.getCanonicalFile + "/streaming/bad_record_redirect").isDirectory)
+    checkAnswer(sql("select count(*) from streaming.bad_record_redirect"), Seq(Row(19)))
+  }
+
+
   def createWriteSocketThread(
       serverSocket: ServerSocket,
       writeNums: Int,
@@ -1625,7 +1691,8 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
       badRecordAction: String = "force",
       intervalSecond: Int = 2,
       handoffSize: Long = CarbonCommonConstants.HANDOFF_SIZE_DEFAULT,
-      autoHandoff: Boolean = CarbonCommonConstants.ENABLE_AUTO_HANDOFF_DEFAULT.toBoolean
+      autoHandoff: Boolean = CarbonCommonConstants.ENABLE_AUTO_HANDOFF_DEFAULT.toBoolean,
+      badRecordsPath: String = CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL
   ): Thread = {
     new Thread() {
       override def run(): Unit = {
@@ -1643,6 +1710,7 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
             .trigger(ProcessingTime(s"$intervalSecond seconds"))
             .option("checkpointLocation", CarbonTablePath.getStreamingCheckpointDir(carbonTable.getTablePath))
             .option("bad_records_action", badRecordAction)
+            .option("BAD_RECORD_PATH", badRecordsPath)
             .option("dbName", tableIdentifier.database.get)
             .option("tableName", tableIdentifier.table)
             .option(CarbonCommonConstants.HANDOFF_SIZE, handoffSize)
@@ -1676,7 +1744,8 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
       generateBadRecords: Boolean,
       badRecordAction: String,
       handoffSize: Long = CarbonCommonConstants.HANDOFF_SIZE_DEFAULT,
-      autoHandoff: Boolean = CarbonCommonConstants.ENABLE_AUTO_HANDOFF_DEFAULT.toBoolean
+      autoHandoff: Boolean = CarbonCommonConstants.ENABLE_AUTO_HANDOFF_DEFAULT.toBoolean,
+      badRecordsPath: String = CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL
   ): Unit = {
     val identifier = new TableIdentifier(tableName, Option("streaming"))
     val carbonTable = CarbonEnv.getInstance(spark).carbonMetastore.lookupRelation(identifier)(spark)
@@ -1698,7 +1767,8 @@ class TestStreamingTableOperation extends QueryTest with BeforeAndAfterAll {
         badRecordAction = badRecordAction,
         intervalSecond = intervalOfIngest,
         handoffSize = handoffSize,
-        autoHandoff = autoHandoff)
+        autoHandoff = autoHandoff,
+        badRecordsPath = badRecordsPath)
       thread1.start()
       thread2.start()
       Thread.sleep(continueSeconds * 1000)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/59693123/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
index 4e555d3..4653445 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/CarbonStreamRecordWriter.java
@@ -179,75 +179,81 @@ public class CarbonStreamRecordWriter extends RecordWriter<Void, Object> {
       initializeAtFirstRow();
     }
 
-    // parse and convert row
-    currentRow.setData(rowParser.parseRow((Object[]) value));
-    converter.convert(currentRow);
-
     // null bit set
     nullBitSet.clear();
-    for (int i = 0; i < dataFields.length; i++) {
-      if (null == currentRow.getObject(i)) {
-        nullBitSet.set(i);
+    Object[] rowData = (Object[]) value;
+    currentRow.setRawData(rowData);
+    // parse and convert row
+    currentRow.setData(rowParser.parseRow(rowData));
+    CarbonRow updatedCarbonRow = converter.convert(currentRow);
+    if (updatedCarbonRow == null) {
+      output.skipRow();
+      currentRow.clearData();
+    } else {
+      for (int i = 0; i < dataFields.length; i++) {
+        if (null == currentRow.getObject(i)) {
+          nullBitSet.set(i);
+        }
       }
-    }
-    output.nextRow();
-    byte[] b = nullBitSet.toByteArray();
-    output.writeShort(b.length);
-    if (b.length > 0) {
-      output.writeBytes(b);
-    }
-    int dimCount = 0;
-    Object columnValue;
+      output.nextRow();
+      byte[] b = nullBitSet.toByteArray();
+      output.writeShort(b.length);
+      if (b.length > 0) {
+        output.writeBytes(b);
+      }
+      int dimCount = 0;
+      Object columnValue;
 
-    // primitive type dimension
-    for (; dimCount < isNoDictionaryDimensionColumn.length; dimCount++) {
-      columnValue = currentRow.getObject(dimCount);
-      if (null != columnValue) {
-        if (isNoDictionaryDimensionColumn[dimCount]) {
+      // primitive type dimension
+      for (; dimCount < isNoDictionaryDimensionColumn.length; dimCount++) {
+        columnValue = currentRow.getObject(dimCount);
+        if (null != columnValue) {
+          if (isNoDictionaryDimensionColumn[dimCount]) {
+            byte[] col = (byte[]) columnValue;
+            output.writeShort(col.length);
+            output.writeBytes(col);
+          } else {
+            output.writeInt((int) columnValue);
+          }
+        }
+      }
+      // complex type dimension
+      for (; dimCount < dimensionWithComplexCount; dimCount++) {
+        columnValue = currentRow.getObject(dimCount);
+        if (null != columnValue) {
           byte[] col = (byte[]) columnValue;
           output.writeShort(col.length);
           output.writeBytes(col);
-        } else {
-          output.writeInt((int) columnValue);
         }
       }
-    }
-    // complex type dimension
-    for (; dimCount < dimensionWithComplexCount; dimCount++) {
-      columnValue = currentRow.getObject(dimCount);
-      if (null != columnValue) {
-        byte[] col = (byte[]) columnValue;
-        output.writeShort(col.length);
-        output.writeBytes(col);
-      }
-    }
-    // measure
-    DataType dataType;
-    for (int msrCount = 0; msrCount < measureCount; msrCount++) {
-      columnValue = currentRow.getObject(dimCount + msrCount);
-      if (null != columnValue) {
-        dataType = measureDataTypes[msrCount];
-        if (dataType == DataTypes.BOOLEAN) {
-          output.writeBoolean((boolean) columnValue);
-        } else if (dataType == DataTypes.SHORT) {
-          output.writeShort((short) columnValue);
-        } else if (dataType == DataTypes.INT) {
-          output.writeInt((int) columnValue);
-        } else if (dataType == DataTypes.LONG) {
-          output.writeLong((long) columnValue);
-        } else if (dataType == DataTypes.DOUBLE) {
-          output.writeDouble((double) columnValue);
-        } else if (DataTypes.isDecimal(dataType)) {
-          BigDecimal val = (BigDecimal) columnValue;
-          byte[] bigDecimalInBytes = DataTypeUtil.bigDecimalToByte(val);
-          output.writeShort(bigDecimalInBytes.length);
-          output.writeBytes(bigDecimalInBytes);
-        } else {
-          String msg =
-              "unsupported data type:" + dataFields[dimCount + msrCount].getColumn().getDataType()
-                  .getName();
-          LOGGER.error(msg);
-          throw new IOException(msg);
+      // measure
+      DataType dataType;
+      for (int msrCount = 0; msrCount < measureCount; msrCount++) {
+        columnValue = currentRow.getObject(dimCount + msrCount);
+        if (null != columnValue) {
+          dataType = measureDataTypes[msrCount];
+          if (dataType == DataTypes.BOOLEAN) {
+            output.writeBoolean((boolean) columnValue);
+          } else if (dataType == DataTypes.SHORT) {
+            output.writeShort((short) columnValue);
+          } else if (dataType == DataTypes.INT) {
+            output.writeInt((int) columnValue);
+          } else if (dataType == DataTypes.LONG) {
+            output.writeLong((long) columnValue);
+          } else if (dataType == DataTypes.DOUBLE) {
+            output.writeDouble((double) columnValue);
+          } else if (DataTypes.isDecimal(dataType)) {
+            BigDecimal val = (BigDecimal) columnValue;
+            byte[] bigDecimalInBytes = DataTypeUtil.bigDecimalToByte(val);
+            output.writeShort(bigDecimalInBytes.length);
+            output.writeBytes(bigDecimalInBytes);
+          } else {
+            String msg =
+                "unsupported data type:" + dataFields[dimCount + msrCount].getColumn().getDataType()
+                .getName();
+            LOGGER.error(msg);
+            throw new IOException(msg);
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/59693123/streaming/src/main/java/org/apache/carbondata/streaming/StreamBlockletWriter.java
----------------------------------------------------------------------
diff --git a/streaming/src/main/java/org/apache/carbondata/streaming/StreamBlockletWriter.java b/streaming/src/main/java/org/apache/carbondata/streaming/StreamBlockletWriter.java
index 509e2aa..7b2176b 100644
--- a/streaming/src/main/java/org/apache/carbondata/streaming/StreamBlockletWriter.java
+++ b/streaming/src/main/java/org/apache/carbondata/streaming/StreamBlockletWriter.java
@@ -76,6 +76,11 @@ public class StreamBlockletWriter {
     rowIndex++;
   }
 
+  void skipRow() {
+    maxSize -- ;
+    maxRowNum -- ;
+  }
+
   boolean isFull() {
     return rowIndex == maxRowNum || count >= maxSize;
   }


[13/50] [abbrv] carbondata git commit: [CARBONDATA-2136] Fixed bug related to data load for bad_record_action as REDIRECT or IGNORE and sort scope as NO_SORT

Posted by gv...@apache.org.
[CARBONDATA-2136] Fixed bug related to data load for bad_record_action as REDIRECT or IGNORE and sort scope as NO_SORT

Problem: When data loading is performed with bad_record_action as REDIRECT or IGNORE and
with sort_scope option as NO_SORT, it was throwing an error as our row batch was getting filled with null.

Solution: Refactored code for creating carbon row batch for bad_record_action as REDIRECT or IGNORE and sort scope as NO_SORT

This closes #1942


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2ebfab15
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2ebfab15
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2ebfab15

Branch: refs/heads/spark-2.3
Commit: 2ebfab151dbf79a6e0cd19198f53e14a165a2759
Parents: 5969312
Author: Geetika Gupta <ge...@knoldus.in>
Authored: Wed Feb 7 16:12:09 2018 +0530
Committer: kunal642 <ku...@gmail.com>
Committed: Tue May 22 10:36:35 2018 +0530

----------------------------------------------------------------------
 .../badrecordloger/BadRecordActionTest.scala    | 189 +++++++++++++++----
 .../processing/loading/row/CarbonRowBatch.java  |   5 +-
 .../steps/DataConverterProcessorStepImpl.java   |  12 +-
 3 files changed, 161 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebfab15/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
index d85ee49..959aa6a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
@@ -1,30 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.carbondata.spark.testsuite.badrecordloger
 
 import java.io.File
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
+import org.scalatest.BeforeAndAfterEach
 
 import org.apache.carbondata.common.constants.LoggerAction
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
-class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
+class BadRecordActionTest extends QueryTest {
 
 
   val csvFilePath = s"$resourcesPath/badrecords/datasample.csv"
-  def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
-    .getCanonicalPath
-  val badRecordFilePath: File =new File(currentPath + "/target/test/badRecords")
+  val badRecordFilePath = new File(currentPath + "/target/test/badRecords")
+  initCarbonProperties
 
-  override def beforeAll = {
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+  private def initCarbonProperties = {
     CarbonProperties.getInstance().addProperty(
       CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, LoggerAction.FORCE.name())
-        badRecordFilePath.mkdirs()
-    sql("drop table if exists sales")
+    badRecordFilePath.mkdirs()
   }
 
   test("test load for bad_record_action=force") {
@@ -34,7 +47,7 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
         "('bad_records_action'='force', 'DELIMITER'=" +
-        " ',', 'QUOTECHAR'= '\"')")
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     checkAnswer(sql("select count(*) from sales"),
       Seq(Row(6)))
 
@@ -47,7 +60,7 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
         "('bad_records_action'='FORCE', 'DELIMITER'=" +
-        " ',', 'QUOTECHAR'= '\"')")
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     checkAnswer(sql("select count(*) from sales"),
       Seq(Row(6)))
   }
@@ -57,11 +70,17 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
     sql(
       """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
-    intercept[Exception] {
+    val exception = intercept[Exception] {
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
           "('bad_records_action'='fail', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"')")
+          " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     }
+    assert(exception.getMessage
+      .contains(
+        "Data load failed due to bad record: The value with column name date and column data " +
+        "type TIMESTAMP is not a valid TIMESTAMP type.Please enable bad record logger to know" +
+        " the detail reason"))
+
   }
 
   test("test load for bad_record_action=FAIL") {
@@ -69,13 +88,19 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
     sql(
       """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
-    intercept[Exception] {
+    val exception = intercept[Exception] {
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
           "('bad_records_action'='FAIL', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"')")
+          " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     }
+    assert(exception.getMessage
+      .contains(
+        "Data load failed due to bad record: The value with column name date and column data " +
+        "type TIMESTAMP is not a valid TIMESTAMP type.Please enable bad record logger to know" +
+        " the detail reason"))
   }
 
+
   test("test load for bad_record_action=ignore") {
     sql("drop table if exists sales")
     sql(
@@ -83,7 +108,7 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
         "('bad_records_action'='ignore', 'DELIMITER'=" +
-        " ',', 'QUOTECHAR'= '\"')")
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     checkAnswer(sql("select count(*) from sales"),
       Seq(Row(2)))
   }
@@ -95,7 +120,7 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
         "('bad_records_action'='IGNORE', 'DELIMITER'=" +
-        " ',', 'QUOTECHAR'= '\"')")
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
     checkAnswer(sql("select count(*) from sales"),
       Seq(Row(2)))
   }
@@ -108,25 +133,25 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
     val exMessage = intercept[Exception] {
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
           "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='')")
+          " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='','timestampformat'='yyyy/MM/dd')")
     }
     assert(exMessage.getMessage.contains("Invalid bad records location."))
   }
 
   test("test bad record REDIRECT but not having empty location in option should throw exception") {
+    sql("drop table if exists sales")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
     val badRecordLocation = CarbonProperties.getInstance()
       .getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC)
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
       CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL)
-    sql("drop table if exists sales")
     try {
-      sql(
-        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
-          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
       val exMessage = intercept[Exception] {
         sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
             "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
-            " ',', 'QUOTECHAR'= '\"')")
+            " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
       }
       assert(exMessage.getMessage.contains("Invalid bad records location."))
     }
@@ -138,29 +163,113 @@ class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
 
   test("test bad record is REDIRECT with location in carbon properties should pass") {
     sql("drop table if exists sales")
-      sql(
-        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
-          "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"')")
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
+        "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales"),
+      Seq(Row(2)))
   }
 
   test("test bad record is redirect with location in option while data loading should pass") {
     sql("drop table if exists sales")
-         sql(
-        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country String,
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata'""")
-      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
-          "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
-          " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='" + {badRecordFilePath.getCanonicalPath} +
-          "')")
-      checkAnswer(sql("select count(*) from sales"),
-        Seq(Row(2)))
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales OPTIONS" +
+        "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='" + { badRecordFilePath.getCanonicalPath } +
+        "','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales"),
+      Seq(Row(2)))
   }
 
-  override def afterAll() = {
-    sql("drop table if exists sales")
+  test("test bad record FORCE option with no_sort as sort scope ") {
+    sql("drop table if exists sales_no_sort")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales_no_sort(ID BigInt, date Timestamp, country String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' tblproperties('sort_scope'='NO_SORT')""")
+
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_no_sort OPTIONS" +
+        "('bad_records_action'='FORCE', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales_no_sort"),
+      Seq(Row(6)))
+  }
+
+  test("test bad record REDIRECT option with location and no_sort as sort scope ") {
+    sql("drop table if exists sales_no_sort")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales_no_sort(ID BigInt, date Timestamp, country String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' tblproperties('sort_scope'='NO_SORT')""")
+
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_no_sort OPTIONS" +
+        "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='" + { badRecordFilePath.getCanonicalPath } +
+        "','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales_no_sort"),
+      Seq(Row(2)))
+  }
+
+  test("test bad record IGNORE option with no_sort as sort scope ") {
+    sql("drop table if exists sales_no_sort")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales_no_sort(ID BigInt, date Timestamp, country String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' tblproperties('sort_scope'='NO_SORT')""")
+
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_no_sort OPTIONS" +
+        "('bad_records_action'='IGNORE', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales_no_sort"),
+      Seq(Row(2)))
   }
 
-}
+  test("test bad record with FAIL option with location and no_sort as sort scope ") {
+    sql("drop table if exists sales_no_sort")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales_no_sort(ID BigInt, date Timestamp, country String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' tblproperties('sort_scope'='NO_SORT')""")
+
+    val exception = intercept[Exception] {
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_no_sort OPTIONS" +
+          "('bad_records_action'='FAIL', 'DELIMITER'=" +
+          " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
+    }
+    assert(exception.getMessage
+      .contains(
+        "Data load failed due to bad record: The value with column name date and column data " +
+        "type TIMESTAMP is not a valid TIMESTAMP type.Please enable bad record logger to know" +
+        " the detail reason"))
+  }
+
+  test("test bad record with IGNORE option and sort scope as NO_SORT for bucketed table") {
+    sql("drop table if exists sales_bucket")
+    sql("CREATE TABLE IF NOT EXISTS sales_bucket(ID BigInt, date Timestamp, country String," +
+          "actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' TBLPROPERTIES ('BUCKETNUMBER'='2', 'BUCKETCOLUMNS'='country','sort_scope'='NO_SORT')")
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_bucket OPTIONS" +
+        "('bad_records_action'='IGNORE', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales_bucket"),
+      Seq(Row(2)))
+  }
+
+  test("test bad record with REDIRECT option and sort scope as NO_SORT for bucketed table") {
+    sql("drop table if exists sales_bucket")
+    sql("CREATE TABLE IF NOT EXISTS sales_bucket(ID BigInt, date Timestamp, country String," +
+        "actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED BY 'carbondata' TBLPROPERTIES ('BUCKETNUMBER'='2', 'BUCKETCOLUMNS'='country', 'sort_scope'='NO_SORT')")
+    sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales_bucket OPTIONS" +
+        "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+        " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='" + { badRecordFilePath.getCanonicalPath } +
+        "','timestampformat'='yyyy/MM/dd')")
+    checkAnswer(sql("select count(*) from sales_bucket"),
+      Seq(Row(2)))
+  }
+
+
+  private def currentPath: String = {
+    new File(this.getClass.getResource("/").getPath + "../../")
+      .getCanonicalPath
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebfab15/processing/src/main/java/org/apache/carbondata/processing/loading/row/CarbonRowBatch.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/row/CarbonRowBatch.java b/processing/src/main/java/org/apache/carbondata/processing/loading/row/CarbonRowBatch.java
index e819dcd..6b75aa3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/row/CarbonRowBatch.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/row/CarbonRowBatch.java
@@ -22,6 +22,7 @@ import java.util.NoSuchElementException;
 import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
 
+import org.apache.commons.lang.ArrayUtils;
 
 /**
  * Batch of rows.
@@ -59,7 +60,9 @@ public class CarbonRowBatch extends CarbonIterator<CarbonRow> {
   }
 
   @Override public void remove() {
-
+    rowBatch = (CarbonRow[]) ArrayUtils.remove(rowBatch, index - 1);
+    --size;
+    --index;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2ebfab15/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataConverterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataConverterProcessorStepImpl.java b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataConverterProcessorStepImpl.java
index 72a8c25..43b2278 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataConverterProcessorStepImpl.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/DataConverterProcessorStepImpl.java
@@ -192,11 +192,15 @@ public class DataConverterProcessorStepImpl extends AbstractDataLoadProcessorSte
   protected CarbonRowBatch processRowBatch(CarbonRowBatch rowBatch, RowConverter localConverter) {
     while (rowBatch.hasNext()) {
       CarbonRow convertRow = localConverter.convert(rowBatch.next());
-      if (isSortColumnRangeEnabled || isBucketColumnEnabled) {
-        short rangeNumber = (short) partitioner.getPartition(convertRow);
-        convertRow.setRangeId(rangeNumber);
+      if (convertRow == null) {
+        rowBatch.remove();
+      } else {
+        if (isSortColumnRangeEnabled || isBucketColumnEnabled) {
+          short rangeNumber = (short) partitioner.getPartition(convertRow);
+          convertRow.setRangeId(rangeNumber);
+        }
+        rowBatch.setPreviousRow(convertRow);
       }
-      rowBatch.setPreviousRow(convertRow);
     }
     rowCounter.getAndAdd(rowBatch.getSize());
     // reuse the origin batch


[33/50] [abbrv] carbondata git commit: [CARBONDATA-2552]Fix Data Mismatch for Complex Data type Array of Timestamp with Dictionary Include

Posted by gv...@apache.org.
[CARBONDATA-2552]Fix Data Mismatch for Complex Data type Array of Timestamp with Dictionary Include

Fix Data Mismatch for Complex Data type Array and Struct of Timestamp with Dictionary Include


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/2993034e
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/2993034e
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/2993034e

Branch: refs/heads/spark-2.3
Commit: 2993034e8b4435cf506bd03c73416236b6fb2106
Parents: 8896a63
Author: Indhumathi27 <in...@gmail.com>
Authored: Mon May 28 22:17:36 2018 +0530
Committer: kumarvishal09 <ku...@gmail.com>
Committed: Tue May 29 17:20:36 2018 +0530

----------------------------------------------------------------------
 .../core/scan/complextypes/PrimitiveQueryType.java  |  2 +-
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala   | 16 ++++++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/2993034e/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java b/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
index 899957e..d7723b3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryType.java
@@ -95,7 +95,7 @@ public class PrimitiveQueryType extends ComplexQueryType implements GenericQuery
       DimensionRawColumnChunk[] rawColumnChunks, int rowNumber,
       int pageNumber, DataOutputStream dataOutputStream) throws IOException {
     byte[] currentVal = copyBlockDataChunk(rawColumnChunks, rowNumber, pageNumber);
-    if (!this.isDictionary) {
+    if (!this.isDictionary && !this.isDirectDictionary) {
       dataOutputStream.writeShort(currentVal.length);
     }
     dataOutputStream.write(currentVal);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/2993034e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
index d0d578d..7f150be 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
@@ -365,6 +365,22 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends QueryTest with BeforeAndAf
     sql("drop table if exists complexcarbontable")
   }
 
+  test("test Complex Data type - Array and Struct of timestamp with dictionary include") {
+    sql("DROP TABLE IF EXISTS array_timestamp")
+    sql(
+      "create table array_timestamp (date1 array<timestamp>,date2 struct<date:timestamp> ) stored" +
+      " by 'carbondata' tblproperties" +
+      "('dictionary_include'='date1,date2')")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql("insert into array_timestamp values('2015/01/01$2016/01/01','2017/01/01')")
+    checkExistence(sql("select * from array_timestamp "),
+      true, "2015-01-01 00:00:00.0, 2016-01-01 00:00:00.0")
+    checkExistence(sql("select * from array_timestamp "),
+      true, "2017-01-01 00:00:00.0")
+    sql("DROP TABLE IF EXISTS array_timestamp")
+  }
+
   test("array<string> and string datatype for same column is not working properly") {
     sql("drop table if exists complexcarbontable")
     sql("create table complexcarbontable(deviceInformationId int, MAC array<string>, channelsId string, "+


[46/50] [abbrv] carbondata git commit: [CARBONDATA-2571] Calculating the carbonindex and carbondata file size of a table is wrong

Posted by gv...@apache.org.
[CARBONDATA-2571] Calculating the carbonindex and carbondata file size of a table is wrong

Problem:
While calculating the carbonindex files size, we are checking either index file or merge file. But in PR#2333, implementation is changed to fill both
the file name and the merge file name. So, we have to consider both fields.

Solution:
While calculating the carbonindex files size, we have to consider both the files and mergeFileName fields. We should get the list of index files from
these 2 fields and then calculate the size of the files.

This closes #2358


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/27d70599
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/27d70599
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/27d70599

Branch: refs/heads/spark-2.3
Commit: 27d7059984962b97bcaf576fed496653932ea743
Parents: 92d9b92
Author: dhatchayani <dh...@gmail.com>
Authored: Fri Jun 1 15:13:38 2018 +0530
Committer: manishgupta88 <to...@gmail.com>
Committed: Tue Jun 5 11:51:52 2018 +0530

----------------------------------------------------------------------
 .../apache/carbondata/core/util/CarbonUtil.java | 37 +++++++++++---------
 1 file changed, 20 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/27d70599/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index 1526047..5a7bce3 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2688,27 +2688,30 @@ public final class CarbonUtil {
       throws IOException {
     long carbonDataSize = 0L;
     long carbonIndexSize = 0L;
-    List<String> listOfFilesRead = new ArrayList<>();
     HashMap<String, Long> dataAndIndexSize = new HashMap<String, Long>();
-    if (fileStore.getLocationMap() != null) {
+    Map<String, SegmentFileStore.FolderDetails> locationMap = fileStore.getLocationMap();
+    if (locationMap != null) {
       fileStore.readIndexFiles();
-      Map<String, String> indexFiles = fileStore.getIndexFiles();
       Map<String, List<String>> indexFilesMap = fileStore.getIndexFilesMap();
-      for (Map.Entry<String, List<String>> entry : indexFilesMap.entrySet()) {
-        // get the size of carbonindex file
-        String indexFile = entry.getKey();
-        String mergeIndexFile = indexFiles.get(indexFile);
-        if (null != mergeIndexFile) {
-          String mergeIndexPath = indexFile
-              .substring(0, indexFile.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR) + 1)
-              + mergeIndexFile;
-          if (!listOfFilesRead.contains(mergeIndexPath)) {
-            carbonIndexSize += FileFactory.getCarbonFile(mergeIndexPath).getSize();
-            listOfFilesRead.add(mergeIndexPath);
-          }
-        } else {
-          carbonIndexSize += FileFactory.getCarbonFile(indexFile).getSize();
+      // get the size of carbonindex file
+      for (Map.Entry<String, SegmentFileStore.FolderDetails> entry : locationMap.entrySet()) {
+        SegmentFileStore.FolderDetails folderDetails = entry.getValue();
+        Set<String> carbonindexFiles = folderDetails.getFiles();
+        String mergeFileName = folderDetails.getMergeFileName();
+        if (null != mergeFileName) {
+          String mergeIndexPath =
+              fileStore.getTablePath() + entry.getKey() + CarbonCommonConstants.FILE_SEPARATOR
+                  + mergeFileName;
+          carbonIndexSize += FileFactory.getCarbonFile(mergeIndexPath).getSize();
         }
+        for (String indexFile : carbonindexFiles) {
+          String indexPath =
+              fileStore.getTablePath() + entry.getKey() + CarbonCommonConstants.FILE_SEPARATOR
+                  + indexFile;
+          carbonIndexSize += FileFactory.getCarbonFile(indexPath).getSize();
+        }
+      }
+      for (Map.Entry<String, List<String>> entry : indexFilesMap.entrySet()) {
         // get the size of carbondata files
         for (String blockFile : entry.getValue()) {
           carbonDataSize += FileFactory.getCarbonFile(blockFile).getSize();


[09/50] [abbrv] carbondata git commit: [CARBONDATA-2433] [Lucene GC Issue] Executor OOM because of GC when blocklet pruning is done using Lucene datamap

Posted by gv...@apache.org.
[CARBONDATA-2433] [Lucene GC Issue] Executor OOM because of GC when blocklet pruning is done using Lucene datamap

Problem
Executor OOM because of GC when blocklet pruning is done using Lucene datamap

Analysis
While seraching using lucene it creates a PriorityQueue to hold the documents. As size is not specified by default the PriorityQueue size is
equal to the number of lucene documents. As the docuemnts start getting added to the heap the GC time increases and after some time task fails due
to excessive GC and executor OOM occurs.
Reference blog: http://lucene.472066.n3.nabble.com/Optimization-of-memory-usage-in-PriorityQueue-td590355.html

Fix
Specify the limit for first search and after that use the searchAfter API to search in incremental order with gieven PriorityQueue size.

This closes #2267


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/0e011977
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/0e011977
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/0e011977

Branch: refs/heads/spark-2.3
Commit: 0e011977e5562f4b033f503bb53a845616523dc7
Parents: 061871e
Author: manishgupta88 <to...@gmail.com>
Authored: Thu May 3 20:40:41 2018 +0530
Committer: ravipesala <ra...@gmail.com>
Committed: Mon May 21 19:12:54 2018 +0530

----------------------------------------------------------------------
 .../datamap/lucene/LuceneFineGrainDataMap.java  | 65 +++++++++++++++-----
 1 file changed, 50 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/0e011977/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
index 3645bb6..742f8d0 100644
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
+++ b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneFineGrainDataMap.java
@@ -68,6 +68,15 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
       LogServiceFactory.getLogService(LuceneFineGrainDataMap.class.getName());
 
   /**
+   * search limit will help in deciding the size of priority queue which is used by lucene to store
+   * the documents in heap. By default it is 100 means in one search max of 10 documents can be
+   * stored in heap by lucene. This way it will help in reducing the GC.
+   * Note: If it is removed or it's value is increased it will lead to almost 90%
+   * of the query time in GC in worst case scenarios if it's value is increased beyond a limit
+   */
+  private static final int SEARCH_LIMIT = 100;
+
+  /**
    * searcher object for this datamap
    */
   private Map<String, IndexSearcher> indexSearcherMap = null;
@@ -232,13 +241,23 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
     // Map<BlockId, Map<BlockletId, Map<PageId, Set<RowId>>>>
     Map<String, Map<Integer, List<Short>>> mapBlocks = new HashMap<>();
 
+    long luceneSearchStartTime = System.currentTimeMillis();
     for (Map.Entry<String, IndexSearcher> searcherEntry : indexSearcherMap.entrySet()) {
       IndexSearcher indexSearcher = searcherEntry.getValue();
+      // take the min of total documents available in the reader and limit if set by the user
+      maxDocs = Math.min(maxDocs, indexSearcher.getIndexReader().maxDoc());
       // execute index search
       // initialize to null, else ScoreDoc objects will get accumulated in memory
       TopDocs result = null;
+      // the number of documents to be queried in one search. It will always be minimum of
+      // search result and maxDocs
+      int numberOfDocumentsToBeQueried = 0;
+      // counter for maintaining the total number of documents finished querying
+      int documentHitCounter = 0;
       try {
-        result = indexSearcher.search(query, maxDocs);
+        numberOfDocumentsToBeQueried = Math.min(maxDocs, SEARCH_LIMIT);
+        result = indexSearcher.search(query, numberOfDocumentsToBeQueried);
+        documentHitCounter += numberOfDocumentsToBeQueried;
       } catch (IOException e) {
         String errorMessage =
             String.format("failed to search lucene data, detail is %s", e.getMessage());
@@ -247,22 +266,38 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
       }
 
       ByteBuffer intBuffer = ByteBuffer.allocate(4);
-
-      for (ScoreDoc scoreDoc : result.scoreDocs) {
-        // get a document
-        Document doc = indexSearcher.doc(scoreDoc.doc);
-
-        // get all fields
-        List<IndexableField> fieldsInDoc = doc.getFields();
-        if (writeCacheSize > 0) {
-          // It fills rowids to the map, its value is combined with multiple rows.
-          fillMapForCombineRows(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
-        } else {
-          // Fill rowids to the map
-          fillMap(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
+      // last scoreDoc in a result to be used in searchAfter API
+      ScoreDoc lastScoreDoc = null;
+      while (true) {
+        for (ScoreDoc scoreDoc : result.scoreDocs) {
+          // get a document
+          Document doc = indexSearcher.doc(scoreDoc.doc);
+          // get all fields
+          List<IndexableField> fieldsInDoc = doc.getFields();
+          if (writeCacheSize > 0) {
+            // It fills rowids to the map, its value is combined with multiple rows.
+            fillMapForCombineRows(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
+          } else {
+            // Fill rowids to the map
+            fillMap(intBuffer, mapBlocks, fieldsInDoc, searcherEntry.getKey());
+          }
+          lastScoreDoc = scoreDoc;
+        }
+        // result will have the total number of hits therefore we always need to query on the
+        // left over documents
+        int remainingHits = result.totalHits - documentHitCounter;
+        // break the loop if count reaches maxDocs to be searched or remaining hits become <=0
+        if (remainingHits <= 0 || documentHitCounter >= maxDocs) {
+          break;
         }
+        numberOfDocumentsToBeQueried = Math.min(remainingHits, SEARCH_LIMIT);
+        result = indexSearcher.searchAfter(lastScoreDoc, query, numberOfDocumentsToBeQueried);
+        documentHitCounter += numberOfDocumentsToBeQueried;
       }
     }
+    LOGGER.info(
+        "Time taken for lucene search: " + (System.currentTimeMillis() - luceneSearchStartTime)
+            + " ms");
 
     // result blocklets
     List<FineGrainBlocklet> blocklets = new ArrayList<>();
@@ -388,4 +423,4 @@ public class LuceneFineGrainDataMap extends FineGrainDataMap {
 
   }
 
-}
+}
\ No newline at end of file


[34/50] [abbrv] carbondata git commit: [CARBONDATA-2558] Optimize carbon schema reader interface of SDK

Posted by gv...@apache.org.
[CARBONDATA-2558] Optimize carbon schema reader interface of SDK

Optimize carbon schema reader interface of SDK

1.create CarbonSchemaReader and move schema read interface from CarbonReader to CarbonSchemaReader
2.change the return type from List to SDK Schema, remove the tableInfo return type
3.Optimize the document

This closes #2353


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/e7401824
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/e7401824
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/e7401824

Branch: refs/heads/spark-2.3
Commit: e740182439357fda6b3616f05d25e90b880eca57
Parents: 2993034
Author: xubo245 <xu...@huawei.com>
Authored: Tue May 29 17:07:10 2018 +0800
Committer: Jacky Li <ja...@qq.com>
Committed: Wed May 30 00:01:54 2018 +0800

----------------------------------------------------------------------
 docs/sdk-guide.md                               | 107 +++++++++++++++---
 .../carbondata/sdk/file/CarbonReader.java       |  85 ---------------
 .../carbondata/sdk/file/CarbonSchemaReader.java | 108 +++++++++++++++++++
 .../org/apache/carbondata/sdk/file/Field.java   |  16 +++
 .../org/apache/carbondata/sdk/file/Schema.java  |  31 ++++++
 .../carbondata/store/MetaCachedCarbonStore.java |  13 ++-
 .../carbondata/sdk/file/CarbonReaderTest.java   |  88 ++++++++-------
 7 files changed, 305 insertions(+), 143 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 328a845..3c575fe 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -437,20 +437,6 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ```
   /**
-   * Read carbondata file and return the schema
-   */
-  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath);
-```
-
-```
- /**
-  * Read schema file and return table info object
-  */
-  public static TableInfo readSchemaFile(String schemaFilePath);
-```
-
-```
-  /**
    * Return true if has next row
    */
   public boolean hasNext();
@@ -598,4 +584,97 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
    */
   public <T> CarbonReader<T> build();
 ```
+### Class org.apache.carbondata.sdk.file.CarbonSchemaReader
+```
+  /**
+   * Read schema file and return the schema
+   *
+   * @param schemaFilePath complete path including schema file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInSchemaFile(String schemaFilePath);
+```
+
+```
+  /**
+   * Read carbondata file and return the schema
+   *
+   * @param dataFilePath complete path including carbondata file name
+   * @return Schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInDataFile(String dataFilePath);
+```
+
+```
+  /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInIndexFile(String indexFilePath);
+```
+
+### Class org.apache.carbondata.sdk.file.Schema
+```
+  /**
+   * construct a schema with fields
+   * @param fields
+   */
+  public Schema(Field[] fields);
+```
+
+```
+  /**
+   * construct a schema with List<ColumnSchema>
+   *
+   * @param columnSchemaList column schema list
+   */
+  public Schema(List<ColumnSchema> columnSchemaList);
+```
+
+```
+  /**
+   * Create a Schema using JSON string, for example:
+   * [
+   *   {"name":"string"},
+   *   {"age":"int"}
+   * ]
+   * @param json specified as string
+   * @return Schema
+   */
+  public static Schema parseJson(String json);
+```
+
+```
+  /**
+   * Sort the schema order as original order
+   *
+   * @return Schema object
+   */
+  public Schema asOriginOrder();
+```
+
+### Class org.apache.carbondata.sdk.file.Field
+```
+  /**
+   * Field Constructor
+   * @param name name of the field
+   * @param type datatype of field, specified in strings.
+   */
+  public Field(String name, String type);
+```
+
+```
+  /**
+   * Construct Field from ColumnSchema
+   *
+   * @param columnSchema ColumnSchema, Store the information about the column meta data
+   */
+  public Field(ColumnSchema columnSchema);
+```
+
 Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
index 9ae940b..60ead05 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonReader.java
@@ -17,26 +17,11 @@
 
 package org.apache.carbondata.sdk.file;
 
-import java.io.DataInputStream;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
-import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.metadata.converter.SchemaConverter;
-import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.reader.CarbonHeaderReader;
-import org.apache.carbondata.core.reader.CarbonIndexFileReader;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
 
 import org.apache.hadoop.mapreduce.RecordReader;
 
@@ -104,76 +89,6 @@ public class CarbonReader<T> {
   }
 
   /**
-   * Read carbondata file and return the schema
-   */
-  public static List<ColumnSchema> readSchemaInDataFile(String dataFilePath) throws IOException {
-    CarbonHeaderReader reader = new CarbonHeaderReader(dataFilePath);
-    return reader.readSchema();
-  }
-
-  /**
-   * Read carbonindex file and return the schema
-   *
-   * @param indexFilePath complete path including index file name
-   * @return null, if the index file is not present in the path.
-   * List<ColumnSchema> from the index file.
-   * @throws IOException
-   */
-  public static List<ColumnSchema> readSchemaInIndexFile(String indexFilePath) throws IOException {
-    CarbonFile indexFile =
-        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
-    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
-      throw new IOException("Not an index file name");
-    }
-    // read schema from the first index file
-    DataInputStream dataInputStream =
-        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
-    byte[] bytes = new byte[(int) indexFile.getSize()];
-    try {
-      //get the file in byte buffer
-      dataInputStream.readFully(bytes);
-      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
-      // read from byte buffer.
-      indexReader.openThriftReader(bytes);
-      // get the index header
-      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
-      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
-      List<org.apache.carbondata.format.ColumnSchema> table_columns =
-          readIndexHeader.getTable_columns();
-      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
-        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
-      }
-      return columnSchemaList;
-    } finally {
-      dataInputStream.close();
-    }
-  }
-
-  /**
-   * Read CarbonData file and return the user schema,
-   * the schema order is the same as user save schema
-   */
-  public static List<ColumnSchema> readUserSchema(String indexFilePath) throws IOException {
-    List<ColumnSchema> columnSchemas = readSchemaInIndexFile(indexFilePath);
-    Collections.sort(columnSchemas, new Comparator<ColumnSchema>() {
-      @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
-        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
-      }
-    });
-    return columnSchemas;
-  }
-
-  /**
-   * Read schema file and return table info object
-   */
-  public static TableInfo readSchemaFile(String schemaFilePath) throws IOException {
-    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);
-    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
-    return schemaConverter.fromExternalToWrapperTableInfo(tableInfo, "", "", "");
-  }
-
-  /**
    * Close reader
    *
    * @throws IOException

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
new file mode 100644
index 0000000..d8882bc
--- /dev/null
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonSchemaReader.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.sdk.file;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.metadata.converter.SchemaConverter;
+import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.reader.CarbonIndexFileReader;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+
+import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;
+
+/**
+ * Schema reader for carbon files, including carbondata file, carbonindex file, and schema file
+ */
+public class CarbonSchemaReader {
+
+  /**
+   * Read schema file and return the schema
+   *
+   * @param schemaFilePath complete path including schema file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInSchemaFile(String schemaFilePath) throws IOException {
+    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil.readSchemaFile(schemaFilePath);
+    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
+    List<ColumnSchema> schemaList = schemaConverter
+        .fromExternalToWrapperTableInfo(tableInfo, "", "", "")
+        .getFactTable()
+        .getListOfColumns();
+    return new Schema(schemaList);
+  }
+
+  /**
+   * Read carbondata file and return the schema
+   *
+   * @param dataFilePath complete path including carbondata file name
+   * @return Schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInDataFile(String dataFilePath) throws IOException {
+    CarbonHeaderReader reader = new CarbonHeaderReader(dataFilePath);
+    return new Schema(reader.readSchema());
+  }
+
+  /**
+   * Read carbonindex file and return the schema
+   *
+   * @param indexFilePath complete path including index file name
+   * @return schema object
+   * @throws IOException
+   */
+  public static Schema readSchemaInIndexFile(String indexFilePath) throws IOException {
+    CarbonFile indexFile =
+        FileFactory.getCarbonFile(indexFilePath, FileFactory.getFileType(indexFilePath));
+    if (!indexFile.getName().endsWith(CarbonTablePath.INDEX_FILE_EXT)) {
+      throw new IOException("Not an index file name");
+    }
+    // read schema from the first index file
+    DataInputStream dataInputStream =
+        FileFactory.getDataInputStream(indexFilePath, FileFactory.getFileType(indexFilePath));
+    byte[] bytes = new byte[(int) indexFile.getSize()];
+    try {
+      //get the file in byte buffer
+      dataInputStream.readFully(bytes);
+      CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
+      // read from byte buffer.
+      indexReader.openThriftReader(bytes);
+      // get the index header
+      org.apache.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.apache.carbondata.format.ColumnSchema> table_columns =
+          readIndexHeader.getTable_columns();
+      for (org.apache.carbondata.format.ColumnSchema columnSchema : table_columns) {
+        columnSchemaList.add(thriftColumnSchemaToWrapperColumnSchema(columnSchema));
+      }
+      return new Schema(columnSchemaList);
+    } finally {
+      dataInputStream.close();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
index 0db3bc5..6d4cfd9 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Field.java
@@ -17,6 +17,7 @@
 
 package org.apache.carbondata.sdk.file;
 
+import java.util.LinkedList;
 import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
@@ -24,6 +25,7 @@ import org.apache.carbondata.common.annotations.InterfaceStability;
 import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.datatype.StructField;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 /**
  * A field represent one column
@@ -126,6 +128,20 @@ public class Field {
     this.type = type;
   }
 
+  /**
+   * Construct Field from ColumnSchema
+   *
+   * @param columnSchema ColumnSchema, Store the information about the column meta data
+   */
+  public Field(ColumnSchema columnSchema) {
+    this.name = columnSchema.getColumnName();
+    this.type = columnSchema.getDataType();
+    children = new LinkedList<>();
+    schemaOrdinal = columnSchema.getSchemaOrdinal();
+    precision = columnSchema.getPrecision();
+    scale = columnSchema.getScale();
+  }
+
   public String getFieldName() {
     return name;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
index 31c202d..6131d45 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/Schema.java
@@ -18,9 +18,13 @@
 package org.apache.carbondata.sdk.file;
 
 import java.io.IOException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
 import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 
 import com.google.gson.GsonBuilder;
 import com.google.gson.TypeAdapter;
@@ -45,6 +49,18 @@ public class Schema {
   }
 
   /**
+   * construct a schema with List<ColumnSchema>
+   *
+   * @param columnSchemaList column schema list
+   */
+  public Schema(List<ColumnSchema> columnSchemaList) {
+    fields = new Field[columnSchemaList.size()];
+    for (int i = 0; i < columnSchemaList.size(); i++) {
+      fields[i] = new Field(columnSchemaList.get(i));
+    }
+  }
+
+  /**
    * Create a Schema using JSON string, for example:
    * [
    *   {"name":"string"},
@@ -77,4 +93,19 @@ public class Schema {
   public Field[] getFields() {
     return fields;
   }
+
+  /**
+   * Sort the schema order as original order
+   *
+   * @return Schema object
+   */
+  public Schema asOriginOrder() {
+    Arrays.sort(fields, new Comparator<Field>() {
+      @Override
+      public int compare(Field o1, Field o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+    return this;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java b/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
index d847e67..e43f750 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/store/MetaCachedCarbonStore.java
@@ -22,10 +22,12 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.core.metadata.converter.SchemaConverter;
+import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
+import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
-import org.apache.carbondata.sdk.file.CarbonReader;
 
 /**
  * A CarbonStore base class that caches CarbonTable object
@@ -40,9 +42,12 @@ abstract class MetaCachedCarbonStore implements CarbonStore {
     if (cache.containsKey(path)) {
       return cache.get(path);
     }
-    TableInfo schema = CarbonReader.readSchemaFile(CarbonTablePath.getSchemaFilePath(path));
-    schema.setTablePath(path);
-    CarbonTable table = CarbonTable.buildFromTableInfo(schema);
+    org.apache.carbondata.format.TableInfo tableInfo = CarbonUtil
+        .readSchemaFile(CarbonTablePath.getSchemaFilePath(path));
+    SchemaConverter schemaConverter = new ThriftWrapperSchemaConverterImpl();
+    TableInfo tableInfo1 = schemaConverter.fromExternalToWrapperTableInfo(tableInfo, "", "", "");
+    tableInfo1.setTablePath(path);
+    CarbonTable table = CarbonTable.buildFromTableInfo(tableInfo1);
     cache.put(path, table);
     return table;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/e7401824/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index 30d4091..7a2a765 100644
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -20,9 +20,8 @@ package org.apache.carbondata.sdk.file;
 import java.io.*;
 import java.sql.Date;
 import java.sql.Timestamp;
-import java.util.Collections;
+import java.util.Arrays;
 import java.util.Comparator;
-import java.util.List;
 
 import org.apache.avro.generic.GenericData;
 import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
@@ -30,8 +29,6 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
-import org.apache.carbondata.core.metadata.schema.table.TableInfo;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
@@ -200,12 +197,12 @@ public class CarbonReaderTest extends TestCase {
     });
     Assert.assertTrue(dataFiles != null);
     Assert.assertTrue(dataFiles.length > 0);
-    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles[0].getAbsolutePath());
-    Assert.assertTrue(columns.size() == 2);
-    Assert.assertEquals("name", columns.get(0).getColumnName());
-    Assert.assertEquals("age", columns.get(1).getColumnName());
-    Assert.assertEquals(DataTypes.STRING, columns.get(0).getDataType());
-    Assert.assertEquals(DataTypes.INT, columns.get(1).getDataType());
+    Schema schema = CarbonSchemaReader.readSchemaInDataFile(dataFiles[0].getAbsolutePath());
+    Assert.assertTrue(schema.getFields().length == 2);
+    Assert.assertEquals("name", (schema.getFields())[0].getFieldName());
+    Assert.assertEquals("age", (schema.getFields())[1].getFieldName());
+    Assert.assertEquals(DataTypes.STRING, (schema.getFields())[0].getDataType());
+    Assert.assertEquals(DataTypes.INT, (schema.getFields())[1].getDataType());
 
     FileUtils.deleteDirectory(new File(path));
   }
@@ -228,20 +225,33 @@ public class CarbonReaderTest extends TestCase {
     });
     Assert.assertTrue(dataFiles != null);
     Assert.assertTrue(dataFiles.length > 0);
-    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
-    Assert.assertEquals(2, tableInfo.getFactTable().getListOfColumns().size());
 
-    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
-    Assert.assertEquals(2, columns.size());
-    Assert.assertEquals("name", columns.get(0).getColumnName());
-    Assert.assertEquals("age", columns.get(1).getColumnName());
-    Assert.assertEquals(DataTypes.STRING, columns.get(0).getDataType());
-    Assert.assertEquals(DataTypes.INT, columns.get(1).getDataType());
+    Schema schema = CarbonSchemaReader.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath());
+
+    // sort the schema
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
+      @Override
+      public int compare(Field o1, Field o2) {
+        return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
+      }
+    });
+
+    // Transform the schema
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
+    }
+
+    Assert.assertEquals(2, schema.getFields().length);
+
+    Assert.assertEquals("name", (schema.getFields())[0].getFieldName());
+    Assert.assertEquals("age", (schema.getFields())[1].getFieldName());
+    Assert.assertEquals(DataTypes.STRING, (schema.getFields())[0].getDataType());
+    Assert.assertEquals(DataTypes.INT, (schema.getFields())[1].getDataType());
 
     FileUtils.deleteDirectory(new File(path));
   }
 
-
   @Test
   public void testWriteAndReadFilesNonTransactional() throws IOException, InterruptedException {
     String path = "./testWriteFiles";
@@ -473,22 +483,20 @@ public class CarbonReaderTest extends TestCase {
         return name.endsWith("schema");
       }
     });
-    TableInfo tableInfo = CarbonReader.readSchemaFile(dataFiles[0].getAbsolutePath());
-
-    List<ColumnSchema> columns = tableInfo.getFactTable().getListOfColumns();
+    Schema schema = CarbonSchemaReader.readSchemaInSchemaFile(dataFiles[0].getAbsolutePath());
 
     // sort the schema
-    Collections.sort(tableInfo.getFactTable().getListOfColumns(), new Comparator<ColumnSchema>() {
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
       @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
+      public int compare(Field o1, Field o2) {
         return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
       }
     });
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -591,20 +599,20 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInDataFile(dataFiles2[0].getAbsolutePath());
 
     // sort the schema
-    Collections.sort(columns, new Comparator<ColumnSchema>() {
+    Arrays.sort(schema.getFields(), new Comparator<Field>() {
       @Override
-      public int compare(ColumnSchema o1, ColumnSchema o2) {
+      public int compare(Field o1, Field o2) {
         return Integer.compare(o1.getSchemaOrdinal(), o2.getSchemaOrdinal());
       }
     });
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -704,12 +712,12 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInIndexFile(dataFiles2[0].getAbsolutePath()).asOriginOrder();
 
     // Transform the schema
-    String[] strings= new String[columns.size()];
-    for (int i = 0; i < columns.size(); i++) {
-      strings[i]= columns.get(i).getColumnName();
+    String[] strings = new String[schema.getFields().length];
+    for (int i = 0; i < schema.getFields().length; i++) {
+      strings[i] = (schema.getFields())[i].getFieldName();
     }
 
     File segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
@@ -936,10 +944,10 @@ public class CarbonReaderTest extends TestCase {
       }
     });
 
-    List<ColumnSchema> columns = CarbonReader.readUserSchema(dataFiles2[0].getAbsolutePath());
+    Schema schema = CarbonSchemaReader.readSchemaInIndexFile(dataFiles2[0].getAbsolutePath()).asOriginOrder();
 
-    for (int i = 0; i < columns.size(); i++) {
-      System.out.println(columns.get(i).getColumnName() + "\t" + columns.get(i).getSchemaOrdinal());
+    for (int i = 0; i < schema.getFields().length; i++) {
+      System.out.println((schema.getFields())[i].getFieldName() + "\t" + schema.getFields()[i].getSchemaOrdinal());
     }
     FileUtils.deleteDirectory(new File(path));
   }